Hydra render engine #104712

Closed
Bogdan Nagirniak wants to merge 142 commits from BogdanNagirniak/blender:hydra-render into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
11 changed files with 303 additions and 445 deletions
Showing only changes of commit 1f8e9641bd - Show all commits

View File

@ -27,10 +27,14 @@ set(INC
../../source/blender/io/usd
../../source/blender/io/common
../../source/blender/io/usd/intern
../../source/blender/gpu
../../source/blender/gpu/opengl
../../source/blender/gpu/intern
${CMAKE_BINARY_DIR}/source/blender/makesrna/intern
)
set(INC_SYS
${Epoxy_INCLUDE_DIRS}
${PYTHON_INCLUDE_DIRS}
${USD_INCLUDE_DIRS}
${BOOST_INCLUDE_DIR}
@ -44,7 +48,9 @@ set(SRC
engine.h
engine.cpp
finalEngine.h
finalEngine.cpp
viewportEngine.h
viewportEngine.cpp
utils.h
@ -66,6 +72,7 @@ set(SRC
)
set(LIB
${Epoxy_LIBRARIES}
${PYTHON_LIBRARIES}
${BOOST_LIBRARIES}
)

View File

@ -5,15 +5,13 @@
#include <pxr/base/plug/plugin.h>
#include <pxr/base/plug/registry.h>
#include <pxr/usd/usdGeom/tokens.h>
#include <pxr/usdImaging/usdImagingGL/engine.h>
#include "glog/logging.h"
#include "intern/usd_hierarchy_iterator.h"
#include "BKE_context.h"
#include "BKE_blender_version.h"
#include "DEG_depsgraph_query.h"
#include "engine.h"
#include "finalEngine.h"
#include "viewportEngine.h"
using namespace pxr;
@ -21,7 +19,6 @@ namespace usdhydra {
Engine::Engine(BL::RenderEngine &b_engine, const std::string &delegateId)
: b_engine(b_engine)
, delegateId(delegateId)
{
HdRendererPluginRegistry& registry = HdRendererPluginRegistry::GetInstance();
@ -43,38 +40,14 @@ Engine::~Engine()
renderDelegate = nullptr;
}
void Engine::exportScene(BL::Depsgraph& b_depsgraph, BL::Context& b_context)
float Engine::getRendererPercentDone()
{
Depsgraph *depsgraph = (Depsgraph *)b_depsgraph.ptr.data;
Scene *scene = DEG_get_input_scene(depsgraph);
World *world = scene->world;
DEG_graph_build_for_all_objects(depsgraph);
bContext *C = (bContext *)b_context.ptr.data;
Main *bmain = CTX_data_main(C);
USDExportParams usd_export_params;
usd_export_params.selected_objects_only = false;
usd_export_params.visible_objects_only = false;
//stage->Reload();
stage->SetMetadata(UsdGeomTokens->upAxis, VtValue(UsdGeomTokens->z));
stage->SetMetadata(UsdGeomTokens->metersPerUnit, static_cast<double>(scene->unit.scale_length));
stage->GetRootLayer()->SetDocumentation(std::string("Blender v") + BKE_blender_version_string());
/* Set up the stage for animated data. */
//if (data->params.export_animation) {
// stage->SetTimeCodesPerSecond(FPS);
// stage->SetStartTimeCode(scene->r.sfra);
// stage->SetEndTimeCode(scene->r.efra);
//}
blender::io::usd::USDHierarchyIterator iter(bmain, depsgraph, stage, usd_export_params);
iter.iterate_and_write();
iter.release_writers();
VtDictionary render_stats = renderDelegate->GetRenderStats();
auto it = render_stats.find("percentDone");
if (it == render_stats.end()) {
return 0.0;
}
return (float)it->second.UncheckedGet<double>();
}
/* ------------------------------------------------------------------------- */

View File

@ -8,11 +8,9 @@
#include <Python.h>
#include <pxr/imaging/hd/engine.h>
#include <pxr/imaging/hd/pluginRenderDelegateUniqueHandle.h>
#include <pxr/imaging/hdx/freeCameraSceneDelegate.h>
#include <pxr/usd/usd/stage.h>
#include <pxr/usdImaging/usdImagingGL/engine.h>
#include "MEM_guardedalloc.h"
#include "RNA_blender_cpp.h"
@ -27,13 +25,9 @@ public:
virtual ~Engine();
virtual void sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings) = 0;
pxr::UsdStageRefPtr getStage();
protected:
void exportScene(BL::Depsgraph &b_depsgraph, BL::Context &b_context);
template <typename T>
float getRendererPercentDone(T &renderer);
float getRendererPercentDone();
protected:
BL::RenderEngine b_engine;
@ -44,60 +38,8 @@ protected:
std::unique_ptr<RenderTaskDelegate> renderTaskDelegate;
std::unique_ptr<HdxFreeCameraSceneDelegate> freeCameraDelegate;
HdEngine _engine;
std::string delegateId;
pxr::HdRenderSettingsMap renderSettings;
pxr::UsdStageRefPtr stage;
};
class FinalEngine : public Engine {
public:
using Engine::Engine;
void sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings) override;
void render(BL::Depsgraph &b_depsgraph);
private:
void renderGL(BL::Depsgraph &b_depsgraph);
void renderLite(BL::Depsgraph &b_depsgraph);
void getResolution(BL::RenderSettings b_render, int &width, int &height);
void updateRenderResult(std::map<std::string, std::vector<float>> &render_images, const std::string &layerName, int width, int height);
void notifyStatus(float progress, const std::string &title, const std::string &info);
};
class ViewportEngine : public Engine {
public:
using Engine::Engine;
void sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings) override;
void viewDraw(BL::Depsgraph &b_depsgraph, BL::Context &b_context);
private:
void notifyStatus(const std::string &title, const std::string &info, bool redraw);
private:
std::unique_ptr<pxr::UsdImagingGLEngine> imagingGLEngine;
pxr::UsdImagingGLRenderParams renderParams;
std::chrono::time_point<std::chrono::steady_clock> timeBegin;
};
PyObject *addPythonSubmodule_engine(PyObject *mod);
template <typename T>
float Engine::getRendererPercentDone(T &renderer)
{
float percent = 0.0;
VtDictionary render_stats = renderer.GetRenderStats();
auto it = render_stats.find("percentDone");
if (it != render_stats.end()) {
percent = (float)it->second.UncheckedGet<double>();
}
return round(percent * 10.0f) / 10.0f;
}
inline pxr::UsdStageRefPtr Engine::getStage()
{
return stage;
}
} // namespace usdhydra

View File

@ -10,7 +10,7 @@
#include "glog/logging.h"
#include "engine.h"
#include "finalEngine.h"
#include "utils.h"
#include "sceneDelegate/scene.h"
@ -31,65 +31,6 @@ void FinalEngine::sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::
}
void FinalEngine::render(BL::Depsgraph &b_depsgraph)
{
if (b_engine.bl_use_gpu_context()) {
renderGL(b_depsgraph);
}
else {
renderLite(b_depsgraph);
}
}
void FinalEngine::renderGL(BL::Depsgraph &b_depsgraph)
{
// TODO implement with BlenderSceneDelegate
return;
std::unique_ptr<UsdImagingGLEngine> imagingGLEngine = std::make_unique<UsdImagingGLEngine>();
if (!imagingGLEngine->SetRendererPlugin(TfToken(delegateId))) {
DLOG(ERROR) << "Error in SetRendererPlugin(" << delegateId << ")";
return;
}
for (auto const& setting : renderSettings) {
imagingGLEngine->SetRendererSetting(setting.first, setting.second);
}
BL::Scene b_scene = b_depsgraph.scene_eval();
int width, height;
getResolution(b_scene.render(), width, height);
UsdGeomCamera usdCamera = UsdAppUtilsGetCameraAtPath(stage, SdfPath(TfMakeValidIdentifier(b_scene.camera().data().name())));
GfCamera gfCamera = usdCamera.GetCamera(UsdTimeCode(b_scene.frame_current()));
GlfDrawTargetRefPtr drawTarget = GlfDrawTarget::New(GfVec2i(width, height));
drawTarget->Bind();
drawTarget->AddAttachment("color", GL_RGBA, GL_FLOAT, GL_RGBA);
imagingGLEngine->SetRenderViewport(GfVec4d(0, 0, width, height));
imagingGLEngine->SetRendererAov(HdAovTokens->color);
imagingGLEngine->SetCameraState(gfCamera.GetFrustum().ComputeViewMatrix(),
gfCamera.GetFrustum().ComputeProjectionMatrix());
UsdImagingGLRenderParams renderParams;
renderParams.frame = UsdTimeCode(b_scene.frame_current());
renderParams.clearColor = GfVec4f(1.0, 1.0, 1.0, 0.0);
imagingGLEngine->Render(stage->GetPseudoRoot(), renderParams);
map<string, vector<float>> renderImages{{"Combined", vector<float>(width * height * 4)}}; // 4 - number of channels
vector<float> &pixels = renderImages["Combined"];
glReadPixels(0, 0, width, height, GL_RGBA, GL_FLOAT, pixels.data());
drawTarget->Unbind();
updateRenderResult(renderImages, b_depsgraph.view_layer().name(), width, height);
}
void FinalEngine::renderLite(BL::Depsgraph &b_depsgraph)
{
SceneExport sceneExport(b_depsgraph);
auto resolution = sceneExport.resolution();
@ -97,12 +38,9 @@ void FinalEngine::renderLite(BL::Depsgraph &b_depsgraph)
GfCamera gfCamera = sceneExport.gfCamera();
freeCameraDelegate->SetCamera(gfCamera);
renderTaskDelegate->SetCameraViewport(freeCameraDelegate->GetCameraId(), width, height);
TfToken aov = HdAovTokens->color;
HdAovDescriptor aovDesc = renderDelegate->GetDefaultAovDescriptor(aov);
renderTaskDelegate->SetRendererAov(aov, aovDesc);
renderTaskDelegate->SetCameraAndViewport(freeCameraDelegate->GetCameraId(), GfVec4d(0, 0, width, height));
renderTaskDelegate->SetRendererAov(HdAovTokens->color);
HdTaskSharedPtrVector tasks = renderTaskDelegate->GetTasks();
chrono::time_point<chrono::steady_clock> timeBegin = chrono::steady_clock::now(), timeCurrent;
@ -125,7 +63,7 @@ void FinalEngine::renderLite(BL::Depsgraph &b_depsgraph)
break;
}
percentDone = getRendererPercentDone(*renderDelegate);
percentDone = getRendererPercentDone();
timeCurrent = chrono::steady_clock::now();
elapsedTime = chrono::duration_cast<chrono::milliseconds>(timeCurrent - timeBegin);
@ -136,11 +74,11 @@ void FinalEngine::renderLite(BL::Depsgraph &b_depsgraph)
break;
}
renderTaskDelegate->GetRendererAov(HdAovTokens->color, pixels.data());
renderTaskDelegate->GetRendererAovData(HdAovTokens->color, pixels.data());
updateRenderResult(renderImages, layerName, width, height);
}
renderTaskDelegate->GetRendererAov(HdAovTokens->color, pixels.data());
renderTaskDelegate->GetRendererAovData(HdAovTokens->color, pixels.data());
updateRenderResult(renderImages, layerName, width, height);
}

24
extern/usdhydra/finalEngine.h vendored Normal file
View File

@ -0,0 +1,24 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <chrono>
#include "engine.h"
namespace usdhydra {
class FinalEngine : public Engine {
public:
using Engine::Engine;
void sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings) override;
void render(BL::Depsgraph &b_depsgraph);
private:
void getResolution(BL::RenderSettings b_render, int &width, int &height);
void updateRenderResult(std::map<std::string, std::vector<float>> &render_images, const std::string &layerName, int width, int height);
void notifyStatus(float progress, const std::string &title, const std::string &info);
};
} // namespace usdhydra

View File

@ -5,158 +5,17 @@
#include <pxr/imaging/hd/renderBuffer.h>
#include <pxr/imaging/hd/renderDelegate.h>
#include <pxr/imaging/hdx/renderTask.h>
#include "renderTaskDelegate.h"
namespace usdhydra {
/* RenderTask */
RenderTask::RenderTask(HdSceneDelegate* delegate, SdfPath const& id)
: HdTask(id)
{
}
RenderTask::~RenderTask()
{
}
bool RenderTask::IsConverged() const
{
return _pass ? _pass->IsConverged() : true;
}
void RenderTask::Sync(HdSceneDelegate* delegate,
HdTaskContext* ctx,
HdDirtyBits* dirtyBits)
{
auto renderIndex = &delegate->GetRenderIndex();
if ((*dirtyBits) & HdChangeTracker::DirtyCollection) {
VtValue val = delegate->Get(GetId(), HdTokens->collection);
auto collection = val.Get<HdRprimCollection>();
// Check for cases where the collection is empty (i.e. default
// constructed). To do this, the code looks at the root paths,
// if it is empty, the collection doesn't refer to any prims at
// all.
if (collection.GetName().IsEmpty()) {
_pass.reset();
} else {
if (!_pass) {
auto renderDelegate = renderIndex->GetRenderDelegate();
_pass = renderDelegate->CreateRenderPass(renderIndex, collection);
} else {
_pass->SetRprimCollection(collection);
}
}
}
if ((*dirtyBits) & HdChangeTracker::DirtyParams) {
RenderTaskParams params;
auto value = delegate->Get(GetId(), HdTokens->params);
if (TF_VERIFY(value.IsHolding<RenderTaskParams>())) {
params = value.UncheckedGet<RenderTaskParams>();
}
_aovBindings = params.aovBindings;
_viewport = params.viewport;
_cameraId = params.camera;
}
if ((*dirtyBits) & HdChangeTracker::DirtyRenderTags) {
_renderTags = _GetTaskRenderTags(delegate);
}
if (_pass) {
_pass->Sync();
}
*dirtyBits = HdChangeTracker::Clean;
}
void RenderTask::Prepare(HdTaskContext* ctx, HdRenderIndex* renderIndex)
{
if (!_passState) {
_passState = renderIndex->GetRenderDelegate()->CreateRenderPassState();
}
// Prepare AOVS
{
// Walk the aov bindings, resolving the render index references as they're
// encountered.
for (size_t i = 0; i < _aovBindings.size(); ++i) {
if (_aovBindings[i].renderBuffer == nullptr) {
_aovBindings[i].renderBuffer = static_cast<HdRenderBuffer*>(renderIndex->GetBprim(HdPrimTypeTokens->renderBuffer, _aovBindings[i].renderBufferId));
}
}
_passState->SetAovBindings(_aovBindings);
// XXX Tasks that are not RenderTasks (OIT, ColorCorrection etc) also need
// access to AOVs, but cannot access SetupTask or RenderPassState.
//(*ctx)[HdxTokens->aovBindings] = VtValue(_aovBindings);
}
// Prepare Camera
{
auto camera = static_cast<const HdCamera*>(renderIndex->GetSprim(HdPrimTypeTokens->camera, _cameraId));
TF_VERIFY(camera);
_passState->SetCameraAndViewport(camera, _viewport);
}
_passState->Prepare(renderIndex->GetResourceRegistry());
}
void RenderTask::Execute(HdTaskContext* ctx)
{
// Bind the render state and render geometry with the rendertags (if any)
if (_pass) {
_pass->Execute(_passState, GetRenderTags());
}
}
TfTokenVector const& RenderTask::GetRenderTags() const
{
return _renderTags;
}
// --------------------------------------------------------------------------- //
// VtValue Requirements
// --------------------------------------------------------------------------- //
std::ostream& operator<<(std::ostream& out, const RenderTaskParams& pv)
{
out << "RenderTask Params:\n";
out << "camera: " << pv.camera << '\n';
out << "viewport: " << pv.viewport << '\n';
out << "aovBindings: ";
for (auto const& a : pv.aovBindings) {
out << a << " ";
}
out << '\n';
return out;
}
bool operator==(const RenderTaskParams& lhs, const RenderTaskParams& rhs)
{
return lhs.aovBindings == rhs.aovBindings &&
lhs.camera == rhs.camera &&
lhs.viewport == rhs.viewport;
}
bool operator!=(const RenderTaskParams& lhs, const RenderTaskParams& rhs)
{
return !(lhs == rhs);
}
/* RenderTaskDelegate */
RenderTaskDelegate::RenderTaskDelegate(HdRenderIndex* parentIndex, SdfPath const& delegateID)
: HdSceneDelegate(parentIndex, delegateID)
{
SdfPath renderTaskId = GetTaskID();
GetRenderIndex().InsertTask<RenderTask>(this, renderTaskId);
GetRenderIndex().InsertTask<HdxRenderTask>(this, renderTaskId);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(renderTaskId, HdChangeTracker::DirtyCollection);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(renderTaskId, HdChangeTracker::DirtyRenderTags);
}
@ -166,6 +25,11 @@ SdfPath RenderTaskDelegate::GetTaskID() const
return GetDelegateID().AppendElementString("task");
}
SdfPath RenderTaskDelegate::GetAovID(TfToken const &aov) const
{
return GetDelegateID().AppendElementString("aov_" + aov.GetString());
}
VtValue RenderTaskDelegate::Get(SdfPath const& id, TfToken const& key)
{
std::cout << "RenderTaskDelegate::Get - " << id.GetAsString() << " " << key.GetString() << "\n";
@ -180,14 +44,14 @@ VtValue RenderTaskDelegate::Get(SdfPath const& id, TfToken const& key)
return VtValue();
}
HdRenderBufferDescriptor RenderTaskDelegate::GetRenderBufferDescriptor(SdfPath const& id)
HdRenderBufferDescriptor RenderTaskDelegate::GetRenderBufferDescriptor(SdfPath const &id)
{
std::cout << "RenderTaskDelegate::GetRenderBufferDescriptor - " << id.GetAsString() << "\n";
return bufferDescriptors[id];
}
TfTokenVector RenderTaskDelegate::GetTaskRenderTags(SdfPath const& taskId)
TfTokenVector RenderTaskDelegate::GetTaskRenderTags(SdfPath const &taskId)
{
std::cout << "RenderTaskDelegate::GetTaskRenderTags - " << taskId.GetAsString() << "\n";
@ -197,37 +61,46 @@ TfTokenVector RenderTaskDelegate::GetTaskRenderTags(SdfPath const& taskId)
bool RenderTaskDelegate::IsConverged()
{
HdTaskSharedPtr renderTask = GetRenderIndex().GetTask(GetTaskID());
return ((RenderTask &)*renderTask).IsConverged();
return ((HdxRenderTask &)*renderTask).IsConverged();
}
void RenderTaskDelegate::SetRendererAov(TfToken const &aovName, HdAovDescriptor &aovDesc)
void RenderTaskDelegate::SetRendererAov(TfToken const &aov)
{
HdAovDescriptor aovDesc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(aov);
HdRenderBufferDescriptor desc(GfVec3i(taskParams.viewport[2] - taskParams.viewport[0], taskParams.viewport[3] - taskParams.viewport[1], 1),
aovDesc.format, aovDesc.multiSampled);
SdfPath bufferId = GetAovID(aov);
SdfPath renderBufferId = GetDelegateID().AppendElementString("aov_" + aovName.GetString());
GetRenderIndex().InsertBprim(HdPrimTypeTokens->renderBuffer, this, renderBufferId);
bufferDescriptors[renderBufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(renderBufferId, HdRenderBuffer::DirtyDescription);
if (bufferDescriptors.find(bufferId) == bufferDescriptors.end()) {
GetRenderIndex().InsertBprim(HdPrimTypeTokens->renderBuffer, this, bufferId);
bufferDescriptors[bufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId, HdRenderBuffer::DirtyDescription);
HdRenderPassAovBinding binding;
binding.aovName = aovName;
binding.renderBufferId = renderBufferId;
binding.aovSettings = aovDesc.aovSettings;
taskParams.aovBindings.push_back(binding);
HdRenderPassAovBinding binding;
binding.aovName = aov;
binding.renderBufferId = bufferId;
binding.aovSettings = aovDesc.aovSettings;
taskParams.aovBindings.push_back(binding);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), HdChangeTracker::DirtyParams);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), HdChangeTracker::DirtyParams);
}
else if (bufferDescriptors[bufferId] != desc) {
bufferDescriptors[bufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId, HdRenderBuffer::DirtyDescription);
}
}
void RenderTaskDelegate::GetRendererAov(TfToken const &aovId, void *buf)
HdRenderBuffer *RenderTaskDelegate::GetRendererAov(TfToken const &aov)
{
SdfPath renderBufferId = GetDelegateID().AppendElementString("aov_" + aovId.GetString());
HdRenderBuffer *rBuf = static_cast<HdRenderBuffer*>(GetRenderIndex().GetBprim(HdPrimTypeTokens->renderBuffer, renderBufferId));
return (HdRenderBuffer *)(GetRenderIndex().GetBprim(HdPrimTypeTokens->renderBuffer, GetAovID(aov)));
}
void *data = rBuf->Map();
memcpy(buf, data, rBuf->GetWidth() * rBuf->GetHeight() * HdDataSizeOfFormat(rBuf->GetFormat()));
rBuf->Unmap();
void RenderTaskDelegate::GetRendererAovData(TfToken const &aov, void *data)
{
HdRenderBuffer *buffer = GetRendererAov(aov);
void *bufData = buffer->Map();
memcpy(data, bufData, buffer->GetWidth() * buffer->GetHeight() * HdDataSizeOfFormat(buffer->GetFormat()));
buffer->Unmap();
}
HdTaskSharedPtrVector RenderTaskDelegate::GetTasks()
@ -236,12 +109,13 @@ HdTaskSharedPtrVector RenderTaskDelegate::GetTasks()
return { renderTask };
}
void RenderTaskDelegate::SetCameraViewport(SdfPath const & cameraId, int width, int height)
void RenderTaskDelegate::SetCameraAndViewport(SdfPath const &cameraId, GfVec4d const &viewport)
{
taskParams.viewport = GfVec4d(0, 0, width, height);
taskParams.camera = cameraId;
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), HdChangeTracker::DirtyParams);
if (taskParams.viewport != viewport || taskParams.camera != cameraId) {
taskParams.viewport = viewport;
taskParams.camera = cameraId;
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), HdChangeTracker::DirtyParams);
}
}

View File

@ -3,92 +3,36 @@
#pragma once
#include <pxr/pxr.h>
#include <pxr/imaging/hd/task.h>
#include <pxr/imaging/hd/renderPass.h>
#include <pxr/imaging/hd/renderPassState.h>
#include <pxr/imaging/hd/camera.h>
#include <pxr/imaging/hdx/renderSetupTask.h>
#include <pxr/imaging/hd/sceneDelegate.h>
#include <pxr/imaging/hd/renderIndex.h>
#include <pxr/usd/usd/stage.h>
using namespace pxr;
namespace usdhydra {
TF_DEFINE_PRIVATE_TOKENS(_tokens,
(renderBufferDescriptor)
(renderTags));
class RenderTask : public HdTask
{
public:
RenderTask(HdSceneDelegate* delegate, SdfPath const& id);
~RenderTask() override;
bool IsConverged() const;
/// Sync the render pass resources
void Sync(HdSceneDelegate* delegate,
HdTaskContext* ctx,
HdDirtyBits* dirtyBits) override;
/// Prepare the tasks resources
void Prepare(HdTaskContext* ctx,
HdRenderIndex* renderIndex) override;
/// Execute render pass task
void Execute(HdTaskContext* ctx) override;
/// Collect Render Tags used by the task.
TfTokenVector const& GetRenderTags() const override;
private:
HdRenderPassSharedPtr _pass;
HdRenderPassStateSharedPtr _passState;
TfTokenVector _renderTags;
GfVec4d _viewport;
SdfPath _cameraId;
HdRenderPassAovBindingVector _aovBindings;
};
struct RenderTaskParams
{
// Should not be empty.
HdRenderPassAovBindingVector aovBindings;
SdfPath camera;
GfVec4d viewport = GfVec4d(0.0);
};
// VtValue requirements
std::ostream& operator<<(std::ostream& out, const RenderTaskParams& pv);
bool operator==(const RenderTaskParams& lhs, const RenderTaskParams& rhs);
bool operator!=(const RenderTaskParams& lhs, const RenderTaskParams& rhs);
class RenderTaskDelegate : public HdSceneDelegate
{
public:
RenderTaskDelegate(HdRenderIndex* parentIndex, SdfPath const& delegateID);
RenderTaskDelegate(HdRenderIndex* parentIndex, SdfPath const &delegateID);
~RenderTaskDelegate() override = default;
SdfPath GetTaskID() const;
SdfPath GetAovID(TfToken const &aov) const;
VtValue Get(SdfPath const& id, TfToken const& key) override;
HdRenderBufferDescriptor GetRenderBufferDescriptor(SdfPath const& id) override;
TfTokenVector GetTaskRenderTags(SdfPath const& taskId) override;
VtValue Get(SdfPath const &id, TfToken const &key) override;
HdRenderBufferDescriptor GetRenderBufferDescriptor(SdfPath const &id) override;
TfTokenVector GetTaskRenderTags(SdfPath const &taskId) override;
bool IsConverged();
void SetRendererAov(TfToken const &aovId, HdAovDescriptor &aovDesc);
void GetRendererAov(TfToken const &id, void *buf);
void SetRendererAov(TfToken const &aovId);
HdRenderBuffer *GetRendererAov(TfToken const &id);
void GetRendererAovData(TfToken const &id, void *buf);
HdTaskSharedPtrVector GetTasks();
void SetCameraViewport(SdfPath const& cameraId, int width, int height);
void SetCameraAndViewport(SdfPath const &cameraId, GfVec4d const &viewport);
private:
RenderTaskParams taskParams;
HdxRenderTaskParams taskParams;
TfHashMap<SdfPath, HdRenderBufferDescriptor, SdfPath::Hash> bufferDescriptors;
};

View File

@ -8,9 +8,8 @@ using namespace pxr;
namespace usdhydra {
GfCamera SceneExport::gfCamera()
GfCamera SceneExport::gfCamera(BL::Object &b_cameraObj)
{
BL::Object b_cameraObj = b_scene.camera();
BL::Camera &b_camera = (BL::Camera &)b_cameraObj.data();
auto res = resolution();
float ratio = (float)res.first / res.second;
@ -20,11 +19,17 @@ GfCamera SceneExport::gfCamera()
gfCamera.SetHorizontalAperture(b_camera.sensor_width());
gfCamera.SetVerticalAperture(b_camera.sensor_width() / ratio);
gfCamera.SetFocalLength(b_camera.lens());
gfCamera.SetTransform(ObjectExport(b_cameraObj,b_depsgraph).transform());
gfCamera.SetTransform(ObjectExport(b_cameraObj, b_depsgraph).transform());
return gfCamera;
}
GfCamera SceneExport::gfCamera()
{
BL::Object b_cameraObj = b_scene.camera();
return gfCamera(b_cameraObj);
}
std::pair<int,int> SceneExport::resolution()
{
BL::RenderSettings b_render = b_scene.render();

View File

@ -18,6 +18,7 @@ public:
, b_scene(b_depsgraph.scene())
{}
pxr::GfCamera gfCamera();
pxr::GfCamera gfCamera(BL::Object &cameraObj);
std::pair<int, int> resolution();
std::string sceneName();
std::string layerName();

View File

@ -1,6 +1,8 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <epoxy/gl.h>
#include <pxr/base/gf/camera.h>
#include <pxr/imaging/glf/drawTarget.h>
#include <pxr/usd/usdGeom/camera.h>
@ -9,7 +11,7 @@
#include "glog/logging.h"
#include "engine.h"
#include "viewportEngine.h"
#include "utils.h"
using namespace std;
@ -419,37 +421,138 @@ int ViewSettings::get_height()
return border[1][1];
}
pxr::GfCamera ViewSettings::export_camera()
GfCamera ViewSettings::export_camera()
{
float tile[4] = {(float)border[0][0] / screen_width, (float)border[0][1] / screen_height,
(float)border[1][0] / screen_width, (float)border[1][1] / screen_height};
return camera_data.export_gf(tile);
}
void ViewportEngine::sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings_)
GLTexture::GLTexture()
: textureId(0)
, width(0)
, height(0)
, channels(4)
{
// TODO implement with BlenderSceneDelegate
return;
}
renderSettings = renderSettings_;
if (!imagingGLEngine) {
stage = UsdStage::CreateInMemory();
exportScene(b_depsgraph, b_context);
GLTexture::~GLTexture()
{
if (textureId) {
free();
}
}
imagingGLEngine = std::make_unique<UsdImagingGLEngine>();
imagingGLEngine->SetRendererPlugin(TfToken(delegateId));
void GLTexture::setBuffer(pxr::HdRenderBuffer *buffer)
{
if (!textureId) {
create(buffer);
return;
}
for (auto const& pair : renderSettings) {
imagingGLEngine->SetRendererSetting(pair.first, pair.second);
if (width != buffer->GetWidth() || height != buffer->GetHeight()) {
free();
create(buffer);
return;
}
glBindTexture(GL_TEXTURE_2D, textureId);
void *data = buffer->Map();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGBA, GL_FLOAT, data);
buffer->Unmap();
}
void GLTexture::create(pxr::HdRenderBuffer *buffer)
{
width = buffer->GetWidth();
height = buffer->GetHeight();
channels = HdGetComponentCount(buffer->GetFormat());
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
void *data = buffer->Map();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA16F, width, height, 0, GL_RGBA, GL_FLOAT, data);
buffer->Unmap();
}
void GLTexture::free()
{
glDeleteTextures(1, &textureId);
textureId = 0;
}
void GLTexture::draw(GLfloat x, GLfloat y)
{
// INITIALIZATION
// Getting shader program
GLint shader_program;
glGetIntegerv(GL_CURRENT_PROGRAM, &shader_program);
// Generate vertex array
GLuint vertex_array;
glGenVertexArrays(1, &vertex_array);
GLint texturecoord_location = glGetAttribLocation(shader_program, "texCoord");
GLint position_location = glGetAttribLocation(shader_program, "pos");
// Generate geometry buffers for drawing textured quad
GLfloat position[8] = { x, y, x + width, y, x + width, y + height, x, y + height };
GLfloat texcoord[8] = {0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0};
GLuint vertex_buffer[2];
glGenBuffers(2, vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer[0]);
glBufferData(GL_ARRAY_BUFFER, 32, position, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer[1]);
glBufferData(GL_ARRAY_BUFFER, 32, texcoord, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// DRAWING
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
glBindVertexArray(vertex_array);
glEnableVertexAttribArray(texturecoord_location);
glEnableVertexAttribArray(position_location);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer[0]);
glVertexAttribPointer(position_location, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer[1]);
glVertexAttribPointer(texturecoord_location, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D, 0);
// DELETING
glDeleteBuffers(2, vertex_buffer);
glDeleteVertexArrays(1, &vertex_array);
}
void ViewportEngine::sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings)
{
if (!sceneDelegate) {
sceneDelegate = std::make_unique<BlenderSceneDelegate>(renderIndex.get(),
SdfPath::AbsoluteRootPath().AppendElementString("blenderScene"), b_depsgraph);
}
sceneDelegate->Populate();
for (auto const& setting : renderSettings) {
renderDelegate->SetRenderSetting(setting.first, setting.second);
}
}
void ViewportEngine::viewDraw(BL::Depsgraph &b_depsgraph, BL::Context &b_context)
{
// TODO implement with BlenderSceneDelegate
return;
ViewSettings viewSettings(b_context);
if (viewSettings.get_width() * viewSettings.get_height() == 0) {
return;
@ -458,50 +561,50 @@ void ViewportEngine::viewDraw(BL::Depsgraph &b_depsgraph, BL::Context &b_context
BL::Scene b_scene = b_depsgraph.scene_eval();
GfCamera gfCamera = viewSettings.export_camera();
vector<GfVec4f> clipPlanes = gfCamera.GetClippingPlanes();
freeCameraDelegate->SetCamera(gfCamera);
renderTaskDelegate->SetCameraAndViewport(freeCameraDelegate->GetCameraId(),
GfVec4d(viewSettings.border[0][0], viewSettings.border[0][1], viewSettings.border[1][0], viewSettings.border[1][1]));
renderTaskDelegate->SetRendererAov(HdAovTokens->color);
HdTaskSharedPtrVector tasks = renderTaskDelegate->GetTasks();
for (int i = 0; i < clipPlanes.size(); i++) {
renderParams.clipPlanes.push_back((GfVec4d)clipPlanes[i]);
}
imagingGLEngine->SetCameraState(gfCamera.GetFrustum().ComputeViewMatrix(),
gfCamera.GetFrustum().ComputeProjectionMatrix());
imagingGLEngine->SetRenderViewport(GfVec4d((double)viewSettings.border[0][0], (double)viewSettings.border[0][1],
(double)viewSettings.border[1][0], (double)viewSettings.border[1][1]));
b_engine.bind_display_space_shader(b_scene);
if (getRendererPercentDone(*imagingGLEngine) == 0.0f) {
if (getRendererPercentDone() == 0.0f) {
timeBegin = chrono::steady_clock::now();
}
imagingGLEngine->Render(stage->GetPseudoRoot(), renderParams);
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
TF_PY_ALLOW_THREADS_IN_SCOPE();
_engine.Execute(renderIndex.get(), &tasks);
}
b_engine.bind_display_space_shader(b_scene);
texture.setBuffer(renderTaskDelegate->GetRendererAov(HdAovTokens->color));
texture.draw((GLfloat)viewSettings.border[0][0], (GLfloat)viewSettings.border[0][1]);
b_engine.unbind_display_space_shader();
glClear(GL_DEPTH_BUFFER_BIT);
//glClear(GL_DEPTH_BUFFER_BIT);
chrono::time_point<chrono::steady_clock> timeCurrent = chrono::steady_clock::now();
chrono::milliseconds elapsedTime = chrono::duration_cast<chrono::milliseconds>(timeCurrent - timeBegin);
string formattedTime = formatDuration(elapsedTime);
if (!imagingGLEngine->IsConverged()) {
notifyStatus("Time: " + formattedTime + " | Done: " + to_string(int(getRendererPercentDone(*imagingGLEngine))) + "%",
"Render", true);
if (!renderTaskDelegate->IsConverged()) {
notifyStatus("Time: " + formattedTime + " | Done: " + to_string(int(getRendererPercentDone())) + "%",
"Render");
b_engine.tag_redraw();
}
else {
notifyStatus(("Time: " + formattedTime).c_str(), "Rendering Done", false);
notifyStatus(("Time: " + formattedTime).c_str(), "Rendering Done");
}
}
void ViewportEngine::notifyStatus(const string &info, const string &status, bool redraw)
void ViewportEngine::notifyStatus(const string &info, const string &status)
{
b_engine.update_stats(status.c_str(), info.c_str());
if (redraw) {
b_engine.tag_redraw();
}
}
} // namespace usdhydra

47
extern/usdhydra/viewportEngine.h vendored Normal file
View File

@ -0,0 +1,47 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <chrono>
#include <epoxy/gl.h>
#include <pxr/imaging/hd/renderBuffer.h>
#include "engine.h"
namespace usdhydra {
class GLTexture
{
public:
GLTexture();
~GLTexture();
void setBuffer(pxr::HdRenderBuffer *buffer);
void draw(GLfloat x, GLfloat y);
private:
void create(pxr::HdRenderBuffer *buffer);
void free();
GLuint textureId;
int width, height, channels;
};
class ViewportEngine : public Engine {
public:
using Engine::Engine;
void sync(BL::Depsgraph &b_depsgraph, BL::Context &b_context, pxr::HdRenderSettingsMap &renderSettings) override;
void viewDraw(BL::Depsgraph &b_depsgraph, BL::Context &b_context);
private:
void notifyStatus(const std::string &title, const std::string &info);
private:
std::chrono::time_point<std::chrono::steady_clock> timeBegin;
GLTexture texture;
};
} // namespace usdhydra