BLEN-367: Fix code style #14

Merged
Bogdan Nagirniak merged 11 commits from BLEN-367 into hydra-render 2023-03-15 09:42:17 +01:00
43 changed files with 1378 additions and 1295 deletions

View File

@ -52,38 +52,38 @@ set(SRC
engine.h
engine.cc
finalEngine.h
finalEngine.cc
previewEngine.h
previewEngine.cc
viewportEngine.h
viewportEngine.cc
final_engine.h
final_engine.cc
preview_engine.h
preview_engine.cc
viewport_engine.h
viewport_engine.cc
camera.h
camera.cc
utils.h
utils.cc
renderTaskDelegate.cc
renderTaskDelegate.h
simpleLightTaskDelegate.cc
simpleLightTaskDelegate.h
render_task_delegate.cc
render_task_delegate.h
simple_light_task_delegate.cc
simple_light_task_delegate.h
sceneDelegate/blenderSceneDelegate.h
sceneDelegate/blenderSceneDelegate.cc
sceneDelegate/id.h
sceneDelegate/id.cc
sceneDelegate/object.h
sceneDelegate/object.cc
sceneDelegate/material.h
sceneDelegate/material.cc
sceneDelegate/mesh.h
sceneDelegate/mesh.cc
sceneDelegate/mtlxHydraAdapter.h
sceneDelegate/mtlxHydraAdapter.cc
sceneDelegate/light.h
sceneDelegate/light.cc
sceneDelegate/world.h
sceneDelegate/world.cc
scene_delegate/blender_scene_delegate.h
scene_delegate/blender_scene_delegate.cc
scene_delegate/id.h
scene_delegate/id.cc
scene_delegate/object.h
scene_delegate/object.cc
scene_delegate/material.h
scene_delegate/material.cc
scene_delegate/mesh.h
scene_delegate/mesh.cc
scene_delegate/mtlx_hydra_adapter.h
scene_delegate/mtlx_hydra_adapter.cc
scene_delegate/light.h
scene_delegate/light.cc
scene_delegate/world.h
scene_delegate/world.cc
)
set(LIB

View File

@ -3,23 +3,20 @@
#include "DNA_camera_types.h"
#include "DNA_screen_types.h"
#include "BKE_context.h"
#include "camera.h"
#include "utils.h"
using namespace pxr;
namespace blender::render::hydra {
CameraData::CameraData(Object *camera_obj, GfVec2i res, GfVec4f tile)
CameraData::CameraData(Object *camera_obj, pxr::GfVec2i res, pxr::GfVec4f tile)
{
Camera *camera = (Camera *)camera_obj->data;
float t_pos[2] = {tile[0], tile[1]};
float t_size[2] = {tile[2], tile[3]};
transform = gf_matrix_from_transform(camera_obj->object_to_world);
clip_range = GfRange1f(camera->clip_start, camera->clip_end);
clip_range = pxr::GfRange1f(camera->clip_start, camera->clip_end);
mode = camera->type;
if (camera->dof.flag & CAM_DOF_ENABLED) {
@ -28,42 +25,42 @@ CameraData::CameraData(Object *camera_obj, GfVec2i res, GfVec4f tile)
focus_distance = camera->dof.focus_distance;
}
else {
GfVec3f obj_pos(camera->dof.focus_object->object_to_world[0][3],
camera->dof.focus_object->object_to_world[1][3],
camera->dof.focus_object->object_to_world[2][3]);
GfVec3f cam_pos(transform[0][3], transform[1][3], transform[2][3]);
pxr::GfVec3f obj_pos(camera->dof.focus_object->object_to_world[0][3],
camera->dof.focus_object->object_to_world[1][3],
camera->dof.focus_object->object_to_world[2][3]);
pxr::GfVec3f cam_pos(transform[0][3], transform[1][3], transform[2][3]);
focus_distance = (obj_pos - cam_pos).GetLength();
}
dof_data = std::tuple(std::max(focus_distance, 0.001f),
camera->dof.aperture_fstop,
camera->dof.aperture_blades);
dof_data = std::tuple(
std::max(focus_distance, 0.001f), camera->dof.aperture_fstop, camera->dof.aperture_blades);
}
float ratio = (float)res[0] / res[1];
switch (camera->sensor_fit) {
case CAMERA_SENSOR_FIT_VERT:
lens_shift = GfVec2f(camera->shiftx / ratio, camera->shifty);
lens_shift = pxr::GfVec2f(camera->shiftx / ratio, camera->shifty);
break;
case CAMERA_SENSOR_FIT_HOR:
lens_shift = GfVec2f(camera->shiftx, camera->shifty * ratio);
lens_shift = pxr::GfVec2f(camera->shiftx, camera->shifty * ratio);
break;
case CAMERA_SENSOR_FIT_AUTO:
if (ratio > 1.0f) {
lens_shift = GfVec2f(camera->shiftx, camera->shifty * ratio);
lens_shift = pxr::GfVec2f(camera->shiftx, camera->shifty * ratio);
}
else {
lens_shift = GfVec2f(camera->shiftx / ratio, camera->shifty);
lens_shift = pxr::GfVec2f(camera->shiftx / ratio, camera->shifty);
}
break;
default:
lens_shift = GfVec2f(camera->shiftx, camera->shifty);
lens_shift = pxr::GfVec2f(camera->shiftx, camera->shifty);
break;
}
lens_shift = GfVec2f(lens_shift[0] / t_size[0] + (t_pos[0] + t_size[0] * 0.5 - 0.5) / t_size[0],
lens_shift[1] / t_size[1] + (t_pos[1] + t_size[1] * 0.5 - 0.5) / t_size[1]);
lens_shift = pxr::GfVec2f(
lens_shift[0] / t_size[0] + (t_pos[0] + t_size[0] * 0.5 - 0.5) / t_size[0],
lens_shift[1] / t_size[1] + (t_pos[1] + t_size[1] * 0.5 - 0.5) / t_size[1]);
switch (camera->type) {
case CAM_PERSP:
@ -71,48 +68,48 @@ CameraData::CameraData(Object *camera_obj, GfVec2i res, GfVec4f tile)
switch (camera->sensor_fit) {
case CAMERA_SENSOR_FIT_VERT:
sensor_size = GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
sensor_size = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
break;
case CAMERA_SENSOR_FIT_HOR:
sensor_size = GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
break;
case CAMERA_SENSOR_FIT_AUTO:
if (ratio > 1.0f) {
sensor_size = GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
}
else {
sensor_size = GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
sensor_size = pxr::GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
}
break;
default:
sensor_size = GfVec2f(camera->sensor_x, camera->sensor_y);
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_y);
break;
}
sensor_size = GfVec2f(sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]);
sensor_size = pxr::GfVec2f(sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]);
break;
case CAM_ORTHO:
focal_length = 0.0f;
switch (camera->sensor_fit) {
case CAMERA_SENSOR_FIT_VERT:
ortho_size = GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
ortho_size = pxr::GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
break;
case CAMERA_SENSOR_FIT_HOR:
ortho_size = GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
ortho_size = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
break;
case CAMERA_SENSOR_FIT_AUTO:
if (ratio > 1.0f) {
ortho_size = GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
ortho_size = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
}
else {
ortho_size = GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
ortho_size = pxr::GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
}
break;
default:
ortho_size = GfVec2f(camera->ortho_scale, camera->ortho_scale);
ortho_size = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale);
break;
}
ortho_size = GfVec2f(ortho_size[0] * t_size[0], ortho_size[1] * t_size[1]);
ortho_size = pxr::GfVec2f(ortho_size[0] * t_size[0], ortho_size[1] * t_size[1]);
break;
case CAM_PANO:
@ -121,28 +118,28 @@ CameraData::CameraData(Object *camera_obj, GfVec2i res, GfVec4f tile)
switch (camera->sensor_fit) {
case CAMERA_SENSOR_FIT_VERT:
sensor_size = GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
sensor_size = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
break;
case CAMERA_SENSOR_FIT_HOR:
sensor_size = GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
break;
case CAMERA_SENSOR_FIT_AUTO:
if (ratio > 1.0f) {
sensor_size = GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
}
else {
sensor_size = GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
sensor_size = pxr::GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
}
break;
default:
sensor_size = GfVec2f(camera->sensor_x, camera->sensor_y);
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_y);
break;
}
sensor_size = GfVec2f(sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]);
sensor_size = pxr::GfVec2f(sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]);
default:
focal_length = camera->lens;
sensor_size = GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
sensor_size = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
}
}
@ -156,47 +153,47 @@ CameraData::CameraData(bContext *context)
// context.view3d or context.region_data
float VIEWPORT_SENSOR_SIZE = 72.0;
GfVec2i res(region->winx, region->winy);
pxr::GfVec2i res(region->winx, region->winy);
float ratio = (float)res[0] / res[1];
transform = gf_matrix_from_transform(region_data->viewmat).GetInverse();
switch (region_data->persp) {
case RV3D_PERSP: {
mode = CAM_PERSP;
clip_range = GfRange1f(view3d->clip_start, view3d->clip_end);
lens_shift = GfVec2f(0.0, 0.0);
clip_range = pxr::GfRange1f(view3d->clip_start, view3d->clip_end);
lens_shift = pxr::GfVec2f(0.0, 0.0);
focal_length = view3d->lens;
if (ratio > 1.0) {
sensor_size = GfVec2f(VIEWPORT_SENSOR_SIZE, VIEWPORT_SENSOR_SIZE / ratio);
sensor_size = pxr::GfVec2f(VIEWPORT_SENSOR_SIZE, VIEWPORT_SENSOR_SIZE / ratio);
}
else {
sensor_size = GfVec2f(VIEWPORT_SENSOR_SIZE * ratio, VIEWPORT_SENSOR_SIZE);
sensor_size = pxr::GfVec2f(VIEWPORT_SENSOR_SIZE * ratio, VIEWPORT_SENSOR_SIZE);
}
break;
}
case RV3D_ORTHO: {
mode = CAM_ORTHO;
lens_shift = GfVec2f(0.0f, 0.0f);
lens_shift = pxr::GfVec2f(0.0f, 0.0f);
float o_size = region_data->dist * VIEWPORT_SENSOR_SIZE / view3d->lens;
float o_depth = view3d->clip_end;
clip_range = GfRange1f(-o_depth * 0.5, o_depth * 0.5);
clip_range = pxr::GfRange1f(-o_depth * 0.5, o_depth * 0.5);
if (ratio > 1.0f) {
ortho_size = GfVec2f(o_size, o_size / ratio);
ortho_size = pxr::GfVec2f(o_size, o_size / ratio);
}
else {
ortho_size = GfVec2f(o_size * ratio, o_size);
ortho_size = pxr::GfVec2f(o_size * ratio, o_size);
}
break;
}
case RV3D_CAMOB: {
GfMatrix4d mat = transform;
*this = CameraData(view3d->camera, res, GfVec4f(0, 0, 1, 1));
pxr::GfMatrix4d mat = transform;
*this = CameraData(view3d->camera, res, pxr::GfVec4f(0, 0, 1, 1));
transform = mat;
// This formula was taken from previous plugin with corresponded comment
@ -206,8 +203,8 @@ CameraData::CameraData(bContext *context)
// Updating l_shift due to viewport zoom and view_camera_offset
// view_camera_offset should be multiplied by 2
lens_shift = GfVec2f((lens_shift[0] + region_data->camdx * 2) / zoom,
(lens_shift[1] + region_data->camdy * 2) / zoom);
lens_shift = pxr::GfVec2f((lens_shift[0] + region_data->camdx * 2) / zoom,
(lens_shift[1] + region_data->camdy * 2) / zoom);
if (mode == CAM_ORTHO) {
ortho_size *= zoom;
@ -223,11 +220,11 @@ CameraData::CameraData(bContext *context)
}
}
GfCamera CameraData::gf_camera(GfVec4f tile)
pxr::GfCamera CameraData::gf_camera(pxr::GfVec4f tile)
{
float t_pos[2] = {tile[0], tile[1]}, t_size[2] = {tile[2], tile[3]};
GfCamera gf_camera = GfCamera();
pxr::GfCamera gf_camera = pxr::GfCamera();
gf_camera.SetClippingRange(clip_range);
@ -238,7 +235,7 @@ GfCamera CameraData::gf_camera(GfVec4f tile)
case CAM_PERSP:
case CAM_PANO: {
/* TODO: store panoramic camera settings */
gf_camera.SetProjection(GfCamera::Projection::Perspective);
gf_camera.SetProjection(pxr::GfCamera::Projection::Perspective);
gf_camera.SetFocalLength(focal_length);
float s_size[2] = {sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]};
@ -251,7 +248,7 @@ GfCamera CameraData::gf_camera(GfVec4f tile)
break;
}
case CAM_ORTHO: {
gf_camera.SetProjection(GfCamera::Projection::Orthographic);
gf_camera.SetProjection(pxr::GfCamera::Projection::Orthographic);
// Use tenths of a world unit accorging to USD docs
// https://graphics.pixar.com/usd/docs/api/class_gf_camera.html
@ -272,9 +269,9 @@ GfCamera CameraData::gf_camera(GfVec4f tile)
return gf_camera;
}
GfCamera CameraData::gf_camera()
pxr::GfCamera CameraData::gf_camera()
{
return gf_camera(GfVec4f(0, 0, 1, 1));
return gf_camera(pxr::GfVec4f(0, 0, 1, 1));
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -8,19 +8,21 @@
#include <pxr/base/gf/camera.h>
#include <pxr/base/gf/vec2f.h>
#include "BKE_context.h"
#include "DNA_object_types.h"
namespace blender::render::hydra {
class CameraData {
public:
public:
CameraData(bContext *context);
CameraData(Object *camera_obj, pxr::GfVec2i res, pxr::GfVec4f tile);
pxr::GfCamera gf_camera();
pxr::GfCamera gf_camera(pxr::GfVec4f tile);
private:
private:
int mode;
pxr::GfRange1f clip_range;
float focal_length;
@ -31,4 +33,4 @@ private:
std::tuple<float, float, int> dof_data;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -1,49 +1,47 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <pxr/base/plug/plugin.h>
#include <pxr/base/plug/registry.h>
#include <pxr/imaging/hd/rendererPluginRegistry.h>
#include <pxr/imaging/hdSt/renderDelegate.h>
#include <pxr/imaging/hgi/tokens.h>
#include <pxr/base/plug/plugin.h>
#include <pxr/base/plug/registry.h>
#include <pxr/usd/usdGeom/tokens.h>
#include "glog/logging.h"
#include "engine.h"
using namespace pxr;
namespace blender::render::hydra {
Engine::Engine(RenderEngine *bl_engine, const std::string &delegateId)
: bl_engine(bl_engine)
Engine::Engine(RenderEngine *bl_engine, const std::string &delegate_id) : bl_engine(bl_engine)
{
HdRendererPluginRegistry& registry = HdRendererPluginRegistry::GetInstance();
pxr::HdRendererPluginRegistry &registry = pxr::HdRendererPluginRegistry::GetInstance();
TF_PY_ALLOW_THREADS_IN_SCOPE();
render_delegate = registry.CreateRenderDelegate(TfToken(delegateId));
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
render_delegate = registry.CreateRenderDelegate(pxr::TfToken(delegate_id));
HdDriverVector hd_drivers;
pxr::HdDriverVector hd_drivers;
if (bl_engine->type->flag & RE_USE_GPU_CONTEXT) {
hgi = Hgi::CreatePlatformDefaultHgi();
hgi_driver.name = HgiTokens->renderDriver;
hgi_driver.driver = VtValue(hgi.get());
hgi = pxr::Hgi::CreatePlatformDefaultHgi();
hgi_driver.name = pxr::HgiTokens->renderDriver;
hgi_driver.driver = pxr::VtValue(hgi.get());
hd_drivers.push_back(&hgi_driver);
}
render_index.reset(HdRenderIndex::New(render_delegate.Get(), hd_drivers));
free_camera_delegate = std::make_unique<HdxFreeCameraSceneDelegate>(
render_index.get(), SdfPath::AbsoluteRootPath().AppendElementString("freeCamera"));
render_index.reset(pxr::HdRenderIndex::New(render_delegate.Get(), hd_drivers));
free_camera_delegate = std::make_unique<pxr::HdxFreeCameraSceneDelegate>(
render_index.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("freeCamera"));
render_task_delegate = std::make_unique<RenderTaskDelegate>(
render_index.get(), SdfPath::AbsoluteRootPath().AppendElementString("renderTask"));
render_index.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("renderTask"));
if (render_delegate->GetRendererDisplayName() == "GL") {
simple_light_task_delegate = std::make_unique<SimpleLightTaskDelegate>(
render_index.get(), SdfPath::AbsoluteRootPath().AppendElementString("simpleLightTask"));
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("simpleLightTask"));
}
engine = std::make_unique<HdEngine>();
engine = std::make_unique<pxr::HdEngine>();
}
Engine::~Engine()
@ -60,7 +58,7 @@ Engine::~Engine()
float Engine::renderer_percent_done()
{
VtDictionary render_stats = render_delegate->GetRenderStats();
pxr::VtDictionary render_stats = render_delegate->GetRenderStats();
auto it = render_stats.find("percentDone");
if (it == render_stats.end()) {
return 0.0;
@ -68,4 +66,4 @@ float Engine::renderer_percent_done()
return (float)it->second.UncheckedGet<double>();
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -7,44 +7,46 @@
#include <Python.h>
#include <pxr/imaging/hd/driver.h>
#include <pxr/imaging/hd/engine.h>
#include <pxr/imaging/hd/pluginRenderDelegateUniqueHandle.h>
#include <pxr/imaging/hd/driver.h>
#include <pxr/imaging/hdx/freeCameraSceneDelegate.h>
#include <pxr/imaging/hgi/hgi.h>
#include "RE_engine.h"
#include "sceneDelegate/blenderSceneDelegate.h"
#include "renderTaskDelegate.h"
#include "simpleLightTaskDelegate.h"
#include "render_task_delegate.h"
#include "scene_delegate/blender_scene_delegate.h"
#include "simple_light_task_delegate.h"
namespace blender::render::hydra {
class Engine {
public:
public:
Engine(RenderEngine *bl_engine, const std::string &render_delegate_id);
virtual ~Engine();
virtual void sync(Depsgraph *depsgraph, bContext *context, pxr::HdRenderSettingsMap &renderSettings) = 0;
virtual void sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings) = 0;
virtual void render(Depsgraph *depsgraph) = 0;
protected:
protected:
float renderer_percent_done();
protected:
protected:
RenderEngine *bl_engine;
HdPluginRenderDelegateUniqueHandle render_delegate;
std::unique_ptr<HdRenderIndex> render_index;
pxr::HdPluginRenderDelegateUniqueHandle render_delegate;
std::unique_ptr<pxr::HdRenderIndex> render_index;
std::unique_ptr<BlenderSceneDelegate> scene_delegate;
std::unique_ptr<RenderTaskDelegate> render_task_delegate;
std::unique_ptr<HdxFreeCameraSceneDelegate> free_camera_delegate;
std::unique_ptr<pxr::HdxFreeCameraSceneDelegate> free_camera_delegate;
std::unique_ptr<SimpleLightTaskDelegate> simple_light_task_delegate;
std::unique_ptr<HdEngine> engine;
std::unique_ptr<pxr::HdEngine> engine;
HgiUniquePtr hgi;
HdDriver hgi_driver;
pxr::HgiUniquePtr hgi;
pxr::HdDriver hgi_driver;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -1,239 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <pxr/imaging/glf/drawTarget.h>
#include "DEG_depsgraph_query.h"
#include "BKE_lib_id.h"
#include "glog/logging.h"
#include "finalEngine.h"
#include "camera.h"
#include "utils.h"
using namespace std;
using namespace pxr;
namespace blender::render::hydra {
void FinalEngine::sync(Depsgraph *depsgraph, bContext *context, HdRenderSettingsMap &renderSettings)
{
scene_delegate = std::make_unique<BlenderSceneDelegate>(render_index.get(),
SdfPath::AbsoluteRootPath().AppendElementString("scene"), BlenderSceneDelegate::EngineType::Final);
scene_delegate->populate(depsgraph, context);
for (auto const& setting : renderSettings) {
render_delegate->SetRenderSetting(setting.first, setting.second);
}
}
void FinalEngine::render(Depsgraph *depsgraph)
{
Scene *scene = DEG_get_input_scene(depsgraph);
ViewLayer *view_layer = DEG_get_input_view_layer(depsgraph);
string scene_name(MAX_ID_FULL_NAME, 0);
BKE_id_full_name_get(scene_name.data(), (ID *)scene, 0);
string layer_name = view_layer->name;
RenderData &r = scene->r;
GfVec4f border(0, 0, 1, 1);
if (r.mode & R_BORDER) {
border = GfVec4f(r.border.xmin, r.border.ymin,
r.border.xmax - r.border.xmin, r.border.ymax - r.border.ymin);
}
GfVec2i image_res(r.xsch * r.size / 100, r.ysch * r.size / 100);
GfVec2i res(int(image_res[0] * border[2]), int(image_res[1] * border[3]));
GfCamera camera = CameraData(scene->camera, image_res, GfVec4f(0, 0, 1, 1)).gf_camera(border);
free_camera_delegate->SetCamera(camera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(), GfVec4d(0, 0, res[0], res[1]));
render_task_delegate->SetRendererAov(HdAovTokens->color);
if (simple_light_task_delegate) {
simple_light_task_delegate->SetCameraPath(free_camera_delegate->GetCameraId());
}
HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
tasks.push_back(simple_light_task_delegate->GetTask());
}
tasks.push_back(render_task_delegate->GetTask());
chrono::time_point<chrono::steady_clock> timeBegin = chrono::steady_clock::now(), timeCurrent;
chrono::milliseconds elapsedTime;
float percentDone = 0.0;
map<string, vector<float>> renderImages{
{"Combined", vector<float>(res[0] * res[1] * 4)}}; // 4 - number of channels
vector<float> &pixels = renderImages["Combined"];
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
}
while (true) {
if (RE_engine_test_break(bl_engine)) {
break;
}
percentDone = renderer_percent_done();
timeCurrent = chrono::steady_clock::now();
elapsedTime = chrono::duration_cast<chrono::milliseconds>(timeCurrent - timeBegin);
notify_status(percentDone / 100.0, scene_name + ": " + layer_name,
"Render Time: " + format_duration(elapsedTime) + " | Done: " + to_string(int(percentDone)) + "%");
if (render_task_delegate->IsConverged()) {
break;
}
render_task_delegate->GetRendererAovData(HdAovTokens->color, pixels.data());
updateRenderResult(renderImages, layer_name, res[0], res[1]);
}
render_task_delegate->GetRendererAovData(HdAovTokens->color, pixels.data());
updateRenderResult(renderImages, layer_name, res[0], res[1]);
}
GfVec2i FinalEngine::get_resolution(Scene *scene)
{
RenderData &r = scene->r;
float border_w = 1.0, border_h = 1.0;
if (r.mode & R_BORDER) {
border_w = r.border.xmax - r.border.xmin;
border_h = r.border.ymax - r.border.ymin;
}
return GfVec2i(int(r.xsch * border_w * r.size / 100), int(r.ysch * border_h * r.size / 100));
}
void FinalEngine::updateRenderResult(map<string, vector<float>>& renderImages, const string &layerName, int width, int height)
{
RenderResult *result = RE_engine_begin_result(bl_engine, 0, 0, width, height, layerName.c_str(), nullptr);
/* TODO: only for the first render layer */
RenderLayer *layer = (RenderLayer *)result->layers.first;
for (RenderPass *pass = (RenderPass *)layer->passes.first; pass != nullptr; pass = pass->next) {
auto it_image = renderImages.find(pass->name);
if (it_image == renderImages.end()) {
continue;
}
memcpy(pass->rect, it_image->second.data(),
sizeof(float) * pass->rectx * pass->recty * pass->channels);
}
RE_engine_end_result(bl_engine, result, false, false, false);
}
void FinalEngine::notify_status(float progress, const string &title, const string &info)
{
RE_engine_update_progress(bl_engine, progress);
RE_engine_update_stats(bl_engine, title.c_str(), info.c_str());
}
void FinalEngineGL::render(Depsgraph *depsgraph)
{
Scene *scene = DEG_get_input_scene(depsgraph);
ViewLayer *view_layer = DEG_get_input_view_layer(depsgraph);
string scene_name(MAX_ID_FULL_NAME, 0);
BKE_id_full_name_get(scene_name.data(), (ID *)scene, 0);
string layer_name = view_layer->name;
RenderData &r = scene->r;
GfVec4f border(0, 0, 1, 1);
if (r.mode & R_BORDER) {
border = GfVec4f(r.border.xmin, r.border.ymin,
r.border.xmax - r.border.xmin, r.border.ymax - r.border.ymin);
}
GfVec2i image_res = {r.xsch * r.size / 100, r.ysch * r.size / 100};
GfVec2i res = {int(image_res[0] * border[2]), int(image_res[1] * border[3])};
GfCamera camera = CameraData(scene->camera, image_res, GfVec4f(0, 0, 1, 1)).gf_camera(border);
free_camera_delegate->SetCamera(camera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(),
GfVec4d(0, 0, res[0], res[1]));
if (simple_light_task_delegate) {
simple_light_task_delegate->SetCameraPath(free_camera_delegate->GetCameraId());
}
HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
/* TODO: Uncomment this and fix GL error:
invalid operation, reported from void __cdecl pxrInternal_v0_22__pxrReserved__::HgiGLResourceBindings::BindResources(void) */
// tasks.push_back(simple_light_task_delegate->GetTask());
}
tasks.push_back(render_task_delegate->GetTask());
chrono::time_point<chrono::steady_clock> timeBegin = chrono::steady_clock::now(), timeCurrent;
chrono::milliseconds elapsedTime;
float percentDone = 0.0;
map<string, vector<float>> renderImages{
{"Combined", vector<float>(res[0] * res[1] * 4)}}; // 4 - number of channels
vector<float> &pixels = renderImages["Combined"];
GLuint FramebufferName = 0;
glGenFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, FramebufferName);
// The texture we're going to render to
GLuint renderedTexture;
glGenTextures(1, &renderedTexture);
// "Bind" the newly created texture : all future texture functions will modify this texture
glBindTexture(GL_TEXTURE_2D, renderedTexture);
// Give an empty image to OpenGL ( the last "0" )
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, res[0], res[1], 0, GL_RGBA, GL_FLOAT, 0);
// Poor filtering. Needed !
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// Set "renderedTexture" as our colour attachement #0
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, renderedTexture, 0);
// Generate vertex array
GLuint VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
}
while (true) {
if (RE_engine_test_break(bl_engine)) {
break;
}
percentDone = renderer_percent_done();
timeCurrent = chrono::steady_clock::now();
elapsedTime = chrono::duration_cast<chrono::milliseconds>(timeCurrent - timeBegin);
notify_status(percentDone / 100.0,
scene_name + ": " + layer_name,
"Render Time: " + format_duration(elapsedTime) +
" | Done: " + to_string(int(percentDone)) + "%");
if (render_task_delegate->IsConverged()) {
break;
}
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_FLOAT, pixels.data());
updateRenderResult(renderImages, layer_name, res[0], res[1]);
}
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_FLOAT, pixels.data());
updateRenderResult(renderImages, layer_name, res[0], res[1]);
}
} // namespace blender::render::hydra

View File

@ -0,0 +1,259 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <pxr/imaging/glf/drawTarget.h>
#include "BKE_lib_id.h"
#include "DEG_depsgraph_query.h"
#include "glog/logging.h"
#include "camera.h"
#include "final_engine.h"
#include "utils.h"
namespace blender::render::hydra {
void FinalEngine::sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings)
{
scene_delegate = std::make_unique<BlenderSceneDelegate>(
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
BlenderSceneDelegate::EngineType::FINAL);
scene_delegate->populate(depsgraph, context);
for (auto const &setting : render_settings) {
render_delegate->SetRenderSetting(setting.first, setting.second);
}
}
void FinalEngine::render(Depsgraph *depsgraph)
{
Scene *scene = DEG_get_input_scene(depsgraph);
ViewLayer *view_layer = DEG_get_input_view_layer(depsgraph);
std::string scene_name(MAX_ID_FULL_NAME, 0);
BKE_id_full_name_get(scene_name.data(), (ID *)scene, 0);
std::string layer_name = view_layer->name;
RenderData &r = scene->r;
pxr::GfVec4f border(0, 0, 1, 1);
if (r.mode & R_BORDER) {
border = pxr::GfVec4f(r.border.xmin,
r.border.ymin,
r.border.xmax - r.border.xmin,
r.border.ymax - r.border.ymin);
}
pxr::GfVec2i image_res(r.xsch * r.size / 100, r.ysch * r.size / 100);
pxr::GfVec2i res(int(image_res[0] * border[2]), int(image_res[1] * border[3]));
pxr::GfCamera camera =
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
free_camera_delegate->SetCamera(camera);
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(0, 0, res[0], res[1]));
render_task_delegate->set_renderer_aov(pxr::HdAovTokens->color);
if (simple_light_task_delegate) {
simple_light_task_delegate->set_camera_path(free_camera_delegate->GetCameraId());
}
pxr::HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
tasks.push_back(simple_light_task_delegate->get_task());
}
tasks.push_back(render_task_delegate->get_task());
std::chrono::time_point<std::chrono::steady_clock> time_begin = std::chrono::steady_clock::now(),
time_current;
std::chrono::milliseconds elapsed_time;
float percent_done = 0.0;
std::map<std::string, std::vector<float>> render_images{
{"Combined", std::vector<float>(res[0] * res[1] * 4)}}; // 4 - number of channels
std::vector<float> &pixels = render_images["Combined"];
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
}
while (true) {
if (RE_engine_test_break(bl_engine)) {
break;
}
percent_done = renderer_percent_done();
time_current = std::chrono::steady_clock::now();
elapsed_time = std::chrono::duration_cast<std::chrono::milliseconds>(time_current -
time_begin);
notify_status(percent_done / 100.0,
scene_name + ": " + layer_name,
"Render Time: " + format_duration(elapsed_time) +
" | Done: " + std::to_string(int(percent_done)) + "%");
if (render_task_delegate->is_converged()) {
break;
}
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
update_render_result(render_images, layer_name, res[0], res[1]);
}
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
update_render_result(render_images, layer_name, res[0], res[1]);
}
pxr::GfVec2i FinalEngine::get_resolution(Scene *scene)
{
RenderData &r = scene->r;
float border_w = 1.0, border_h = 1.0;
if (r.mode & R_BORDER) {
border_w = r.border.xmax - r.border.xmin;
border_h = r.border.ymax - r.border.ymin;
}
return pxr::GfVec2i(int(r.xsch * border_w * r.size / 100),
int(r.ysch * border_h * r.size / 100));
}
void FinalEngine::update_render_result(std::map<std::string, std::vector<float>> &render_images,
const std::string &layer_name,
int width,
int height)
{
RenderResult *result = RE_engine_begin_result(
bl_engine, 0, 0, width, height, layer_name.c_str(), nullptr);
/* TODO: only for the first render layer */
RenderLayer *layer = (RenderLayer *)result->layers.first;
for (RenderPass *pass = (RenderPass *)layer->passes.first; pass != nullptr; pass = pass->next) {
auto it_image = render_images.find(pass->name);
if (it_image == render_images.end()) {
continue;
}
memcpy(pass->rect,
it_image->second.data(),
sizeof(float) * pass->rectx * pass->recty * pass->channels);
}
RE_engine_end_result(bl_engine, result, false, false, false);
}
void FinalEngine::notify_status(float progress, const std::string &title, const std::string &info)
{
RE_engine_update_progress(bl_engine, progress);
RE_engine_update_stats(bl_engine, title.c_str(), info.c_str());
}
void FinalEngineGL::render(Depsgraph *depsgraph)
{
Scene *scene = DEG_get_input_scene(depsgraph);
ViewLayer *view_layer = DEG_get_input_view_layer(depsgraph);
std::string scene_name(MAX_ID_FULL_NAME, 0);
BKE_id_full_name_get(scene_name.data(), (ID *)scene, 0);
std::string layer_name = view_layer->name;
RenderData &r = scene->r;
pxr::GfVec4f border(0, 0, 1, 1);
if (r.mode & R_BORDER) {
border = pxr::GfVec4f(r.border.xmin,
r.border.ymin,
r.border.xmax - r.border.xmin,
r.border.ymax - r.border.ymin);
}
pxr::GfVec2i image_res = {r.xsch * r.size / 100, r.ysch * r.size / 100};
pxr::GfVec2i res = {int(image_res[0] * border[2]), int(image_res[1] * border[3])};
pxr::GfCamera camera =
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
free_camera_delegate->SetCamera(camera);
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(0, 0, res[0], res[1]));
if (simple_light_task_delegate) {
simple_light_task_delegate->set_camera_path(free_camera_delegate->GetCameraId());
}
pxr::HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
/* TODO: Uncomment this and fix GL error:
invalid operation, reported from void __cdecl
pxrInternal_v0_22__pxrReserved__::HgiGLResourceBindings::BindResources(void) */
// tasks.push_back(simple_light_task_delegate->get_task());
}
tasks.push_back(render_task_delegate->get_task());
std::chrono::time_point<std::chrono::steady_clock> time_begin = std::chrono::steady_clock::now(),
time_current;
std::chrono::milliseconds elapsed_time;
float percent_done = 0.0;
std::map<std::string, std::vector<float>> render_images{
{"Combined", std::vector<float>(res[0] * res[1] * 4)}}; // 4 - number of channels
std::vector<float> &pixels = render_images["Combined"];
GLuint framebuffer_name = 0;
glGenFramebuffers(1, &framebuffer_name);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_name);
// The texture we're going to render to
GLuint rendered_texture;
glGenTextures(1, &rendered_texture);
// "Bind" the newly created texture : all future texture functions will modify this texture
glBindTexture(GL_TEXTURE_2D, rendered_texture);
// Give an empty image to OpenGL ( the last "0" )
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, res[0], res[1], 0, GL_RGBA, GL_FLOAT, 0);
// Poor filtering. Needed !
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// Set "rendered_texture" as our colour attachement #0
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, rendered_texture, 0);
// Generate vertex array
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
}
while (true) {
if (RE_engine_test_break(bl_engine)) {
break;
}
percent_done = renderer_percent_done();
time_current = std::chrono::steady_clock::now();
elapsed_time = std::chrono::duration_cast<std::chrono::milliseconds>(time_current -
time_begin);
notify_status(percent_done / 100.0,
scene_name + ": " + layer_name,
"Render Time: " + format_duration(elapsed_time) +
" | Done: " + std::to_string(int(percent_done)) + "%");
if (render_task_delegate->is_converged()) {
break;
}
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_FLOAT, pixels.data());
update_render_result(render_images, layer_name, res[0], res[1]);
}
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_FLOAT, pixels.data());
update_render_result(render_images, layer_name, res[0], res[1]);
}
} // namespace blender::render::hydra

View File

@ -10,18 +10,23 @@
namespace blender::render::hydra {
class FinalEngine : public Engine {
public:
public:
using Engine::Engine;
virtual void sync(Depsgraph *depsgraph, bContext *context, pxr::HdRenderSettingsMap &renderSettings) override;
virtual void sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings) override;
virtual void render(Depsgraph *b_depsgraph) override;
protected:
protected:
pxr::GfVec2i get_resolution(Scene *scene);
void updateRenderResult(std::map<std::string, std::vector<float>> &render_images, const std::string &layerName, int width, int height);
void update_render_result(std::map<std::string, std::vector<float>> &render_images,
const std::string &layer_name,
int width,
int height);
void notify_status(float progress, const std::string &title, const std::string &info);
protected:
HdRenderSettingsMap renderSettings;
protected:
pxr::HdRenderSettingsMap render_settings;
};
class FinalEngineGL : public FinalEngine {
@ -30,4 +35,4 @@ class FinalEngineGL : public FinalEngine {
void render(Depsgraph *depsgraph) override;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -1,81 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include "DEG_depsgraph_query.h"
#include "previewEngine.h"
#include "camera.h"
using namespace pxr;
using namespace std;
namespace blender::render::hydra {
void PreviewEngine::sync(Depsgraph *depsgraph, bContext *context, HdRenderSettingsMap &renderSettings)
{
scene_delegate = make_unique<BlenderSceneDelegate>(render_index.get(),
SdfPath::AbsoluteRootPath().AppendElementString("scene"), BlenderSceneDelegate::EngineType::Preview);
scene_delegate->populate(depsgraph, context);
for (auto const& setting : renderSettings) {
render_delegate->SetRenderSetting(setting.first, setting.second);
}
}
void PreviewEngine::render(Depsgraph *depsgraph)
{
Scene *scene = DEG_get_input_scene(depsgraph);
ViewLayer *view_layer = DEG_get_input_view_layer(depsgraph);
string layerName = view_layer->name;
GfVec2i res(scene->r.xsch, scene->r.ysch);
GfCamera camera = CameraData(scene->camera, res, GfVec4f(0, 0, 1, 1)).gf_camera(GfVec4f(0, 0, 1, 1));
free_camera_delegate->SetCamera(camera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(), GfVec4d(0, 0, res[0], res[1]));
render_task_delegate->SetRendererAov(HdAovTokens->color);
HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
tasks.push_back(simple_light_task_delegate->GetTask());
}
tasks.push_back(render_task_delegate->GetTask());
vector<float> pixels = vector<float>(res[0] * res[1] * 4); // 4 - number of channels
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
}
while (true) {
if (RE_engine_test_break(bl_engine)) {
break;
}
if (render_task_delegate->IsConverged()) {
break;
}
render_task_delegate->GetRendererAovData(HdAovTokens->color, pixels.data());
updateRenderResult(layerName, res[0], res[1], pixels);
}
render_task_delegate->GetRendererAovData(HdAovTokens->color, pixels.data());
updateRenderResult(layerName, res[0], res[1], pixels);
}
void PreviewEngine::updateRenderResult(const string &layerName, int width, int height, vector<float> &pixels)
{
RenderResult *result = RE_engine_begin_result(bl_engine, 0, 0, width, height, layerName.c_str(), nullptr);
RenderLayer *layer = (RenderLayer *)result->layers.first;
RenderPass *pass = (RenderPass *)layer->passes.first;
memcpy(pass->rect, pixels.data(), sizeof(float) * pass->rectx * pass->recty * pass->channels);
RE_engine_end_result(bl_engine, result, false, false, false);
}
} // namespace blender::render::hydra

View File

@ -1,23 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include "finalEngine.h"
namespace blender::render::hydra {
class PreviewEngine : public FinalEngine {
public:
using FinalEngine::FinalEngine;
void sync(Depsgraph *depsgraph, bContext *context, pxr::HdRenderSettingsMap &renderSettings) override;
void render(Depsgraph *depsgraph) override;
protected:
void updateRenderResult(const std::string &layerName, int width, int height, std::vector<float> &pixels);
protected:
HdRenderSettingsMap renderSettings;
};
} // namespace blender::render::hydra

View File

@ -0,0 +1,88 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include "DEG_depsgraph_query.h"
#include "camera.h"
#include "preview_engine.h"
namespace blender::render::hydra {
void PreviewEngine::sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings)
{
scene_delegate = std::make_unique<BlenderSceneDelegate>(
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
BlenderSceneDelegate::EngineType::PREVIEW);
scene_delegate->populate(depsgraph, context);
for (auto const &setting : render_settings) {
render_delegate->SetRenderSetting(setting.first, setting.second);
}
}
void PreviewEngine::render(Depsgraph *depsgraph)
{
Scene *scene = DEG_get_input_scene(depsgraph);
ViewLayer *view_layer = DEG_get_input_view_layer(depsgraph);
std::string layer_name = view_layer->name;
pxr::GfVec2i res(scene->r.xsch, scene->r.ysch);
pxr::GfCamera camera =
CameraData(scene->camera, res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(pxr::GfVec4f(0, 0, 1, 1));
free_camera_delegate->SetCamera(camera);
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(0, 0, res[0], res[1]));
render_task_delegate->set_renderer_aov(pxr::HdAovTokens->color);
pxr::HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
tasks.push_back(simple_light_task_delegate->get_task());
}
tasks.push_back(render_task_delegate->get_task());
std::vector<float> pixels = std::vector<float>(res[0] * res[1] * 4); // 4 - number of channels
{
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
}
while (true) {
if (RE_engine_test_break(bl_engine)) {
break;
}
if (render_task_delegate->is_converged()) {
break;
}
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
update_render_result(layer_name, res[0], res[1], pixels);
}
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
update_render_result(layer_name, res[0], res[1], pixels);
}
void PreviewEngine::update_render_result(const std::string &layer_name,
int width,
int height,
std::vector<float> &pixels)
{
RenderResult *result = RE_engine_begin_result(
bl_engine, 0, 0, width, height, layer_name.c_str(), nullptr);
RenderLayer *layer = (RenderLayer *)result->layers.first;
RenderPass *pass = (RenderPass *)layer->passes.first;
memcpy(pass->rect, pixels.data(), sizeof(float) * pass->rectx * pass->recty * pass->channels);
RE_engine_end_result(bl_engine, result, false, false, false);
}
} // namespace blender::render::hydra

View File

@ -0,0 +1,28 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include "final_engine.h"
namespace blender::render::hydra {
class PreviewEngine : public FinalEngine {
public:
using FinalEngine::FinalEngine;
void sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings) override;
void render(Depsgraph *depsgraph) override;
protected:
void update_render_result(const std::string &layer_name,
int width,
int height,
std::vector<float> &pixels);
protected:
pxr::HdRenderSettingsMap render_settings;
};
} // namespace blender::render::hydra

View File

@ -13,12 +13,10 @@
#include "glog/logging.h"
#include "finalEngine.h"
#include "viewportEngine.h"
#include "previewEngine.h"
#include "final_engine.h"
#include "preview_engine.h"
#include "utils.h"
using namespace std;
#include "viewport_engine.h"
namespace blender::render::hydra {
@ -47,7 +45,8 @@ static PyObject *init_func(PyObject * /*self*/, PyObject *args)
{
LOG(INFO) << "init_func";
pxr::PlugRegistry::GetInstance().RegisterPlugins(string(BKE_appdir_program_dir()) + "/blender.shared/usd");
pxr::PlugRegistry::GetInstance().RegisterPlugins(std::string(BKE_appdir_program_dir()) +
"/blender.shared/usd");
setup_usd_mtlx_environment();
@ -63,7 +62,7 @@ static PyObject *register_plugins_func(PyObject * /*self*/, PyObject *args)
LOG(INFO) << "register_plugins_func";
vector<string> plugin_dirs, path_dirs;
std::vector<std::string> plugin_dirs, path_dirs;
PyObject *pyiter, *pyitem;
pyiter = PyObject_GetIter(pyplugin_dirs);
@ -98,8 +97,8 @@ static PyObject *get_render_plugins_func(PyObject * /*self*/, PyObject *args)
{
LOG(INFO) << "get_render_plugins_func";
PlugRegistry &registry = PlugRegistry::GetInstance();
TfTokenVector plugin_ids = UsdImagingGLEngine::GetRendererPlugins();
pxr::PlugRegistry &registry = pxr::PlugRegistry::GetInstance();
pxr::TfTokenVector plugin_ids = pxr::UsdImagingGLEngine::GetRendererPlugins();
PyObject *ret = PyTuple_New(plugin_ids.size());
PyObject *val;
for (int i = 0; i < plugin_ids.size(); ++i) {
@ -108,15 +107,18 @@ static PyObject *get_render_plugins_func(PyObject * /*self*/, PyObject *args)
PyDict_SetItemString(descr, "id", val = PyUnicode_FromString(plugin_ids[i].GetText()));
Py_DECREF(val);
PyDict_SetItemString(descr, "name",
val = PyUnicode_FromString(UsdImagingGLEngine::GetRendererDisplayName(plugin_ids[i]).c_str()));
PyDict_SetItemString(
descr,
"name",
val = PyUnicode_FromString(
pxr::UsdImagingGLEngine::GetRendererDisplayName(plugin_ids[i]).c_str()));
Py_DECREF(val);
string plugin_name = plugin_ids[i];
std::string plugin_name = plugin_ids[i];
plugin_name = plugin_name.substr(0, plugin_name.size() - 6);
plugin_name[0] = tolower(plugin_name[0]);
string path = "";
PlugPluginPtr plugin = registry.GetPluginWithName(plugin_name);
std::string path = "";
pxr::PlugPluginPtr plugin = registry.GetPluginWithName(plugin_name);
if (plugin) {
path = plugin->GetPath();
}
@ -133,8 +135,8 @@ static PyObject *engine_create_func(PyObject * /*self*/, PyObject *args)
LOG(INFO) << "create_func";
PyObject *pyengine;
char *engineType, *render_delegate_id;
if (!PyArg_ParseTuple(args, "Oss", &pyengine, &engineType, &render_delegate_id)) {
char *engine_type, *render_delegate_id;
if (!PyArg_ParseTuple(args, "Oss", &pyengine, &engine_type, &render_delegate_id)) {
Py_RETURN_NONE;
}
@ -142,10 +144,10 @@ static PyObject *engine_create_func(PyObject * /*self*/, PyObject *args)
Engine *engine;
if (string(engineType) == "VIEWPORT") {
if (std::string(engine_type) == "VIEWPORT") {
engine = new ViewportEngine(bl_engine, render_delegate_id);
}
else if (string(engineType) == "PREVIEW") {
else if (std::string(engine_type) == "PREVIEW") {
engine = new PreviewEngine(bl_engine, render_delegate_id);
}
else {
@ -184,12 +186,12 @@ static PyObject *engine_sync_func(PyObject * /*self*/, PyObject *args)
Depsgraph *depsgraph = (Depsgraph *)PyLong_AsVoidPtr(pydepsgraph);
bContext *context = (bContext *)PyLong_AsVoidPtr(pycontext);
HdRenderSettingsMap settings;
pxr::HdRenderSettingsMap settings;
PyObject *pyiter = PyObject_GetIter(pysettings);
if (pyiter) {
PyObject *pykey, *pyval;
while (pykey = PyIter_Next(pyiter)) {
TfToken key(PyUnicode_AsUTF8(pykey));
pxr::TfToken key(PyUnicode_AsUTF8(pykey));
pyval = PyDict_GetItem(pysettings, pykey);
if (PyLong_Check(pyval)) {
@ -222,9 +224,9 @@ static PyObject *engine_render_func(PyObject * /*self*/, PyObject *args)
Depsgraph *depsgraph = (Depsgraph *)PyLong_AsVoidPtr(pydepsgraph);
/* Allow Blender to execute other Python scripts. */
Py_BEGIN_ALLOW_THREADS
engine->render(depsgraph);
Py_END_ALLOW_THREADS
Py_BEGIN_ALLOW_THREADS;
engine->render(depsgraph);
Py_END_ALLOW_THREADS;
Py_RETURN_NONE;
}
@ -241,40 +243,40 @@ static PyObject *engine_view_draw_func(PyObject * /*self*/, PyObject *args)
bContext *context = (bContext *)PyLong_AsVoidPtr(pycontext);
/* Allow Blender to execute other Python scripts. */
Py_BEGIN_ALLOW_THREADS
engine->render(depsgraph, context);
Py_END_ALLOW_THREADS
Py_BEGIN_ALLOW_THREADS;
engine->render(depsgraph, context);
Py_END_ALLOW_THREADS;
Py_RETURN_NONE;
}
static PyMethodDef methods[] = {
{"init", init_func, METH_VARARGS, ""},
{"register_plugins", register_plugins_func, METH_VARARGS, ""},
{"get_render_plugins", get_render_plugins_func, METH_VARARGS, ""},
{"init", init_func, METH_VARARGS, ""},
{"register_plugins", register_plugins_func, METH_VARARGS, ""},
{"get_render_plugins", get_render_plugins_func, METH_VARARGS, ""},
{"engine_create", engine_create_func, METH_VARARGS, ""},
{"engine_free", engine_free_func, METH_VARARGS, ""},
{"engine_render", engine_render_func, METH_VARARGS, ""},
{"engine_sync", engine_sync_func, METH_VARARGS, ""},
{"engine_view_draw", engine_view_draw_func, METH_VARARGS, ""},
{"engine_create", engine_create_func, METH_VARARGS, ""},
{"engine_free", engine_free_func, METH_VARARGS, ""},
{"engine_render", engine_render_func, METH_VARARGS, ""},
{"engine_sync", engine_sync_func, METH_VARARGS, ""},
{"engine_view_draw", engine_view_draw_func, METH_VARARGS, ""},
{NULL, NULL, 0, NULL},
{NULL, NULL, 0, NULL},
};
static struct PyModuleDef module = {
PyModuleDef_HEAD_INIT,
"_hydra",
"Hydra render API",
-1,
methods,
NULL,
NULL,
NULL,
NULL,
PyModuleDef_HEAD_INIT,
"_hydra",
"Hydra render API",
-1,
methods,
NULL,
NULL,
NULL,
NULL,
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra
#ifdef __cplusplus
extern "C" {

View File

@ -1,124 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <iostream>
#include <pxr/imaging/hd/renderBuffer.h>
#include <pxr/imaging/hd/renderDelegate.h>
#include <pxr/imaging/hdx/renderTask.h>
#include "glog/logging.h"
#include "renderTaskDelegate.h"
namespace blender::render::hydra {
RenderTaskDelegate::RenderTaskDelegate(HdRenderIndex* parentIndex, SdfPath const& delegateID)
: HdSceneDelegate(parentIndex, delegateID)
{
SdfPath renderTaskId = GetTaskID();
GetRenderIndex().InsertTask<HdxRenderTask>(this, renderTaskId);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(renderTaskId, HdChangeTracker::DirtyCollection);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(renderTaskId, HdChangeTracker::DirtyRenderTags);
taskParams.enableLighting = true;
taskParams.alphaThreshold = 0.1f;
}
SdfPath RenderTaskDelegate::GetTaskID() const
{
return GetDelegateID().AppendElementString("task");
}
SdfPath RenderTaskDelegate::GetAovID(TfToken const &aov) const
{
return GetDelegateID().AppendElementString("aov_" + aov.GetString());
}
VtValue RenderTaskDelegate::Get(SdfPath const& id, TfToken const& key)
{
LOG(INFO) << "RenderTaskDelegate::Get - " << id.GetAsString() << " " << key.GetString() << "\n";
if (key == HdTokens->params) {
return VtValue(taskParams);
}
if (key == HdTokens->collection) {
HdRprimCollection rprimCollection(HdTokens->geometry, HdReprSelector(HdReprTokens->smoothHull), false, TfToken());
rprimCollection.SetRootPath(SdfPath::AbsoluteRootPath());
return VtValue(rprimCollection);
}
return VtValue();
}
HdRenderBufferDescriptor RenderTaskDelegate::GetRenderBufferDescriptor(SdfPath const &id)
{
LOG(INFO) << "RenderTaskDelegate::GetRenderBufferDescriptor - " << id.GetAsString() << "\n";
return bufferDescriptors[id];
}
TfTokenVector RenderTaskDelegate::GetTaskRenderTags(SdfPath const &taskId)
{
LOG(INFO) << "RenderTaskDelegate::GetTaskRenderTags - " << taskId.GetAsString() << "\n";
return { HdRenderTagTokens->geometry };
}
bool RenderTaskDelegate::IsConverged()
{
HdTaskSharedPtr renderTask = GetRenderIndex().GetTask(GetTaskID());
return ((HdxRenderTask &)*renderTask).IsConverged();
}
void RenderTaskDelegate::SetRendererAov(TfToken const &aov)
{
HdAovDescriptor aovDesc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(aov);
HdRenderBufferDescriptor desc(GfVec3i(taskParams.viewport[2], taskParams.viewport[3], 1),
aovDesc.format, aovDesc.multiSampled);
SdfPath bufferId = GetAovID(aov);
if (bufferDescriptors.find(bufferId) == bufferDescriptors.end()) {
GetRenderIndex().InsertBprim(HdPrimTypeTokens->renderBuffer, this, bufferId);
bufferDescriptors[bufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId, HdRenderBuffer::DirtyDescription);
HdRenderPassAovBinding binding;
binding.aovName = aov;
binding.renderBufferId = bufferId;
binding.aovSettings = aovDesc.aovSettings;
taskParams.aovBindings.push_back(binding);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), HdChangeTracker::DirtyParams);
}
else if (bufferDescriptors[bufferId] != desc) {
bufferDescriptors[bufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId, HdRenderBuffer::DirtyDescription);
}
}
HdRenderBuffer *RenderTaskDelegate::GetRendererAov(TfToken const &aov)
{
return (HdRenderBuffer *)(GetRenderIndex().GetBprim(HdPrimTypeTokens->renderBuffer, GetAovID(aov)));
}
void RenderTaskDelegate::GetRendererAovData(TfToken const &aov, void *data)
{
HdRenderBuffer *buffer = GetRendererAov(aov);
void *bufData = buffer->Map();
memcpy(data, bufData, buffer->GetWidth() * buffer->GetHeight() * HdDataSizeOfFormat(buffer->GetFormat()));
buffer->Unmap();
}
HdTaskSharedPtr RenderTaskDelegate::GetTask()
{
return GetRenderIndex().GetTask(GetTaskID());
}
void RenderTaskDelegate::SetCameraAndViewport(SdfPath const &cameraId, GfVec4d const &viewport)
{
if (taskParams.viewport != viewport || taskParams.camera != cameraId) {
taskParams.viewport = viewport;
taskParams.camera = cameraId;
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), HdChangeTracker::DirtyParams);
}
}
} // namespace blender::render::hydra

View File

@ -1,39 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <pxr/imaging/hdx/renderSetupTask.h>
#include <pxr/imaging/hd/sceneDelegate.h>
using namespace pxr;
namespace blender::render::hydra {
class RenderTaskDelegate : public HdSceneDelegate
{
public:
RenderTaskDelegate(HdRenderIndex* parentIndex, SdfPath const &delegateID);
~RenderTaskDelegate() override = default;
SdfPath GetTaskID() const;
SdfPath GetAovID(TfToken const &aov) const;
VtValue Get(SdfPath const &id, TfToken const &key) override;
HdRenderBufferDescriptor GetRenderBufferDescriptor(SdfPath const &id) override;
TfTokenVector GetTaskRenderTags(SdfPath const &taskId) override;
bool IsConverged();
void SetRendererAov(TfToken const &aovId);
HdRenderBuffer *GetRendererAov(TfToken const &id);
void GetRendererAovData(TfToken const &id, void *buf);
HdTaskSharedPtr GetTask();
void SetCameraAndViewport(SdfPath const &cameraId, GfVec4d const &viewport);
private:
HdxRenderTaskParams taskParams;
TfHashMap<SdfPath, HdRenderBufferDescriptor, SdfPath::Hash> bufferDescriptors;
};
} // namespace blender::render::hydra

View File

@ -0,0 +1,140 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <iostream>
#include <pxr/imaging/hd/renderBuffer.h>
#include <pxr/imaging/hd/renderDelegate.h>
#include <pxr/imaging/hdx/renderTask.h>
#include "glog/logging.h"
#include "render_task_delegate.h"
namespace blender::render::hydra {
RenderTaskDelegate::RenderTaskDelegate(pxr::HdRenderIndex *parent_index,
pxr::SdfPath const &delegate_id)
: pxr::HdSceneDelegate(parent_index, delegate_id)
{
pxr::SdfPath render_task_id = get_task_id();
GetRenderIndex().InsertTask<pxr::HdxRenderTask>(this, render_task_id);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(render_task_id,
pxr::HdChangeTracker::DirtyCollection);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(render_task_id,
pxr::HdChangeTracker::DirtyRenderTags);
task_params.enableLighting = true;
task_params.alphaThreshold = 0.1f;
}
pxr::SdfPath RenderTaskDelegate::get_task_id() const
{
return GetDelegateID().AppendElementString("task");
}
pxr::SdfPath RenderTaskDelegate::get_aov_id(pxr::TfToken const &aov) const
{
return GetDelegateID().AppendElementString("aov_" + aov.GetString());
}
pxr::VtValue RenderTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
{
LOG(INFO) << "RenderTaskDelegate::Get - " << id.GetAsString() << " " << key.GetString() << "\n";
if (key == pxr::HdTokens->params) {
return pxr::VtValue(task_params);
}
if (key == pxr::HdTokens->collection) {
pxr::HdRprimCollection rprim_collection(pxr::HdTokens->geometry,
pxr::HdReprSelector(pxr::HdReprTokens->smoothHull),
false,
pxr::TfToken());
rprim_collection.SetRootPath(pxr::SdfPath::AbsoluteRootPath());
return pxr::VtValue(rprim_collection);
}
return pxr::VtValue();
}
pxr::HdRenderBufferDescriptor RenderTaskDelegate::GetRenderBufferDescriptor(pxr::SdfPath const &id)
{
LOG(INFO) << "RenderTaskDelegate::GetRenderBufferDescriptor - " << id.GetAsString() << "\n";
return buffer_descriptors[id];
}
pxr::TfTokenVector RenderTaskDelegate::GetTaskRenderTags(pxr::SdfPath const &task_id)
{
LOG(INFO) << "RenderTaskDelegate::GetTaskRenderTags - " << task_id.GetAsString() << "\n";
return {pxr::HdRenderTagTokens->geometry};
}
bool RenderTaskDelegate::is_converged()
{
pxr::HdTaskSharedPtr renderTask = GetRenderIndex().GetTask(get_task_id());
return ((pxr::HdxRenderTask &)*renderTask).IsConverged();
}
void RenderTaskDelegate::set_renderer_aov(pxr::TfToken const &aov)
{
pxr::HdAovDescriptor aov_desc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(
aov);
pxr::HdRenderBufferDescriptor desc(
pxr::GfVec3i(task_params.viewport[2], task_params.viewport[3], 1),
aov_desc.format,
aov_desc.multiSampled);
pxr::SdfPath buffer_id = get_aov_id(aov);
if (buffer_descriptors.find(buffer_id) == buffer_descriptors.end()) {
GetRenderIndex().InsertBprim(pxr::HdPrimTypeTokens->renderBuffer, this, buffer_id);
buffer_descriptors[buffer_id] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(buffer_id,
pxr::HdRenderBuffer::DirtyDescription);
pxr::HdRenderPassAovBinding binding;
binding.aovName = aov;
binding.renderBufferId = buffer_id;
binding.aovSettings = aov_desc.aovSettings;
task_params.aovBindings.push_back(binding);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(get_task_id(),
pxr::HdChangeTracker::DirtyParams);
}
else if (buffer_descriptors[buffer_id] != desc) {
buffer_descriptors[buffer_id] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(buffer_id,
pxr::HdRenderBuffer::DirtyDescription);
}
}
pxr::HdRenderBuffer *RenderTaskDelegate::get_renderer_aov(pxr::TfToken const &aov)
{
return (pxr::HdRenderBuffer *)(GetRenderIndex().GetBprim(pxr::HdPrimTypeTokens->renderBuffer,
get_aov_id(aov)));
}
void RenderTaskDelegate::get_renderer_aov_data(pxr::TfToken const &aov, void *data)
{
pxr::HdRenderBuffer *buffer = get_renderer_aov(aov);
void *buf_data = buffer->Map();
memcpy(data,
buf_data,
buffer->GetWidth() * buffer->GetHeight() * pxr::HdDataSizeOfFormat(buffer->GetFormat()));
buffer->Unmap();
}
pxr::HdTaskSharedPtr RenderTaskDelegate::get_task()
{
return GetRenderIndex().GetTask(get_task_id());
}
void RenderTaskDelegate::set_camera_and_viewport(pxr::SdfPath const &camera_id,
pxr::GfVec4d const &viewport)
{
if (task_params.viewport != viewport || task_params.camera != camera_id) {
task_params.viewport = viewport;
task_params.camera = camera_id;
GetRenderIndex().GetChangeTracker().MarkTaskDirty(get_task_id(),
pxr::HdChangeTracker::DirtyParams);
}
}
} // namespace blender::render::hydra

View File

@ -0,0 +1,37 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <pxr/imaging/hd/sceneDelegate.h>
#include <pxr/imaging/hdx/renderSetupTask.h>
namespace blender::render::hydra {
class RenderTaskDelegate : public pxr::HdSceneDelegate {
public:
RenderTaskDelegate(pxr::HdRenderIndex *parent_index, pxr::SdfPath const &delegate_id);
~RenderTaskDelegate() override = default;
pxr::SdfPath get_task_id() const;
pxr::SdfPath get_aov_id(pxr::TfToken const &aov) const;
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
pxr::HdRenderBufferDescriptor GetRenderBufferDescriptor(pxr::SdfPath const &id) override;
pxr::TfTokenVector GetTaskRenderTags(pxr::SdfPath const &taskId) override;
bool is_converged();
void set_renderer_aov(pxr::TfToken const &aovId);
pxr::HdRenderBuffer *get_renderer_aov(pxr::TfToken const &id);
void get_renderer_aov_data(pxr::TfToken const &id, void *buf);
pxr::HdTaskSharedPtr get_task();
void set_camera_and_viewport(pxr::SdfPath const &cameraId, pxr::GfVec4d const &viewport);
private:
pxr::HdxRenderTaskParams task_params;
pxr::TfHashMap<pxr::SdfPath, pxr::HdRenderBufferDescriptor, pxr::SdfPath::Hash>
buffer_descriptors;
};
} // namespace blender::render::hydra

View File

@ -1,77 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include "mtlxHydraAdapter.h"
#include <pxr/base/arch/fileSystem.h>
#include <pxr/usd/ar/resolver.h>
#include <pxr/usd/ar/resolverContextBinder.h>
#include <pxr/usd/ar/resolverScopedCache.h>
#include <pxr/usd/usdMtlx/reader.h>
#include <pxr/usd/usdMtlx/utils.h>
#include <pxr/usd/usdShade/material.h>
#include <pxr/usd/usdShade/shader.h>
#include <pxr/usdImaging/usdImaging/materialParamUtils.h>
#include <pxr/imaging/hd/material.h>
#include <pxr/imaging/hd/tokens.h>
namespace mx = MaterialX;
PXR_NAMESPACE_OPEN_SCOPE
void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlxPath,
TfTokenVector const &shaderSourceTypes,
TfTokenVector const &renderContexts,
HdMaterialNetworkMap *out)
{
if (mtlxPath.empty()) {
return;
}
std::string basePath = TfGetPathName(mtlxPath);
ArResolver &resolver = ArGetResolver();
const ArResolverContext context = resolver.CreateDefaultContextForAsset(mtlxPath);
ArResolverContextBinder binder(context);
ArResolverScopedCache resolverCache;
std::string mtlxName = TfGetBaseName(mtlxPath);
std::string stageId = TfStringPrintf(
"%s%s%s.usda", basePath.c_str(), ARCH_PATH_SEP, mtlxName.c_str());
UsdStageRefPtr stage = UsdStage::CreateInMemory(stageId, context);
try {
mx::DocumentPtr doc = UsdMtlxReadDocument(mtlxPath);
UsdMtlxRead(doc, stage);
}
catch (mx::ExceptionFoundCycle &x) {
TF_RUNTIME_ERROR("MaterialX cycle found: %s\n", x.what());
return;
}
catch (mx::Exception &x) {
TF_RUNTIME_ERROR("MaterialX error: %s\n", x.what());
return;
}
if (UsdPrim materials = stage->GetPrimAtPath(SdfPath("/MaterialX/Materials"))) {
if (UsdPrimSiblingRange children = materials.GetChildren()) {
if (auto material = UsdShadeMaterial(*children.begin())) {
if (UsdShadeShader mtlxSurface = material.ComputeSurfaceSource(renderContexts)) {
UsdImagingBuildHdMaterialNetworkFromTerminal(mtlxSurface.GetPrim(),
HdMaterialTerminalTokens->surface,
shaderSourceTypes,
renderContexts,
out,
UsdTimeCode::Default());
}
}
}
}
}
PXR_NAMESPACE_CLOSE_SCOPE

View File

@ -1,20 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <pxr/base/tf/token.h>
#include <pxr/pxr.h>
#include <string>
PXR_NAMESPACE_OPEN_SCOPE
struct HdMaterialNetworkMap;
void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlxPath,
TfTokenVector const &shaderSourceTypes,
TfTokenVector const &renderContexts,
HdMaterialNetworkMap *out);
PXR_NAMESPACE_CLOSE_SCOPE

View File

@ -6,19 +6,18 @@
#include "glog/logging.h"
#include "blenderSceneDelegate.h"
using namespace pxr;
using namespace std;
#include "blender_scene_delegate.h"
namespace blender::render::hydra {
BlenderSceneDelegate::BlenderSceneDelegate(HdRenderIndex* parentIndex, SdfPath const& delegateID, BlenderSceneDelegate::EngineType engine_type)
: HdSceneDelegate(parentIndex, delegateID)
, engine_type(engine_type)
, depsgraph(nullptr)
, context(nullptr)
, view3d(nullptr)
BlenderSceneDelegate::BlenderSceneDelegate(pxr::HdRenderIndex *parent_index,
pxr::SdfPath const &delegate_id,
BlenderSceneDelegate::EngineType engine_type)
: HdSceneDelegate(parent_index, delegate_id),
engine_type(engine_type),
depsgraph(nullptr),
context(nullptr),
view3d(nullptr)
{
}
@ -26,10 +25,10 @@ void BlenderSceneDelegate::set_material(MeshData &mesh_data)
{
Material *material = mesh_data.material();
if (!material) {
mesh_data.material_id = SdfPath::EmptyPath();
mesh_data.material_id = pxr::SdfPath::EmptyPath();
return;
}
SdfPath id = MaterialData::prim_id(this, material);
pxr::SdfPath id = MaterialData::prim_id(this, material);
MaterialData *mat_data = material_data(id);
if (!mat_data) {
materials[id] = MaterialData::init(this, material);
@ -45,7 +44,7 @@ void BlenderSceneDelegate::update_material(Material *material)
MaterialData *mat_data = material_data(MaterialData::prim_id(this, material));
if (mat_data) {
mat_data->export_mtlx();
mat_data->mark_prim_dirty(IdData::DirtyBits::AllDirty);
mat_data->mark_prim_dirty(IdData::DirtyBits::ALL_DIRTY);
}
}
@ -62,7 +61,7 @@ void BlenderSceneDelegate::update_world()
else {
if (world) {
world_data = WorldData::init(this, world, context);
world_data->mark_prim_dirty(IdData::DirtyBits::AllDirty);
world_data->mark_prim_dirty(IdData::DirtyBits::ALL_DIRTY);
}
else {
world_data->remove_prim();
@ -71,7 +70,7 @@ void BlenderSceneDelegate::update_world()
}
}
bool BlenderSceneDelegate::GetVisible(SdfPath const &id)
bool BlenderSceneDelegate::GetVisible(pxr::SdfPath const &id)
{
if (id == WorldData::prim_id(this)) {
return true;
@ -80,62 +79,68 @@ bool BlenderSceneDelegate::GetVisible(SdfPath const &id)
return object_data(id)->visible;
}
SdfPath BlenderSceneDelegate::GetInstancerId(SdfPath const &primId)
pxr::SdfPath BlenderSceneDelegate::GetInstancerId(pxr::SdfPath const &prim_id)
{
LOG(INFO) << "GetInstancerId: " << primId.GetAsString();
MeshData *m_data = mesh_data(primId);
LOG(INFO) << "GetInstancerId: " << prim_id.GetAsString();
MeshData *m_data = mesh_data(prim_id);
if (m_data) {
return m_data->instancer_id;
}
return SdfPath();
return pxr::SdfPath();
}
SdfPathVector BlenderSceneDelegate::GetInstancerPrototypes(SdfPath const &instancerId)
pxr::SdfPathVector BlenderSceneDelegate::GetInstancerPrototypes(pxr::SdfPath const &instancer_id)
{
LOG(INFO) << "GetInstancerPrototypes: " << instancerId.GetString();
SdfPathVector paths;
paths.push_back(instancerId.GetParentPath());
LOG(INFO) << "GetInstancerPrototypes: " << instancer_id.GetString();
pxr::SdfPathVector paths;
paths.push_back(instancer_id.GetParentPath());
return paths;
}
VtIntArray BlenderSceneDelegate::GetInstanceIndices(SdfPath const &instancerId,
SdfPath const &prototypeId)
pxr::VtIntArray BlenderSceneDelegate::GetInstanceIndices(pxr::SdfPath const &instancer_id,
pxr::SdfPath const &prototype_id)
{
LOG(INFO) << "GetInstanceIndices: " << instancerId.GetString() << " " << prototypeId.GetString();
MeshData *m_data = mesh_data(prototypeId);
VtIntArray ret = m_data->instance_indices();
LOG(INFO) << "GetInstanceIndices: " << instancer_id.GetString() << " "
<< prototype_id.GetString();
MeshData *m_data = mesh_data(prototype_id);
pxr::VtIntArray ret = m_data->instance_indices();
return ret;
}
GfMatrix4d BlenderSceneDelegate::GetInstancerTransform(SdfPath const &instancerId)
pxr::GfMatrix4d BlenderSceneDelegate::get_instancer_transform(pxr::SdfPath const &instancer_id)
{
LOG(INFO) << "GetInstancerTransform: " << instancerId.GetString();
LOG(INFO) << "get_instancer_transform: " << instancer_id.GetString();
// TODO: add a separate object for instancer for cleaner handling code
// Actual instancer transform is get here
return GfMatrix4d(1.0);
return pxr::GfMatrix4d(1.0);
}
size_t BlenderSceneDelegate::SampleInstancerTransform(SdfPath const &instancerId, size_t maxSampleCount,
float *sampleTimes, GfMatrix4d *sampleValues)
size_t BlenderSceneDelegate::SampleInstancerTransform(pxr::SdfPath const &instancer_id,
size_t max_sample_count,
float *sample_times,
pxr::GfMatrix4d *sample_values)
{
LOG(INFO) << "SampleInstancerTransform: " << instancerId.GetString();
LOG(INFO) << "SampleInstancerTransform: " << instancer_id.GetString();
size_t ret = 0;
MeshData *m_data = mesh_data(instancerId.GetParentPath());
ret = m_data->sample_instancer_transform(maxSampleCount, sampleTimes, sampleValues);
MeshData *m_data = mesh_data(instancer_id.GetParentPath());
ret = m_data->sample_instancer_transform(max_sample_count, sample_times, sample_values);
return ret;
}
size_t BlenderSceneDelegate::SamplePrimvar(SdfPath const &id, TfToken const &key, size_t maxSampleCount,
float *sampleTimes, VtValue *sampleValues)
size_t BlenderSceneDelegate::SamplePrimvar(pxr::SdfPath const &id,
pxr::TfToken const &key,
size_t max_sample_count,
float *sample_times,
pxr::VtValue *sample_values)
{
// TODO: add a separate object for instancer for cleaner handling code
if (id.GetName() == "Instancer") {
MeshData *m_data = mesh_data(id.GetParentPath());
if (m_data) {
return m_data->sample_instancer_primvar(key, maxSampleCount, sampleTimes, sampleValues);
return m_data->sample_instancer_primvar(key, max_sample_count, sample_times, sample_values);
}
}
return HdSceneDelegate::SamplePrimvar(id, key, maxSampleCount, sampleTimes, sampleValues);
return HdSceneDelegate::SamplePrimvar(id, key, max_sample_count, sample_times, sample_values);
}
void BlenderSceneDelegate::update_collection(bool remove, bool visibility)
@ -144,14 +149,14 @@ void BlenderSceneDelegate::update_collection(bool remove, bool visibility)
/* Check and update visibility */
for (auto &obj : objects) {
if (obj.second->update_visibility(view3d)) {
obj.second->mark_prim_dirty(IdData::DirtyBits::DirtyVisibility);
obj.second->mark_prim_dirty(IdData::DirtyBits::DIRTY_VISIBILITY);
};
}
}
/* Export of new visible objects which were not exported before */
set<SdfPath> available_objects;
SdfPath id;
std::set<pxr::SdfPath> available_objects;
pxr::SdfPath id;
DEGObjectIterSettings settings = {0};
settings.depsgraph = depsgraph;
@ -164,7 +169,9 @@ void BlenderSceneDelegate::update_collection(bool remove, bool visibility)
ITER_BEGIN (DEG_iterator_objects_begin,
DEG_iterator_objects_next,
DEG_iterator_objects_end,
&data, Object *, object) {
&data,
Object *,
object) {
if (!ObjectData::supported(object)) {
continue;
@ -198,7 +205,7 @@ void BlenderSceneDelegate::update_collection(bool remove, bool visibility)
}
/* remove unused materials */
set<SdfPath> available_materials;
std::set<pxr::SdfPath> available_materials;
for (auto &obj : objects) {
MeshData *m_data = dynamic_cast<MeshData *>(obj.second.get());
if (m_data && !m_data->material_id.IsEmpty()) {
@ -216,9 +223,12 @@ void BlenderSceneDelegate::update_collection(bool remove, bool visibility)
}
}
void BlenderSceneDelegate::add_update_object(Object *object, bool geometry, bool transform, bool shading)
void BlenderSceneDelegate::add_update_object(Object *object,
bool geometry,
bool transform,
bool shading)
{
SdfPath id = ObjectData::prim_id(this, object);
pxr::SdfPath id = ObjectData::prim_id(this, object);
ObjectData *obj_data = object_data(id);
if (!obj_data) {
objects[id] = ObjectData::init(this, object);
@ -240,22 +250,22 @@ void BlenderSceneDelegate::add_update_object(Object *object, bool geometry, bool
if (m_data) {
set_material(*m_data);
}
obj_data->mark_prim_dirty(IdData::DirtyBits::AllDirty);
obj_data->mark_prim_dirty(IdData::DirtyBits::ALL_DIRTY);
return;
}
if (transform) {
obj_data->mark_prim_dirty(IdData::DirtyBits::DirtyTransform);
obj_data->mark_prim_dirty(IdData::DirtyBits::DIRTY_TRANSFORM);
}
if (shading) {
obj_data->mark_prim_dirty(IdData::DirtyBits::DirtyMaterial);
obj_data->mark_prim_dirty(IdData::DirtyBits::DIRTY_MATERIAL);
}
}
void BlenderSceneDelegate::add_update_instance(DupliObject *dupli)
{
SdfPath id = ObjectData::prim_id(this, dupli->ob);
pxr::SdfPath id = ObjectData::prim_id(this, dupli->ob);
if (!object_data(id)) {
add_update_object(dupli->ob, true, true, true);
}
@ -264,7 +274,7 @@ void BlenderSceneDelegate::add_update_instance(DupliObject *dupli)
m_data->add_instance(dupli);
}
ObjectData *BlenderSceneDelegate::object_data(SdfPath const &id)
ObjectData *BlenderSceneDelegate::object_data(pxr::SdfPath const &id)
{
auto it = objects.find(id);
if (it == objects.end()) {
@ -273,17 +283,17 @@ ObjectData *BlenderSceneDelegate::object_data(SdfPath const &id)
return it->second.get();
}
MeshData *BlenderSceneDelegate::mesh_data(SdfPath const &id)
MeshData *BlenderSceneDelegate::mesh_data(pxr::SdfPath const &id)
{
return static_cast<MeshData *>(object_data(id));
}
LightData *BlenderSceneDelegate::light_data(SdfPath const &id)
LightData *BlenderSceneDelegate::light_data(pxr::SdfPath const &id)
{
return static_cast<LightData *>(object_data(id));
}
MaterialData *BlenderSceneDelegate::material_data(SdfPath const &id)
MaterialData *BlenderSceneDelegate::material_data(pxr::SdfPath const &id)
{
auto it = materials.find(id);
if (it == materials.end()) {
@ -317,10 +327,8 @@ void BlenderSceneDelegate::populate(Depsgraph *deps, bContext *cont)
DEGIDIterData data = {0};
data.graph = depsgraph;
data.only_updated = true;
ITER_BEGIN (DEG_iterator_ids_begin,
DEG_iterator_ids_next,
DEG_iterator_ids_end,
&data, ID *, id) {
ITER_BEGIN (
DEG_iterator_ids_begin, DEG_iterator_ids_next, DEG_iterator_ids_end, &data, ID *, id) {
transform = (id->recalc & ID_RECALC_TRANSFORM) != 0;
shading = (id->recalc & (ID_RECALC_SHADING | ID_RECALC_ANIMATION)) != 0;
@ -330,14 +338,13 @@ void BlenderSceneDelegate::populate(Depsgraph *deps, bContext *cont)
switch (GS(id->name)) {
case ID_OB: {
Object *object = (Object *)id;
if (!ObjectData::supported(object)) {
break;
}
geometry |= (((ID *)object->data)->recalc & ID_RECALC_GEOMETRY) != 0;
add_update_object(object, geometry, transform, shading);
Object *object = (Object *)id;
if (!ObjectData::supported(object)) {
break;
}
break;
geometry |= (((ID *)object->data)->recalc & ID_RECALC_GEOMETRY) != 0;
add_update_object(object, geometry, transform, shading);
} break;
case ID_MA:
if (shading) {
@ -383,14 +390,14 @@ void BlenderSceneDelegate::populate(Depsgraph *deps, bContext *cont)
}
}
HdMeshTopology BlenderSceneDelegate::GetMeshTopology(SdfPath const& id)
pxr::HdMeshTopology BlenderSceneDelegate::GetMeshTopology(pxr::SdfPath const &id)
{
LOG(INFO) << "GetMeshTopology: " << id.GetString();
MeshData *m_data = mesh_data(id);
return m_data->mesh_topology();
}
VtValue BlenderSceneDelegate::Get(SdfPath const& id, TfToken const& key)
pxr::VtValue BlenderSceneDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
{
LOG(INFO) << "Get: " << id.GetString() << " " << key.GetString();
ObjectData *obj_data = object_data(id);
@ -406,10 +413,11 @@ VtValue BlenderSceneDelegate::Get(SdfPath const& id, TfToken const& key)
if (mat_data) {
return mat_data->get_data(key);
}
return VtValue();
return pxr::VtValue();
}
HdPrimvarDescriptorVector BlenderSceneDelegate::GetPrimvarDescriptors(SdfPath const& id, HdInterpolation interpolation)
pxr::HdPrimvarDescriptorVector BlenderSceneDelegate::GetPrimvarDescriptors(
pxr::SdfPath const &id, pxr::HdInterpolation interpolation)
{
LOG(INFO) << "GetPrimvarDescriptors: " << id.GetString() << " " << interpolation;
if (mesh_data(id)) {
@ -421,25 +429,25 @@ HdPrimvarDescriptorVector BlenderSceneDelegate::GetPrimvarDescriptors(SdfPath co
return data->instancer_primvar_descriptors(interpolation);
}
}
HdPrimvarDescriptorVector primvars;
pxr::HdPrimvarDescriptorVector primvars;
return primvars;
}
SdfPath BlenderSceneDelegate::GetMaterialId(SdfPath const & rprimId)
pxr::SdfPath BlenderSceneDelegate::GetMaterialId(pxr::SdfPath const &rprim_id)
{
return mesh_data(rprimId)->material_id;
return mesh_data(rprim_id)->material_id;
}
VtValue BlenderSceneDelegate::GetMaterialResource(SdfPath const& id)
pxr::VtValue BlenderSceneDelegate::GetMaterialResource(pxr::SdfPath const &id)
{
MaterialData *mat_data = material_data(id);
if (mat_data) {
return mat_data->material_resource();
}
return VtValue();
return pxr::VtValue();
}
GfMatrix4d BlenderSceneDelegate::GetTransform(SdfPath const& id)
pxr::GfMatrix4d BlenderSceneDelegate::GetTransform(pxr::SdfPath const &id)
{
LOG(INFO) << "GetTransform: " << id.GetString();
ObjectData *obj_data = object_data(id);
@ -455,10 +463,11 @@ GfMatrix4d BlenderSceneDelegate::GetTransform(SdfPath const& id)
if (id == WorldData::prim_id(this)) {
return world_data->transform();
}
return GfMatrix4d();
return pxr::GfMatrix4d();
}
VtValue BlenderSceneDelegate::GetLightParamValue(SdfPath const& id, TfToken const& key)
pxr::VtValue BlenderSceneDelegate::GetLightParamValue(pxr::SdfPath const &id,
pxr::TfToken const &key)
{
LightData *l_data = light_data(id);
if (l_data) {
@ -467,7 +476,7 @@ VtValue BlenderSceneDelegate::GetLightParamValue(SdfPath const& id, TfToken cons
if (id == WorldData::prim_id(this)) {
return world_data->get_data(key);
}
return VtValue();
return pxr::VtValue();
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -8,22 +8,20 @@
#include "BKE_context.h"
#include "DEG_depsgraph.h"
#include "object.h"
#include "mesh.h"
#include "light.h"
#include "mesh.h"
#include "object.h"
#include "world.h"
namespace blender::render::hydra {
class BlenderSceneDelegate : public pxr::HdSceneDelegate {
public:
enum class EngineType {
Viewport = 1,
Final,
Preview
};
public:
enum class EngineType { VIEWPORT = 1, FINAL, PREVIEW };
BlenderSceneDelegate(pxr::HdRenderIndex *render_index, pxr::SdfPath const &delegateId, BlenderSceneDelegate::EngineType engine_type);
BlenderSceneDelegate(pxr::HdRenderIndex *render_index,
pxr::SdfPath const &delegate_id,
BlenderSceneDelegate::EngineType engine_type);
~BlenderSceneDelegate() override = default;
void populate(Depsgraph *depsgraph, bContext *context);
@ -33,22 +31,29 @@ public:
pxr::GfMatrix4d GetTransform(pxr::SdfPath const &id) override;
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
pxr::VtValue GetLightParamValue(pxr::SdfPath const &id, pxr::TfToken const &key) override;
pxr::HdPrimvarDescriptorVector GetPrimvarDescriptors(pxr::SdfPath const &id, pxr::HdInterpolation interpolation) override;
pxr::SdfPath GetMaterialId(pxr::SdfPath const &rprimId) override;
pxr::VtValue GetMaterialResource(pxr::SdfPath const &materialId) override;
pxr::HdPrimvarDescriptorVector GetPrimvarDescriptors(
pxr::SdfPath const &id, pxr::HdInterpolation interpolation) override;
pxr::SdfPath GetMaterialId(pxr::SdfPath const &rprim_id) override;
pxr::VtValue GetMaterialResource(pxr::SdfPath const &material_id) override;
bool GetVisible(pxr::SdfPath const &id) override;
pxr::SdfPath GetInstancerId(pxr::SdfPath const &primId) override;
pxr::SdfPathVector GetInstancerPrototypes(pxr::SdfPath const &instancerId) override;
pxr::VtIntArray GetInstanceIndices(pxr::SdfPath const &instancerId, pxr::SdfPath const &prototypeId) override;
pxr::GfMatrix4d GetInstancerTransform(pxr::SdfPath const &instancerId);
size_t SampleInstancerTransform(pxr::SdfPath const &instancerId, size_t maxSampleCount,
float *sampleTimes, pxr::GfMatrix4d *sampleValues) override;
size_t SamplePrimvar(pxr::SdfPath const &id, pxr::TfToken const &key, size_t maxSampleCount,
float *sampleTimes, pxr::VtValue *sampleValues) override;
pxr::SdfPath GetInstancerId(pxr::SdfPath const &prim_id) override;
pxr::SdfPathVector GetInstancerPrototypes(pxr::SdfPath const &instancer_id) override;
pxr::VtIntArray GetInstanceIndices(pxr::SdfPath const &instancer_id,
pxr::SdfPath const &prototype_id) override;
pxr::GfMatrix4d get_instancer_transform(pxr::SdfPath const &instancer_id);
size_t SampleInstancerTransform(pxr::SdfPath const &instancer_id,
size_t max_sample_count,
float *sample_times,
pxr::GfMatrix4d *sample_values) override;
size_t SamplePrimvar(pxr::SdfPath const &id,
pxr::TfToken const &key,
size_t max_sample_count,
float *sample_times,
pxr::VtValue *sample_values) override;
EngineType engine_type;
private:
private:
ObjectData *object_data(pxr::SdfPath const &id);
MeshData *mesh_data(pxr::SdfPath const &id);
LightData *light_data(pxr::SdfPath const &id);
@ -61,7 +66,7 @@ private:
void update_world();
void update_collection(bool remove, bool visibility);
private:
private:
Depsgraph *depsgraph;
bContext *context;
View3D *view3d;
@ -71,4 +76,4 @@ private:
std::unique_ptr<WorldData> world_data;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -3,16 +3,13 @@
#include "BKE_lib_id.h"
#include "blenderSceneDelegate.h"
#include "blender_scene_delegate.h"
#include "id.h"
using namespace pxr;
namespace blender::render::hydra {
IdData::IdData(BlenderSceneDelegate *scene_delegate, ID *id)
: scene_delegate(scene_delegate)
, id(id)
: scene_delegate(scene_delegate), id(id)
{
}
@ -23,9 +20,9 @@ std::string IdData::name()
return str;
}
VtValue IdData::get_data(TfToken const &key)
pxr::VtValue IdData::get_data(pxr::TfToken const &key)
{
return VtValue();
return pxr::VtValue();
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -3,8 +3,8 @@
#pragma once
#include <pxr/base/vt/value.h>
#include <pxr/base/tf/token.h>
#include <pxr/base/vt/value.h>
#include <pxr/imaging/hd/sceneDelegate.h>
#include "DNA_ID.h"
@ -22,12 +22,7 @@ class IdData {
virtual pxr::VtValue get_data(pxr::TfToken const &key);
template<class T> const T get_data(pxr::TfToken const &key);
enum class DirtyBits {
DirtyTransform = 1,
DirtyVisibility,
DirtyMaterial,
AllDirty
};
enum class DirtyBits { DIRTY_TRANSFORM = 1, DIRTY_VISIBILITY, DIRTY_MATERIAL, ALL_DIRTY };
virtual void insert_prim() = 0;
virtual void remove_prim() = 0;
@ -43,4 +38,4 @@ template<class T> const T IdData::get_data(pxr::TfToken const &key)
return get_data(key).Get<T>();
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -12,63 +12,61 @@
#include "BKE_light.h"
#include "DNA_light_types.h"
#include "blenderSceneDelegate.h"
#include "blender_scene_delegate.h"
#include "light.h"
using namespace pxr;
using namespace boost::algorithm;
namespace blender::render::hydra {
LightData::LightData(BlenderSceneDelegate *scene_delegate, Object *object)
: ObjectData(scene_delegate, object)
: ObjectData(scene_delegate, object)
{
Light *light = (Light *)((Object *)id)->data;
data[HdLightTokens->intensity] = scene_delegate->engine_type == BlenderSceneDelegate::EngineType::Preview
? light->energy / 1000
: light->energy;
data[pxr::HdLightTokens->intensity] = scene_delegate->engine_type ==
BlenderSceneDelegate::EngineType::PREVIEW ?
light->energy / 1000 :
light->energy;
data[HdLightTokens->color] = GfVec3f(light->r, light->g, light->b);
data[pxr::HdLightTokens->color] = pxr::GfVec3f(light->r, light->g, light->b);
switch (light->type) {
case LA_LOCAL:
data[HdLightTokens->radius] = light->area_size / 2;
data[pxr::HdLightTokens->radius] = light->area_size / 2;
break;
case LA_SUN:
data[HdLightTokens->angle] = light->sun_angle * 180.0 / M_PI;
data[pxr::HdLightTokens->angle] = light->sun_angle * 180.0 / M_PI;
break;
case LA_SPOT:
data[HdLightTokens->shapingConeAngle] = light->spotsize / 2;
data[HdLightTokens->shapingConeSoftness] = light->spotblend;
data[UsdLuxTokens->treatAsPoint] = true;
data[pxr::HdLightTokens->shapingConeAngle] = light->spotsize / 2;
data[pxr::HdLightTokens->shapingConeSoftness] = light->spotblend;
data[pxr::UsdLuxTokens->treatAsPoint] = true;
break;
case LA_AREA:
switch (light->area_shape) {
case LA_AREA_SQUARE:
data[HdLightTokens->width] = light->area_size;
data[HdLightTokens->height] = light->area_size;
data[pxr::HdLightTokens->width] = light->area_size;
data[pxr::HdLightTokens->height] = light->area_size;
break;
case LA_AREA_RECT:
data[HdLightTokens->width] = light->area_size;
data[HdLightTokens->height] = light->area_sizey;
data[pxr::HdLightTokens->width] = light->area_size;
data[pxr::HdLightTokens->height] = light->area_sizey;
break;
case LA_AREA_DISK:
data[HdLightTokens->radius] = light->area_size / 2;
data[pxr::HdLightTokens->radius] = light->area_size / 2;
break;
case LA_AREA_ELLIPSE:
data[HdLightTokens->radius] = (light->area_size + light->area_sizey) / 4;
data[pxr::HdLightTokens->radius] = (light->area_size + light->area_sizey) / 4;
break;
default:
break;
}
data[HdLightTokens->normalize] = true;
data[pxr::HdLightTokens->normalize] = true;
break;
default:
@ -76,59 +74,59 @@ LightData::LightData(BlenderSceneDelegate *scene_delegate, Object *object)
}
/* TODO: temporary value, it should be delivered through Python UI */
data[HdLightTokens->exposure] = 1.0f;
data[pxr::HdLightTokens->exposure] = 1.0f;
}
pxr::TfToken LightData::prim_type()
{
Light *light = (Light *)((Object *)id)->data;
TfToken ret;
pxr::TfToken ret;
switch (light->type) {
case LA_LOCAL:
case LA_SPOT:
ret = HdPrimTypeTokens->sphereLight;
ret = pxr::HdPrimTypeTokens->sphereLight;
break;
case LA_SUN:
ret = HdPrimTypeTokens->distantLight;
ret = pxr::HdPrimTypeTokens->distantLight;
break;
case LA_AREA:
switch (light->area_shape) {
case LA_AREA_SQUARE:
case LA_AREA_RECT:
ret = HdPrimTypeTokens->rectLight;
ret = pxr::HdPrimTypeTokens->rectLight;
break;
case LA_AREA_DISK:
case LA_AREA_ELLIPSE:
ret = HdPrimTypeTokens->diskLight;
ret = pxr::HdPrimTypeTokens->diskLight;
break;
default:
ret = HdPrimTypeTokens->rectLight;
ret = pxr::HdPrimTypeTokens->rectLight;
}
break;
default:
ret = HdPrimTypeTokens->sphereLight;
ret = pxr::HdPrimTypeTokens->sphereLight;
}
return ret;
}
VtValue LightData::get_data(TfToken const &key)
pxr::VtValue LightData::get_data(pxr::TfToken const &key)
{
LOG(INFO) << "Get data light: " << name() << " [" << key.GetString() << "]";
VtValue ret;
pxr::VtValue ret;
auto it = data.find(key);
if (it != data.end()) {
ret = it->second;
}
else {
std::string n = key.GetString();
if (contains(n, "object:visibility:")) {
if (ends_with(n, "camera") || ends_with(n, "shadow")) {
if (boost::algorithm::contains(n, "object:visibility:")) {
if (boost::algorithm::ends_with(n, "camera") || boost::algorithm::ends_with(n, "shadow")) {
ret = false;
}
else {
@ -141,14 +139,14 @@ VtValue LightData::get_data(TfToken const &key)
void LightData::insert_prim()
{
SdfPath p_id = prim_id(scene_delegate, (Object *)id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Object *)id);
scene_delegate->GetRenderIndex().InsertSprim(prim_type(), scene_delegate, p_id);
LOG(INFO) << "Add light: " << name() << " id=" << p_id.GetAsString();
}
void LightData::remove_prim()
{
SdfPath p_id = prim_id(scene_delegate, (Object *)id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Object *)id);
scene_delegate->GetRenderIndex().RemoveSprim(prim_type(), p_id);
LOG(INFO) << "Remove light: " << name();
}
@ -157,23 +155,23 @@ void LightData::mark_prim_dirty(DirtyBits dirty_bits)
{
/* TODO: prim_type was changed we have to do remove..add light */
HdDirtyBits bits = HdLight::Clean;
pxr::HdDirtyBits bits = pxr::HdLight::Clean;
switch (dirty_bits) {
case DirtyBits::DirtyTransform:
bits = HdLight::DirtyTransform;
case DirtyBits::DIRTY_TRANSFORM:
bits = pxr::HdLight::DirtyTransform;
break;
case DirtyBits::DirtyVisibility:
bits = HdLight::DirtyParams;
case DirtyBits::DIRTY_VISIBILITY:
bits = pxr::HdLight::DirtyParams;
break;
case DirtyBits::AllDirty:
bits = HdLight::AllDirty;
case DirtyBits::ALL_DIRTY:
bits = pxr::HdLight::AllDirty;
break;
default:
break;
}
SdfPath p_id = prim_id(scene_delegate, (Object *)id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Object *)id);
scene_delegate->GetRenderIndex().GetChangeTracker().MarkSprimDirty(p_id, bits);
LOG(INFO) << "Update light: " << name() << " [" << (int)dirty_bits << "]";
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -3,15 +3,15 @@
#pragma once
#include "pxr/base/tf/hashmap.h"
#include <pxr/usd/sdf/assetPath.h>
#include <pxr/usd/sdf/path.h>
#include "pxr/base/tf/hashmap.h"
#include "object.h"
namespace blender::render::hydra {
class LightData: public ObjectData {
class LightData : public ObjectData {
public:
LightData(BlenderSceneDelegate *scene_delegate, Object *object);
@ -25,4 +25,4 @@ class LightData: public ObjectData {
pxr::TfToken prim_type();
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -3,24 +3,23 @@
#include <Python.h>
#include <pxr/imaging/hd/tokens.h>
#include <pxr/imaging/hd/material.h>
#include <pxr/imaging/hd/renderDelegate.h>
#include <pxr/imaging/hd/tokens.h>
#include "glog/logging.h"
#include "BKE_material.h"
#include "BKE_lib_id.h"
#include "BKE_material.h"
#include "blenderSceneDelegate.h"
#include "blender_scene_delegate.h"
#include "material.h"
#include "mtlxHydraAdapter.h"
using namespace pxr;
#include "mtlx_hydra_adapter.h"
namespace blender::render::hydra {
std::unique_ptr<MaterialData> MaterialData::init(BlenderSceneDelegate *scene_delegate, Material *material)
std::unique_ptr<MaterialData> MaterialData::init(BlenderSceneDelegate *scene_delegate,
Material *material)
{
return std::make_unique<MaterialData>(scene_delegate, material);
}
@ -35,13 +34,13 @@ pxr::SdfPath MaterialData::prim_id(BlenderSceneDelegate *scene_delegate, Materia
}
MaterialData::MaterialData(BlenderSceneDelegate *scene_delegate, Material *material)
: IdData(scene_delegate, (ID *)material)
: IdData(scene_delegate, (ID *)material)
{
}
VtValue MaterialData::get_data(TfToken const &key)
pxr::VtValue MaterialData::get_data(pxr::TfToken const &key)
{
VtValue ret;
pxr::VtValue ret;
if (key.GetString() == "MaterialXFilename") {
if (!mtlx_path.GetResolvedPath().empty()) {
ret = mtlx_path;
@ -54,14 +53,14 @@ pxr::VtValue MaterialData::material_resource()
{
std::string const &path = mtlx_path.GetResolvedPath();
if (!path.empty()) {
HdRenderDelegate *render_delegate = scene_delegate->GetRenderIndex().GetRenderDelegate();
TfTokenVector shader_source_types = render_delegate->GetShaderSourceTypes();
TfTokenVector render_contexts = render_delegate->GetMaterialRenderContexts();
pxr::HdRenderDelegate *render_delegate = scene_delegate->GetRenderIndex().GetRenderDelegate();
pxr::TfTokenVector shader_source_types = render_delegate->GetShaderSourceTypes();
pxr::TfTokenVector render_contexts = render_delegate->GetMaterialRenderContexts();
HdMaterialNetworkMap material_network_map;
pxr::HdMaterialNetworkMap material_network_map;
HdMtlxConvertToMaterialNetworkMap(
path, shader_source_types, render_contexts, &material_network_map);
return VtValue(material_network_map);
return pxr::VtValue(material_network_map);
}
return pxr::VtValue();
@ -87,37 +86,38 @@ void MaterialData::export_mtlx()
PyGILState_Release(gstate);
mtlx_path = SdfAssetPath(path, path);
mtlx_path = pxr::SdfAssetPath(path, path);
LOG(INFO) << "Material export: " << name() << " mtlx=" << mtlx_path.GetResolvedPath();
}
void MaterialData::insert_prim()
{
SdfPath p_id = prim_id(scene_delegate, (Material *)id);
scene_delegate->GetRenderIndex().InsertSprim(HdPrimTypeTokens->material, scene_delegate, p_id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Material *)id);
scene_delegate->GetRenderIndex().InsertSprim(
pxr::HdPrimTypeTokens->material, scene_delegate, p_id);
LOG(INFO) << "Add material: " << name() << " id=" << p_id.GetAsString();
}
void MaterialData::remove_prim()
{
SdfPath p_id = prim_id(scene_delegate, (Material *)id);
scene_delegate->GetRenderIndex().RemoveSprim(HdPrimTypeTokens->material, p_id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Material *)id);
scene_delegate->GetRenderIndex().RemoveSprim(pxr::HdPrimTypeTokens->material, p_id);
LOG(INFO) << "Remove material: " << name();
}
void MaterialData::mark_prim_dirty(DirtyBits dirty_bits)
{
HdDirtyBits bits = HdMaterial::Clean;
pxr::HdDirtyBits bits = pxr::HdMaterial::Clean;
switch (dirty_bits) {
case DirtyBits::AllDirty:
bits = HdMaterial::AllDirty;
case DirtyBits::ALL_DIRTY:
bits = pxr::HdMaterial::AllDirty;
break;
default:
break;
}
SdfPath p_id = prim_id(scene_delegate, (Material *)id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Material *)id);
scene_delegate->GetRenderIndex().GetChangeTracker().MarkSprimDirty(p_id, bits);
LOG(INFO) << "Update material: " << name() << ", mtlx=" << mtlx_path.GetResolvedPath();
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -3,9 +3,9 @@
#pragma once
#include "pxr/base/tf/hashmap.h"
#include <pxr/usd/sdf/assetPath.h>
#include <pxr/usd/sdf/path.h>
#include "pxr/base/tf/hashmap.h"
#include "DNA_material_types.h"
@ -13,9 +13,10 @@
namespace blender::render::hydra {
class MaterialData: IdData {
class MaterialData : IdData {
public:
static std::unique_ptr<MaterialData> init(BlenderSceneDelegate *scene_delegate, Material *material);
static std::unique_ptr<MaterialData> init(BlenderSceneDelegate *scene_delegate,
Material *material);
static pxr::SdfPath prim_id(BlenderSceneDelegate *scene_delegate, Material *material);
MaterialData(BlenderSceneDelegate *scene_delegate, Material *material);
@ -28,10 +29,11 @@ class MaterialData: IdData {
pxr::VtValue material_resource();
void export_mtlx();
private:
private:
pxr::SdfAssetPath mtlx_path;
};
using MaterialDataMap = pxr::TfHashMap<pxr::SdfPath, std::unique_ptr<MaterialData>, pxr::SdfPath::Hash>;
using MaterialDataMap =
pxr::TfHashMap<pxr::SdfPath, std::unique_ptr<MaterialData>, pxr::SdfPath::Hash>;
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -6,21 +6,19 @@
#include "glog/logging.h"
#include "BKE_material.h"
#include "BKE_mesh.h"
#include "BKE_mesh_runtime.h"
#include "BKE_object.h"
#include "BKE_material.h"
#include "blenderSceneDelegate.h"
#include "mesh.h"
#include "../utils.h"
using namespace pxr;
#include "blender_scene_delegate.h"
#include "mesh.h"
namespace blender::render::hydra {
MeshData::MeshData(BlenderSceneDelegate *scene_delegate, Object *object)
: ObjectData(scene_delegate, object)
: ObjectData(scene_delegate, object)
{
if (object->type == OB_MESH && object->mode == OB_MODE_OBJECT &&
BLI_listbase_is_empty(&object->modifiers)) {
@ -35,19 +33,19 @@ MeshData::MeshData(BlenderSceneDelegate *scene_delegate, Object *object)
}
}
VtValue MeshData::get_data(TfToken const &key)
pxr::VtValue MeshData::get_data(pxr::TfToken const &key)
{
VtValue ret;
if (key == HdTokens->points) {
pxr::VtValue ret;
if (key == pxr::HdTokens->points) {
ret = vertices;
}
else if (key == HdTokens->normals) {
else if (key == pxr::HdTokens->normals) {
ret = normals;
}
else if (key == HdPrimvarRoleTokens->textureCoordinate) {
else if (key == pxr::HdPrimvarRoleTokens->textureCoordinate) {
ret = uvs;
}
else if (key == HdInstancerTokens->instanceTransform) {
else if (key == pxr::HdInstancerTokens->instanceTransform) {
ret = instances;
}
return ret;
@ -62,64 +60,74 @@ Material *MeshData::material()
return BKE_object_material_get_eval(object, object->actcol);
}
HdMeshTopology MeshData::mesh_topology()
pxr::HdMeshTopology MeshData::mesh_topology()
{
return HdMeshTopology(PxOsdOpenSubdivTokens->catmullClark, HdTokens->rightHanded,
face_vertex_counts, face_vertex_indices);
return pxr::HdMeshTopology(pxr::PxOsdOpenSubdivTokens->catmullClark,
pxr::HdTokens->rightHanded,
face_vertex_counts,
face_vertex_indices);
}
HdPrimvarDescriptorVector MeshData::primvar_descriptors(HdInterpolation interpolation)
pxr::HdPrimvarDescriptorVector MeshData::primvar_descriptors(pxr::HdInterpolation interpolation)
{
HdPrimvarDescriptorVector primvars;
if (interpolation == HdInterpolationVertex) {
pxr::HdPrimvarDescriptorVector primvars;
if (interpolation == pxr::HdInterpolationVertex) {
if (!vertices.empty()) {
primvars.emplace_back(HdTokens->points, interpolation, HdPrimvarRoleTokens->point);
primvars.emplace_back(pxr::HdTokens->points, interpolation, pxr::HdPrimvarRoleTokens->point);
}
}
else if (interpolation == HdInterpolationFaceVarying) {
else if (interpolation == pxr::HdInterpolationFaceVarying) {
if (!normals.empty()) {
primvars.emplace_back(HdTokens->normals, interpolation, HdPrimvarRoleTokens->normal);
primvars.emplace_back(
pxr::HdTokens->normals, interpolation, pxr::HdPrimvarRoleTokens->normal);
}
if (!uvs.empty()) {
primvars.emplace_back(HdPrimvarRoleTokens->textureCoordinate, interpolation,
HdPrimvarRoleTokens->textureCoordinate);
primvars.emplace_back(pxr::HdPrimvarRoleTokens->textureCoordinate,
interpolation,
pxr::HdPrimvarRoleTokens->textureCoordinate);
}
}
return primvars;
}
HdPrimvarDescriptorVector MeshData::instancer_primvar_descriptors(HdInterpolation interpolation)
pxr::HdPrimvarDescriptorVector MeshData::instancer_primvar_descriptors(
pxr::HdInterpolation interpolation)
{
HdPrimvarDescriptorVector primvars;
if (interpolation == HdInterpolationInstance) {
primvars.emplace_back(HdInstancerTokens->instanceTransform, interpolation,
HdPrimvarRoleTokens->none);
pxr::HdPrimvarDescriptorVector primvars;
if (interpolation == pxr::HdInterpolationInstance) {
primvars.emplace_back(
pxr::HdInstancerTokens->instanceTransform, interpolation, pxr::HdPrimvarRoleTokens->none);
}
return primvars;
}
VtIntArray MeshData::instance_indices()
pxr::VtIntArray MeshData::instance_indices()
{
VtIntArray ret(instances.size());
pxr::VtIntArray ret(instances.size());
for (size_t i = 0; i < ret.size(); ++i) {
ret[i] = i;
}
return ret;
}
size_t MeshData::sample_instancer_transform(size_t maxSampleCount, float *sampleTimes, GfMatrix4d *sampleValues)
size_t MeshData::sample_instancer_transform(size_t max_sample_count,
float *sample_times,
pxr::GfMatrix4d *sample_values)
{
*sampleTimes = 0.0f;
*sampleValues = GfMatrix4d(1.0);
*sample_times = 0.0f;
*sample_values = pxr::GfMatrix4d(1.0);
return 1;
}
size_t MeshData::sample_instancer_primvar(TfToken const &key, size_t maxSampleCount, float *sampleTimes, VtValue *sampleValues)
size_t MeshData::sample_instancer_primvar(pxr::TfToken const &key,
size_t max_sample_count,
float *sample_times,
pxr::VtValue *sample_values)
{
if (key == HdInstancerTokens->instanceTransform) {
if (maxSampleCount > 0) {
sampleTimes[0] = 0.0f;
sampleValues[0] = instances;
if (key == pxr::HdInstancerTokens->instanceTransform) {
if (max_sample_count > 0) {
sample_times[0] = 0.0f;
sample_values[0] = instances;
return 1;
}
}
@ -134,8 +142,9 @@ void MeshData::add_instance(DupliObject *dupli)
LOG(INFO) << "Add instancer: " << name() << " id=" << instancer_id.GetAsString();
}
if (instances.empty()) {
// USD hides the prototype mesh when instancing in contrary to the Blender, so we must add it back implicitly
instances.push_back(GfMatrix4d(1.0));
// USD hides the prototype mesh when instancing in contrary to the Blender, so we must add it
// back implicitly
instances.push_back(pxr::GfMatrix4d(1.0));
}
instances.push_back(transform().GetInverse() * gf_matrix_from_transform(dupli->mat));
LOG(INFO) << "Add instance: " << instancer_id.GetAsString() << " " << dupli->random_id;
@ -152,7 +161,7 @@ void MeshData::set_mesh(Mesh *mesh)
blender::Span<MLoopTri> loopTris = mesh->looptris();
/* face_vertex_counts */
face_vertex_counts = VtIntArray(tris_len, 3);
face_vertex_counts = pxr::VtIntArray(tris_len, 3);
/* face_vertex_indices */
blender::Span<MLoop> loops = mesh->loops();
@ -167,7 +176,7 @@ void MeshData::set_mesh(Mesh *mesh)
vertices.reserve(mesh->totvert);
blender::Span<blender::float3> verts = mesh->vert_positions();
for (blender::float3 v : verts) {
vertices.push_back(GfVec3f(v.x, v.y, v.z));
vertices.push_back(pxr::GfVec3f(v.x, v.y, v.z));
}
/* normals */
@ -175,9 +184,9 @@ void MeshData::set_mesh(Mesh *mesh)
if (lnors) {
normals.reserve(loopTris.size() * 3);
for (MLoopTri lt : loopTris) {
normals.push_back(GfVec3f(lnors[lt.tri[0]]));
normals.push_back(GfVec3f(lnors[lt.tri[1]]));
normals.push_back(GfVec3f(lnors[lt.tri[2]]));
normals.push_back(pxr::GfVec3f(lnors[lt.tri[0]]));
normals.push_back(pxr::GfVec3f(lnors[lt.tri[1]]));
normals.push_back(pxr::GfVec3f(lnors[lt.tri[2]]));
}
}
@ -186,9 +195,9 @@ void MeshData::set_mesh(Mesh *mesh)
if (luvs) {
uvs.reserve(loopTris.size() * 3);
for (MLoopTri lt : loopTris) {
uvs.push_back(GfVec2f(luvs[lt.tri[0]]));
uvs.push_back(GfVec2f(luvs[lt.tri[1]]));
uvs.push_back(GfVec2f(luvs[lt.tri[2]]));
uvs.push_back(pxr::GfVec2f(luvs[lt.tri[0]]));
uvs.push_back(pxr::GfVec2f(luvs[lt.tri[1]]));
uvs.push_back(pxr::GfVec2f(luvs[lt.tri[2]]));
}
}
}
@ -199,14 +208,14 @@ void MeshData::insert_prim()
return;
}
SdfPath p_id = prim_id(scene_delegate, (Object *)id);
scene_delegate->GetRenderIndex().InsertRprim(HdPrimTypeTokens->mesh, scene_delegate, p_id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Object *)id);
scene_delegate->GetRenderIndex().InsertRprim(pxr::HdPrimTypeTokens->mesh, scene_delegate, p_id);
LOG(INFO) << "Add mesh: " << name() << " id=" << p_id.GetAsString();
}
void MeshData::remove_prim()
{
SdfPath p_id = prim_id(scene_delegate, (Object *)id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Object *)id);
if (!scene_delegate->GetRenderIndex().HasRprim(p_id)) {
return;
}
@ -217,7 +226,7 @@ void MeshData::remove_prim()
void MeshData::mark_prim_dirty(DirtyBits dirty_bits)
{
SdfPath p_id = prim_id(scene_delegate, (Object *)id);
pxr::SdfPath p_id = prim_id(scene_delegate, (Object *)id);
if (!scene_delegate->GetRenderIndex().HasRprim(p_id)) {
/* Trying to insert prim */
insert_prim();
@ -230,19 +239,19 @@ void MeshData::mark_prim_dirty(DirtyBits dirty_bits)
return;
}
HdDirtyBits bits = HdChangeTracker::Clean;
pxr::HdDirtyBits bits = pxr::HdChangeTracker::Clean;
switch (dirty_bits) {
case DirtyBits::DirtyTransform:
bits = HdChangeTracker::DirtyTransform;
case DirtyBits::DIRTY_TRANSFORM:
bits = pxr::HdChangeTracker::DirtyTransform;
break;
case DirtyBits::DirtyVisibility:
bits = HdChangeTracker::DirtyVisibility;
case DirtyBits::DIRTY_VISIBILITY:
bits = pxr::HdChangeTracker::DirtyVisibility;
break;
case DirtyBits::DirtyMaterial:
bits = HdChangeTracker::DirtyMaterialId;
case DirtyBits::DIRTY_MATERIAL:
bits = pxr::HdChangeTracker::DirtyMaterialId;
break;
case DirtyBits::AllDirty:
bits = HdChangeTracker::AllDirty;
case DirtyBits::ALL_DIRTY:
bits = pxr::HdChangeTracker::AllDirty;
break;
default:
break;

View File

@ -12,8 +12,8 @@
namespace blender::render::hydra {
class MeshData: public ObjectData {
public:
class MeshData : public ObjectData {
public:
MeshData(BlenderSceneDelegate *scene_delegate, Object *object);
pxr::VtValue get_data(pxr::TfToken const &key) override;
@ -27,8 +27,13 @@ public:
pxr::HdPrimvarDescriptorVector primvar_descriptors(pxr::HdInterpolation interpolation);
pxr::HdPrimvarDescriptorVector instancer_primvar_descriptors(pxr::HdInterpolation interpolation);
pxr::VtIntArray instance_indices();
size_t sample_instancer_transform(size_t maxSampleCount, float *sampleTimes, pxr::GfMatrix4d *sampleValues);
size_t sample_instancer_primvar(pxr::TfToken const &key, size_t maxSampleCount, float *sampleTimes, pxr::VtValue *sampleValues);
size_t sample_instancer_transform(size_t max_sample_count,
float *sample_times,
pxr::GfMatrix4d *sample_values);
size_t sample_instancer_primvar(pxr::TfToken const &key,
size_t max_sample_count,
float *sample_times,
pxr::VtValue *sample_values);
void add_instance(DupliObject *dupli);
@ -47,6 +52,4 @@ public:
pxr::VtMatrix4dArray instances;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -0,0 +1,79 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <pxr/base/arch/fileSystem.h>
#include <pxr/usd/ar/resolver.h>
#include <pxr/usd/ar/resolverContextBinder.h>
#include <pxr/usd/ar/resolverScopedCache.h>
#include <pxr/usd/usdMtlx/reader.h>
#include <pxr/usd/usdMtlx/utils.h>
#include <pxr/usd/usdShade/material.h>
#include <pxr/usd/usdShade/shader.h>
#include <pxr/usdImaging/usdImaging/materialParamUtils.h>
#include <pxr/imaging/hd/material.h>
#include <pxr/imaging/hd/tokens.h>
#include "mtlx_hydra_adapter.h"
namespace mx = MaterialX;
void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlx_path,
pxr::TfTokenVector const &shader_source_types,
pxr::TfTokenVector const &render_contexts,
pxr::HdMaterialNetworkMap *out)
{
if (mtlx_path.empty()) {
return;
}
std::string basePath = pxr::TfGetPathName(mtlx_path);
pxr::ArResolver &resolver = pxr::ArGetResolver();
const pxr::ArResolverContext context = resolver.CreateDefaultContextForAsset(mtlx_path);
pxr::ArResolverContextBinder binder(context);
pxr::ArResolverScopedCache resolver_cache;
std::string mtlxName = pxr::TfGetBaseName(mtlx_path);
std::string stage_id = pxr::TfStringPrintf(
"%s%s%s.usda", basePath.c_str(), ARCH_PATH_SEP, mtlxName.c_str());
pxr::UsdStageRefPtr stage = pxr::UsdStage::CreateInMemory(stage_id, context);
try {
mx::DocumentPtr doc = pxr::UsdMtlxReadDocument(mtlx_path);
pxr::UsdMtlxRead(doc, stage);
}
catch (mx::ExceptionFoundCycle &x) {
Tf_PostErrorHelper(pxr::TF_CALL_CONTEXT,
pxr::TF_DIAGNOSTIC_RUNTIME_ERROR_TYPE,
"MaterialX cycle found: %s\n",
x.what());
return;
}
catch (mx::Exception &x) {
Tf_PostErrorHelper(pxr::TF_CALL_CONTEXT,
pxr::TF_DIAGNOSTIC_RUNTIME_ERROR_TYPE,
"MaterialX error: %s\n",
x.what());
return;
}
if (pxr::UsdPrim materials = stage->GetPrimAtPath(pxr::SdfPath("/MaterialX/Materials"))) {
if (pxr::UsdPrimSiblingRange children = materials.GetChildren()) {
if (auto material = pxr::UsdShadeMaterial(*children.begin())) {
if (pxr::UsdShadeShader mtlx_surface = material.ComputeSurfaceSource(render_contexts)) {
UsdImagingBuildHdMaterialNetworkFromTerminal(mtlx_surface.GetPrim(),
pxr::HdMaterialTerminalTokens->surface,
shader_source_types,
render_contexts,
out,
pxr::UsdTimeCode::Default());
}
}
}
}
}

View File

@ -0,0 +1,16 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <pxr/base/tf/token.h>
#include <pxr/pxr.h>
#include <string>
struct pxr::HdMaterialNetworkMap;
void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlx_path,
pxr::TfTokenVector const &shader_source_types,
pxr::TfTokenVector const &render_contexts,
pxr::HdMaterialNetworkMap *out);

View File

@ -3,13 +3,11 @@
#include "BKE_object.h"
#include "blenderSceneDelegate.h"
#include "object.h"
#include "mesh.h"
#include "light.h"
#include "../utils.h"
using namespace pxr;
#include "blender_scene_delegate.h"
#include "light.h"
#include "mesh.h"
#include "object.h"
namespace blender::render::hydra {
@ -60,8 +58,7 @@ pxr::SdfPath ObjectData::prim_id(BlenderSceneDelegate *scene_delegate, Object *o
}
ObjectData::ObjectData(BlenderSceneDelegate *scene_delegate, Object *object)
: IdData(scene_delegate, (ID *)object)
, visible(true)
: IdData(scene_delegate, (ID *)object), visible(true)
{
}
@ -70,7 +67,7 @@ int ObjectData::type()
return ((Object *)id)->type;
}
GfMatrix4d ObjectData::transform()
pxr::GfMatrix4d ObjectData::transform()
{
return gf_matrix_from_transform(((Object *)id)->object_to_world);
}

View File

@ -3,19 +3,19 @@
#pragma once
#include <pxr/base/gf/matrix4d.h>
#include "pxr/base/tf/hashmap.h"
#include <pxr/base/gf/matrix4d.h>
#include "DNA_object_types.h"
#include "BKE_layer.h"
#include "DNA_object_types.h"
#include "id.h"
#include "material.h"
namespace blender::render::hydra {
class ObjectData: public IdData {
public:
class ObjectData : public IdData {
public:
static bool supported(Object *object);
static std::unique_ptr<ObjectData> init(BlenderSceneDelegate *scene_delegate, Object *object);
static pxr::SdfPath prim_id(BlenderSceneDelegate *scene_delegate, Object *object);
@ -29,6 +29,7 @@ public:
bool visible;
};
using ObjectDataMap = pxr::TfHashMap<pxr::SdfPath, std::unique_ptr<ObjectData>, pxr::SdfPath::Hash>;
using ObjectDataMap =
pxr::TfHashMap<pxr::SdfPath, std::unique_ptr<ObjectData>, pxr::SdfPath::Hash>;
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -3,50 +3,49 @@
#include <filesystem>
#include <pxr/base/vt/array.h>
#include <pxr/base/gf/vec2f.h>
#include <pxr/base/gf/rotation.h>
#include <pxr/base/gf/vec2f.h>
#include <pxr/base/vt/array.h>
#include <pxr/imaging/hd/light.h>
#include <pxr/imaging/hd/tokens.h>
#include <pxr/imaging/hd/renderDelegate.h>
#include <pxr/imaging/hd/tokens.h>
#include <pxr/usd/usdLux/tokens.h>
#include "BKE_context.h"
#include "DNA_node_types.h"
#include "DNA_windowmanager_types.h"
#include "BKE_image.h"
#include "BKE_node.h"
#include "BKE_node_runtime.hh"
#include "BKE_image.h"
#include "NOD_shader.h"
#include "glog/logging.h"
#include "blenderSceneDelegate.h"
#include "world.h"
#include "../utils.h"
#include "blender_scene_delegate.h"
#include "world.h"
/* TODO : add custom tftoken "transparency"? */
using namespace pxr;
namespace blender::render::hydra {
std::unique_ptr<WorldData> WorldData::init(BlenderSceneDelegate *scene_delegate,
World *world, bContext *context)
World *world,
bContext *context)
{
return std::make_unique<WorldData>(scene_delegate, world, context);
}
SdfPath WorldData::prim_id(BlenderSceneDelegate *scene_delegate)
pxr::SdfPath WorldData::prim_id(BlenderSceneDelegate *scene_delegate)
{
return scene_delegate->GetDelegateID().AppendElementString("World");
}
WorldData::WorldData(BlenderSceneDelegate *scene_delegate, World *world, bContext *context)
: IdData(scene_delegate, (ID *)world)
: IdData(scene_delegate, (ID *)world)
{
data[UsdLuxTokens->orientToStageUpAxis] = true;
data[pxr::UsdLuxTokens->orientToStageUpAxis] = true;
if (world->use_nodes) {
/* TODO: Create nodes parsing system */
@ -68,9 +67,9 @@ WorldData::WorldData(BlenderSceneDelegate *scene_delegate, World *world, bContex
float const *strength = strength_input.default_value_typed<float>();
float const *color = color_input.default_value_typed<float>();
data[HdLightTokens->intensity] = strength[1];
data[HdLightTokens->exposure] = 1.0f;
data[HdLightTokens->color] = GfVec3f(color[0], color[1], color[2]);
data[pxr::HdLightTokens->intensity] = strength[1];
data[pxr::HdLightTokens->exposure] = 1.0f;
data[pxr::HdLightTokens->color] = pxr::GfVec3f(color[0], color[1], color[2]);
if (!color_input.directly_linked_links().is_empty()) {
bNode *color_input_node = color_input.directly_linked_links()[0]->fromnode;
@ -88,34 +87,37 @@ WorldData::WorldData(BlenderSceneDelegate *scene_delegate, World *world, bContex
std::string image_path = cache_image(bmain, scene, image, &tex->iuser, &opts, &reports);
if (!image_path.empty()) {
data[HdLightTokens->textureFile] = SdfAssetPath(image_path, image_path);
data[pxr::HdLightTokens->textureFile] = pxr::SdfAssetPath(image_path, image_path);
}
}
}
}
}
else {
data[HdLightTokens->intensity] = 1.0f;
data[HdLightTokens->exposure] = world->exposure;
data[HdLightTokens->color] = GfVec3f(world->horr, world->horg, world->horb);
data[pxr::HdLightTokens->intensity] = 1.0f;
data[pxr::HdLightTokens->exposure] = world->exposure;
data[pxr::HdLightTokens->color] = pxr::GfVec3f(world->horr, world->horg, world->horb);
}
}
GfMatrix4d WorldData::transform()
pxr::GfMatrix4d WorldData::transform()
{
GfMatrix4d transform = GfMatrix4d(GfRotation(GfVec3d(1.0, 0.0, 0.0), -90), GfVec3d());
pxr::GfMatrix4d transform = pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(1.0, 0.0, 0.0), -90),
pxr::GfVec3d());
/* TODO : do this check via RenderSettings*/
if (scene_delegate->GetRenderIndex().GetRenderDelegate()->GetRendererDisplayName() == "RPR") {
transform *= GfMatrix4d(GfRotation(GfVec3d(1.0, 0.0, 0.0), -180), GfVec3d());
transform *= GfMatrix4d(GfRotation(GfVec3d(0.0, 0.0, 1.0), 90.0), GfVec3d());
transform *= pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(1.0, 0.0, 0.0), -180),
pxr::GfVec3d());
transform *= pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(0.0, 0.0, 1.0), 90.0),
pxr::GfVec3d());
}
return transform;
}
VtValue WorldData::get_data(TfToken const &key)
pxr::VtValue WorldData::get_data(pxr::TfToken const &key)
{
VtValue ret;
pxr::VtValue ret;
auto it = data.find(key);
if (it != data.end()) {
ret = it->second;
@ -125,29 +127,30 @@ VtValue WorldData::get_data(TfToken const &key)
void WorldData::insert_prim()
{
SdfPath p_id = prim_id(scene_delegate);
scene_delegate->GetRenderIndex().InsertSprim(HdPrimTypeTokens->domeLight, scene_delegate, p_id);
pxr::SdfPath p_id = prim_id(scene_delegate);
scene_delegate->GetRenderIndex().InsertSprim(
pxr::HdPrimTypeTokens->domeLight, scene_delegate, p_id);
LOG(INFO) << "Add World: id=" << p_id.GetAsString();
}
void WorldData::remove_prim()
{
SdfPath p_id = prim_id(scene_delegate);
scene_delegate->GetRenderIndex().RemoveSprim(HdPrimTypeTokens->domeLight, p_id);
pxr::SdfPath p_id = prim_id(scene_delegate);
scene_delegate->GetRenderIndex().RemoveSprim(pxr::HdPrimTypeTokens->domeLight, p_id);
LOG(INFO) << "Remove World";
}
void WorldData::mark_prim_dirty(DirtyBits dirty_bits)
{
HdDirtyBits bits = HdLight::Clean;
pxr::HdDirtyBits bits = pxr::HdLight::Clean;
switch (dirty_bits) {
case DirtyBits::AllDirty:
bits = HdLight::AllDirty;
case DirtyBits::ALL_DIRTY:
bits = pxr::HdLight::AllDirty;
break;
default:
break;
}
SdfPath p_id = prim_id(scene_delegate);
pxr::SdfPath p_id = prim_id(scene_delegate);
scene_delegate->GetRenderIndex().GetChangeTracker().MarkSprimDirty(p_id, bits);
LOG(INFO) << "Update World";
}

View File

@ -5,11 +5,11 @@
#include <map>
#include "pxr/base/tf/staticTokens.h"
#include <pxr/base/gf/matrix4d.h>
#include <pxr/base/vt/value.h>
#include <pxr/usd/sdf/assetPath.h>
#include <pxr/usd/sdf/path.h>
#include <pxr/base/vt/value.h>
#include "pxr/base/tf/staticTokens.h"
#include "DNA_view3d_types.h"
#include "DNA_world_types.h"
@ -18,9 +18,11 @@
namespace blender::render::hydra {
class WorldData: public IdData {
public:
static std::unique_ptr<WorldData> init(BlenderSceneDelegate *scene_delegate, World *world, bContext *context);
class WorldData : public IdData {
public:
static std::unique_ptr<WorldData> init(BlenderSceneDelegate *scene_delegate,
World *world,
bContext *context);
static pxr::SdfPath prim_id(BlenderSceneDelegate *scene_delegate);
WorldData(BlenderSceneDelegate *scene_delegate, World *world, bContext *context);
@ -36,4 +38,4 @@ public:
std::map<pxr::TfToken, pxr::VtValue> data;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -1,41 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <pxr/imaging/hdx/simpleLightTask.h>
#include "simpleLightTaskDelegate.h"
namespace blender::render::hydra {
SimpleLightTaskDelegate::SimpleLightTaskDelegate(HdRenderIndex *parentIndex,
SdfPath const &delegateID)
: HdSceneDelegate(parentIndex, delegateID)
{
SdfPath taskId = GetTaskID();
GetRenderIndex().InsertTask<HdxSimpleLightTask>(this, taskId);
}
SdfPath SimpleLightTaskDelegate::GetTaskID() const
{
return GetDelegateID().AppendElementString("task");
}
HdTaskSharedPtr SimpleLightTaskDelegate::GetTask()
{
return GetRenderIndex().GetTask(GetTaskID());
}
void SimpleLightTaskDelegate::SetCameraPath(SdfPath const &cameraPath)
{
taskParams.cameraPath = cameraPath;
}
VtValue SimpleLightTaskDelegate::Get(SdfPath const &id, TfToken const &key)
{
if (key == HdTokens->params) {
return VtValue(taskParams);
}
return VtValue();
}
} // namespace blender::render::hydra

View File

@ -1,29 +0,0 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <pxr/imaging/hd/sceneDelegate.h>
#include <pxr/imaging/hdx/simpleLightTask.h>
using namespace pxr;
namespace blender::render::hydra {
class SimpleLightTaskDelegate : public HdSceneDelegate {
public:
SimpleLightTaskDelegate(HdRenderIndex *parentIndex, SdfPath const &delegateID);
~SimpleLightTaskDelegate() override = default;
SdfPath GetTaskID() const;
HdTaskSharedPtr GetTask();
void SetCameraPath(SdfPath const &);
VtValue Get(SdfPath const &id, TfToken const &key) override;
private:
HdxSimpleLightTaskParams taskParams;
};
} // namespace blender::render::hydra

View File

@ -0,0 +1,41 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <pxr/imaging/hdx/simpleLightTask.h>
#include "simple_light_task_delegate.h"
namespace blender::render::hydra {
SimpleLightTaskDelegate::SimpleLightTaskDelegate(pxr::HdRenderIndex *parent_index,
pxr::SdfPath const &delegate_id)
: pxr::HdSceneDelegate(parent_index, delegate_id)
{
pxr::SdfPath task_id = get_task_id();
GetRenderIndex().InsertTask<pxr::HdxSimpleLightTask>(this, task_id);
}
pxr::SdfPath SimpleLightTaskDelegate::get_task_id() const
{
return GetDelegateID().AppendElementString("task");
}
pxr::HdTaskSharedPtr SimpleLightTaskDelegate::get_task()
{
return GetRenderIndex().GetTask(get_task_id());
}
void SimpleLightTaskDelegate::set_camera_path(pxr::SdfPath const &camera_path)
{
task_params.cameraPath = camera_path;
}
pxr::VtValue SimpleLightTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
{
if (key == pxr::HdTokens->params) {
return pxr::VtValue(task_params);
}
return pxr::VtValue();
}
} // namespace blender::render::hydra

View File

@ -0,0 +1,27 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#pragma once
#include <pxr/imaging/hd/sceneDelegate.h>
#include <pxr/imaging/hdx/simpleLightTask.h>
namespace blender::render::hydra {
class SimpleLightTaskDelegate : public pxr::HdSceneDelegate {
public:
SimpleLightTaskDelegate(pxr::HdRenderIndex *parentIndex, pxr::SdfPath const &delegate_id);
~SimpleLightTaskDelegate() override = default;
pxr::SdfPath get_task_id() const;
pxr::HdTaskSharedPtr get_task();
void set_camera_path(pxr::SdfPath const &);
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
private:
pxr::HdxSimpleLightTaskParams task_params;
};
} // namespace blender::render::hydra

View File

@ -1,71 +1,71 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <sstream>
#include <chrono>
#include <filesystem>
#include <sstream>
#include <pxr/base/tf/stringUtils.h>
#include "BKE_appdir.h"
#include "BKE_image_save.h"
#include "BLI_string.h"
#include "BLI_path_util.h"
#include "BLI_string.h"
#include "DNA_camera_types.h"
#include "utils.h"
using namespace std;
using namespace pxr;
namespace blender::render::hydra {
GfMatrix4d gf_matrix_from_transform(float m[4][4])
pxr::GfMatrix4d gf_matrix_from_transform(float m[4][4])
{
return GfMatrix4d(
m[0][0], m[0][1], m[0][2], m[0][3],
m[1][0], m[1][1], m[1][2], m[1][3],
m[2][0], m[2][1], m[2][2], m[2][3],
m[3][0], m[3][1], m[3][2], m[3][3]);
pxr::GfMatrix4d ret = pxr::GfMatrix4d();
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
ret[i][j] = m[i][j];
}
}
return ret;
}
string format_duration(chrono::milliseconds millisecs)
std::string format_duration(std::chrono::milliseconds millisecs)
{
stringstream ss;
bool neg = millisecs < 0ms;
std::stringstream ss;
bool neg = millisecs < std::chrono::milliseconds(0);
if (neg) {
millisecs = -millisecs;
}
auto m = chrono::duration_cast<chrono::minutes>(millisecs);
auto m = std::chrono::duration_cast<std::chrono::minutes>(millisecs);
millisecs -= m;
auto s = chrono::duration_cast<chrono::seconds>(millisecs);
auto s = std::chrono::duration_cast<std::chrono::seconds>(millisecs);
millisecs -= s;
if (neg) {
ss << "-";
}
if (m < 10min) {
if (m < std::chrono::minutes(10)) {
ss << "0";
}
ss << to_string(m / 1min) << ":";
if (s < 10s) {
ss << std::to_string(m / std::chrono::minutes(1)) << ":";
if (s < std::chrono::seconds(10)) {
ss << "0";
}
ss << to_string(s / 1s) << ":";
if (millisecs < 10ms) {
ss << std::to_string(s / std::chrono::seconds(1)) << ":";
if (millisecs < std::chrono::milliseconds(10)) {
ss << "0";
}
ss << to_string(millisecs / 1ms / 10);
ss << std::to_string(millisecs / std::chrono::milliseconds(1) / 10);
return ss.str();
}
string cache_image(Main *bmain,
Scene *scene,
Image *image,
ImageUser *iuser,
ImageSaveOptions *opts,
ReportList *reports)
std::string cache_image(Main *bmain,
Scene *scene,
Image *image,
ImageUser *iuser,
ImageSaveOptions *opts,
ReportList *reports)
{
const string default_format = ".png";
const std::string default_format = ".png";
char tempfile[FILE_MAX];
@ -74,15 +74,15 @@ string cache_image(Main *bmain,
return "";
}
string image_name;
std::string image_name;
if (image->source == IMA_SRC_GENERATED) {
image_name = TfMakeValidIdentifier(image_name.append(image->id.name + 2));
image_name = pxr::TfMakeValidIdentifier(image_name.append(image->id.name + 2));
}
else {
image_name = image->filepath == NULL ? image->filepath : image->id.name + 2;
image_name = std::filesystem::path(image_name).filename().replace_extension().string();
image_name = TfMakeValidIdentifier(image_name);
image_name = pxr::TfMakeValidIdentifier(image_name);
}
image_name.append(default_format);
@ -99,20 +99,21 @@ string cache_image(Main *bmain,
return tempfile;
}
void set_env_paths(string const &name, vector<string> path_dirs)
void set_env_paths(std::string const &name, std::vector<std::string> path_dirs)
{
const char *env = BLI_getenv(name.c_str());;
stringstream ss;
const char *env = BLI_getenv(name.c_str());
;
std::stringstream ss;
int i = 0;
for (string &s : path_dirs) {
for (std::string &s : path_dirs) {
++i;
ss << s;
if (i < path_dirs.size() || env) {
#ifdef _WIN32
#ifdef _WIN32
ss << ";";
#else
#else
ss << ":";
#endif
#endif
}
}
if (env) {

View File

@ -23,4 +23,4 @@ std::string cache_image(Main *bmain,
ReportList *reports);
void set_env_paths(std::string const &name, std::vector<std::string> path_dirs);
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -7,21 +7,19 @@
#include <pxr/imaging/glf/drawTarget.h>
#include <pxr/usd/usdGeom/camera.h>
#include "DEG_depsgraph_query.h"
#include "BLI_math_matrix.h"
#include "DNA_camera_types.h"
#include "DNA_screen_types.h"
#include "DNA_vec_types.h" /* this include must be before BKE_camera.h due to "rctf" type */
#include "BKE_camera.h"
#include "BLI_math_matrix.h"
#include "DEG_depsgraph_query.h"
#include "GPU_shader.h"
#include "glog/logging.h"
#include "viewportEngine.h"
#include "camera.h"
#include "utils.h"
using namespace std;
using namespace pxr;
#include "viewport_engine.h"
namespace blender::render::hydra {
@ -31,17 +29,16 @@ struct ViewSettings {
int width();
int height();
GfCamera gf_camera();
pxr::GfCamera gf_camera();
CameraData camera_data;
int screen_width;
int screen_height;
GfVec4i border;
pxr::GfVec4i border;
};
ViewSettings::ViewSettings(bContext *context)
: camera_data(context)
ViewSettings::ViewSettings(bContext *context) : camera_data(context)
{
View3D *view3d = CTX_wm_view3d(context);
RegionView3D *region_data = (RegionView3D *)CTX_wm_region_data(context);
@ -52,7 +49,7 @@ ViewSettings::ViewSettings(bContext *context)
Scene *scene = CTX_data_scene(context);
//getting render border
// getting render border
int x1 = 0, y1 = 0;
int x2 = screen_width, y2 = screen_height;
@ -64,22 +61,29 @@ ViewSettings::ViewSettings(bContext *context)
BKE_camera_view_frame(scene, (Camera *)camera_obj->data, camera_points);
float screen_points[4][2];
for (int i = 0 ; i < 4; i++) {
float world_location[] = {camera_points[i][0], camera_points[i][1], camera_points[i][2], 1.0f};
for (int i = 0; i < 4; i++) {
float world_location[] = {
camera_points[i][0], camera_points[i][1], camera_points[i][2], 1.0f};
mul_m4_v4(camera_obj->object_to_world, world_location);
mul_m4_v4(region_data->persmat, world_location);
if (world_location[3] > 0.0) {
screen_points[i][0] = screen_width * 0.5f + screen_width * 0.5f * (world_location[0] / world_location[3]);
screen_points[i][1] = screen_height * 0.5f + screen_height * 0.5f * (world_location[1] / world_location[3]);
screen_points[i][0] = screen_width * 0.5f +
screen_width * 0.5f * (world_location[0] / world_location[3]);
screen_points[i][1] = screen_height * 0.5f +
screen_height * 0.5f * (world_location[1] / world_location[3]);
}
}
// getting camera view region
float x1_f = min({screen_points[0][0], screen_points[1][0], screen_points[2][0], screen_points[3][0]});
float x2_f = max({screen_points[0][0], screen_points[1][0], screen_points[2][0], screen_points[3][0]});
float y1_f = min({screen_points[0][1], screen_points[1][1], screen_points[2][1], screen_points[3][1]});
float y2_f = max({screen_points[0][1], screen_points[1][1], screen_points[2][1], screen_points[3][1]});
float x1_f = std::min(
{screen_points[0][0], screen_points[1][0], screen_points[2][0], screen_points[3][0]});
float x2_f = std::max(
{screen_points[0][0], screen_points[1][0], screen_points[2][0], screen_points[3][0]});
float y1_f = std::min(
{screen_points[0][1], screen_points[1][1], screen_points[2][1], screen_points[3][1]});
float y2_f = std::max(
{screen_points[0][1], screen_points[1][1], screen_points[2][1], screen_points[3][1]});
// adjusting region to border
float x = x1_f, y = y1_f;
@ -91,10 +95,10 @@ ViewSettings::ViewSettings(bContext *context)
y2 = y + scene->r.border.ymax * dy;
// adjusting to region screen resolution
x1 = max(min(x1, screen_width), 0);
x2 = max(min(x2, screen_width), 0);
y1 = max(min(y1, screen_height), 0);
y2 = max(min(y2, screen_height), 0);
x1 = std::max(std::min(x1, screen_width), 0);
x2 = std::max(std::min(x2, screen_width), 0);
y1 = std::max(std::min(y1, screen_height), 0);
y2 = std::max(std::min(y2, screen_height), 0);
}
}
else {
@ -109,7 +113,7 @@ ViewSettings::ViewSettings(bContext *context)
}
}
border = GfVec4i(x1, y1, x2 - x1, y2 - y1);
border = pxr::GfVec4i(x1, y1, x2 - x1, y2 - y1);
}
int ViewSettings::width()
@ -122,31 +126,28 @@ int ViewSettings::height()
return border[3];
}
GfCamera ViewSettings::gf_camera()
pxr::GfCamera ViewSettings::gf_camera()
{
return camera_data.gf_camera(GfVec4f(
(float)border[0] / screen_width, (float)border[1] / screen_height,
(float)border[2] / screen_width, (float)border[3] / screen_height));
return camera_data.gf_camera(pxr::GfVec4f((float)border[0] / screen_width,
(float)border[1] / screen_height,
(float)border[2] / screen_width,
(float)border[3] / screen_height));
}
GLTexture::GLTexture()
: textureId(0)
, width(0)
, height(0)
, channels(4)
GLTexture::GLTexture() : texture_id(0), width(0), height(0), channels(4)
{
}
GLTexture::~GLTexture()
{
if (textureId) {
if (texture_id) {
free();
}
}
void GLTexture::setBuffer(HdRenderBuffer *buffer)
void GLTexture::set_buffer(pxr::HdRenderBuffer *buffer)
{
if (!textureId) {
if (!texture_id) {
create(buffer);
return;
}
@ -157,21 +158,21 @@ void GLTexture::setBuffer(HdRenderBuffer *buffer)
return;
}
glBindTexture(GL_TEXTURE_2D, textureId);
glBindTexture(GL_TEXTURE_2D, texture_id);
void *data = buffer->Map();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGBA, GL_FLOAT, data);
buffer->Unmap();
}
void GLTexture::create(HdRenderBuffer *buffer)
void GLTexture::create(pxr::HdRenderBuffer *buffer)
{
width = buffer->GetWidth();
height = buffer->GetHeight();
channels = HdGetComponentCount(buffer->GetFormat());
channels = pxr::HdGetComponentCount(buffer->GetFormat());
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glGenTextures(1, &texture_id);
glBindTexture(GL_TEXTURE_2D, texture_id);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
@ -184,8 +185,8 @@ void GLTexture::create(HdRenderBuffer *buffer)
void GLTexture::free()
{
glDeleteTextures(1, &textureId);
textureId = 0;
glDeleteTextures(1, &texture_id);
texture_id = 0;
}
void GLTexture::draw(GLfloat x, GLfloat y)
@ -204,7 +205,7 @@ void GLTexture::draw(GLfloat x, GLfloat y)
GLint position_location = glGetAttribLocation(shader_program, "pos");
// Generate geometry buffers for drawing textured quad
GLfloat position[8] = { x, y, x + width, y, x + width, y + height, x, y + height };
GLfloat position[8] = {x, y, x + width, y, x + width, y + height, x, y + height};
GLfloat texcoord[8] = {0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0};
GLuint vertex_buffer[2];
@ -217,7 +218,7 @@ void GLTexture::draw(GLfloat x, GLfloat y)
// DRAWING
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
glBindTexture(GL_TEXTURE_2D, texture_id);
glBindVertexArray(vertex_array);
glEnableVertexAttribArray(texturecoord_location);
@ -239,75 +240,86 @@ void GLTexture::draw(GLfloat x, GLfloat y)
glDeleteVertexArrays(1, &vertex_array);
}
void ViewportEngine::sync(Depsgraph *depsgraph, bContext *context, HdRenderSettingsMap &renderSettings)
void ViewportEngine::sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings)
{
if (!scene_delegate) {
scene_delegate = std::make_unique<BlenderSceneDelegate>(render_index.get(),
SdfPath::AbsoluteRootPath().AppendElementString("scene"), BlenderSceneDelegate::EngineType::Viewport);
scene_delegate = std::make_unique<BlenderSceneDelegate>(
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
BlenderSceneDelegate::EngineType::VIEWPORT);
}
scene_delegate->populate(depsgraph, context);
for (auto const& setting : renderSettings) {
for (auto const &setting : render_settings) {
render_delegate->SetRenderSetting(setting.first, setting.second);
}
}
void ViewportEngine::render(Depsgraph *depsgraph, bContext *context)
{
ViewSettings viewSettings(context);
if (viewSettings.width() * viewSettings.height() == 0) {
ViewSettings view_settings(context);
if (view_settings.width() * view_settings.height() == 0) {
return;
};
GfCamera gfCamera = viewSettings.gf_camera();
free_camera_delegate->SetCamera(gfCamera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(),
GfVec4d(viewSettings.border[0], viewSettings.border[1], viewSettings.border[2], viewSettings.border[3]));
pxr::GfCamera gf_camera = view_settings.gf_camera();
free_camera_delegate->SetCamera(gf_camera);
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(view_settings.border[0],
view_settings.border[1],
view_settings.border[2],
view_settings.border[3]));
if (simple_light_task_delegate) {
simple_light_task_delegate->SetCameraPath(free_camera_delegate->GetCameraId());
simple_light_task_delegate->set_camera_path(free_camera_delegate->GetCameraId());
}
if ((bl_engine->type->flag & RE_USE_GPU_CONTEXT) == 0) {
render_task_delegate->SetRendererAov(HdAovTokens->color);
render_task_delegate->set_renderer_aov(pxr::HdAovTokens->color);
}
if (renderer_percent_done() == 0.0f) {
timeBegin = chrono::steady_clock::now();
time_begin = std::chrono::steady_clock::now();
}
GPUShader *shader = GPU_shader_get_builtin_shader(GPU_SHADER_3D_IMAGE);
GPU_shader_bind(shader);
HdTaskSharedPtrVector tasks;
pxr::HdTaskSharedPtrVector tasks;
if (simple_light_task_delegate) {
tasks.push_back(simple_light_task_delegate->GetTask());
tasks.push_back(simple_light_task_delegate->get_task());
}
tasks.push_back(render_task_delegate->GetTask());
tasks.push_back(render_task_delegate->get_task());
{
/* Release the GIL before calling into hydra, in case any hydra plugins call into python. */
TF_PY_ALLOW_THREADS_IN_SCOPE();
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
engine->Execute(render_index.get(), &tasks);
if ((bl_engine->type->flag & RE_USE_GPU_CONTEXT) == 0) {
texture.setBuffer(render_task_delegate->GetRendererAov(HdAovTokens->color));
texture.draw((GLfloat)viewSettings.border[0], (GLfloat)viewSettings.border[1]);
texture.set_buffer(render_task_delegate->get_renderer_aov(pxr::HdAovTokens->color));
texture.draw((GLfloat)view_settings.border[0], (GLfloat)view_settings.border[1]);
}
}
GPU_shader_unbind();
chrono::time_point<chrono::steady_clock> timeCurrent = chrono::steady_clock::now();
chrono::milliseconds elapsedTime = chrono::duration_cast<chrono::milliseconds>(timeCurrent - timeBegin);
std::chrono::time_point<std::chrono::steady_clock> time_current =
std::chrono::steady_clock::now();
std::chrono::milliseconds elapsed_time = std::chrono::duration_cast<std::chrono::milliseconds>(
time_current - time_begin);
string formattedTime = format_duration(elapsedTime);
std::string formatted_time = format_duration(elapsed_time);
if (!render_task_delegate->IsConverged()) {
notify_status("Time: " + formattedTime + " | Done: " + to_string(int(renderer_percent_done())) + "%", "Render");
if (!render_task_delegate->is_converged()) {
notify_status("Time: " + formatted_time +
" | Done: " + std::to_string(int(renderer_percent_done())) + "%",
"Render");
bl_engine->flag |= RE_ENGINE_DO_DRAW;
}
else {
notify_status(("Time: " + formattedTime).c_str(), "Rendering Done");
notify_status(("Time: " + formatted_time).c_str(), "Rendering Done");
}
}
@ -316,9 +328,9 @@ void ViewportEngine::render(Depsgraph *depsgraph)
/* Empty function */
}
void ViewportEngine::notify_status(const string &info, const string &status)
void ViewportEngine::notify_status(const std::string &info, const std::string &status)
{
RE_engine_update_stats(bl_engine, status.c_str(), info.c_str());
}
} // namespace blender::render::hydra
} // namespace blender::render::hydra

View File

@ -13,36 +13,37 @@
namespace blender::render::hydra {
class GLTexture
{
public:
class GLTexture {
public:
GLTexture();
~GLTexture();
void setBuffer(pxr::HdRenderBuffer *buffer);
void set_buffer(pxr::HdRenderBuffer *buffer);
void draw(GLfloat x, GLfloat y);
private:
private:
void create(pxr::HdRenderBuffer *buffer);
void free();
GLuint textureId;
GLuint texture_id;
int width, height, channels;
};
class ViewportEngine : public Engine {
public:
public:
using Engine::Engine;
void sync(Depsgraph *depsgraph, bContext *context, pxr::HdRenderSettingsMap &renderSettings) override;
void sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &render_settings) override;
void render(Depsgraph *depsgraph) override;
void render(Depsgraph *depsgraph, bContext *context);
private:
private:
void notify_status(const std::string &title, const std::string &info);
private:
std::chrono::time_point<std::chrono::steady_clock> timeBegin;
private:
std::chrono::time_point<std::chrono::steady_clock> time_begin;
GLTexture texture;
};
} // namespace blender::render::hydra
} // namespace blender::render::hydra