BLEN-367: Fix code style #14

Merged
Bogdan Nagirniak merged 11 commits from BLEN-367 into hydra-render 2023-03-15 09:42:17 +01:00
18 changed files with 154 additions and 120 deletions
Showing only changes of commit 8bd5ccb525 - Show all commits

View File

@ -26,8 +26,8 @@ CameraData::CameraData(Object *camera_obj, pxr::GfVec2i res, pxr::GfVec4f tile)
}
else {
pxr::GfVec3f obj_pos(camera->dof.focus_object->object_to_world[0][3],
camera->dof.focus_object->object_to_world[1][3],
camera->dof.focus_object->object_to_world[2][3]);
camera->dof.focus_object->object_to_world[1][3],
camera->dof.focus_object->object_to_world[2][3]);
pxr::GfVec3f cam_pos(transform[0][3], transform[1][3], transform[2][3]);
focus_distance = (obj_pos - cam_pos).GetLength();
}
@ -58,8 +58,9 @@ CameraData::CameraData(Object *camera_obj, pxr::GfVec2i res, pxr::GfVec4f tile)
break;
}
lens_shift = pxr::GfVec2f(lens_shift[0] / t_size[0] + (t_pos[0] + t_size[0] * 0.5 - 0.5) / t_size[0],
lens_shift[1] / t_size[1] + (t_pos[1] + t_size[1] * 0.5 - 0.5) / t_size[1]);
lens_shift = pxr::GfVec2f(
lens_shift[0] / t_size[0] + (t_pos[0] + t_size[0] * 0.5 - 0.5) / t_size[0],
lens_shift[1] / t_size[1] + (t_pos[1] + t_size[1] * 0.5 - 0.5) / t_size[1]);
switch (camera->type) {
case CAM_PERSP:
@ -203,7 +204,7 @@ CameraData::CameraData(bContext *context)
// Updating l_shift due to viewport zoom and view_camera_offset
// view_camera_offset should be multiplied by 2
lens_shift = pxr::GfVec2f((lens_shift[0] + region_data->camdx * 2) / zoom,
(lens_shift[1] + region_data->camdy * 2) / zoom);
(lens_shift[1] + region_data->camdy * 2) / zoom);
if (mode == CAM_ORTHO) {
ortho_size *= zoom;

View File

@ -18,7 +18,7 @@ Engine::Engine(RenderEngine *bl_engine, const std::string &delegateId) : bl_engi
{
pxr::HdRendererPluginRegistry &registry = pxr::HdRendererPluginRegistry::GetInstance();
//TF_PY_ALLOW_THREADS_IN_SCOPE();
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
render_delegate = registry.CreateRenderDelegate(pxr::TfToken(delegateId));
pxr::HdDriverVector hd_drivers;
@ -37,7 +37,8 @@ Engine::Engine(RenderEngine *bl_engine, const std::string &delegateId) : bl_engi
render_index.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("renderTask"));
if (render_delegate->GetRendererDisplayName() == "GL") {
simple_light_task_delegate = std::make_unique<SimpleLightTaskDelegate>(
render_index.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("simpleLightTask"));
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("simpleLightTask"));
}
engine = std::make_unique<pxr::HdEngine>();

View File

@ -16,11 +16,11 @@ namespace blender::render::hydra {
void FinalEngine::sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &renderSettings)
pxr::HdRenderSettingsMap &renderSettings)
{
scene_delegate = std::make_unique<BlenderSceneDelegate>(
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
BlenderSceneDelegate::EngineType::Final);
scene_delegate->populate(depsgraph, context);
@ -42,17 +42,18 @@ void FinalEngine::render(Depsgraph *depsgraph)
pxr::GfVec4f border(0, 0, 1, 1);
if (r.mode & R_BORDER) {
border = pxr::GfVec4f(r.border.xmin,
r.border.ymin,
r.border.xmax - r.border.xmin,
r.border.ymax - r.border.ymin);
r.border.ymin,
r.border.xmax - r.border.xmin,
r.border.ymax - r.border.ymin);
}
pxr::GfVec2i image_res(r.xsch * r.size / 100, r.ysch * r.size / 100);
pxr::GfVec2i res(int(image_res[0] * border[2]), int(image_res[1] * border[3]));
pxr::GfCamera camera = CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
pxr::GfCamera camera =
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
free_camera_delegate->SetCamera(camera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(0, 0, res[0], res[1]));
pxr::GfVec4d(0, 0, res[0], res[1]));
render_task_delegate->SetRendererAov(pxr::HdAovTokens->color);
if (simple_light_task_delegate) {
simple_light_task_delegate->SetCameraPath(free_camera_delegate->GetCameraId());
@ -64,7 +65,8 @@ void FinalEngine::render(Depsgraph *depsgraph)
}
tasks.push_back(render_task_delegate->GetTask());
std::chrono::time_point<std::chrono::steady_clock> timeBegin = std::chrono::steady_clock::now(), timeCurrent;
std::chrono::time_point<std::chrono::steady_clock> timeBegin = std::chrono::steady_clock::now(),
timeCurrent;
std::chrono::milliseconds elapsedTime;
float percentDone = 0.0;
@ -113,7 +115,8 @@ pxr::GfVec2i FinalEngine::get_resolution(Scene *scene)
border_w = r.border.xmax - r.border.xmin;
border_h = r.border.ymax - r.border.ymin;
}
return pxr::GfVec2i(int(r.xsch * border_w * r.size / 100), int(r.ysch * border_h * r.size / 100));
return pxr::GfVec2i(int(r.xsch * border_w * r.size / 100),
int(r.ysch * border_h * r.size / 100));
}
void FinalEngine::updateRenderResult(std::map<std::string, std::vector<float>> &renderImages,
@ -158,17 +161,18 @@ void FinalEngineGL::render(Depsgraph *depsgraph)
pxr::GfVec4f border(0, 0, 1, 1);
if (r.mode & R_BORDER) {
border = pxr::GfVec4f(r.border.xmin,
r.border.ymin,
r.border.xmax - r.border.xmin,
r.border.ymax - r.border.ymin);
r.border.ymin,
r.border.xmax - r.border.xmin,
r.border.ymax - r.border.ymin);
}
pxr::GfVec2i image_res = {r.xsch * r.size / 100, r.ysch * r.size / 100};
pxr::GfVec2i res = {int(image_res[0] * border[2]), int(image_res[1] * border[3])};
pxr::GfCamera camera = CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
pxr::GfCamera camera =
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
free_camera_delegate->SetCamera(camera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(0, 0, res[0], res[1]));
pxr::GfVec4d(0, 0, res[0], res[1]));
if (simple_light_task_delegate) {
simple_light_task_delegate->SetCameraPath(free_camera_delegate->GetCameraId());
}
@ -182,7 +186,8 @@ void FinalEngineGL::render(Depsgraph *depsgraph)
}
tasks.push_back(render_task_delegate->GetTask());
std::chrono::time_point<std::chrono::steady_clock> timeBegin = std::chrono::steady_clock::now(), timeCurrent;
std::chrono::time_point<std::chrono::steady_clock> timeBegin = std::chrono::steady_clock::now(),
timeCurrent;
std::chrono::milliseconds elapsedTime;
float percentDone = 0.0;

View File

@ -10,11 +10,11 @@ namespace blender::render::hydra {
void PreviewEngine::sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &renderSettings)
pxr::HdRenderSettingsMap &renderSettings)
{
scene_delegate = std::make_unique<BlenderSceneDelegate>(
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
BlenderSceneDelegate::EngineType::Preview);
scene_delegate->populate(depsgraph, context);
@ -36,7 +36,7 @@ void PreviewEngine::render(Depsgraph *depsgraph)
free_camera_delegate->SetCamera(camera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(0, 0, res[0], res[1]));
pxr::GfVec4d(0, 0, res[0], res[1]));
render_task_delegate->SetRendererAov(pxr::HdAovTokens->color);
pxr::HdTaskSharedPtrVector tasks;
@ -73,7 +73,7 @@ void PreviewEngine::render(Depsgraph *depsgraph)
void PreviewEngine::updateRenderResult(const std::string &layerName,
int width,
int height,
std::vector<float> &pixels)
std::vector<float> &pixels)
{
RenderResult *result = RE_engine_begin_result(
bl_engine, 0, 0, width, height, layerName.c_str(), nullptr);

View File

@ -22,7 +22,7 @@ class PreviewEngine : public FinalEngine {
std::vector<float> &pixels);
protected:
pxr::HdRenderSettingsMap renderSettings;
pxr::HdRenderSettingsMap renderSettings;
};
} // namespace blender::render::hydra

View File

@ -107,10 +107,11 @@ static PyObject *get_render_plugins_func(PyObject * /*self*/, PyObject *args)
PyDict_SetItemString(descr, "id", val = PyUnicode_FromString(plugin_ids[i].GetText()));
Py_DECREF(val);
PyDict_SetItemString(descr,
"name",
val = PyUnicode_FromString(
pxr::UsdImagingGLEngine::GetRendererDisplayName(plugin_ids[i]).c_str()));
PyDict_SetItemString(
descr,
"name",
val = PyUnicode_FromString(
pxr::UsdImagingGLEngine::GetRendererDisplayName(plugin_ids[i]).c_str()));
Py_DECREF(val);
std::string plugin_name = plugin_ids[i];

View File

@ -13,15 +13,16 @@
namespace blender::render::hydra {
RenderTaskDelegate::RenderTaskDelegate(pxr::HdRenderIndex *parentIndex, pxr::SdfPath const &delegateID)
RenderTaskDelegate::RenderTaskDelegate(pxr::HdRenderIndex *parentIndex,
pxr::SdfPath const &delegateID)
: pxr::HdSceneDelegate(parentIndex, delegateID)
{
pxr::SdfPath renderTaskId = GetTaskID();
GetRenderIndex().InsertTask<pxr::HdxRenderTask>(this, renderTaskId);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(renderTaskId,
pxr::HdChangeTracker::DirtyCollection);
pxr::HdChangeTracker::DirtyCollection);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(renderTaskId,
pxr::HdChangeTracker::DirtyRenderTags);
pxr::HdChangeTracker::DirtyRenderTags);
taskParams.enableLighting = true;
taskParams.alphaThreshold = 0.1f;
@ -44,8 +45,10 @@ pxr::VtValue RenderTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const
return pxr::VtValue(taskParams);
}
if (key == pxr::HdTokens->collection) {
pxr::HdRprimCollection rprimCollection(
pxr::HdTokens->geometry, pxr::HdReprSelector(pxr::HdReprTokens->smoothHull), false, pxr::TfToken());
pxr::HdRprimCollection rprimCollection(pxr::HdTokens->geometry,
pxr::HdReprSelector(pxr::HdReprTokens->smoothHull),
false,
pxr::TfToken());
rprimCollection.SetRootPath(pxr::SdfPath::AbsoluteRootPath());
return pxr::VtValue(rprimCollection);
}
@ -72,16 +75,19 @@ bool RenderTaskDelegate::IsConverged()
void RenderTaskDelegate::SetRendererAov(pxr::TfToken const &aov)
{
pxr::HdAovDescriptor aovDesc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(aov);
pxr::HdRenderBufferDescriptor desc(pxr::GfVec3i(taskParams.viewport[2], taskParams.viewport[3], 1),
aovDesc.format,
aovDesc.multiSampled);
pxr::HdAovDescriptor aovDesc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(
aov);
pxr::HdRenderBufferDescriptor desc(
pxr::GfVec3i(taskParams.viewport[2], taskParams.viewport[3], 1),
aovDesc.format,
aovDesc.multiSampled);
pxr::SdfPath bufferId = GetAovID(aov);
if (bufferDescriptors.find(bufferId) == bufferDescriptors.end()) {
GetRenderIndex().InsertBprim(pxr::HdPrimTypeTokens->renderBuffer, this, bufferId);
bufferDescriptors[bufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId, pxr::HdRenderBuffer::DirtyDescription);
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId,
pxr::HdRenderBuffer::DirtyDescription);
pxr::HdRenderPassAovBinding binding;
binding.aovName = aov;
@ -89,18 +95,20 @@ void RenderTaskDelegate::SetRendererAov(pxr::TfToken const &aov)
binding.aovSettings = aovDesc.aovSettings;
taskParams.aovBindings.push_back(binding);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), pxr::HdChangeTracker::DirtyParams);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(),
pxr::HdChangeTracker::DirtyParams);
}
else if (bufferDescriptors[bufferId] != desc) {
bufferDescriptors[bufferId] = desc;
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId, pxr::HdRenderBuffer::DirtyDescription);
GetRenderIndex().GetChangeTracker().MarkBprimDirty(bufferId,
pxr::HdRenderBuffer::DirtyDescription);
}
}
pxr::HdRenderBuffer *RenderTaskDelegate::GetRendererAov(pxr::TfToken const &aov)
{
return (
pxr::HdRenderBuffer *)(GetRenderIndex().GetBprim(pxr::HdPrimTypeTokens->renderBuffer, GetAovID(aov)));
return (pxr::HdRenderBuffer *)(GetRenderIndex().GetBprim(pxr::HdPrimTypeTokens->renderBuffer,
GetAovID(aov)));
}
void RenderTaskDelegate::GetRendererAovData(pxr::TfToken const &aov, void *data)
@ -118,12 +126,14 @@ pxr::HdTaskSharedPtr RenderTaskDelegate::GetTask()
return GetRenderIndex().GetTask(GetTaskID());
}
void RenderTaskDelegate::SetCameraAndViewport(pxr::SdfPath const &cameraId, pxr::GfVec4d const &viewport)
void RenderTaskDelegate::SetCameraAndViewport(pxr::SdfPath const &cameraId,
pxr::GfVec4d const &viewport)
{
if (taskParams.viewport != viewport || taskParams.camera != cameraId) {
taskParams.viewport = viewport;
taskParams.camera = cameraId;
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(), pxr::HdChangeTracker::DirtyParams);
GetRenderIndex().GetChangeTracker().MarkTaskDirty(GetTaskID(),
pxr::HdChangeTracker::DirtyParams);
}
}

View File

@ -29,8 +29,9 @@ class RenderTaskDelegate : public pxr::HdSceneDelegate {
void SetCameraAndViewport(pxr::SdfPath const &cameraId, pxr::GfVec4d const &viewport);
private:
pxr::HdxRenderTaskParams taskParams;
pxr::TfHashMap<pxr::SdfPath, pxr::HdRenderBufferDescriptor, pxr::SdfPath::Hash> bufferDescriptors;
pxr::HdxRenderTaskParams taskParams;
pxr::TfHashMap<pxr::SdfPath, pxr::HdRenderBufferDescriptor, pxr::SdfPath::Hash>
bufferDescriptors;
};
} // namespace blender::render::hydra

View File

@ -98,7 +98,7 @@ pxr::SdfPathVector BlenderSceneDelegate::GetInstancerPrototypes(pxr::SdfPath con
}
pxr::VtIntArray BlenderSceneDelegate::GetInstanceIndices(pxr::SdfPath const &instancerId,
pxr::SdfPath const &prototypeId)
pxr::SdfPath const &prototypeId)
{
LOG(INFO) << "GetInstanceIndices: " << instancerId.GetString() << " " << prototypeId.GetString();
MeshData *m_data = mesh_data(prototypeId);
@ -117,7 +117,7 @@ pxr::GfMatrix4d BlenderSceneDelegate::GetInstancerTransform(pxr::SdfPath const &
size_t BlenderSceneDelegate::SampleInstancerTransform(pxr::SdfPath const &instancerId,
size_t maxSampleCount,
float *sampleTimes,
pxr::GfMatrix4d *sampleValues)
pxr::GfMatrix4d *sampleValues)
{
LOG(INFO) << "SampleInstancerTransform: " << instancerId.GetString();
size_t ret = 0;
@ -127,10 +127,10 @@ size_t BlenderSceneDelegate::SampleInstancerTransform(pxr::SdfPath const &instan
}
size_t BlenderSceneDelegate::SamplePrimvar(pxr::SdfPath const &id,
pxr::TfToken const &key,
pxr::TfToken const &key,
size_t maxSampleCount,
float *sampleTimes,
pxr::VtValue *sampleValues)
pxr::VtValue *sampleValues)
{
// TODO: add a separate object for instancer for cleaner handling code
if (id.GetName() == "Instancer") {
@ -416,7 +416,7 @@ pxr::VtValue BlenderSceneDelegate::Get(pxr::SdfPath const &id, pxr::TfToken cons
}
pxr::HdPrimvarDescriptorVector BlenderSceneDelegate::GetPrimvarDescriptors(
pxr::SdfPath const &id, pxr::HdInterpolation interpolation)
pxr::SdfPath const &id, pxr::HdInterpolation interpolation)
{
LOG(INFO) << "GetPrimvarDescriptors: " << id.GetString() << " " << interpolation;
if (mesh_data(id)) {
@ -465,7 +465,8 @@ pxr::GfMatrix4d BlenderSceneDelegate::GetTransform(pxr::SdfPath const &id)
return pxr::GfMatrix4d();
}
pxr::VtValue BlenderSceneDelegate::GetLightParamValue(pxr::SdfPath const &id, pxr::TfToken const &key)
pxr::VtValue BlenderSceneDelegate::GetLightParamValue(pxr::SdfPath const &id,
pxr::TfToken const &key)
{
LightData *l_data = light_data(id);
if (l_data) {

View File

@ -23,9 +23,9 @@ LightData::LightData(BlenderSceneDelegate *scene_delegate, Object *object)
Light *light = (Light *)((Object *)id)->data;
data[pxr::HdLightTokens->intensity] = scene_delegate->engine_type ==
BlenderSceneDelegate::EngineType::Preview ?
light->energy / 1000 :
light->energy;
BlenderSceneDelegate::EngineType::Preview ?
light->energy / 1000 :
light->energy;
data[pxr::HdLightTokens->color] = pxr::GfVec3f(light->r, light->g, light->b);

View File

@ -93,7 +93,8 @@ void MaterialData::export_mtlx()
void MaterialData::insert_prim()
{
pxr::SdfPath p_id = prim_id(scene_delegate, (Material *)id);
scene_delegate->GetRenderIndex().InsertSprim(pxr::HdPrimTypeTokens->material, scene_delegate, p_id);
scene_delegate->GetRenderIndex().InsertSprim(
pxr::HdPrimTypeTokens->material, scene_delegate, p_id);
LOG(INFO) << "Add material: " << name() << " id=" << p_id.GetAsString();
}

View File

@ -63,9 +63,9 @@ Material *MeshData::material()
pxr::HdMeshTopology MeshData::mesh_topology()
{
return pxr::HdMeshTopology(pxr::PxOsdOpenSubdivTokens->catmullClark,
pxr::HdTokens->rightHanded,
face_vertex_counts,
face_vertex_indices);
pxr::HdTokens->rightHanded,
face_vertex_counts,
face_vertex_indices);
}
pxr::HdPrimvarDescriptorVector MeshData::primvar_descriptors(pxr::HdInterpolation interpolation)
@ -78,23 +78,25 @@ pxr::HdPrimvarDescriptorVector MeshData::primvar_descriptors(pxr::HdInterpolatio
}
else if (interpolation == pxr::HdInterpolationFaceVarying) {
if (!normals.empty()) {
primvars.emplace_back(pxr::HdTokens->normals, interpolation, pxr::HdPrimvarRoleTokens->normal);
primvars.emplace_back(
pxr::HdTokens->normals, interpolation, pxr::HdPrimvarRoleTokens->normal);
}
if (!uvs.empty()) {
primvars.emplace_back(pxr::HdPrimvarRoleTokens->textureCoordinate,
interpolation,
pxr::HdPrimvarRoleTokens->textureCoordinate);
pxr::HdPrimvarRoleTokens->textureCoordinate);
}
}
return primvars;
}
pxr::HdPrimvarDescriptorVector MeshData::instancer_primvar_descriptors(pxr::HdInterpolation interpolation)
pxr::HdPrimvarDescriptorVector MeshData::instancer_primvar_descriptors(
pxr::HdInterpolation interpolation)
{
pxr::HdPrimvarDescriptorVector primvars;
if (interpolation == pxr::HdInterpolationInstance) {
primvars.emplace_back(
pxr::HdInstancerTokens->instanceTransform, interpolation, pxr::HdPrimvarRoleTokens->none);
pxr::HdInstancerTokens->instanceTransform, interpolation, pxr::HdPrimvarRoleTokens->none);
}
return primvars;
}
@ -110,7 +112,7 @@ pxr::VtIntArray MeshData::instance_indices()
size_t MeshData::sample_instancer_transform(size_t maxSampleCount,
float *sampleTimes,
pxr::GfMatrix4d *sampleValues)
pxr::GfMatrix4d *sampleValues)
{
*sampleTimes = 0.0f;
*sampleValues = pxr::GfMatrix4d(1.0);
@ -120,7 +122,7 @@ size_t MeshData::sample_instancer_transform(size_t maxSampleCount,
size_t MeshData::sample_instancer_primvar(pxr::TfToken const &key,
size_t maxSampleCount,
float *sampleTimes,
pxr::VtValue *sampleValues)
pxr::VtValue *sampleValues)
{
if (key == pxr::HdInstancerTokens->instanceTransform) {
if (maxSampleCount > 0) {

View File

@ -23,9 +23,9 @@
namespace mx = MaterialX;
void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlxPath,
pxr::TfTokenVector const &shaderSourceTypes,
pxr::TfTokenVector const &renderContexts,
pxr::HdMaterialNetworkMap *out)
pxr::TfTokenVector const &shaderSourceTypes,
pxr::TfTokenVector const &renderContexts,
pxr::HdMaterialNetworkMap *out)
{
if (mtlxPath.empty()) {
return;
@ -48,11 +48,17 @@ void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlxPath,
pxr::UsdMtlxRead(doc, stage);
}
catch (mx::ExceptionFoundCycle &x) {
Tf_PostErrorHelper(pxr::TF_CALL_CONTEXT, pxr::TF_DIAGNOSTIC_RUNTIME_ERROR_TYPE, "MaterialX cycle found: %s\n", x.what());
Tf_PostErrorHelper(pxr::TF_CALL_CONTEXT,
pxr::TF_DIAGNOSTIC_RUNTIME_ERROR_TYPE,
"MaterialX cycle found: %s\n",
x.what());
return;
}
catch (mx::Exception &x) {
Tf_PostErrorHelper(pxr::TF_CALL_CONTEXT, pxr::TF_DIAGNOSTIC_RUNTIME_ERROR_TYPE, "MaterialX error: %s\n", x.what());
Tf_PostErrorHelper(pxr::TF_CALL_CONTEXT,
pxr::TF_DIAGNOSTIC_RUNTIME_ERROR_TYPE,
"MaterialX error: %s\n",
x.what());
return;
}
@ -61,11 +67,11 @@ void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlxPath,
if (auto material = pxr::UsdShadeMaterial(*children.begin())) {
if (pxr::UsdShadeShader mtlxSurface = material.ComputeSurfaceSource(renderContexts)) {
UsdImagingBuildHdMaterialNetworkFromTerminal(mtlxSurface.GetPrim(),
pxr::HdMaterialTerminalTokens->surface,
pxr::HdMaterialTerminalTokens->surface,
shaderSourceTypes,
renderContexts,
out,
pxr::UsdTimeCode::Default());
pxr::UsdTimeCode::Default());
}
}
}

View File

@ -11,6 +11,6 @@
struct pxr::HdMaterialNetworkMap;
void HdMtlxConvertToMaterialNetworkMap(std::string const &mtlxPath,
pxr::TfTokenVector const &shaderSourceTypes,
pxr::TfTokenVector const &renderContexts,
pxr::HdMaterialNetworkMap *out);
pxr::TfTokenVector const &shaderSourceTypes,
pxr::TfTokenVector const &renderContexts,
pxr::HdMaterialNetworkMap *out);

View File

@ -102,12 +102,15 @@ WorldData::WorldData(BlenderSceneDelegate *scene_delegate, World *world, bContex
pxr::GfMatrix4d WorldData::transform()
{
pxr::GfMatrix4d transform = pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(1.0, 0.0, 0.0), -90), pxr::GfVec3d());
pxr::GfMatrix4d transform = pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(1.0, 0.0, 0.0), -90),
pxr::GfVec3d());
/* TODO : do this check via RenderSettings*/
if (scene_delegate->GetRenderIndex().GetRenderDelegate()->GetRendererDisplayName() == "RPR") {
transform *= pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(1.0, 0.0, 0.0), -180), pxr::GfVec3d());
transform *= pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(0.0, 0.0, 1.0), 90.0), pxr::GfVec3d());
transform *= pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(1.0, 0.0, 0.0), -180),
pxr::GfVec3d());
transform *= pxr::GfMatrix4d(pxr::GfRotation(pxr::GfVec3d(0.0, 0.0, 1.0), 90.0),
pxr::GfVec3d());
}
return transform;
}
@ -125,7 +128,8 @@ pxr::VtValue WorldData::get_data(pxr::TfToken const &key)
void WorldData::insert_prim()
{
pxr::SdfPath p_id = prim_id(scene_delegate);
scene_delegate->GetRenderIndex().InsertSprim(pxr::HdPrimTypeTokens->domeLight, scene_delegate, p_id);
scene_delegate->GetRenderIndex().InsertSprim(
pxr::HdPrimTypeTokens->domeLight, scene_delegate, p_id);
LOG(INFO) << "Add World: id=" << p_id.GetAsString();
}

View File

@ -8,7 +8,7 @@
namespace blender::render::hydra {
SimpleLightTaskDelegate::SimpleLightTaskDelegate(pxr::HdRenderIndex *parentIndex,
pxr::SdfPath const &delegateID)
pxr::SdfPath const &delegateID)
: pxr::HdSceneDelegate(parentIndex, delegateID)
{
pxr::SdfPath taskId = GetTaskID();

View File

@ -1,9 +1,9 @@
/* SPDX-License-Identifier: Apache-2.0
* Copyright 2011-2022 Blender Foundation */
#include <chrono>
#include <filesystem>
#include <sstream>
#include <chrono>
#include <pxr/base/tf/stringUtils.h>
@ -18,24 +18,24 @@
namespace blender::render::hydra {
pxr::GfMatrix4d gf_matrix_from_transform(float m[4][4])
pxr::GfMatrix4d gf_matrix_from_transform(float m[4][4])
{
return pxr::GfMatrix4d(m[0][0],
m[0][1],
m[0][2],
m[0][3],
m[1][0],
m[1][1],
m[1][2],
m[1][3],
m[2][0],
m[2][1],
m[2][2],
m[2][3],
m[3][0],
m[3][1],
m[3][2],
m[3][3]);
m[0][1],
m[0][2],
m[0][3],
m[1][0],
m[1][1],
m[1][2],
m[1][3],
m[2][0],
m[2][1],
m[2][2],
m[2][3],
m[3][0],
m[3][1],
m[3][2],
m[3][3]);
}
std::string format_duration(std::chrono::milliseconds millisecs)
@ -68,11 +68,11 @@ std::string format_duration(std::chrono::milliseconds millisecs)
}
std::string cache_image(Main *bmain,
Scene *scene,
Image *image,
ImageUser *iuser,
ImageSaveOptions *opts,
ReportList *reports)
Scene *scene,
Image *image,
ImageUser *iuser,
ImageSaveOptions *opts,
ReportList *reports)
{
const std::string default_format = ".png";

View File

@ -7,19 +7,19 @@
#include <pxr/imaging/glf/drawTarget.h>
#include <pxr/usd/usdGeom/camera.h>
#include "DNA_vec_types.h"
#include "BKE_camera.h"
#include "BLI_math_matrix.h"
#include "DEG_depsgraph_query.h"
#include "DNA_camera_types.h"
#include "DNA_screen_types.h"
#include "DNA_vec_types.h"
#include "GPU_shader.h"
#include "glog/logging.h"
#include "viewport_engine.h"
#include "camera.h"
#include "utils.h"
#include "viewport_engine.h"
namespace blender::render::hydra {
@ -129,9 +129,9 @@ int ViewSettings::height()
pxr::GfCamera ViewSettings::gf_camera()
{
return camera_data.gf_camera(pxr::GfVec4f((float)border[0] / screen_width,
(float)border[1] / screen_height,
(float)border[2] / screen_width,
(float)border[3] / screen_height));
(float)border[1] / screen_height,
(float)border[2] / screen_width,
(float)border[3] / screen_height));
}
GLTexture::GLTexture() : textureId(0), width(0), height(0), channels(4)
@ -242,12 +242,12 @@ void GLTexture::draw(GLfloat x, GLfloat y)
void ViewportEngine::sync(Depsgraph *depsgraph,
bContext *context,
pxr::HdRenderSettingsMap &renderSettings)
pxr::HdRenderSettingsMap &renderSettings)
{
if (!scene_delegate) {
scene_delegate = std::make_unique<BlenderSceneDelegate>(
render_index.get(),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
BlenderSceneDelegate::EngineType::Viewport);
}
scene_delegate->populate(depsgraph, context);
@ -267,10 +267,10 @@ void ViewportEngine::render(Depsgraph *depsgraph, bContext *context)
pxr::GfCamera gfCamera = viewSettings.gf_camera();
free_camera_delegate->SetCamera(gfCamera);
render_task_delegate->SetCameraAndViewport(free_camera_delegate->GetCameraId(),
pxr::GfVec4d(viewSettings.border[0],
viewSettings.border[1],
viewSettings.border[2],
viewSettings.border[3]));
pxr::GfVec4d(viewSettings.border[0],
viewSettings.border[1],
viewSettings.border[2],
viewSettings.border[3]));
if (simple_light_task_delegate) {
simple_light_task_delegate->SetCameraPath(free_camera_delegate->GetCameraId());
}
@ -305,9 +305,10 @@ void ViewportEngine::render(Depsgraph *depsgraph, bContext *context)
GPU_shader_unbind();
std::chrono::time_point<std::chrono::steady_clock> timeCurrent = std::chrono::steady_clock::now();
std::chrono::milliseconds elapsedTime = std::chrono::duration_cast<std::chrono::milliseconds>(timeCurrent -
timeBegin);
std::chrono::time_point<std::chrono::steady_clock> timeCurrent =
std::chrono::steady_clock::now();
std::chrono::milliseconds elapsedTime = std::chrono::duration_cast<std::chrono::milliseconds>(
timeCurrent - timeBegin);
std::string formattedTime = format_duration(elapsedTime);