forked from blender/blender
Rename some functions #25
@ -9,140 +9,6 @@
|
|||||||
|
|
||||||
namespace blender::render::hydra {
|
namespace blender::render::hydra {
|
||||||
|
|
||||||
CameraData::CameraData(Object *camera_obj, pxr::GfVec2i res, pxr::GfVec4f tile)
|
|
||||||
{
|
|
||||||
Camera *camera = (Camera *)camera_obj->data;
|
|
||||||
|
|
||||||
float t_pos[2] = {tile[0], tile[1]};
|
|
||||||
float t_size[2] = {tile[2], tile[3]};
|
|
||||||
transform = gf_matrix_from_transform(camera_obj->object_to_world);
|
|
||||||
clip_range = pxr::GfRange1f(camera->clip_start, camera->clip_end);
|
|
||||||
mode = camera->type;
|
|
||||||
|
|
||||||
if (camera->dof.flag & CAM_DOF_ENABLED) {
|
|
||||||
float focus_distance;
|
|
||||||
if (!camera->dof.focus_object) {
|
|
||||||
focus_distance = camera->dof.focus_distance;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
pxr::GfVec3f obj_pos(camera->dof.focus_object->object_to_world[0][3],
|
|
||||||
camera->dof.focus_object->object_to_world[1][3],
|
|
||||||
camera->dof.focus_object->object_to_world[2][3]);
|
|
||||||
pxr::GfVec3f cam_pos(transform[0][3], transform[1][3], transform[2][3]);
|
|
||||||
focus_distance = (obj_pos - cam_pos).GetLength();
|
|
||||||
}
|
|
||||||
|
|
||||||
dof_data = std::tuple(
|
|
||||||
std::max(focus_distance, 0.001f), camera->dof.aperture_fstop, camera->dof.aperture_blades);
|
|
||||||
}
|
|
||||||
|
|
||||||
float ratio = (float)res[0] / res[1];
|
|
||||||
|
|
||||||
switch (camera->sensor_fit) {
|
|
||||||
case CAMERA_SENSOR_FIT_VERT:
|
|
||||||
lens_shift = pxr::GfVec2f(camera->shiftx / ratio, camera->shifty);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_HOR:
|
|
||||||
lens_shift = pxr::GfVec2f(camera->shiftx, camera->shifty * ratio);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_AUTO:
|
|
||||||
if (ratio > 1.0f) {
|
|
||||||
lens_shift = pxr::GfVec2f(camera->shiftx, camera->shifty * ratio);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
lens_shift = pxr::GfVec2f(camera->shiftx / ratio, camera->shifty);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
lens_shift = pxr::GfVec2f(camera->shiftx, camera->shifty);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
lens_shift = pxr::GfVec2f(
|
|
||||||
lens_shift[0] / t_size[0] + (t_pos[0] + t_size[0] * 0.5 - 0.5) / t_size[0],
|
|
||||||
lens_shift[1] / t_size[1] + (t_pos[1] + t_size[1] * 0.5 - 0.5) / t_size[1]);
|
|
||||||
|
|
||||||
switch (camera->type) {
|
|
||||||
case CAM_PERSP:
|
|
||||||
focal_length = camera->lens;
|
|
||||||
|
|
||||||
switch (camera->sensor_fit) {
|
|
||||||
case CAMERA_SENSOR_FIT_VERT:
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_HOR:
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_AUTO:
|
|
||||||
if (ratio > 1.0f) {
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_y);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
sensor_size = pxr::GfVec2f(sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case CAM_ORTHO:
|
|
||||||
focal_length = 0.0f;
|
|
||||||
switch (camera->sensor_fit) {
|
|
||||||
case CAMERA_SENSOR_FIT_VERT:
|
|
||||||
ortho_size = pxr::GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_HOR:
|
|
||||||
ortho_size = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_AUTO:
|
|
||||||
if (ratio > 1.0f) {
|
|
||||||
ortho_size = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
ortho_size = pxr::GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
ortho_size = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
ortho_size = pxr::GfVec2f(ortho_size[0] * t_size[0], ortho_size[1] * t_size[1]);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case CAM_PANO:
|
|
||||||
/* TODO: Recheck parameters for PANO camera */
|
|
||||||
focal_length = camera->lens;
|
|
||||||
|
|
||||||
switch (camera->sensor_fit) {
|
|
||||||
case CAMERA_SENSOR_FIT_VERT:
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_HOR:
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
|
||||||
break;
|
|
||||||
case CAMERA_SENSOR_FIT_AUTO:
|
|
||||||
if (ratio > 1.0f) {
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_x, camera->sensor_y);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
sensor_size = pxr::GfVec2f(sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]);
|
|
||||||
|
|
||||||
default:
|
|
||||||
focal_length = camera->lens;
|
|
||||||
sensor_size = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
CameraData::CameraData(bContext *context)
|
CameraData::CameraData(bContext *context)
|
||||||
{
|
{
|
||||||
View3D *view3d = CTX_wm_view3d(context);
|
View3D *view3d = CTX_wm_view3d(context);
|
||||||
@ -155,46 +21,46 @@ CameraData::CameraData(bContext *context)
|
|||||||
|
|
||||||
pxr::GfVec2i res(region->winx, region->winy);
|
pxr::GfVec2i res(region->winx, region->winy);
|
||||||
float ratio = (float)res[0] / res[1];
|
float ratio = (float)res[0] / res[1];
|
||||||
transform = gf_matrix_from_transform(region_data->viewmat).GetInverse();
|
transform_ = gf_matrix_from_transform(region_data->viewmat).GetInverse();
|
||||||
|
|
||||||
switch (region_data->persp) {
|
switch (region_data->persp) {
|
||||||
case RV3D_PERSP: {
|
case RV3D_PERSP: {
|
||||||
mode = CAM_PERSP;
|
mode_ = CAM_PERSP;
|
||||||
clip_range = pxr::GfRange1f(view3d->clip_start, view3d->clip_end);
|
clip_range_ = pxr::GfRange1f(view3d->clip_start, view3d->clip_end);
|
||||||
lens_shift = pxr::GfVec2f(0.0, 0.0);
|
lens_shift_ = pxr::GfVec2f(0.0, 0.0);
|
||||||
focal_length = view3d->lens;
|
focal_length_ = view3d->lens;
|
||||||
|
|
||||||
if (ratio > 1.0) {
|
if (ratio > 1.0) {
|
||||||
sensor_size = pxr::GfVec2f(VIEWPORT_SENSOR_SIZE, VIEWPORT_SENSOR_SIZE / ratio);
|
sensor_size_ = pxr::GfVec2f(VIEWPORT_SENSOR_SIZE, VIEWPORT_SENSOR_SIZE / ratio);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
sensor_size = pxr::GfVec2f(VIEWPORT_SENSOR_SIZE * ratio, VIEWPORT_SENSOR_SIZE);
|
sensor_size_ = pxr::GfVec2f(VIEWPORT_SENSOR_SIZE * ratio, VIEWPORT_SENSOR_SIZE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case RV3D_ORTHO: {
|
case RV3D_ORTHO: {
|
||||||
mode = CAM_ORTHO;
|
mode_ = CAM_ORTHO;
|
||||||
lens_shift = pxr::GfVec2f(0.0f, 0.0f);
|
lens_shift_ = pxr::GfVec2f(0.0f, 0.0f);
|
||||||
|
|
||||||
float o_size = region_data->dist * VIEWPORT_SENSOR_SIZE / view3d->lens;
|
float o_size = region_data->dist * VIEWPORT_SENSOR_SIZE / view3d->lens;
|
||||||
float o_depth = view3d->clip_end;
|
float o_depth = view3d->clip_end;
|
||||||
|
|
||||||
clip_range = pxr::GfRange1f(-o_depth * 0.5, o_depth * 0.5);
|
clip_range_ = pxr::GfRange1f(-o_depth * 0.5, o_depth * 0.5);
|
||||||
|
|
||||||
if (ratio > 1.0f) {
|
if (ratio > 1.0f) {
|
||||||
ortho_size = pxr::GfVec2f(o_size, o_size / ratio);
|
ortho_size_ = pxr::GfVec2f(o_size, o_size / ratio);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
ortho_size = pxr::GfVec2f(o_size * ratio, o_size);
|
ortho_size_ = pxr::GfVec2f(o_size * ratio, o_size);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case RV3D_CAMOB: {
|
case RV3D_CAMOB: {
|
||||||
pxr::GfMatrix4d mat = transform;
|
pxr::GfMatrix4d mat = transform_;
|
||||||
*this = CameraData(view3d->camera, res, pxr::GfVec4f(0, 0, 1, 1));
|
*this = CameraData(view3d->camera, res, pxr::GfVec4f(0, 0, 1, 1));
|
||||||
transform = mat;
|
transform_ = mat;
|
||||||
|
|
||||||
// This formula was taken from previous plugin with corresponded comment
|
// This formula was taken from previous plugin with corresponded comment
|
||||||
// See blender/intern/cycles/blender/blender_camera.cpp:blender_camera_from_view (look
|
// See blender/intern/cycles/blender/blender_camera.cpp:blender_camera_from_view (look
|
||||||
@ -203,14 +69,14 @@ CameraData::CameraData(bContext *context)
|
|||||||
|
|
||||||
// Updating l_shift due to viewport zoom and view_camera_offset
|
// Updating l_shift due to viewport zoom and view_camera_offset
|
||||||
// view_camera_offset should be multiplied by 2
|
// view_camera_offset should be multiplied by 2
|
||||||
lens_shift = pxr::GfVec2f((lens_shift[0] + region_data->camdx * 2) / zoom,
|
lens_shift_ = pxr::GfVec2f((lens_shift_[0] + region_data->camdx * 2) / zoom,
|
||||||
(lens_shift[1] + region_data->camdy * 2) / zoom);
|
(lens_shift_[1] + region_data->camdy * 2) / zoom);
|
||||||
|
|
||||||
if (mode == CAM_ORTHO) {
|
if (mode_ == CAM_ORTHO) {
|
||||||
ortho_size *= zoom;
|
ortho_size_ *= zoom;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
sensor_size *= zoom;
|
sensor_size_ *= zoom;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -220,25 +86,164 @@ CameraData::CameraData(bContext *context)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CameraData::CameraData(Object *camera_obj, pxr::GfVec2i res, pxr::GfVec4f tile)
|
||||||
|
{
|
||||||
|
Camera *camera = (Camera *)camera_obj->data;
|
||||||
|
|
||||||
|
float t_pos[2] = {tile[0], tile[1]};
|
||||||
|
float t_size[2] = {tile[2], tile[3]};
|
||||||
|
transform_ = gf_matrix_from_transform(camera_obj->object_to_world);
|
||||||
|
clip_range_ = pxr::GfRange1f(camera->clip_start, camera->clip_end);
|
||||||
|
mode_ = camera->type;
|
||||||
|
|
||||||
|
if (camera->dof.flag & CAM_DOF_ENABLED) {
|
||||||
|
float focus_distance;
|
||||||
|
if (!camera->dof.focus_object) {
|
||||||
|
focus_distance = camera->dof.focus_distance;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
pxr::GfVec3f obj_pos(camera->dof.focus_object->object_to_world[0][3],
|
||||||
|
camera->dof.focus_object->object_to_world[1][3],
|
||||||
|
camera->dof.focus_object->object_to_world[2][3]);
|
||||||
|
pxr::GfVec3f cam_pos(transform_[0][3], transform_[1][3], transform_[2][3]);
|
||||||
|
focus_distance = (obj_pos - cam_pos).GetLength();
|
||||||
|
}
|
||||||
|
|
||||||
|
dof_data_ = std::tuple(
|
||||||
|
std::max(focus_distance, 0.001f), camera->dof.aperture_fstop, camera->dof.aperture_blades);
|
||||||
|
}
|
||||||
|
|
||||||
|
float ratio = (float)res[0] / res[1];
|
||||||
|
|
||||||
|
switch (camera->sensor_fit) {
|
||||||
|
case CAMERA_SENSOR_FIT_VERT:
|
||||||
|
lens_shift_ = pxr::GfVec2f(camera->shiftx / ratio, camera->shifty);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_HOR:
|
||||||
|
lens_shift_ = pxr::GfVec2f(camera->shiftx, camera->shifty * ratio);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_AUTO:
|
||||||
|
if (ratio > 1.0f) {
|
||||||
|
lens_shift_ = pxr::GfVec2f(camera->shiftx, camera->shifty * ratio);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
lens_shift_ = pxr::GfVec2f(camera->shiftx / ratio, camera->shifty);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
lens_shift_ = pxr::GfVec2f(camera->shiftx, camera->shifty);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
lens_shift_ = pxr::GfVec2f(
|
||||||
|
lens_shift_[0] / t_size[0] + (t_pos[0] + t_size[0] * 0.5 - 0.5) / t_size[0],
|
||||||
|
lens_shift_[1] / t_size[1] + (t_pos[1] + t_size[1] * 0.5 - 0.5) / t_size[1]);
|
||||||
|
|
||||||
|
switch (camera->type) {
|
||||||
|
case CAM_PERSP:
|
||||||
|
focal_length_ = camera->lens;
|
||||||
|
|
||||||
|
switch (camera->sensor_fit) {
|
||||||
|
case CAMERA_SENSOR_FIT_VERT:
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_HOR:
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_AUTO:
|
||||||
|
if (ratio > 1.0f) {
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x, camera->sensor_y);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sensor_size_ = pxr::GfVec2f(sensor_size_[0] * t_size[0], sensor_size_[1] * t_size[1]);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CAM_ORTHO:
|
||||||
|
focal_length_ = 0.0f;
|
||||||
|
switch (camera->sensor_fit) {
|
||||||
|
case CAMERA_SENSOR_FIT_VERT:
|
||||||
|
ortho_size_ = pxr::GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_HOR:
|
||||||
|
ortho_size_ = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_AUTO:
|
||||||
|
if (ratio > 1.0f) {
|
||||||
|
ortho_size_ = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale / ratio);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
ortho_size_ = pxr::GfVec2f(camera->ortho_scale * ratio, camera->ortho_scale);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
ortho_size_ = pxr::GfVec2f(camera->ortho_scale, camera->ortho_scale);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ortho_size_ = pxr::GfVec2f(ortho_size_[0] * t_size[0], ortho_size_[1] * t_size[1]);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CAM_PANO:
|
||||||
|
/* TODO: Recheck parameters for PANO camera */
|
||||||
|
focal_length_ = camera->lens;
|
||||||
|
|
||||||
|
switch (camera->sensor_fit) {
|
||||||
|
case CAMERA_SENSOR_FIT_VERT:
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_HOR:
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
||||||
|
break;
|
||||||
|
case CAMERA_SENSOR_FIT_AUTO:
|
||||||
|
if (ratio > 1.0f) {
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x, camera->sensor_x / ratio);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x * ratio, camera->sensor_x);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_x, camera->sensor_y);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
sensor_size_ = pxr::GfVec2f(sensor_size_[0] * t_size[0], sensor_size_[1] * t_size[1]);
|
||||||
|
|
||||||
|
default:
|
||||||
|
focal_length_ = camera->lens;
|
||||||
|
sensor_size_ = pxr::GfVec2f(camera->sensor_y * ratio, camera->sensor_y);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pxr::GfCamera CameraData::gf_camera()
|
||||||
|
{
|
||||||
|
return gf_camera(pxr::GfVec4f(0, 0, 1, 1));
|
||||||
|
}
|
||||||
|
|
||||||
pxr::GfCamera CameraData::gf_camera(pxr::GfVec4f tile)
|
pxr::GfCamera CameraData::gf_camera(pxr::GfVec4f tile)
|
||||||
{
|
{
|
||||||
float t_pos[2] = {tile[0], tile[1]}, t_size[2] = {tile[2], tile[3]};
|
float t_pos[2] = {tile[0], tile[1]}, t_size[2] = {tile[2], tile[3]};
|
||||||
|
|
||||||
pxr::GfCamera gf_camera = pxr::GfCamera();
|
pxr::GfCamera gf_camera = pxr::GfCamera();
|
||||||
|
|
||||||
gf_camera.SetClippingRange(clip_range);
|
gf_camera.SetClippingRange(clip_range_);
|
||||||
|
|
||||||
float l_shift[2] = {(lens_shift[0] + t_pos[0] + t_size[0] * 0.5f - 0.5f) / t_size[0],
|
float l_shift[2] = {(lens_shift_[0] + t_pos[0] + t_size[0] * 0.5f - 0.5f) / t_size[0],
|
||||||
(lens_shift[1] + t_pos[1] + t_size[1] * 0.5f - 0.5f) / t_size[1]};
|
(lens_shift_[1] + t_pos[1] + t_size[1] * 0.5f - 0.5f) / t_size[1]};
|
||||||
|
|
||||||
switch (mode) {
|
switch (mode_) {
|
||||||
case CAM_PERSP:
|
case CAM_PERSP:
|
||||||
case CAM_PANO: {
|
case CAM_PANO: {
|
||||||
/* TODO: store panoramic camera settings */
|
/* TODO: store panoramic camera settings */
|
||||||
gf_camera.SetProjection(pxr::GfCamera::Projection::Perspective);
|
gf_camera.SetProjection(pxr::GfCamera::Projection::Perspective);
|
||||||
gf_camera.SetFocalLength(focal_length);
|
gf_camera.SetFocalLength(focal_length_);
|
||||||
|
|
||||||
float s_size[2] = {sensor_size[0] * t_size[0], sensor_size[1] * t_size[1]};
|
float s_size[2] = {sensor_size_[0] * t_size[0], sensor_size_[1] * t_size[1]};
|
||||||
|
|
||||||
gf_camera.SetHorizontalAperture(s_size[0]);
|
gf_camera.SetHorizontalAperture(s_size[0]);
|
||||||
gf_camera.SetVerticalAperture(s_size[1]);
|
gf_camera.SetVerticalAperture(s_size[1]);
|
||||||
@ -252,7 +257,7 @@ pxr::GfCamera CameraData::gf_camera(pxr::GfVec4f tile)
|
|||||||
|
|
||||||
// Use tenths of a world unit accorging to USD docs
|
// Use tenths of a world unit accorging to USD docs
|
||||||
// https://graphics.pixar.com/usd/docs/api/class_gf_camera.html
|
// https://graphics.pixar.com/usd/docs/api/class_gf_camera.html
|
||||||
float o_size[2] = {ortho_size[0] * t_size[0] * 10, ortho_size[1] * t_size[1] * 10};
|
float o_size[2] = {ortho_size_[0] * t_size[0] * 10, ortho_size_[1] * t_size[1] * 10};
|
||||||
|
|
||||||
gf_camera.SetHorizontalAperture(o_size[0]);
|
gf_camera.SetHorizontalAperture(o_size[0]);
|
||||||
gf_camera.SetVerticalAperture(o_size[1]);
|
gf_camera.SetVerticalAperture(o_size[1]);
|
||||||
@ -265,13 +270,10 @@ pxr::GfCamera CameraData::gf_camera(pxr::GfVec4f tile)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
gf_camera.SetTransform(transform);
|
gf_camera.SetTransform(transform_);
|
||||||
return gf_camera;
|
return gf_camera;
|
||||||
}
|
}
|
||||||
|
|
||||||
pxr::GfCamera CameraData::gf_camera()
|
|
||||||
{
|
|
||||||
return gf_camera(pxr::GfVec4f(0, 0, 1, 1));
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -23,14 +23,14 @@ class CameraData {
|
|||||||
pxr::GfCamera gf_camera(pxr::GfVec4f tile);
|
pxr::GfCamera gf_camera(pxr::GfVec4f tile);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int mode;
|
int mode_;
|
||||||
pxr::GfRange1f clip_range;
|
pxr::GfRange1f clip_range_;
|
||||||
float focal_length;
|
float focal_length_;
|
||||||
pxr::GfVec2f sensor_size;
|
pxr::GfVec2f sensor_size_;
|
||||||
pxr::GfMatrix4d transform;
|
pxr::GfMatrix4d transform_;
|
||||||
pxr::GfVec2f lens_shift;
|
pxr::GfVec2f lens_shift_;
|
||||||
pxr::GfVec2f ortho_size;
|
pxr::GfVec2f ortho_size_;
|
||||||
std::tuple<float, float, int> dof_data;
|
std::tuple<float, float, int> dof_data_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -14,51 +14,51 @@ namespace blender::render::hydra {
|
|||||||
|
|
||||||
CLG_LOGREF_DECLARE_GLOBAL(LOG_EN, "rhd.en");
|
CLG_LOGREF_DECLARE_GLOBAL(LOG_EN, "rhd.en");
|
||||||
|
|
||||||
Engine::Engine(RenderEngine *bl_engine, const std::string &delegate_id) : bl_engine(bl_engine)
|
Engine::Engine(RenderEngine *bl_engine, const std::string &delegate_id) : bl_engine_(bl_engine)
|
||||||
{
|
{
|
||||||
pxr::HdRendererPluginRegistry ®istry = pxr::HdRendererPluginRegistry::GetInstance();
|
pxr::HdRendererPluginRegistry ®istry = pxr::HdRendererPluginRegistry::GetInstance();
|
||||||
|
|
||||||
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
||||||
render_delegate = registry.CreateRenderDelegate(pxr::TfToken(delegate_id));
|
render_delegate_ = registry.CreateRenderDelegate(pxr::TfToken(delegate_id));
|
||||||
|
|
||||||
pxr::HdDriverVector hd_drivers;
|
pxr::HdDriverVector hd_drivers;
|
||||||
if (bl_engine->type->flag & RE_USE_GPU_CONTEXT) {
|
if (bl_engine->type->flag & RE_USE_GPU_CONTEXT) {
|
||||||
hgi = pxr::Hgi::CreatePlatformDefaultHgi();
|
hgi_ = pxr::Hgi::CreatePlatformDefaultHgi();
|
||||||
hgi_driver.name = pxr::HgiTokens->renderDriver;
|
hgi_driver_.name = pxr::HgiTokens->renderDriver;
|
||||||
hgi_driver.driver = pxr::VtValue(hgi.get());
|
hgi_driver_.driver = pxr::VtValue(hgi_.get());
|
||||||
|
|
||||||
hd_drivers.push_back(&hgi_driver);
|
hd_drivers.push_back(&hgi_driver_);
|
||||||
}
|
}
|
||||||
|
|
||||||
render_index.reset(pxr::HdRenderIndex::New(render_delegate.Get(), hd_drivers));
|
render_index_.reset(pxr::HdRenderIndex::New(render_delegate_.Get(), hd_drivers));
|
||||||
free_camera_delegate = std::make_unique<pxr::HdxFreeCameraSceneDelegate>(
|
free_camera_delegate_ = std::make_unique<pxr::HdxFreeCameraSceneDelegate>(
|
||||||
render_index.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("freeCamera"));
|
render_index_.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("freeCamera"));
|
||||||
render_task_delegate = std::make_unique<RenderTaskDelegate>(
|
render_task_delegate_ = std::make_unique<RenderTaskDelegate>(
|
||||||
render_index.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("renderTask"));
|
render_index_.get(), pxr::SdfPath::AbsoluteRootPath().AppendElementString("renderTask"));
|
||||||
if (render_delegate->GetRendererDisplayName() == "GL") {
|
if (render_delegate_->GetRendererDisplayName() == "GL") {
|
||||||
simple_light_task_delegate = std::make_unique<SimpleLightTaskDelegate>(
|
simple_light_task_delegate_ = std::make_unique<SimpleLightTaskDelegate>(
|
||||||
render_index.get(),
|
render_index_.get(),
|
||||||
pxr::SdfPath::AbsoluteRootPath().AppendElementString("simpleLightTask"));
|
pxr::SdfPath::AbsoluteRootPath().AppendElementString("simpleLightTask"));
|
||||||
}
|
}
|
||||||
|
|
||||||
engine = std::make_unique<pxr::HdEngine>();
|
engine_ = std::make_unique<pxr::HdEngine>();
|
||||||
}
|
}
|
||||||
|
|
||||||
Engine::~Engine()
|
Engine::~Engine()
|
||||||
{
|
{
|
||||||
scene_delegate_ = nullptr;
|
scene_delegate_ = nullptr;
|
||||||
render_task_delegate = nullptr;
|
render_task_delegate_ = nullptr;
|
||||||
free_camera_delegate = nullptr;
|
free_camera_delegate_ = nullptr;
|
||||||
simple_light_task_delegate = nullptr;
|
simple_light_task_delegate_ = nullptr;
|
||||||
render_index = nullptr;
|
render_index_ = nullptr;
|
||||||
render_delegate = nullptr;
|
render_delegate_ = nullptr;
|
||||||
engine = nullptr;
|
engine_ = nullptr;
|
||||||
hgi = nullptr;
|
hgi_ = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
float Engine::renderer_percent_done()
|
float Engine::renderer_percent_done()
|
||||||
{
|
{
|
||||||
pxr::VtDictionary render_stats = render_delegate->GetRenderStats();
|
pxr::VtDictionary render_stats = render_delegate_->GetRenderStats();
|
||||||
auto it = render_stats.find("percentDone");
|
auto it = render_stats.find("percentDone");
|
||||||
if (it == render_stats.end()) {
|
if (it == render_stats.end()) {
|
||||||
return 0.0;
|
return 0.0;
|
||||||
|
@ -38,18 +38,18 @@ class Engine {
|
|||||||
protected:
|
protected:
|
||||||
float renderer_percent_done();
|
float renderer_percent_done();
|
||||||
|
|
||||||
RenderEngine *bl_engine;
|
RenderEngine *bl_engine_;
|
||||||
|
|
||||||
pxr::HdPluginRenderDelegateUniqueHandle render_delegate;
|
pxr::HdPluginRenderDelegateUniqueHandle render_delegate_;
|
||||||
std::unique_ptr<pxr::HdRenderIndex> render_index;
|
std::unique_ptr<pxr::HdRenderIndex> render_index_;
|
||||||
std::unique_ptr<BlenderSceneDelegate> scene_delegate_;
|
std::unique_ptr<BlenderSceneDelegate> scene_delegate_;
|
||||||
std::unique_ptr<RenderTaskDelegate> render_task_delegate;
|
std::unique_ptr<RenderTaskDelegate> render_task_delegate_;
|
||||||
std::unique_ptr<pxr::HdxFreeCameraSceneDelegate> free_camera_delegate;
|
std::unique_ptr<pxr::HdxFreeCameraSceneDelegate> free_camera_delegate_;
|
||||||
std::unique_ptr<SimpleLightTaskDelegate> simple_light_task_delegate;
|
std::unique_ptr<SimpleLightTaskDelegate> simple_light_task_delegate_;
|
||||||
std::unique_ptr<pxr::HdEngine> engine;
|
std::unique_ptr<pxr::HdEngine> engine_;
|
||||||
|
|
||||||
pxr::HgiUniquePtr hgi;
|
pxr::HgiUniquePtr hgi_;
|
||||||
pxr::HdDriver hgi_driver;
|
pxr::HdDriver hgi_driver_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -17,13 +17,13 @@ void FinalEngine::sync(Depsgraph *depsgraph,
|
|||||||
pxr::HdRenderSettingsMap &render_settings)
|
pxr::HdRenderSettingsMap &render_settings)
|
||||||
{
|
{
|
||||||
scene_delegate_ = std::make_unique<BlenderSceneDelegate>(
|
scene_delegate_ = std::make_unique<BlenderSceneDelegate>(
|
||||||
render_index.get(),
|
render_index_.get(),
|
||||||
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
|
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
|
||||||
BlenderSceneDelegate::EngineType::FINAL);
|
BlenderSceneDelegate::EngineType::FINAL);
|
||||||
scene_delegate_->populate(depsgraph, context);
|
scene_delegate_->populate(depsgraph, context);
|
||||||
|
|
||||||
for (auto const &setting : render_settings) {
|
for (auto const &setting : render_settings) {
|
||||||
render_delegate->SetRenderSetting(setting.first, setting.second);
|
render_delegate_->SetRenderSetting(setting.first, setting.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,19 +49,19 @@ void FinalEngine::render(Depsgraph *depsgraph)
|
|||||||
pxr::GfCamera camera =
|
pxr::GfCamera camera =
|
||||||
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
|
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
|
||||||
|
|
||||||
free_camera_delegate->SetCamera(camera);
|
free_camera_delegate_->SetCamera(camera);
|
||||||
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
|
render_task_delegate_->set_camera_and_viewport(free_camera_delegate_->GetCameraId(),
|
||||||
pxr::GfVec4d(0, 0, res[0], res[1]));
|
pxr::GfVec4d(0, 0, res[0], res[1]));
|
||||||
render_task_delegate->set_renderer_aov(pxr::HdAovTokens->color);
|
render_task_delegate_->set_renderer_aov(pxr::HdAovTokens->color);
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
simple_light_task_delegate->set_camera_path(free_camera_delegate->GetCameraId());
|
simple_light_task_delegate_->set_camera_path(free_camera_delegate_->GetCameraId());
|
||||||
}
|
}
|
||||||
|
|
||||||
pxr::HdTaskSharedPtrVector tasks;
|
pxr::HdTaskSharedPtrVector tasks;
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
tasks.push_back(simple_light_task_delegate->get_task());
|
tasks.push_back(simple_light_task_delegate_->get_task());
|
||||||
}
|
}
|
||||||
tasks.push_back(render_task_delegate->get_task());
|
tasks.push_back(render_task_delegate_->get_task());
|
||||||
|
|
||||||
std::chrono::time_point<std::chrono::steady_clock> time_begin = std::chrono::steady_clock::now(),
|
std::chrono::time_point<std::chrono::steady_clock> time_begin = std::chrono::steady_clock::now(),
|
||||||
time_current;
|
time_current;
|
||||||
@ -76,11 +76,11 @@ void FinalEngine::render(Depsgraph *depsgraph)
|
|||||||
{
|
{
|
||||||
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
|
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
|
||||||
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
||||||
engine->Execute(render_index.get(), &tasks);
|
engine_->Execute(render_index_.get(), &tasks);
|
||||||
}
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (RE_engine_test_break(bl_engine)) {
|
if (RE_engine_test_break(bl_engine_)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,15 +94,15 @@ void FinalEngine::render(Depsgraph *depsgraph)
|
|||||||
"Render Time: " + format_duration(elapsed_time) +
|
"Render Time: " + format_duration(elapsed_time) +
|
||||||
" | Done: " + std::to_string(int(percent_done)) + "%");
|
" | Done: " + std::to_string(int(percent_done)) + "%");
|
||||||
|
|
||||||
if (render_task_delegate->is_converged()) {
|
if (render_task_delegate_->is_converged()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
render_task_delegate_->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
||||||
update_render_result(render_images, layer_name, res[0], res[1]);
|
update_render_result(render_images, layer_name, res[0], res[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
render_task_delegate_->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
||||||
update_render_result(render_images, layer_name, res[0], res[1]);
|
update_render_result(render_images, layer_name, res[0], res[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,7 +124,7 @@ void FinalEngine::update_render_result(std::map<std::string, std::vector<float>>
|
|||||||
int height)
|
int height)
|
||||||
{
|
{
|
||||||
RenderResult *result = RE_engine_begin_result(
|
RenderResult *result = RE_engine_begin_result(
|
||||||
bl_engine, 0, 0, width, height, layer_name.c_str(), nullptr);
|
bl_engine_, 0, 0, width, height, layer_name.c_str(), nullptr);
|
||||||
|
|
||||||
/* TODO: only for the first render layer */
|
/* TODO: only for the first render layer */
|
||||||
RenderLayer *layer = (RenderLayer *)result->layers.first;
|
RenderLayer *layer = (RenderLayer *)result->layers.first;
|
||||||
@ -138,13 +138,13 @@ void FinalEngine::update_render_result(std::map<std::string, std::vector<float>>
|
|||||||
sizeof(float) * pass->rectx * pass->recty * pass->channels);
|
sizeof(float) * pass->rectx * pass->recty * pass->channels);
|
||||||
}
|
}
|
||||||
|
|
||||||
RE_engine_end_result(bl_engine, result, false, false, false);
|
RE_engine_end_result(bl_engine_, result, false, false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
void FinalEngine::notify_status(float progress, const std::string &title, const std::string &info)
|
void FinalEngine::notify_status(float progress, const std::string &title, const std::string &info)
|
||||||
{
|
{
|
||||||
RE_engine_update_progress(bl_engine, progress);
|
RE_engine_update_progress(bl_engine_, progress);
|
||||||
RE_engine_update_stats(bl_engine, title.c_str(), info.c_str());
|
RE_engine_update_stats(bl_engine_, title.c_str(), info.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
void FinalEngineGL::render(Depsgraph *depsgraph)
|
void FinalEngineGL::render(Depsgraph *depsgraph)
|
||||||
@ -169,21 +169,21 @@ void FinalEngineGL::render(Depsgraph *depsgraph)
|
|||||||
pxr::GfCamera camera =
|
pxr::GfCamera camera =
|
||||||
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
|
CameraData(scene->camera, image_res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(border);
|
||||||
|
|
||||||
free_camera_delegate->SetCamera(camera);
|
free_camera_delegate_->SetCamera(camera);
|
||||||
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
|
render_task_delegate_->set_camera_and_viewport(free_camera_delegate_->GetCameraId(),
|
||||||
pxr::GfVec4d(0, 0, res[0], res[1]));
|
pxr::GfVec4d(0, 0, res[0], res[1]));
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
simple_light_task_delegate->set_camera_path(free_camera_delegate->GetCameraId());
|
simple_light_task_delegate_->set_camera_path(free_camera_delegate_->GetCameraId());
|
||||||
}
|
}
|
||||||
|
|
||||||
pxr::HdTaskSharedPtrVector tasks;
|
pxr::HdTaskSharedPtrVector tasks;
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
/* TODO: Uncomment this and fix GL error:
|
/* TODO: Uncomment this and fix GL error:
|
||||||
invalid operation, reported from void __cdecl
|
invalid operation, reported from void __cdecl
|
||||||
pxrInternal_v0_22__pxrReserved__::HgiGLResourceBindings::BindResources(void) */
|
pxrInternal_v0_22__pxrReserved__::HgiGLResourceBindings::BindResources(void) */
|
||||||
// tasks.push_back(simple_light_task_delegate->get_task());
|
// tasks.push_back(simple_light_task_delegate_->get_task());
|
||||||
}
|
}
|
||||||
tasks.push_back(render_task_delegate->get_task());
|
tasks.push_back(render_task_delegate_->get_task());
|
||||||
|
|
||||||
std::chrono::time_point<std::chrono::steady_clock> time_begin = std::chrono::steady_clock::now(),
|
std::chrono::time_point<std::chrono::steady_clock> time_begin = std::chrono::steady_clock::now(),
|
||||||
time_current;
|
time_current;
|
||||||
@ -214,11 +214,11 @@ void FinalEngineGL::render(Depsgraph *depsgraph)
|
|||||||
{
|
{
|
||||||
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
|
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
|
||||||
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
||||||
engine->Execute(render_index.get(), &tasks);
|
engine_->Execute(render_index_.get(), &tasks);
|
||||||
}
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (RE_engine_test_break(bl_engine)) {
|
if (RE_engine_test_break(bl_engine_)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -232,7 +232,7 @@ void FinalEngineGL::render(Depsgraph *depsgraph)
|
|||||||
"Render Time: " + format_duration(elapsed_time) +
|
"Render Time: " + format_duration(elapsed_time) +
|
||||||
" | Done: " + std::to_string(int(percent_done)) + "%");
|
" | Done: " + std::to_string(int(percent_done)) + "%");
|
||||||
|
|
||||||
if (render_task_delegate->is_converged()) {
|
if (render_task_delegate_->is_converged()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@ namespace blender::render::hydra {
|
|||||||
class FinalEngine : public Engine {
|
class FinalEngine : public Engine {
|
||||||
public:
|
public:
|
||||||
using Engine::Engine;
|
using Engine::Engine;
|
||||||
|
|
||||||
virtual void sync(Depsgraph *depsgraph,
|
virtual void sync(Depsgraph *depsgraph,
|
||||||
bContext *context,
|
bContext *context,
|
||||||
pxr::HdRenderSettingsMap &render_settings) override;
|
pxr::HdRenderSettingsMap &render_settings) override;
|
||||||
@ -25,13 +26,13 @@ class FinalEngine : public Engine {
|
|||||||
int height);
|
int height);
|
||||||
void notify_status(float progress, const std::string &title, const std::string &info);
|
void notify_status(float progress, const std::string &title, const std::string &info);
|
||||||
|
|
||||||
protected:
|
|
||||||
pxr::HdRenderSettingsMap render_settings;
|
pxr::HdRenderSettingsMap render_settings;
|
||||||
};
|
};
|
||||||
|
|
||||||
class FinalEngineGL : public FinalEngine {
|
class FinalEngineGL : public FinalEngine {
|
||||||
public:
|
public:
|
||||||
using FinalEngine::FinalEngine;
|
using FinalEngine::FinalEngine;
|
||||||
|
|
||||||
void render(Depsgraph *depsgraph) override;
|
void render(Depsgraph *depsgraph) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -11,14 +11,14 @@ namespace blender::render::hydra {
|
|||||||
|
|
||||||
const double LIFETIME = 180.0;
|
const double LIFETIME = 180.0;
|
||||||
|
|
||||||
std::unique_ptr<PreviewEngine> PreviewEngine::instance;
|
std::unique_ptr<PreviewEngine> PreviewEngine::instance_;
|
||||||
|
|
||||||
void PreviewEngine::schedule_free()
|
void PreviewEngine::schedule_free()
|
||||||
{
|
{
|
||||||
instance->render_delegate->Stop();
|
instance_->render_delegate_->Stop();
|
||||||
|
|
||||||
/* Register timer for schedule free PreviewEngine instance */
|
/* Register timer for schedule free PreviewEngine instance */
|
||||||
BLI_timer_register((uintptr_t)instance.get(),
|
BLI_timer_register((uintptr_t)instance_.get(),
|
||||||
free_instance,
|
free_instance,
|
||||||
nullptr,
|
nullptr,
|
||||||
nullptr,
|
nullptr,
|
||||||
@ -28,27 +28,27 @@ void PreviewEngine::schedule_free()
|
|||||||
|
|
||||||
PreviewEngine *PreviewEngine::get_instance(RenderEngine *bl_engine, const std::string &render_delegate_id)
|
PreviewEngine *PreviewEngine::get_instance(RenderEngine *bl_engine, const std::string &render_delegate_id)
|
||||||
{
|
{
|
||||||
if (!instance) {
|
if (!instance_) {
|
||||||
instance = std::make_unique<PreviewEngine>(bl_engine, render_delegate_id);
|
instance_ = std::make_unique<PreviewEngine>(bl_engine, render_delegate_id);
|
||||||
}
|
}
|
||||||
if (BLI_timer_is_registered((uintptr_t)instance.get())) {
|
if (BLI_timer_is_registered((uintptr_t)instance_.get())) {
|
||||||
/* Unregister timer while PreviewEngine is working */
|
/* Unregister timer while PreviewEngine is working */
|
||||||
BLI_timer_unregister((uintptr_t)instance.get());
|
BLI_timer_unregister((uintptr_t)instance_.get());
|
||||||
}
|
}
|
||||||
instance->update(bl_engine, render_delegate_id);
|
instance_->update(bl_engine, render_delegate_id);
|
||||||
|
|
||||||
return instance.get();
|
return instance_.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
double PreviewEngine::free_instance(uintptr_t uuid, void *user_data)
|
double PreviewEngine::free_instance(uintptr_t uuid, void *user_data)
|
||||||
{
|
{
|
||||||
if (!instance->render_task_delegate->is_converged()) {
|
if (!instance_->render_task_delegate_->is_converged()) {
|
||||||
/* Restart timer if render isn't completed */
|
/* Restart timer if render isn't completed */
|
||||||
return LIFETIME;
|
return LIFETIME;
|
||||||
}
|
}
|
||||||
|
|
||||||
CLOG_INFO(LOG_EN, 2, "");
|
CLOG_INFO(LOG_EN, 2, "");
|
||||||
instance = nullptr;
|
instance_ = nullptr;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ void PreviewEngine::sync(Depsgraph *depsgraph,
|
|||||||
{
|
{
|
||||||
if (!scene_delegate_) {
|
if (!scene_delegate_) {
|
||||||
scene_delegate_ = std::make_unique<BlenderSceneDelegate>(
|
scene_delegate_ = std::make_unique<BlenderSceneDelegate>(
|
||||||
render_index.get(),
|
render_index_.get(),
|
||||||
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
|
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
|
||||||
BlenderSceneDelegate::EngineType::PREVIEW);
|
BlenderSceneDelegate::EngineType::PREVIEW);
|
||||||
}
|
}
|
||||||
@ -66,7 +66,7 @@ void PreviewEngine::sync(Depsgraph *depsgraph,
|
|||||||
scene_delegate_->populate(depsgraph, context);
|
scene_delegate_->populate(depsgraph, context);
|
||||||
|
|
||||||
for (auto const &setting : render_settings) {
|
for (auto const &setting : render_settings) {
|
||||||
render_delegate->SetRenderSetting(setting.first, setting.second);
|
render_delegate_->SetRenderSetting(setting.first, setting.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,45 +81,45 @@ void PreviewEngine::render(Depsgraph *depsgraph)
|
|||||||
pxr::GfCamera camera =
|
pxr::GfCamera camera =
|
||||||
CameraData(scene->camera, res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(pxr::GfVec4f(0, 0, 1, 1));
|
CameraData(scene->camera, res, pxr::GfVec4f(0, 0, 1, 1)).gf_camera(pxr::GfVec4f(0, 0, 1, 1));
|
||||||
|
|
||||||
free_camera_delegate->SetCamera(camera);
|
free_camera_delegate_->SetCamera(camera);
|
||||||
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
|
render_task_delegate_->set_camera_and_viewport(free_camera_delegate_->GetCameraId(),
|
||||||
pxr::GfVec4d(0, 0, res[0], res[1]));
|
pxr::GfVec4d(0, 0, res[0], res[1]));
|
||||||
render_task_delegate->set_renderer_aov(pxr::HdAovTokens->color);
|
render_task_delegate_->set_renderer_aov(pxr::HdAovTokens->color);
|
||||||
|
|
||||||
pxr::HdTaskSharedPtrVector tasks;
|
pxr::HdTaskSharedPtrVector tasks;
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
tasks.push_back(simple_light_task_delegate->get_task());
|
tasks.push_back(simple_light_task_delegate_->get_task());
|
||||||
}
|
}
|
||||||
tasks.push_back(render_task_delegate->get_task());
|
tasks.push_back(render_task_delegate_->get_task());
|
||||||
|
|
||||||
std::vector<float> pixels = std::vector<float>(res[0] * res[1] * 4); // 4 - number of channels
|
std::vector<float> pixels = std::vector<float>(res[0] * res[1] * 4); // 4 - number of channels
|
||||||
|
|
||||||
{
|
{
|
||||||
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
|
// Release the GIL before calling into hydra, in case any hydra plugins call into python.
|
||||||
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
||||||
engine->Execute(render_index.get(), &tasks);
|
engine_->Execute(render_index_.get(), &tasks);
|
||||||
}
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (RE_engine_test_break(bl_engine)) {
|
if (RE_engine_test_break(bl_engine_)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (render_task_delegate->is_converged()) {
|
if (render_task_delegate_->is_converged()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
render_task_delegate_->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
||||||
update_render_result(layer_name, res[0], res[1], pixels);
|
update_render_result(layer_name, res[0], res[1], pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
render_task_delegate->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
render_task_delegate_->get_renderer_aov_data(pxr::HdAovTokens->color, pixels.data());
|
||||||
update_render_result(layer_name, res[0], res[1], pixels);
|
update_render_result(layer_name, res[0], res[1], pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
void PreviewEngine::update(RenderEngine *bl_engine, const std::string &render_delegate_id)
|
void PreviewEngine::update(RenderEngine *bl_engine, const std::string &render_delegate_id)
|
||||||
{
|
{
|
||||||
this->bl_engine = bl_engine;
|
this->bl_engine_ = bl_engine;
|
||||||
/* TODO: recreate render_delegate when render_delegate_id is changed */
|
/* TODO: recreate render_delegate when render_delegate_id is changed */
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,13 +129,13 @@ void PreviewEngine::update_render_result(const std::string &layer_name,
|
|||||||
std::vector<float> &pixels)
|
std::vector<float> &pixels)
|
||||||
{
|
{
|
||||||
RenderResult *result = RE_engine_begin_result(
|
RenderResult *result = RE_engine_begin_result(
|
||||||
bl_engine, 0, 0, width, height, layer_name.c_str(), nullptr);
|
bl_engine_, 0, 0, width, height, layer_name.c_str(), nullptr);
|
||||||
|
|
||||||
RenderLayer *layer = (RenderLayer *)result->layers.first;
|
RenderLayer *layer = (RenderLayer *)result->layers.first;
|
||||||
RenderPass *pass = (RenderPass *)layer->passes.first;
|
RenderPass *pass = (RenderPass *)layer->passes.first;
|
||||||
memcpy(pass->rect, pixels.data(), sizeof(float) * pass->rectx * pass->recty * pass->channels);
|
memcpy(pass->rect, pixels.data(), sizeof(float) * pass->rectx * pass->recty * pass->channels);
|
||||||
|
|
||||||
RE_engine_end_result(bl_engine, result, false, false, false);
|
RE_engine_end_result(bl_engine_, result, false, false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -8,22 +8,23 @@
|
|||||||
namespace blender::render::hydra {
|
namespace blender::render::hydra {
|
||||||
|
|
||||||
class PreviewEngine : public FinalEngine {
|
class PreviewEngine : public FinalEngine {
|
||||||
private:
|
|
||||||
/* Singleton class instance */
|
|
||||||
static std::unique_ptr<PreviewEngine> instance;
|
|
||||||
static double free_instance(uintptr_t uuid, void *user_data);
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
static PreviewEngine *get_instance(RenderEngine *bl_engine, const std::string &render_delegate_id);
|
using FinalEngine::FinalEngine;
|
||||||
|
|
||||||
|
static PreviewEngine *get_instance(RenderEngine *bl_engine,
|
||||||
|
const std::string &render_delegate_id);
|
||||||
static void schedule_free();
|
static void schedule_free();
|
||||||
|
|
||||||
using FinalEngine::FinalEngine;
|
|
||||||
void sync(Depsgraph *depsgraph,
|
void sync(Depsgraph *depsgraph,
|
||||||
bContext *context,
|
bContext *context,
|
||||||
pxr::HdRenderSettingsMap &render_settings) override;
|
pxr::HdRenderSettingsMap &render_settings) override;
|
||||||
void render(Depsgraph *depsgraph) override;
|
void render(Depsgraph *depsgraph) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
/* Singleton class instance */
|
||||||
|
static double free_instance(uintptr_t uuid, void *user_data);
|
||||||
|
static std::unique_ptr<PreviewEngine> instance_;
|
||||||
|
|
||||||
void update(RenderEngine *bl_engine, const std::string &render_delegate_id);
|
void update(RenderEngine *bl_engine, const std::string &render_delegate_id);
|
||||||
void update_render_result(const std::string &layer_name,
|
void update_render_result(const std::string &layer_name,
|
||||||
int width,
|
int width,
|
||||||
|
@ -22,8 +22,8 @@ RenderTaskDelegate::RenderTaskDelegate(pxr::HdRenderIndex *parent_index,
|
|||||||
GetRenderIndex().GetChangeTracker().MarkTaskDirty(render_task_id,
|
GetRenderIndex().GetChangeTracker().MarkTaskDirty(render_task_id,
|
||||||
pxr::HdChangeTracker::DirtyRenderTags);
|
pxr::HdChangeTracker::DirtyRenderTags);
|
||||||
|
|
||||||
task_params.enableLighting = true;
|
task_params_.enableLighting = true;
|
||||||
task_params.alphaThreshold = 0.1f;
|
task_params_.alphaThreshold = 0.1f;
|
||||||
}
|
}
|
||||||
|
|
||||||
pxr::SdfPath RenderTaskDelegate::get_task_id() const
|
pxr::SdfPath RenderTaskDelegate::get_task_id() const
|
||||||
@ -39,7 +39,7 @@ pxr::SdfPath RenderTaskDelegate::get_aov_id(pxr::TfToken const &aov) const
|
|||||||
pxr::VtValue RenderTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
|
pxr::VtValue RenderTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
|
||||||
{
|
{
|
||||||
if (key == pxr::HdTokens->params) {
|
if (key == pxr::HdTokens->params) {
|
||||||
return pxr::VtValue(task_params);
|
return pxr::VtValue(task_params_);
|
||||||
}
|
}
|
||||||
if (key == pxr::HdTokens->collection) {
|
if (key == pxr::HdTokens->collection) {
|
||||||
pxr::HdRprimCollection rprim_collection(pxr::HdTokens->geometry,
|
pxr::HdRprimCollection rprim_collection(pxr::HdTokens->geometry,
|
||||||
@ -54,7 +54,7 @@ pxr::VtValue RenderTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const
|
|||||||
|
|
||||||
pxr::HdRenderBufferDescriptor RenderTaskDelegate::GetRenderBufferDescriptor(pxr::SdfPath const &id)
|
pxr::HdRenderBufferDescriptor RenderTaskDelegate::GetRenderBufferDescriptor(pxr::SdfPath const &id)
|
||||||
{
|
{
|
||||||
return buffer_descriptors[id];
|
return buffer_descriptors_[id];
|
||||||
}
|
}
|
||||||
|
|
||||||
pxr::TfTokenVector RenderTaskDelegate::GetTaskRenderTags(pxr::SdfPath const &task_id)
|
pxr::TfTokenVector RenderTaskDelegate::GetTaskRenderTags(pxr::SdfPath const &task_id)
|
||||||
@ -73,14 +73,14 @@ void RenderTaskDelegate::set_renderer_aov(pxr::TfToken const &aov)
|
|||||||
pxr::HdAovDescriptor aov_desc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(
|
pxr::HdAovDescriptor aov_desc = GetRenderIndex().GetRenderDelegate()->GetDefaultAovDescriptor(
|
||||||
aov);
|
aov);
|
||||||
pxr::HdRenderBufferDescriptor desc(
|
pxr::HdRenderBufferDescriptor desc(
|
||||||
pxr::GfVec3i(task_params.viewport[2], task_params.viewport[3], 1),
|
pxr::GfVec3i(task_params_.viewport[2], task_params_.viewport[3], 1),
|
||||||
aov_desc.format,
|
aov_desc.format,
|
||||||
aov_desc.multiSampled);
|
aov_desc.multiSampled);
|
||||||
pxr::SdfPath buffer_id = get_aov_id(aov);
|
pxr::SdfPath buffer_id = get_aov_id(aov);
|
||||||
|
|
||||||
if (buffer_descriptors.find(buffer_id) == buffer_descriptors.end()) {
|
if (buffer_descriptors_.find(buffer_id) == buffer_descriptors_.end()) {
|
||||||
GetRenderIndex().InsertBprim(pxr::HdPrimTypeTokens->renderBuffer, this, buffer_id);
|
GetRenderIndex().InsertBprim(pxr::HdPrimTypeTokens->renderBuffer, this, buffer_id);
|
||||||
buffer_descriptors[buffer_id] = desc;
|
buffer_descriptors_[buffer_id] = desc;
|
||||||
GetRenderIndex().GetChangeTracker().MarkBprimDirty(buffer_id,
|
GetRenderIndex().GetChangeTracker().MarkBprimDirty(buffer_id,
|
||||||
pxr::HdRenderBuffer::DirtyDescription);
|
pxr::HdRenderBuffer::DirtyDescription);
|
||||||
|
|
||||||
@ -88,13 +88,13 @@ void RenderTaskDelegate::set_renderer_aov(pxr::TfToken const &aov)
|
|||||||
binding.aovName = aov;
|
binding.aovName = aov;
|
||||||
binding.renderBufferId = buffer_id;
|
binding.renderBufferId = buffer_id;
|
||||||
binding.aovSettings = aov_desc.aovSettings;
|
binding.aovSettings = aov_desc.aovSettings;
|
||||||
task_params.aovBindings.push_back(binding);
|
task_params_.aovBindings.push_back(binding);
|
||||||
|
|
||||||
GetRenderIndex().GetChangeTracker().MarkTaskDirty(get_task_id(),
|
GetRenderIndex().GetChangeTracker().MarkTaskDirty(get_task_id(),
|
||||||
pxr::HdChangeTracker::DirtyParams);
|
pxr::HdChangeTracker::DirtyParams);
|
||||||
}
|
}
|
||||||
else if (buffer_descriptors[buffer_id] != desc) {
|
else if (buffer_descriptors_[buffer_id] != desc) {
|
||||||
buffer_descriptors[buffer_id] = desc;
|
buffer_descriptors_[buffer_id] = desc;
|
||||||
GetRenderIndex().GetChangeTracker().MarkBprimDirty(buffer_id,
|
GetRenderIndex().GetChangeTracker().MarkBprimDirty(buffer_id,
|
||||||
pxr::HdRenderBuffer::DirtyDescription);
|
pxr::HdRenderBuffer::DirtyDescription);
|
||||||
}
|
}
|
||||||
@ -124,9 +124,9 @@ pxr::HdTaskSharedPtr RenderTaskDelegate::get_task()
|
|||||||
void RenderTaskDelegate::set_camera_and_viewport(pxr::SdfPath const &camera_id,
|
void RenderTaskDelegate::set_camera_and_viewport(pxr::SdfPath const &camera_id,
|
||||||
pxr::GfVec4d const &viewport)
|
pxr::GfVec4d const &viewport)
|
||||||
{
|
{
|
||||||
if (task_params.viewport != viewport || task_params.camera != camera_id) {
|
if (task_params_.viewport != viewport || task_params_.camera != camera_id) {
|
||||||
task_params.viewport = viewport;
|
task_params_.viewport = viewport;
|
||||||
task_params.camera = camera_id;
|
task_params_.camera = camera_id;
|
||||||
GetRenderIndex().GetChangeTracker().MarkTaskDirty(get_task_id(),
|
GetRenderIndex().GetChangeTracker().MarkTaskDirty(get_task_id(),
|
||||||
pxr::HdChangeTracker::DirtyParams);
|
pxr::HdChangeTracker::DirtyParams);
|
||||||
}
|
}
|
||||||
|
@ -16,10 +16,6 @@ class RenderTaskDelegate : public pxr::HdSceneDelegate {
|
|||||||
pxr::SdfPath get_task_id() const;
|
pxr::SdfPath get_task_id() const;
|
||||||
pxr::SdfPath get_aov_id(pxr::TfToken const &aov) const;
|
pxr::SdfPath get_aov_id(pxr::TfToken const &aov) const;
|
||||||
|
|
||||||
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
|
|
||||||
pxr::HdRenderBufferDescriptor GetRenderBufferDescriptor(pxr::SdfPath const &id) override;
|
|
||||||
pxr::TfTokenVector GetTaskRenderTags(pxr::SdfPath const &taskId) override;
|
|
||||||
|
|
||||||
bool is_converged();
|
bool is_converged();
|
||||||
void set_renderer_aov(pxr::TfToken const &aovId);
|
void set_renderer_aov(pxr::TfToken const &aovId);
|
||||||
pxr::HdRenderBuffer *get_renderer_aov(pxr::TfToken const &id);
|
pxr::HdRenderBuffer *get_renderer_aov(pxr::TfToken const &id);
|
||||||
@ -28,10 +24,15 @@ class RenderTaskDelegate : public pxr::HdSceneDelegate {
|
|||||||
pxr::HdTaskSharedPtr get_task();
|
pxr::HdTaskSharedPtr get_task();
|
||||||
void set_camera_and_viewport(pxr::SdfPath const &cameraId, pxr::GfVec4d const &viewport);
|
void set_camera_and_viewport(pxr::SdfPath const &cameraId, pxr::GfVec4d const &viewport);
|
||||||
|
|
||||||
|
/* Delegate methods */
|
||||||
|
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
|
||||||
|
pxr::HdRenderBufferDescriptor GetRenderBufferDescriptor(pxr::SdfPath const &id) override;
|
||||||
|
pxr::TfTokenVector GetTaskRenderTags(pxr::SdfPath const &taskId) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
pxr::HdxRenderTaskParams task_params;
|
pxr::HdxRenderTaskParams task_params_;
|
||||||
pxr::TfHashMap<pxr::SdfPath, pxr::HdRenderBufferDescriptor, pxr::SdfPath::Hash>
|
pxr::TfHashMap<pxr::SdfPath, pxr::HdRenderBufferDescriptor, pxr::SdfPath::Hash>
|
||||||
buffer_descriptors;
|
buffer_descriptors_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -27,13 +27,13 @@ pxr::HdTaskSharedPtr SimpleLightTaskDelegate::get_task()
|
|||||||
|
|
||||||
void SimpleLightTaskDelegate::set_camera_path(pxr::SdfPath const &camera_path)
|
void SimpleLightTaskDelegate::set_camera_path(pxr::SdfPath const &camera_path)
|
||||||
{
|
{
|
||||||
task_params.cameraPath = camera_path;
|
task_params_.cameraPath = camera_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
pxr::VtValue SimpleLightTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
|
pxr::VtValue SimpleLightTaskDelegate::Get(pxr::SdfPath const &id, pxr::TfToken const &key)
|
||||||
{
|
{
|
||||||
if (key == pxr::HdTokens->params) {
|
if (key == pxr::HdTokens->params) {
|
||||||
return pxr::VtValue(task_params);
|
return pxr::VtValue(task_params_);
|
||||||
}
|
}
|
||||||
return pxr::VtValue();
|
return pxr::VtValue();
|
||||||
}
|
}
|
||||||
|
@ -18,10 +18,11 @@ class SimpleLightTaskDelegate : public pxr::HdSceneDelegate {
|
|||||||
pxr::HdTaskSharedPtr get_task();
|
pxr::HdTaskSharedPtr get_task();
|
||||||
void set_camera_path(pxr::SdfPath const &);
|
void set_camera_path(pxr::SdfPath const &);
|
||||||
|
|
||||||
|
/* Delegate methods */
|
||||||
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
|
pxr::VtValue Get(pxr::SdfPath const &id, pxr::TfToken const &key) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
pxr::HdxSimpleLightTaskParams task_params;
|
pxr::HdxSimpleLightTaskParams task_params_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -132,7 +132,7 @@ pxr::GfCamera ViewSettings::gf_camera()
|
|||||||
(float)border[3] / screen_height));
|
(float)border[3] / screen_height));
|
||||||
}
|
}
|
||||||
|
|
||||||
DrawTexture::DrawTexture() : texture(nullptr), width(0), height(0), channels(4)
|
DrawTexture::DrawTexture() : texture_(nullptr), width_(0), height_(0), channels_(4)
|
||||||
{
|
{
|
||||||
float coords[8] = {0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0};
|
float coords[8] = {0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0};
|
||||||
|
|
||||||
@ -144,72 +144,72 @@ DrawTexture::DrawTexture() : texture(nullptr), width(0), height(0), channels(4)
|
|||||||
GPU_vertbuf_attr_fill(vbo, 0, coords);
|
GPU_vertbuf_attr_fill(vbo, 0, coords);
|
||||||
GPU_vertbuf_attr_fill(vbo, 1, coords);
|
GPU_vertbuf_attr_fill(vbo, 1, coords);
|
||||||
|
|
||||||
batch = GPU_batch_create_ex(GPU_PRIM_TRI_FAN, vbo, nullptr, GPU_BATCH_OWNS_VBO);
|
batch_ = GPU_batch_create_ex(GPU_PRIM_TRI_FAN, vbo, nullptr, GPU_BATCH_OWNS_VBO);
|
||||||
}
|
}
|
||||||
|
|
||||||
DrawTexture::~DrawTexture()
|
DrawTexture::~DrawTexture()
|
||||||
{
|
{
|
||||||
if (texture) {
|
if (texture_) {
|
||||||
free();
|
free();
|
||||||
}
|
}
|
||||||
GPU_batch_discard(batch);
|
GPU_batch_discard(batch_);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DrawTexture::set_buffer(pxr::HdRenderBuffer *buffer)
|
void DrawTexture::set_buffer(pxr::HdRenderBuffer *buffer)
|
||||||
{
|
{
|
||||||
if (!texture) {
|
if (!texture_) {
|
||||||
create(buffer);
|
create(buffer);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (width != buffer->GetWidth() || height != buffer->GetHeight()) {
|
if (width_ != buffer->GetWidth() || height_ != buffer->GetHeight()) {
|
||||||
free();
|
free();
|
||||||
create(buffer);
|
create(buffer);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
void *data = buffer->Map();
|
void *data = buffer->Map();
|
||||||
GPU_texture_update(texture, GPU_DATA_FLOAT, data);
|
GPU_texture_update(texture_, GPU_DATA_FLOAT, data);
|
||||||
buffer->Unmap();
|
buffer->Unmap();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DrawTexture::create(pxr::HdRenderBuffer *buffer)
|
void DrawTexture::create(pxr::HdRenderBuffer *buffer)
|
||||||
{
|
{
|
||||||
width = buffer->GetWidth();
|
width_ = buffer->GetWidth();
|
||||||
height = buffer->GetHeight();
|
height_ = buffer->GetHeight();
|
||||||
channels = pxr::HdGetComponentCount(buffer->GetFormat());
|
channels_ = pxr::HdGetComponentCount(buffer->GetFormat());
|
||||||
|
|
||||||
void *data = buffer->Map();
|
void *data = buffer->Map();
|
||||||
texture = GPU_texture_create_2d("texHydraRenderViewport",
|
texture_ = GPU_texture_create_2d("texHydraRenderViewport",
|
||||||
width,
|
width_,
|
||||||
height,
|
height_,
|
||||||
1,
|
1,
|
||||||
GPU_RGBA16F,
|
GPU_RGBA16F,
|
||||||
GPU_TEXTURE_USAGE_GENERAL,
|
GPU_TEXTURE_USAGE_GENERAL,
|
||||||
(float *)data);
|
(float *)data);
|
||||||
buffer->Unmap();
|
buffer->Unmap();
|
||||||
|
|
||||||
GPU_texture_filter_mode(texture, true);
|
GPU_texture_filter_mode(texture_, true);
|
||||||
GPU_texture_mipmap_mode(texture, true, true);
|
GPU_texture_mipmap_mode(texture_, true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DrawTexture::free()
|
void DrawTexture::free()
|
||||||
{
|
{
|
||||||
GPU_texture_free(texture);
|
GPU_texture_free(texture_);
|
||||||
texture = nullptr;
|
texture_ = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DrawTexture::draw(GPUShader *shader, float x, float y)
|
void DrawTexture::draw(GPUShader *shader, float x, float y)
|
||||||
{
|
{
|
||||||
int slot = GPU_shader_get_sampler_binding(shader, "image");
|
int slot = GPU_shader_get_sampler_binding(shader, "image");
|
||||||
GPU_texture_bind(texture, slot);
|
GPU_texture_bind(texture_, slot);
|
||||||
GPU_shader_uniform_1i(shader, "image", slot);
|
GPU_shader_uniform_1i(shader, "image", slot);
|
||||||
|
|
||||||
GPU_matrix_push();
|
GPU_matrix_push();
|
||||||
GPU_matrix_translate_2f(x, y);
|
GPU_matrix_translate_2f(x, y);
|
||||||
GPU_matrix_scale_2f(width, height);
|
GPU_matrix_scale_2f(width_, height_);
|
||||||
GPU_batch_set_shader(batch, shader);
|
GPU_batch_set_shader(batch_, shader);
|
||||||
GPU_batch_draw(batch);
|
GPU_batch_draw(batch_);
|
||||||
GPU_matrix_pop();
|
GPU_matrix_pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,14 +219,14 @@ void ViewportEngine::sync(Depsgraph *depsgraph,
|
|||||||
{
|
{
|
||||||
if (!scene_delegate_) {
|
if (!scene_delegate_) {
|
||||||
scene_delegate_ = std::make_unique<BlenderSceneDelegate>(
|
scene_delegate_ = std::make_unique<BlenderSceneDelegate>(
|
||||||
render_index.get(),
|
render_index_.get(),
|
||||||
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
|
pxr::SdfPath::AbsoluteRootPath().AppendElementString("scene"),
|
||||||
BlenderSceneDelegate::EngineType::VIEWPORT);
|
BlenderSceneDelegate::EngineType::VIEWPORT);
|
||||||
}
|
}
|
||||||
scene_delegate_->populate(depsgraph, context);
|
scene_delegate_->populate(depsgraph, context);
|
||||||
|
|
||||||
for (auto const &setting : render_settings) {
|
for (auto const &setting : render_settings) {
|
||||||
render_delegate->SetRenderSetting(setting.first, setting.second);
|
render_delegate_->SetRenderSetting(setting.first, setting.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -238,41 +238,41 @@ void ViewportEngine::render(Depsgraph *depsgraph, bContext *context)
|
|||||||
};
|
};
|
||||||
|
|
||||||
pxr::GfCamera gf_camera = view_settings.gf_camera();
|
pxr::GfCamera gf_camera = view_settings.gf_camera();
|
||||||
free_camera_delegate->SetCamera(gf_camera);
|
free_camera_delegate_->SetCamera(gf_camera);
|
||||||
render_task_delegate->set_camera_and_viewport(free_camera_delegate->GetCameraId(),
|
render_task_delegate_->set_camera_and_viewport(free_camera_delegate_->GetCameraId(),
|
||||||
pxr::GfVec4d(view_settings.border[0],
|
pxr::GfVec4d(view_settings.border[0],
|
||||||
view_settings.border[1],
|
view_settings.border[1],
|
||||||
view_settings.border[2],
|
view_settings.border[2],
|
||||||
view_settings.border[3]));
|
view_settings.border[3]));
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
simple_light_task_delegate->set_camera_path(free_camera_delegate->GetCameraId());
|
simple_light_task_delegate_->set_camera_path(free_camera_delegate_->GetCameraId());
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((bl_engine->type->flag & RE_USE_GPU_CONTEXT) == 0) {
|
if ((bl_engine_->type->flag & RE_USE_GPU_CONTEXT) == 0) {
|
||||||
render_task_delegate->set_renderer_aov(pxr::HdAovTokens->color);
|
render_task_delegate_->set_renderer_aov(pxr::HdAovTokens->color);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (renderer_percent_done() == 0.0f) {
|
if (renderer_percent_done() == 0.0f) {
|
||||||
time_begin = std::chrono::steady_clock::now();
|
time_begin_ = std::chrono::steady_clock::now();
|
||||||
}
|
}
|
||||||
|
|
||||||
GPUShader *shader = GPU_shader_get_builtin_shader(GPU_SHADER_3D_IMAGE);
|
GPUShader *shader = GPU_shader_get_builtin_shader(GPU_SHADER_3D_IMAGE);
|
||||||
GPU_shader_bind(shader);
|
GPU_shader_bind(shader);
|
||||||
|
|
||||||
pxr::HdTaskSharedPtrVector tasks;
|
pxr::HdTaskSharedPtrVector tasks;
|
||||||
if (simple_light_task_delegate) {
|
if (simple_light_task_delegate_) {
|
||||||
tasks.push_back(simple_light_task_delegate->get_task());
|
tasks.push_back(simple_light_task_delegate_->get_task());
|
||||||
}
|
}
|
||||||
tasks.push_back(render_task_delegate->get_task());
|
tasks.push_back(render_task_delegate_->get_task());
|
||||||
|
|
||||||
{
|
{
|
||||||
/* Release the GIL before calling into hydra, in case any hydra plugins call into python. */
|
/* Release the GIL before calling into hydra, in case any hydra plugins call into python. */
|
||||||
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
pxr::TF_PY_ALLOW_THREADS_IN_SCOPE();
|
||||||
engine->Execute(render_index.get(), &tasks);
|
engine_->Execute(render_index_.get(), &tasks);
|
||||||
|
|
||||||
if ((bl_engine->type->flag & RE_USE_GPU_CONTEXT) == 0) {
|
if ((bl_engine_->type->flag & RE_USE_GPU_CONTEXT) == 0) {
|
||||||
draw_texture.set_buffer(render_task_delegate->get_renderer_aov(pxr::HdAovTokens->color));
|
draw_texture_.set_buffer(render_task_delegate_->get_renderer_aov(pxr::HdAovTokens->color));
|
||||||
draw_texture.draw(shader, view_settings.border[0], view_settings.border[1]);
|
draw_texture_.draw(shader, view_settings.border[0], view_settings.border[1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -281,15 +281,15 @@ void ViewportEngine::render(Depsgraph *depsgraph, bContext *context)
|
|||||||
std::chrono::time_point<std::chrono::steady_clock> time_current =
|
std::chrono::time_point<std::chrono::steady_clock> time_current =
|
||||||
std::chrono::steady_clock::now();
|
std::chrono::steady_clock::now();
|
||||||
std::chrono::milliseconds elapsed_time = std::chrono::duration_cast<std::chrono::milliseconds>(
|
std::chrono::milliseconds elapsed_time = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||||
time_current - time_begin);
|
time_current - time_begin_);
|
||||||
|
|
||||||
std::string formatted_time = format_duration(elapsed_time);
|
std::string formatted_time = format_duration(elapsed_time);
|
||||||
|
|
||||||
if (!render_task_delegate->is_converged()) {
|
if (!render_task_delegate_->is_converged()) {
|
||||||
notify_status("Time: " + formatted_time +
|
notify_status("Time: " + formatted_time +
|
||||||
" | Done: " + std::to_string(int(renderer_percent_done())) + "%",
|
" | Done: " + std::to_string(int(renderer_percent_done())) + "%",
|
||||||
"Render");
|
"Render");
|
||||||
bl_engine->flag |= RE_ENGINE_DO_DRAW;
|
bl_engine_->flag |= RE_ENGINE_DO_DRAW;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
notify_status(("Time: " + formatted_time).c_str(), "Rendering Done");
|
notify_status(("Time: " + formatted_time).c_str(), "Rendering Done");
|
||||||
@ -303,7 +303,7 @@ void ViewportEngine::render(Depsgraph *depsgraph)
|
|||||||
|
|
||||||
void ViewportEngine::notify_status(const std::string &info, const std::string &status)
|
void ViewportEngine::notify_status(const std::string &info, const std::string &status)
|
||||||
{
|
{
|
||||||
RE_engine_update_stats(bl_engine, status.c_str(), info.c_str());
|
RE_engine_update_stats(bl_engine_, status.c_str(), info.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
@ -19,6 +19,7 @@ class DrawTexture {
|
|||||||
public:
|
public:
|
||||||
DrawTexture();
|
DrawTexture();
|
||||||
~DrawTexture();
|
~DrawTexture();
|
||||||
|
|
||||||
void set_buffer(pxr::HdRenderBuffer *buffer);
|
void set_buffer(pxr::HdRenderBuffer *buffer);
|
||||||
void draw(GPUShader *shader, float x, float y);
|
void draw(GPUShader *shader, float x, float y);
|
||||||
|
|
||||||
@ -26,14 +27,15 @@ class DrawTexture {
|
|||||||
void create(pxr::HdRenderBuffer *buffer);
|
void create(pxr::HdRenderBuffer *buffer);
|
||||||
void free();
|
void free();
|
||||||
|
|
||||||
GPUTexture *texture;
|
GPUTexture *texture_;
|
||||||
GPUBatch *batch;
|
GPUBatch *batch_;
|
||||||
int width, height, channels;
|
int width_, height_, channels_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class ViewportEngine : public Engine {
|
class ViewportEngine : public Engine {
|
||||||
public:
|
public:
|
||||||
using Engine::Engine;
|
using Engine::Engine;
|
||||||
|
|
||||||
void sync(Depsgraph *depsgraph,
|
void sync(Depsgraph *depsgraph,
|
||||||
bContext *context,
|
bContext *context,
|
||||||
pxr::HdRenderSettingsMap &render_settings) override;
|
pxr::HdRenderSettingsMap &render_settings) override;
|
||||||
@ -44,9 +46,9 @@ class ViewportEngine : public Engine {
|
|||||||
void notify_status(const std::string &title, const std::string &info);
|
void notify_status(const std::string &title, const std::string &info);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::chrono::time_point<std::chrono::steady_clock> time_begin;
|
std::chrono::time_point<std::chrono::steady_clock> time_begin_;
|
||||||
|
|
||||||
DrawTexture draw_texture;
|
DrawTexture draw_texture_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace blender::render::hydra
|
} // namespace blender::render::hydra
|
||||||
|
Loading…
Reference in New Issue
Block a user