Compositor: Port redesigned Defocus node to CPU #117174

Merged
Omar Emara merged 2 commits from OmarEmaraDev/blender:refactor-cpu-defocus-node into main 2024-01-17 13:19:19 +01:00
4 changed files with 240 additions and 156 deletions

View File

@ -2,9 +2,14 @@
* *
* SPDX-License-Identifier: GPL-2.0-or-later */ * SPDX-License-Identifier: GPL-2.0-or-later */
#include "COM_DefocusNode.h" #include "DNA_scene_types.h"
#include "BKE_camera.h"
#include "COM_BokehImageOperation.h" #include "COM_BokehImageOperation.h"
#include "COM_ConvertDepthToRadiusOperation.h" #include "COM_ConvertDepthToRadiusOperation.h"
#include "COM_DefocusNode.h"
#include "COM_FastGaussianBlurOperation.h"
#include "COM_GammaCorrectOperation.h" #include "COM_GammaCorrectOperation.h"
#include "COM_MathBaseOperation.h" #include "COM_MathBaseOperation.h"
#include "COM_SetValueOperation.h" #include "COM_SetValueOperation.h"
@ -22,8 +27,6 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
{ {
const bNode *node = this->get_bnode(); const bNode *node = this->get_bnode();
const NodeDefocus *data = (const NodeDefocus *)node->storage; const NodeDefocus *data = (const NodeDefocus *)node->storage;
Scene *scene = node->id ? (Scene *)node->id : context.get_scene();
Object *camob = scene ? scene->camera : nullptr;
NodeOperation *radius_operation; NodeOperation *radius_operation;
if (data->no_zbuf) { if (data->no_zbuf) {
@ -48,22 +51,31 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
} }
else { else {
ConvertDepthToRadiusOperation *radius_op = new ConvertDepthToRadiusOperation(); ConvertDepthToRadiusOperation *radius_op = new ConvertDepthToRadiusOperation();
radius_op->set_camera_object(camob); radius_op->set_data(data);
radius_op->setf_stop(data->fstop); radius_op->set_scene(get_scene(context));
radius_op->set_max_radius(data->maxblur);
converter.add_operation(radius_op); converter.add_operation(radius_op);
converter.map_input_socket(get_input_socket(1), radius_op->get_input_socket(0)); converter.map_input_socket(get_input_socket(1), radius_op->get_input_socket(0));
converter.map_input_socket(get_input_socket(0), radius_op->get_input_socket(1));
FastGaussianBlurValueOperation *blur = new FastGaussianBlurValueOperation(); GaussianXBlurOperation *blur_x_operation = new GaussianXBlurOperation();
/* maintain close pixels so far Z values don't bleed into the foreground */ converter.add_operation(blur_x_operation);
blur->set_overlay(FAST_GAUSS_OVERLAY_MIN); converter.add_link(radius_op->get_output_socket(), blur_x_operation->get_input_socket(0));
converter.add_operation(blur);
converter.add_link(radius_op->get_output_socket(0), blur->get_input_socket(0)); GaussianYBlurOperation *blur_y_operation = new GaussianYBlurOperation();
radius_op->set_post_blur(blur); converter.add_operation(blur_y_operation);
converter.add_link(blur_x_operation->get_output_socket(),
blur_y_operation->get_input_socket(0));
radius_operation = blur; MathMinimumOperation *minimum_operation = new MathMinimumOperation();
converter.add_operation(minimum_operation);
converter.add_link(blur_y_operation->get_output_socket(),
minimum_operation->get_input_socket(0));
converter.add_link(radius_op->get_output_socket(), minimum_operation->get_input_socket(1));
radius_op->set_blur_x_operation(blur_x_operation);
radius_op->set_blur_y_operation(blur_y_operation);
radius_operation = minimum_operation;
} }
NodeBokehImage *bokehdata = new NodeBokehImage(); NodeBokehImage *bokehdata = new NodeBokehImage();
@ -82,30 +94,14 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
bokeh->delete_data_on_finish(); bokeh->delete_data_on_finish();
converter.add_operation(bokeh); converter.add_operation(bokeh);
#ifdef COM_DEFOCUS_SEARCH
InverseSearchRadiusOperation *search = new InverseSearchRadiusOperation();
search->set_max_blur(data->maxblur);
converter.add_operation(search);
converter.add_link(radius_operation->get_output_socket(0), search->get_input_socket(0));
#endif
VariableSizeBokehBlurOperation *operation = new VariableSizeBokehBlurOperation(); VariableSizeBokehBlurOperation *operation = new VariableSizeBokehBlurOperation();
if (data->preview) { operation->set_quality(eCompositorQuality::High);
operation->set_quality(eCompositorQuality::Low);
}
else {
operation->set_quality(context.get_quality());
}
operation->set_max_blur(data->maxblur); operation->set_max_blur(data->maxblur);
operation->set_threshold(data->bthresh); operation->set_threshold(data->bthresh);
converter.add_operation(operation); converter.add_operation(operation);
converter.add_link(bokeh->get_output_socket(), operation->get_input_socket(1)); converter.add_link(bokeh->get_output_socket(), operation->get_input_socket(1));
converter.add_link(radius_operation->get_output_socket(), operation->get_input_socket(2)); converter.add_link(radius_operation->get_output_socket(), operation->get_input_socket(2));
#ifdef COM_DEFOCUS_SEARCH
converter.add_link(search->get_output_socket(), operation->get_input_socket(3));
#endif
if (data->gamco) { if (data->gamco) {
GammaCorrectOperation *correct = new GammaCorrectOperation(); GammaCorrectOperation *correct = new GammaCorrectOperation();
@ -124,4 +120,9 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
} }
} }
const Scene *DefocusNode::get_scene(const CompositorContext &context) const
{
return get_bnode()->id ? reinterpret_cast<Scene *>(get_bnode()->id) : context.get_scene();
}
} // namespace blender::compositor } // namespace blender::compositor

View File

@ -4,6 +4,8 @@
#pragma once #pragma once
#include "DNA_scene_types.h"
#include "COM_Node.h" #include "COM_Node.h"
namespace blender::compositor { namespace blender::compositor {
@ -17,6 +19,7 @@ class DefocusNode : public Node {
DefocusNode(bNode *editor_node); DefocusNode(bNode *editor_node);
void convert_to_operations(NodeConverter &converter, void convert_to_operations(NodeConverter &converter,
const CompositorContext &context) const override; const CompositorContext &context) const override;
const Scene *get_scene(const CompositorContext &context) const;
}; };
} // namespace blender::compositor } // namespace blender::compositor

View File

@ -2,130 +2,213 @@
* *
* SPDX-License-Identifier: GPL-2.0-or-later */ * SPDX-License-Identifier: GPL-2.0-or-later */
#include "COM_ConvertDepthToRadiusOperation.h" #include "BLI_math_base.hh"
#include "BKE_camera.h"
#include "DNA_camera_types.h" #include "DNA_camera_types.h"
#include "DNA_node_types.h"
#include "DNA_object_types.h"
#include "DNA_scene_types.h"
#include "BKE_camera.h"
#include "COM_ConvertDepthToRadiusOperation.h"
namespace blender::compositor { namespace blender::compositor {
ConvertDepthToRadiusOperation::ConvertDepthToRadiusOperation() ConvertDepthToRadiusOperation::ConvertDepthToRadiusOperation()
{ {
this->add_input_socket(DataType::Value); this->add_input_socket(DataType::Value);
this->add_input_socket(DataType::Color);
this->add_output_socket(DataType::Value); this->add_output_socket(DataType::Value);
input_operation_ = nullptr;
f_stop_ = 128.0f;
camera_object_ = nullptr;
max_radius_ = 32.0f;
blur_post_operation_ = nullptr;
flags_.can_be_constant = true; flags_.can_be_constant = true;
} }
float ConvertDepthToRadiusOperation::determine_focal_distance()
{
if (camera_object_ && camera_object_->type == OB_CAMERA) {
Camera *camera = (Camera *)camera_object_->data;
cam_lens_ = camera->lens;
return BKE_camera_object_dof_distance(camera_object_);
}
return 10.0f;
}
void ConvertDepthToRadiusOperation::init_execution() void ConvertDepthToRadiusOperation::init_execution()
{ {
float cam_sensor = DEFAULT_SENSOR_WIDTH; depth_input_operation_ = this->get_input_socket_reader(0);
Camera *camera = nullptr; image_input_operation_ = this->get_input_socket_reader(1);
if (camera_object_ && camera_object_->type == OB_CAMERA) { f_stop = get_f_stop();
camera = (Camera *)camera_object_->data; focal_length = get_focal_length();
cam_sensor = BKE_camera_sensor_size(camera->sensor_fit, camera->sensor_x, camera->sensor_y); max_radius = data_->maxblur;
} pixels_per_meter = compute_pixels_per_meter();
distance_to_image_of_focus = compute_distance_to_image_of_focus();
input_operation_ = this->get_input_socket_reader(0); NodeBlurData blur_data;
float focal_distance = determine_focal_distance(); blur_data.sizex = compute_maximum_defocus_radius();
if (focal_distance == 0.0f) { blur_data.sizey = blur_data.sizex;
focal_distance = 1e10f; /* If the DOF is 0.0 then set it to be far away. */ blur_data.relative = false;
} blur_data.filtertype = R_FILTER_GAUSS;
inverse_focal_distance_ = 1.0f / focal_distance;
aspect_ = (this->get_width() > this->get_height()) ?
(this->get_height() / float(this->get_width())) :
(this->get_width() / float(this->get_height()));
aperture_ = 0.5f * (cam_lens_ / (aspect_ * cam_sensor)) / f_stop_;
const float minsz = MIN2(get_width(), get_height());
/* Equal to: `aspect * MIN2(img->x, img->y) / tan(0.5f * fov)`. */
dof_sp_ = minsz / ((cam_sensor / 2.0f) / cam_lens_);
if (blur_post_operation_) { blur_x_operation_->set_data(&blur_data);
blur_post_operation_->set_sigma(std::min(aperture_ * 128.0f, max_radius_)); blur_x_operation_->set_size(1.0f);
} blur_y_operation_->set_data(&blur_data);
blur_y_operation_->set_size(1.0f);
} }
/* Given a depth texture, compute the radius of the circle of confusion in pixels based on equation
* (8) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
void ConvertDepthToRadiusOperation::execute_pixel_sampled(float output[4], void ConvertDepthToRadiusOperation::execute_pixel_sampled(float output[4],
float x, float x,
float y, float y,
PixelSampler sampler) PixelSampler sampler)
{ {
float input_value[4]; float input_value[4];
float z; depth_input_operation_->read_sampled(input_value, x, y, sampler);
float radius; const float depth = input_value[0];
input_operation_->read_sampled(input_value, x, y, sampler);
z = input_value[0];
if (z != 0.0f) {
float iZ = (1.0f / z);
/* bug #6656 part 2b, do not re-scale. */ /* Compute `Vu` in equation (7). */
#if 0 const float distance_to_image_of_object = (focal_length * depth) / (depth - focal_length);
bcrad = 0.5f * fabs(aperture * (dof_sp * (cam_invfdist - iZ) - 1.0f));
/* Scale crad back to original maximum and blend. */ /* Compute C in equation (8). Notice that the last multiplier was included in the absolute since
crad->rect[px] = bcrad + wts->rect[px] * (scf * crad->rect[px] - bcrad); * it is negative when the object distance is less than the focal length, as noted in equation
#endif * (7). */
radius = 0.5f * fabsf(aperture_ * (dof_sp_ * (inverse_focal_distance_ - iZ) - 1.0f)); float diameter = abs((distance_to_image_of_object - distance_to_image_of_focus) *
/* 'bug' #6615, limit minimum radius to 1 pixel, (focal_length / (f_stop * distance_to_image_of_object)));
* not really a solution, but somewhat mitigates the problem. */
if (radius < 0.0f) { /* The diameter is in meters, so multiply by the pixels per meter. */
radius = 0.0f; float radius = (diameter / 2.0f) * pixels_per_meter;
}
if (radius > max_radius_) { output[0] = math::min(max_radius, radius);
radius = max_radius_;
}
output[0] = radius;
}
else {
output[0] = 0.0f;
}
} }
void ConvertDepthToRadiusOperation::deinit_execution() void ConvertDepthToRadiusOperation::deinit_execution()
{ {
input_operation_ = nullptr; depth_input_operation_ = nullptr;
} }
/* Given a depth texture, compute the radius of the circle of confusion in pixels based on equation
* (8) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
void ConvertDepthToRadiusOperation::update_memory_buffer_partial(MemoryBuffer *output, void ConvertDepthToRadiusOperation::update_memory_buffer_partial(MemoryBuffer *output,
const rcti &area, const rcti &area,
Span<MemoryBuffer *> inputs) Span<MemoryBuffer *> inputs)
{ {
for (BuffersIterator<float> it = output->iterate_with(inputs, area); !it.is_end(); ++it) { for (BuffersIterator<float> it = output->iterate_with(inputs, area); !it.is_end(); ++it) {
const float z = *it.in(0); const float depth = *it.in(0);
if (z == 0.0f) {
*it.out = 0.0f;
continue;
}
const float inv_z = (1.0f / z); /* Compute `Vu` in equation (7). */
const float distance_to_image_of_object = (focal_length * depth) / (depth - focal_length);
/* Bug #6656 part 2b, do not re-scale. */ /* Compute C in equation (8). Notice that the last multiplier was included in the absolute
#if 0 * since it is negative when the object distance is less than the focal length, as noted in
bcrad = 0.5f * fabs(aperture * (dof_sp * (cam_invfdist - iZ) - 1.0f)); * equation (7). */
/* Scale crad back to original maximum and blend: float diameter = abs((distance_to_image_of_object - distance_to_image_of_focus) *
* `crad->rect[px] = bcrad + wts->rect[px] * (scf * crad->rect[px] - bcrad);` */ (focal_length / (f_stop * distance_to_image_of_object)));
#endif
const float radius = 0.5f * /* The diameter is in meters, so multiply by the pixels per meter. */
fabsf(aperture_ * (dof_sp_ * (inverse_focal_distance_ - inv_z) - 1.0f)); float radius = (diameter / 2.0f) * pixels_per_meter;
/* Bug #6615, limit minimum radius to 1 pixel,
* not really a solution, but somewhat mitigates the problem. */ *it.out = math::min(max_radius, radius);
*it.out = CLAMPIS(radius, 0.0f, max_radius_);
} }
} }
/* Computes the maximum possible defocus radius in pixels. */
float ConvertDepthToRadiusOperation::compute_maximum_defocus_radius() const
{
const float maximum_diameter = compute_maximum_diameter_of_circle_of_confusion();
const float pixels_per_meter = compute_pixels_per_meter();
const float radius = (maximum_diameter / 2.0f) * pixels_per_meter;
return math::min(radius, data_->maxblur);
}
/* Computes the diameter of the circle of confusion at infinity. This computes the limit in
* figure (5) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305.
*
* Notice that the diameter is asymmetric around the focus point, and we are computing the
* limiting diameter at infinity, while another limiting diameter exist at zero distance from the
* lens. This is a limitation of the implementation, as it assumes far defocusing only. */
float ConvertDepthToRadiusOperation::compute_maximum_diameter_of_circle_of_confusion() const
{
const float f_stop = get_f_stop();
const float focal_length = get_focal_length();
const float distance_to_image_of_focus = compute_distance_to_image_of_focus();
return math::abs((distance_to_image_of_focus / (f_stop * focal_length)) -
(focal_length / f_stop));
}
/* Computes the distance in meters to the image of the focus point across a lens of the specified
* focal length. This computes `Vp` in equation (7) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
float ConvertDepthToRadiusOperation::compute_distance_to_image_of_focus() const
{
const float focal_length = get_focal_length();
const float focus_distance = compute_focus_distance();
return (focal_length * focus_distance) / (focus_distance - focal_length);
}
/* Returns the focal length in meters. Fallback to 50 mm in case of an invalid camera. Ensure a
* minimum of 1e-6. */
float ConvertDepthToRadiusOperation::get_focal_length() const
{
const Camera *camera = get_camera();
return camera ? math::max(1e-6f, camera->lens / 1000.0f) : 50.0f / 1000.0f;
}
/* Computes the distance to the point that is completely in focus. */
float ConvertDepthToRadiusOperation::compute_focus_distance() const
{
return BKE_camera_object_dof_distance(get_camera_object());
}
/* Computes the number of pixels per meter of the sensor size. This is essentially the resolution
* over the sensor size, using the sensor fit axis. Fallback to DEFAULT_SENSOR_WIDTH in case of
* an invalid camera. Note that the stored sensor size is in millimeter, so convert to meters. */
float ConvertDepthToRadiusOperation::compute_pixels_per_meter() const
{
const int2 size = int2(image_input_operation_->get_width(),
image_input_operation_->get_height());
const Camera *camera = get_camera();
const float default_value = size.x / (DEFAULT_SENSOR_WIDTH / 1000.0f);
if (!camera) {
return default_value;
}
switch (camera->sensor_fit) {
case CAMERA_SENSOR_FIT_HOR:
return size.x / (camera->sensor_x / 1000.0f);
case CAMERA_SENSOR_FIT_VERT:
return size.y / (camera->sensor_y / 1000.0f);
case CAMERA_SENSOR_FIT_AUTO: {
return size.x > size.y ? size.x / (camera->sensor_x / 1000.0f) :
size.y / (camera->sensor_y / 1000.0f);
}
default:
break;
}
return default_value;
}
/* Returns the f-stop number. Fallback to 1e-3 for zero f-stop. */
float ConvertDepthToRadiusOperation::get_f_stop() const
{
return math::max(1e-3f, data_->fstop);
}
const Camera *ConvertDepthToRadiusOperation::get_camera() const
{
const Object *camera_object = get_camera_object();
if (!camera_object || camera_object->type != OB_CAMERA) {
return nullptr;
}
return reinterpret_cast<Camera *>(camera_object->data);
}
const Object *ConvertDepthToRadiusOperation::get_camera_object() const
{
return scene_->camera;
}
} // namespace blender::compositor } // namespace blender::compositor

View File

@ -4,75 +4,72 @@
#pragma once #pragma once
#include "COM_FastGaussianBlurOperation.h" #include "COM_GaussianXBlurOperation.h"
#include "COM_GaussianYBlurOperation.h"
#include "COM_MultiThreadedOperation.h" #include "COM_MultiThreadedOperation.h"
#include "DNA_object_types.h"
namespace blender::compositor { namespace blender::compositor {
/**
* this program converts an input color to an output value.
* it assumes we are in sRGB color space.
*/
class ConvertDepthToRadiusOperation : public MultiThreadedOperation { class ConvertDepthToRadiusOperation : public MultiThreadedOperation {
private: private:
/** SocketReader *depth_input_operation_;
* Cached reference to the input_program SocketReader *image_input_operation_;
*/
SocketReader *input_operation_;
float f_stop_;
float aspect_;
float max_radius_;
float inverse_focal_distance_;
float aperture_;
float cam_lens_;
float dof_sp_;
Object *camera_object_;
FastGaussianBlurValueOperation *blur_post_operation_; const Scene *scene_;
const NodeDefocus *data_;
float f_stop;
float max_radius;
float focal_length;
float pixels_per_meter;
float distance_to_image_of_focus;
GaussianXBlurOperation *blur_x_operation_;
GaussianYBlurOperation *blur_y_operation_;
public: public:
/**
* Default constructor
*/
ConvertDepthToRadiusOperation(); ConvertDepthToRadiusOperation();
/**
* The inner loop of this operation.
*/
void execute_pixel_sampled(float output[4], float x, float y, PixelSampler sampler) override; void execute_pixel_sampled(float output[4], float x, float y, PixelSampler sampler) override;
/**
* Initialize the execution
*/
void init_execution() override; void init_execution() override;
/**
* Deinitialize the execution
*/
void deinit_execution() override; void deinit_execution() override;
void setf_stop(float f_stop) void set_data(const NodeDefocus *data)
{ {
f_stop_ = f_stop; data_ = data;
} }
void set_max_radius(float max_radius)
void set_scene(const Scene *scene)
{ {
max_radius_ = max_radius; scene_ = scene;
} }
void set_camera_object(Object *camera)
void set_blur_x_operation(GaussianXBlurOperation *blur_x_operation)
{ {
camera_object_ = camera; blur_x_operation_ = blur_x_operation;
} }
float determine_focal_distance();
void set_post_blur(FastGaussianBlurValueOperation *operation) void set_blur_y_operation(GaussianYBlurOperation *blur_y_operation)
{ {
blur_post_operation_ = operation; blur_y_operation_ = blur_y_operation;
} }
void update_memory_buffer_partial(MemoryBuffer *output, void update_memory_buffer_partial(MemoryBuffer *output,
const rcti &area, const rcti &area,
Span<MemoryBuffer *> inputs) override; Span<MemoryBuffer *> inputs) override;
private:
float compute_maximum_defocus_radius() const;
float compute_maximum_diameter_of_circle_of_confusion() const;
float compute_distance_to_image_of_focus() const;
float get_focal_length() const;
float compute_focus_distance() const;
float compute_pixels_per_meter() const;
float get_f_stop() const;
const Camera *get_camera() const;
const Object *get_camera_object() const;
}; };
} // namespace blender::compositor } // namespace blender::compositor