WIP: Brush assets project #106303

Draft
Julian Eisel wants to merge 352 commits from brush-assets-project into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
4 changed files with 240 additions and 156 deletions
Showing only changes of commit 6454d989fd - Show all commits

View File

@ -2,9 +2,14 @@
*
* SPDX-License-Identifier: GPL-2.0-or-later */
#include "COM_DefocusNode.h"
#include "DNA_scene_types.h"
#include "BKE_camera.h"
#include "COM_BokehImageOperation.h"
#include "COM_ConvertDepthToRadiusOperation.h"
#include "COM_DefocusNode.h"
#include "COM_FastGaussianBlurOperation.h"
#include "COM_GammaCorrectOperation.h"
#include "COM_MathBaseOperation.h"
#include "COM_SetValueOperation.h"
@ -22,8 +27,6 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
{
const bNode *node = this->get_bnode();
const NodeDefocus *data = (const NodeDefocus *)node->storage;
Scene *scene = node->id ? (Scene *)node->id : context.get_scene();
Object *camob = scene ? scene->camera : nullptr;
NodeOperation *radius_operation;
if (data->no_zbuf) {
@ -48,22 +51,31 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
}
else {
ConvertDepthToRadiusOperation *radius_op = new ConvertDepthToRadiusOperation();
radius_op->set_camera_object(camob);
radius_op->setf_stop(data->fstop);
radius_op->set_max_radius(data->maxblur);
radius_op->set_data(data);
radius_op->set_scene(get_scene(context));
converter.add_operation(radius_op);
converter.map_input_socket(get_input_socket(1), radius_op->get_input_socket(0));
converter.map_input_socket(get_input_socket(0), radius_op->get_input_socket(1));
FastGaussianBlurValueOperation *blur = new FastGaussianBlurValueOperation();
/* maintain close pixels so far Z values don't bleed into the foreground */
blur->set_overlay(FAST_GAUSS_OVERLAY_MIN);
converter.add_operation(blur);
GaussianXBlurOperation *blur_x_operation = new GaussianXBlurOperation();
converter.add_operation(blur_x_operation);
converter.add_link(radius_op->get_output_socket(), blur_x_operation->get_input_socket(0));
converter.add_link(radius_op->get_output_socket(0), blur->get_input_socket(0));
radius_op->set_post_blur(blur);
GaussianYBlurOperation *blur_y_operation = new GaussianYBlurOperation();
converter.add_operation(blur_y_operation);
converter.add_link(blur_x_operation->get_output_socket(),
blur_y_operation->get_input_socket(0));
radius_operation = blur;
MathMinimumOperation *minimum_operation = new MathMinimumOperation();
converter.add_operation(minimum_operation);
converter.add_link(blur_y_operation->get_output_socket(),
minimum_operation->get_input_socket(0));
converter.add_link(radius_op->get_output_socket(), minimum_operation->get_input_socket(1));
radius_op->set_blur_x_operation(blur_x_operation);
radius_op->set_blur_y_operation(blur_y_operation);
radius_operation = minimum_operation;
}
NodeBokehImage *bokehdata = new NodeBokehImage();
@ -82,30 +94,14 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
bokeh->delete_data_on_finish();
converter.add_operation(bokeh);
#ifdef COM_DEFOCUS_SEARCH
InverseSearchRadiusOperation *search = new InverseSearchRadiusOperation();
search->set_max_blur(data->maxblur);
converter.add_operation(search);
converter.add_link(radius_operation->get_output_socket(0), search->get_input_socket(0));
#endif
VariableSizeBokehBlurOperation *operation = new VariableSizeBokehBlurOperation();
if (data->preview) {
operation->set_quality(eCompositorQuality::Low);
}
else {
operation->set_quality(context.get_quality());
}
operation->set_quality(eCompositorQuality::High);
operation->set_max_blur(data->maxblur);
operation->set_threshold(data->bthresh);
converter.add_operation(operation);
converter.add_link(bokeh->get_output_socket(), operation->get_input_socket(1));
converter.add_link(radius_operation->get_output_socket(), operation->get_input_socket(2));
#ifdef COM_DEFOCUS_SEARCH
converter.add_link(search->get_output_socket(), operation->get_input_socket(3));
#endif
if (data->gamco) {
GammaCorrectOperation *correct = new GammaCorrectOperation();
@ -124,4 +120,9 @@ void DefocusNode::convert_to_operations(NodeConverter &converter,
}
}
const Scene *DefocusNode::get_scene(const CompositorContext &context) const
{
return get_bnode()->id ? reinterpret_cast<Scene *>(get_bnode()->id) : context.get_scene();
}
} // namespace blender::compositor

View File

@ -4,6 +4,8 @@
#pragma once
#include "DNA_scene_types.h"
#include "COM_Node.h"
namespace blender::compositor {
@ -17,6 +19,7 @@ class DefocusNode : public Node {
DefocusNode(bNode *editor_node);
void convert_to_operations(NodeConverter &converter,
const CompositorContext &context) const override;
const Scene *get_scene(const CompositorContext &context) const;
};
} // namespace blender::compositor

View File

@ -2,130 +2,213 @@
*
* SPDX-License-Identifier: GPL-2.0-or-later */
#include "COM_ConvertDepthToRadiusOperation.h"
#include "BKE_camera.h"
#include "BLI_math_base.hh"
#include "DNA_camera_types.h"
#include "DNA_node_types.h"
#include "DNA_object_types.h"
#include "DNA_scene_types.h"
#include "BKE_camera.h"
#include "COM_ConvertDepthToRadiusOperation.h"
namespace blender::compositor {
ConvertDepthToRadiusOperation::ConvertDepthToRadiusOperation()
{
this->add_input_socket(DataType::Value);
this->add_input_socket(DataType::Color);
this->add_output_socket(DataType::Value);
input_operation_ = nullptr;
f_stop_ = 128.0f;
camera_object_ = nullptr;
max_radius_ = 32.0f;
blur_post_operation_ = nullptr;
flags_.can_be_constant = true;
}
float ConvertDepthToRadiusOperation::determine_focal_distance()
{
if (camera_object_ && camera_object_->type == OB_CAMERA) {
Camera *camera = (Camera *)camera_object_->data;
cam_lens_ = camera->lens;
return BKE_camera_object_dof_distance(camera_object_);
}
return 10.0f;
}
void ConvertDepthToRadiusOperation::init_execution()
{
float cam_sensor = DEFAULT_SENSOR_WIDTH;
Camera *camera = nullptr;
depth_input_operation_ = this->get_input_socket_reader(0);
image_input_operation_ = this->get_input_socket_reader(1);
if (camera_object_ && camera_object_->type == OB_CAMERA) {
camera = (Camera *)camera_object_->data;
cam_sensor = BKE_camera_sensor_size(camera->sensor_fit, camera->sensor_x, camera->sensor_y);
}
f_stop = get_f_stop();
focal_length = get_focal_length();
max_radius = data_->maxblur;
pixels_per_meter = compute_pixels_per_meter();
distance_to_image_of_focus = compute_distance_to_image_of_focus();
input_operation_ = this->get_input_socket_reader(0);
float focal_distance = determine_focal_distance();
if (focal_distance == 0.0f) {
focal_distance = 1e10f; /* If the DOF is 0.0 then set it to be far away. */
}
inverse_focal_distance_ = 1.0f / focal_distance;
aspect_ = (this->get_width() > this->get_height()) ?
(this->get_height() / float(this->get_width())) :
(this->get_width() / float(this->get_height()));
aperture_ = 0.5f * (cam_lens_ / (aspect_ * cam_sensor)) / f_stop_;
const float minsz = MIN2(get_width(), get_height());
/* Equal to: `aspect * MIN2(img->x, img->y) / tan(0.5f * fov)`. */
dof_sp_ = minsz / ((cam_sensor / 2.0f) / cam_lens_);
NodeBlurData blur_data;
blur_data.sizex = compute_maximum_defocus_radius();
blur_data.sizey = blur_data.sizex;
blur_data.relative = false;
blur_data.filtertype = R_FILTER_GAUSS;
if (blur_post_operation_) {
blur_post_operation_->set_sigma(std::min(aperture_ * 128.0f, max_radius_));
}
blur_x_operation_->set_data(&blur_data);
blur_x_operation_->set_size(1.0f);
blur_y_operation_->set_data(&blur_data);
blur_y_operation_->set_size(1.0f);
}
/* Given a depth texture, compute the radius of the circle of confusion in pixels based on equation
* (8) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
void ConvertDepthToRadiusOperation::execute_pixel_sampled(float output[4],
float x,
float y,
PixelSampler sampler)
{
float input_value[4];
float z;
float radius;
input_operation_->read_sampled(input_value, x, y, sampler);
z = input_value[0];
if (z != 0.0f) {
float iZ = (1.0f / z);
depth_input_operation_->read_sampled(input_value, x, y, sampler);
const float depth = input_value[0];
/* bug #6656 part 2b, do not re-scale. */
#if 0
bcrad = 0.5f * fabs(aperture * (dof_sp * (cam_invfdist - iZ) - 1.0f));
/* Scale crad back to original maximum and blend. */
crad->rect[px] = bcrad + wts->rect[px] * (scf * crad->rect[px] - bcrad);
#endif
radius = 0.5f * fabsf(aperture_ * (dof_sp_ * (inverse_focal_distance_ - iZ) - 1.0f));
/* 'bug' #6615, limit minimum radius to 1 pixel,
* not really a solution, but somewhat mitigates the problem. */
if (radius < 0.0f) {
radius = 0.0f;
}
if (radius > max_radius_) {
radius = max_radius_;
}
output[0] = radius;
}
else {
output[0] = 0.0f;
}
/* Compute `Vu` in equation (7). */
const float distance_to_image_of_object = (focal_length * depth) / (depth - focal_length);
/* Compute C in equation (8). Notice that the last multiplier was included in the absolute since
* it is negative when the object distance is less than the focal length, as noted in equation
* (7). */
float diameter = abs((distance_to_image_of_object - distance_to_image_of_focus) *
(focal_length / (f_stop * distance_to_image_of_object)));
/* The diameter is in meters, so multiply by the pixels per meter. */
float radius = (diameter / 2.0f) * pixels_per_meter;
output[0] = math::min(max_radius, radius);
}
void ConvertDepthToRadiusOperation::deinit_execution()
{
input_operation_ = nullptr;
depth_input_operation_ = nullptr;
}
/* Given a depth texture, compute the radius of the circle of confusion in pixels based on equation
* (8) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
void ConvertDepthToRadiusOperation::update_memory_buffer_partial(MemoryBuffer *output,
const rcti &area,
Span<MemoryBuffer *> inputs)
{
for (BuffersIterator<float> it = output->iterate_with(inputs, area); !it.is_end(); ++it) {
const float z = *it.in(0);
if (z == 0.0f) {
*it.out = 0.0f;
continue;
}
const float depth = *it.in(0);
const float inv_z = (1.0f / z);
/* Compute `Vu` in equation (7). */
const float distance_to_image_of_object = (focal_length * depth) / (depth - focal_length);
/* Bug #6656 part 2b, do not re-scale. */
#if 0
bcrad = 0.5f * fabs(aperture * (dof_sp * (cam_invfdist - iZ) - 1.0f));
/* Scale crad back to original maximum and blend:
* `crad->rect[px] = bcrad + wts->rect[px] * (scf * crad->rect[px] - bcrad);` */
#endif
const float radius = 0.5f *
fabsf(aperture_ * (dof_sp_ * (inverse_focal_distance_ - inv_z) - 1.0f));
/* Bug #6615, limit minimum radius to 1 pixel,
* not really a solution, but somewhat mitigates the problem. */
*it.out = CLAMPIS(radius, 0.0f, max_radius_);
/* Compute C in equation (8). Notice that the last multiplier was included in the absolute
* since it is negative when the object distance is less than the focal length, as noted in
* equation (7). */
float diameter = abs((distance_to_image_of_object - distance_to_image_of_focus) *
(focal_length / (f_stop * distance_to_image_of_object)));
/* The diameter is in meters, so multiply by the pixels per meter. */
float radius = (diameter / 2.0f) * pixels_per_meter;
*it.out = math::min(max_radius, radius);
}
}
/* Computes the maximum possible defocus radius in pixels. */
float ConvertDepthToRadiusOperation::compute_maximum_defocus_radius() const
{
const float maximum_diameter = compute_maximum_diameter_of_circle_of_confusion();
const float pixels_per_meter = compute_pixels_per_meter();
const float radius = (maximum_diameter / 2.0f) * pixels_per_meter;
return math::min(radius, data_->maxblur);
}
/* Computes the diameter of the circle of confusion at infinity. This computes the limit in
* figure (5) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305.
*
* Notice that the diameter is asymmetric around the focus point, and we are computing the
* limiting diameter at infinity, while another limiting diameter exist at zero distance from the
* lens. This is a limitation of the implementation, as it assumes far defocusing only. */
float ConvertDepthToRadiusOperation::compute_maximum_diameter_of_circle_of_confusion() const
{
const float f_stop = get_f_stop();
const float focal_length = get_focal_length();
const float distance_to_image_of_focus = compute_distance_to_image_of_focus();
return math::abs((distance_to_image_of_focus / (f_stop * focal_length)) -
(focal_length / f_stop));
}
/* Computes the distance in meters to the image of the focus point across a lens of the specified
* focal length. This computes `Vp` in equation (7) of the paper:
*
* Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
* image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
float ConvertDepthToRadiusOperation::compute_distance_to_image_of_focus() const
{
const float focal_length = get_focal_length();
const float focus_distance = compute_focus_distance();
return (focal_length * focus_distance) / (focus_distance - focal_length);
}
/* Returns the focal length in meters. Fallback to 50 mm in case of an invalid camera. Ensure a
* minimum of 1e-6. */
float ConvertDepthToRadiusOperation::get_focal_length() const
{
const Camera *camera = get_camera();
return camera ? math::max(1e-6f, camera->lens / 1000.0f) : 50.0f / 1000.0f;
}
/* Computes the distance to the point that is completely in focus. */
float ConvertDepthToRadiusOperation::compute_focus_distance() const
{
return BKE_camera_object_dof_distance(get_camera_object());
}
/* Computes the number of pixels per meter of the sensor size. This is essentially the resolution
* over the sensor size, using the sensor fit axis. Fallback to DEFAULT_SENSOR_WIDTH in case of
* an invalid camera. Note that the stored sensor size is in millimeter, so convert to meters. */
float ConvertDepthToRadiusOperation::compute_pixels_per_meter() const
{
const int2 size = int2(image_input_operation_->get_width(),
image_input_operation_->get_height());
const Camera *camera = get_camera();
const float default_value = size.x / (DEFAULT_SENSOR_WIDTH / 1000.0f);
if (!camera) {
return default_value;
}
switch (camera->sensor_fit) {
case CAMERA_SENSOR_FIT_HOR:
return size.x / (camera->sensor_x / 1000.0f);
case CAMERA_SENSOR_FIT_VERT:
return size.y / (camera->sensor_y / 1000.0f);
case CAMERA_SENSOR_FIT_AUTO: {
return size.x > size.y ? size.x / (camera->sensor_x / 1000.0f) :
size.y / (camera->sensor_y / 1000.0f);
}
default:
break;
}
return default_value;
}
/* Returns the f-stop number. Fallback to 1e-3 for zero f-stop. */
float ConvertDepthToRadiusOperation::get_f_stop() const
{
return math::max(1e-3f, data_->fstop);
}
const Camera *ConvertDepthToRadiusOperation::get_camera() const
{
const Object *camera_object = get_camera_object();
if (!camera_object || camera_object->type != OB_CAMERA) {
return nullptr;
}
return reinterpret_cast<Camera *>(camera_object->data);
}
const Object *ConvertDepthToRadiusOperation::get_camera_object() const
{
return scene_->camera;
}
} // namespace blender::compositor

View File

@ -4,75 +4,72 @@
#pragma once
#include "COM_FastGaussianBlurOperation.h"
#include "COM_GaussianXBlurOperation.h"
#include "COM_GaussianYBlurOperation.h"
#include "COM_MultiThreadedOperation.h"
#include "DNA_object_types.h"
namespace blender::compositor {
/**
* this program converts an input color to an output value.
* it assumes we are in sRGB color space.
*/
class ConvertDepthToRadiusOperation : public MultiThreadedOperation {
private:
/**
* Cached reference to the input_program
*/
SocketReader *input_operation_;
float f_stop_;
float aspect_;
float max_radius_;
float inverse_focal_distance_;
float aperture_;
float cam_lens_;
float dof_sp_;
Object *camera_object_;
SocketReader *depth_input_operation_;
SocketReader *image_input_operation_;
FastGaussianBlurValueOperation *blur_post_operation_;
const Scene *scene_;
const NodeDefocus *data_;
float f_stop;
float max_radius;
float focal_length;
float pixels_per_meter;
float distance_to_image_of_focus;
GaussianXBlurOperation *blur_x_operation_;
GaussianYBlurOperation *blur_y_operation_;
public:
/**
* Default constructor
*/
ConvertDepthToRadiusOperation();
/**
* The inner loop of this operation.
*/
void execute_pixel_sampled(float output[4], float x, float y, PixelSampler sampler) override;
/**
* Initialize the execution
*/
void init_execution() override;
/**
* Deinitialize the execution
*/
void deinit_execution() override;
void setf_stop(float f_stop)
void set_data(const NodeDefocus *data)
{
f_stop_ = f_stop;
data_ = data;
}
void set_max_radius(float max_radius)
void set_scene(const Scene *scene)
{
max_radius_ = max_radius;
scene_ = scene;
}
void set_camera_object(Object *camera)
void set_blur_x_operation(GaussianXBlurOperation *blur_x_operation)
{
camera_object_ = camera;
blur_x_operation_ = blur_x_operation;
}
float determine_focal_distance();
void set_post_blur(FastGaussianBlurValueOperation *operation)
void set_blur_y_operation(GaussianYBlurOperation *blur_y_operation)
{
blur_post_operation_ = operation;
blur_y_operation_ = blur_y_operation;
}
void update_memory_buffer_partial(MemoryBuffer *output,
const rcti &area,
Span<MemoryBuffer *> inputs) override;
private:
float compute_maximum_defocus_radius() const;
float compute_maximum_diameter_of_circle_of_confusion() const;
float compute_distance_to_image_of_focus() const;
float get_focal_length() const;
float compute_focus_distance() const;
float compute_pixels_per_meter() const;
float get_f_stop() const;
const Camera *get_camera() const;
const Object *get_camera_object() const;
};
} // namespace blender::compositor