MacOS: Enable support for EDR rendering #105662

Merged
Brecht Van Lommel merged 26 commits from Jason-Fielder/blender:macos_EDR_support into main 2023-08-09 14:25:23 +02:00
14 changed files with 49 additions and 13 deletions
Showing only changes of commit 14cd9d6bd9 - Show all commits

View File

@ -55,6 +55,8 @@ class RENDER_PT_color_management(RenderButtonsPanel, Panel):
'BLENDER_WORKBENCH_NEXT'}
def draw(self, context):
import gpu
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No animation.
@ -75,7 +77,9 @@ class RENDER_PT_color_management(RenderButtonsPanel, Panel):
col = flow.column()
col.prop(view, "exposure")
col.prop(view, "gamma")
col.prop(view, "use_hdr_view")
if gpu.capabilities.hdr_support_get():
brecht marked this conversation as resolved Outdated

It's a bit of a hack as we can't automatically check for this in general, but I think graying it out for Filmic will help avoid some confusion:

if gpu.capabilities.hdr_support_get():
    sub = col.row()
    sub.active = view.view_transfrom != "Filmic"
    sub.prop(view, "use_hdr_view")
It's a bit of a hack as we can't automatically check for this in general, but I think graying it out for Filmic will help avoid some confusion: ``` if gpu.capabilities.hdr_support_get(): sub = col.row() sub.active = view.view_transfrom != "Filmic" sub.prop(view, "use_hdr_view") ```
col.prop(view, "use_hdr_view")
col.separator()

View File

@ -6680,9 +6680,6 @@ void uiTemplateColormanagedViewSettings(uiLayout *layout,
uiItemR(col, &view_transform_ptr, "exposure", 0, nullptr, ICON_NONE);
uiItemR(col, &view_transform_ptr, "gamma", 0, nullptr, ICON_NONE);
col = uiLayoutColumn(layout, false);
uiItemR(col, &view_transform_ptr, "use_hdr_view", 0, nullptr, ICON_NONE);
col = uiLayoutColumn(layout, false);
uiItemR(col, &view_transform_ptr, "use_curve_mapping", 0, nullptr, ICON_NONE);
if (view_settings->flag & COLORMANAGE_VIEW_USE_CURVES) {

View File

@ -74,6 +74,7 @@
#include "BIF_glutil.h"
#include "GPU_capabilities.h"
fclem marked this conversation as resolved Outdated

Uneeded change

Uneeded change
#include "GPU_shader.h"
#include "RE_engine.h"
@ -678,7 +679,8 @@ static bool ed_preview_draw_rect(ScrArea *area, int split, int first, rcti *rect
/*Use floating point texture for material preview if High Dynamic Range enabled. */
brecht marked this conversation as resolved Outdated

In main it now always draws a float buffer, so this change can be removed.

In main it now always draws a float buffer, so this change can be removed.
Scene *scene = RE_GetScene(re);
if (scene && ((scene->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0)) {
if (GPU_HDR_support() && scene &&
((scene->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0)) {
float *rect_float = static_cast<float *>(MEM_mallocN(
rres.rectx * rres.recty * sizeof(float) * 4, "ed_preview_draw_rect_float"));
float fx = rect->xmin + offx;

View File

@ -62,6 +62,7 @@
#include "GPU_batch.h"
#include "GPU_batch_presets.h"
#include "GPU_capabilities.h"
#include "GPU_framebuffer.h"
#include "GPU_immediate.h"
#include "GPU_immediate_util.h"
@ -1890,7 +1891,7 @@ ImBuf *ED_view3d_draw_offscreen_imbuf(Depsgraph *depsgraph,
/* Determine desired offscreen format depending on HDR availability. */
bool use_hdr = false;
if (scene && ((scene->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0)) {
use_hdr = true;
use_hdr = GPU_HDR_support();
}
eGPUTextureFormat desired_format = (use_hdr) ? GPU_RGBA16F : GPU_RGBA8;

View File

@ -52,6 +52,7 @@ bool GPU_compute_shader_support(void);
bool GPU_shader_storage_buffer_objects_support(void);
bool GPU_shader_image_load_store_support(void);
bool GPU_shader_draw_parameters_support(void);
bool GPU_HDR_support(void);
brecht marked this conversation as resolved Outdated

Would use GPU_hdr_support even if the style guide doesn't require it

Would use `GPU_hdr_support` even if the style guide doesn't require it
bool GPU_mem_stats_supported(void);
void GPU_mem_stats_get(int *totalmem, int *freemem);

View File

@ -182,6 +182,11 @@ bool GPU_shader_draw_parameters_support()
return GCaps.shader_draw_parameters_support;
}
bool GPU_HDR_support()
{
return GCaps.hdr_viewport_support;
}
int GPU_max_shader_storage_buffer_bindings()
{
return GCaps.max_shader_storage_buffer_bindings;

View File

@ -48,6 +48,7 @@ struct GPUCapabilities {
bool shader_image_load_store_support = false;
bool shader_draw_parameters_support = false;
bool transform_feedback_support = false;
bool hdr_viewport_support = false;
/* OpenGL related workarounds. */
bool mip_render_workaround = false;

View File

@ -428,6 +428,8 @@ static void gpu_viewport_draw_colormanaged(GPUViewport *viewport,
GPUTexture *color_overlay = viewport->color_overlay_tx[view];
bool use_ocio = false;
bool use_hdr = GPU_HDR_support() &&
((viewport->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0);
if (viewport->do_color_management && display_colorspace) {
/* During the binding process the last used VertexFormat is tested and can assert as it is not
@ -455,8 +457,7 @@ static void gpu_viewport_draw_colormanaged(GPUViewport *viewport,
GPU_batch_program_set_builtin(batch, GPU_SHADER_2D_IMAGE_OVERLAYS_MERGE);
GPU_batch_uniform_1i(batch, "overlay", do_overlay_merge);
GPU_batch_uniform_1i(batch, "display_transform", display_colorspace);
GPU_batch_uniform_1i(
batch, "use_extended", viewport->view_settings.flag & COLORMANAGE_VIEW_USE_HDR);
GPU_batch_uniform_1i(batch, "use_extended", use_hdr);
}
GPU_texture_bind(color, 0);

View File

@ -405,6 +405,7 @@ void MTLBackend::capabilities_init(MTLContext *ctx)
GCaps.compute_shader_support = true;
GCaps.shader_storage_buffer_objects_support = true;
GCaps.shader_draw_parameters_support = true;
GCaps.hdr_viewport_support = true;
brecht marked this conversation as resolved Outdated

Is it possible to check if the monitor supports HDR?

Is it possible to check if the monitor supports HDR?

Thanks @brecht working through your feedback now, hopefully we can work towards getting the first prototype of this in soon.

Regarding whether it is possible to check the monitor support, I believe we should be able to add a dynamic parameter to GHOST_DisplayManager* for this, although I'd be conscious whether this would throw up potential issues if the setup changes after initialisation.

i.e. a HDR-compatible display is plugged in after the fact.

I believe the parameter [NSScreen maximumPotentialExtendedDynamicRangeColorComponentValue] should be able to tell you if a display can support EDR/HDR up-front.

https://developer.apple.com/documentation/appkit/nsscreen/3180381-maximumpotentialextendeddynamicr

Perhaps it would make sense to add a supports_hdr to GHOST_DisplaySetting and use this data for the active display?
Does Blender currently do any context re-initialisation upon switching the window between displays?

Thanks @brecht working through your feedback now, hopefully we can work towards getting the first prototype of this in soon. Regarding whether it is possible to check the monitor support, I believe we should be able to add a dynamic parameter to GHOST_DisplayManager* for this, although I'd be conscious whether this would throw up potential issues if the setup changes after initialisation. i.e. a HDR-compatible display is plugged in after the fact. I believe the parameter `[NSScreen maximumPotentialExtendedDynamicRangeColorComponentValue]` should be able to tell you if a display can support EDR/HDR up-front. https://developer.apple.com/documentation/appkit/nsscreen/3180381-maximumpotentialextendeddynamicr Perhaps it would make sense to add a supports_hdr to GHOST_DisplaySetting and use this data for the active display? Does Blender currently do any context re-initialisation upon switching the window between displays?

I was thinking about this mostly for the purpose of graying out the setting in the user interface. For that there could be bool GHOST_HasHDRDisplay(), returning a cached value that is updated on certain events from the operating system.

Not sure it's worth doing that as an optimization to allocate a smaller buffer when HDR is not used. We don't reinitialize the context when moving the window between displays.

I was thinking about this mostly for the purpose of graying out the setting in the user interface. For that there could be `bool GHOST_HasHDRDisplay()`, returning a cached value that is updated on certain events from the operating system. Not sure it's worth doing that as an optimization to allocate a smaller buffer when HDR is not used. We don't reinitialize the context when moving the window between displays.

Yeah, this makes sense, will look into getting something put together, in this case, perhaps it makes sense to keep the buffer allocation static using the GPU capability GCaps.hdr_viewport_support to demonstrate the potential, and then have GHOST_HasHDRDisplay() be dynamic based on actual availability to override the UI/feature toggle.

This should mean all workflows are supported, and dynamic changes to connected displays do not affect behaviour.

Yeah, this makes sense, will look into getting something put together, in this case, perhaps it makes sense to keep the buffer allocation static using the GPU capability `GCaps.hdr_viewport_support` to demonstrate the potential, and then have `GHOST_HasHDRDisplay()` be dynamic based on actual availability to override the UI/feature toggle. This should mean all workflows are supported, and dynamic changes to connected displays do not affect behaviour.

Yes, that seems fine.

Yes, that seems fine.

@brecht On reflection after looking into this, I'm not sure if this makes sense to change. Similarly, I'm also unsure if greying out the HDR toggle should be done either. There are two main thoughts which jump to mind:

  1. Determining HDR/EDR availability of the monitor can be more complex than perceived. While macOS devices are able to return values for EDR availability, whether the toggle has a clear visual change will also be dependent on the users configured display transform. Similarly, different HDR monitors from other manufacturers may also behave differently. They may still be able to display extended range contents in some form, or offer better precision within the 0.0..1.0 range.

i.e. there are potentially multiple dimensions to consider, bit-depth and EDR, which are both means of displaying higher precision content, but not necessarily both present at one time.

  1. While Filmic transforms may still have a color range which is clamped between 0.0...1.0, there is still an argument to be made for utilising a higher precision buffer format. While the range clamp toggle controls clamping, a HDR or EDR enabled monitor may still be able to display "Filmic" content with a higher quality when the HDR feature is used at all. The button won't change anything, but the underlying higher-bit-depth target changes may reduce possible banding if displayed on a monitor with some form of HDR capability.

Perhaps the key issue is in communicating what the actual toggle button should be. As it stands, given HDR is enabled anyway if supported by the backend, the button acts as a visualisation toggle for over-brightness.

As per other apps such as Affinity photo, my suggestion for this initial implementation may simply be to reflect the switch as an "EDR" toggle on macOS ("Enable Extended Dynamic Range"), which implies allowing an extended range, which is more in-line with what is happening.

Then as the HDR workflow project as a whole evolves, and moves to other platforms, then perhaps this can be improved once ideas for how the implementation should be handled evolve.

In most of these cases, it feels the best solution is to keep the representation in the application as simple as possible, while allowing the app to output the higher-precision formats and configure their own displays accordingly.

@brecht On reflection after looking into this, I'm not sure if this makes sense to change. Similarly, I'm also unsure if greying out the HDR toggle should be done either. There are two main thoughts which jump to mind: 1) Determining HDR/EDR availability of the monitor can be more complex than perceived. While macOS devices are able to return values for EDR availability, whether the toggle has a clear visual change will also be dependent on the users configured display transform. Similarly, different HDR monitors from other manufacturers may also behave differently. They may still be able to display extended range contents in some form, or offer better precision within the 0.0..1.0 range. i.e. there are potentially multiple dimensions to consider, bit-depth and EDR, which are both means of displaying higher precision content, but not necessarily both present at one time. 2) While Filmic transforms may still have a color range which is clamped between 0.0...1.0, there is still an argument to be made for utilising a higher precision buffer format. While the range clamp toggle controls clamping, a HDR or EDR enabled monitor may still be able to display "Filmic" content with a higher quality when the HDR feature is used at all. The button won't change anything, but the underlying higher-bit-depth target changes may reduce possible banding if displayed on a monitor with some form of HDR capability. Perhaps the key issue is in communicating what the actual toggle button should be. As it stands, given HDR is enabled anyway if supported by the backend, the button acts as a visualisation toggle for over-brightness. As per other apps such as Affinity photo, my suggestion for this initial implementation may simply be to reflect the switch as an "EDR" toggle on macOS ("Enable Extended Dynamic Range"), which implies allowing an extended range, which is more in-line with what is happening. Then as the HDR workflow project as a whole evolves, and moves to other platforms, then perhaps this can be improved once ideas for how the implementation should be handled evolve. In most of these cases, it feels the best solution is to keep the representation in the application as simple as possible, while allowing the app to output the higher-precision formats and configure their own displays accordingly.

If we can't reliably or easily determine if the monitor support HDR/EDR, then I'm fine not graying out the HDR option for that reason. It's just a bit nicer but not that important.

I can see that for Filmic it could make a difference, but we also do dithering by default which negates that in practice. I think the fact that the button does nothing at all for Filmic is the bigger UI issue that users will struggle with (in fact developers here have gotten confused wondering why it didn't work). What do you think about adding a text label explaining this and not graying out. For example just:

Filmic does not generate HDR colors.

I also still suggest to put the HDR option in a "Display" subpanel of "Color Management" as suggested before.

I prefer the name HDR over EDR, even if there are some side benefits with extended precision. I would rather not introduce the term EDR for Blender users.

If we can't reliably or easily determine if the monitor support HDR/EDR, then I'm fine not graying out the HDR option for that reason. It's just a bit nicer but not that important. I can see that for Filmic it could make a difference, but we also do dithering by default which negates that in practice. I think the fact that the button does nothing at all for Filmic is the bigger UI issue that users will struggle with (in fact developers here have gotten confused wondering why it didn't work). What do you think about adding a text label explaining this and not graying out. For example just: ``` Filmic does not generate HDR colors. ``` I also still suggest to put the HDR option in a "Display" subpanel of "Color Management" as suggested before. I prefer the name HDR over EDR, even if there are some side benefits with extended precision. I would rather not introduce the term EDR for Blender users.

Okay thanks for the feedback, I think it should be okay to keep the current patch status with the greying out for Filmic in this case, so that it better represents user expectations.

What do you think about adding a text label explaining this and not graying out. For example..

Greying out may be still be reasonable along side this. A note likely would be useful to highlight that not all transforms will have a visual change, especially if they are internally remapping a range between 0.0 and 1.0. I can tag this note onto the description just to help set user expectations.

I also still suggest to put the HDR option in a "Display" subpanel of "Color Management" as suggested before.

In this case, I think the last bit to do is your final suggestion here, I can add a display sub-panel and update the PR.

I prefer the name HDR over EDR, even if there are some side benefits with extended precision. I would rather not introduce the term EDR for Blender users.

This makes sense, I imagine the implementation will evolve over time as HDR workflows are implemented across the board as part of the longer-term effort.

Thanks!

Okay thanks for the feedback, I think it should be okay to keep the current patch status with the greying out for Filmic in this case, so that it better represents user expectations. > What do you think about adding a text label explaining this and not graying out. For example.. Greying out may be still be reasonable along side this. A note likely would be useful to highlight that not all transforms will have a visual change, especially if they are internally remapping a range between 0.0 and 1.0. I can tag this note onto the description just to help set user expectations. > I also still suggest to put the HDR option in a "Display" subpanel of "Color Management" as suggested before. In this case, I think the last bit to do is your final suggestion here, I can add a display sub-panel and update the PR. > I prefer the name HDR over EDR, even if there are some side benefits with extended precision. I would rather not introduce the term EDR for Blender users. This makes sense, I imagine the implementation will evolve over time as HDR workflows are implemented across the board as part of the longer-term effort. Thanks!
GCaps.geometry_shader_support = false;

View File

@ -234,6 +234,7 @@ static void detect_workarounds()
GCaps.shader_image_load_store_support = false;
GCaps.shader_draw_parameters_support = false;
GCaps.shader_storage_buffer_objects_support = false;
GCaps.hdr_viewport_support = false;
GLContext::base_instance_support = false;
GLContext::clear_texture_support = false;
GLContext::copy_image_support = false;
@ -538,6 +539,7 @@ void GLBackend::capabilities_init()
epoxy_gl_version() >= 43;
GCaps.geometry_shader_support = true;
GCaps.max_samplers = GCaps.max_textures;
GCaps.hdr_viewport_support = false;
if (GCaps.compute_shader_support) {
glGetIntegeri_v(GL_MAX_COMPUTE_WORK_GROUP_COUNT, 0, &GCaps.max_work_group_count[0]);

View File

@ -42,6 +42,8 @@
#include "BKE_image_format.h"
#include "BKE_main.h"
#include "GPU_capabilities.h"
#include "RNA_define.h"
#include "SEQ_iterator.h"
@ -4075,7 +4077,8 @@ bool IMB_colormanagement_setup_glsl_draw_from_space(
const float gamma = applied_view_settings->gamma;
const float scale = (exposure == 0.0f) ? 1.0f : powf(2.0f, exposure);
const float exponent = (gamma == 1.0f) ? 1.0f : 1.0f / max_ff(FLT_EPSILON, gamma);
const bool use_extended = (applied_view_settings->flag & COLORMANAGE_VIEW_USE_HDR) != 0;
const bool use_extended = GPU_HDR_support() &&
(applied_view_settings->flag & COLORMANAGE_VIEW_USE_HDR) != 0;
OCIO_ConstConfigRcPtr *config = OCIO_getCurrentConfig();

View File

@ -1293,7 +1293,7 @@ static void rna_def_colormanage(BlenderRNA *brna)
RNA_def_property_ui_text(prop,
"High Dynamic Range",
"Enable high dynamic range with extended colorspace in viewport, "
"uncapping display brightness for rendered content.");
"uncapping display brightness for rendered content");
brecht marked this conversation as resolved Outdated

Can we extend this tooltip with some more info?

Enable high dynamic range display in rendered viewport, uncapping display brightness. This requires a monitor with HDR support and a view transform designed for HDR.
Can we extend this tooltip with some more info? ``` Enable high dynamic range display in rendered viewport, uncapping display brightness. This requires a monitor with HDR support and a view transform designed for HDR. ```
RNA_def_property_update(prop, NC_WINDOW, "rna_ColorManagedColorspaceSettings_reload_update");
/* ** Color-space ** */

View File

@ -228,6 +228,19 @@ static PyObject *pygpu_shader_image_load_store_support_get(PyObject *UNUSED(self
{
return PyBool_FromLong(GPU_shader_image_load_store_support());
}
PyDoc_STRVAR(pygpu_hdr_support_get_doc,
".. function:: hdr_support_get()\n"
"\n"
" Return whether GPU backend supports High Dynamic range for viewport.\n"
"\n"
" :return: HDR support available.\n"
" :rtype: bool\n");
static PyObject *pygpu_hdr_support_get(PyObject *UNUSED(self))
{
return PyBool_FromLong(GPU_HDR_support());
}
/** \} */
/* -------------------------------------------------------------------- */
@ -297,6 +310,10 @@ static PyMethodDef pygpu_capabilities__tp_methods[] = {
(PyCFunction)pygpu_shader_image_load_store_support_get,
METH_NOARGS,
pygpu_shader_image_load_store_support_get_doc},
{"hdr_support_get",
(PyCFunction)pygpu_hdr_support_get,
METH_NOARGS,
pygpu_hdr_support_get_doc},
{NULL, NULL, 0, NULL},
};

View File

@ -38,6 +38,7 @@
#include "ED_view3d.h"
#include "GPU_batch_presets.h"
#include "GPU_capabilities.h"
#include "GPU_context.h"
#include "GPU_debug.h"
#include "GPU_framebuffer.h"
@ -663,7 +664,7 @@ static void wm_draw_region_buffer_create(Scene *scene,
/* Determine desired offscreen format. */
brecht marked this conversation as resolved Outdated

I see this code be repeated 3 times. better extract it in a static function.

I see this code be repeated 3 times. better extract it in a static function.
bool use_hdr = false;
if (scene && ((scene->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0)) {
use_hdr = true;
use_hdr = GPU_HDR_support();
}
eGPUTextureFormat desired_format = (use_hdr) ? GPU_RGBA16F : GPU_RGBA8;
@ -1165,7 +1166,7 @@ static void wm_draw_window(bContext *C, wmWindow *win)
bool use_hdr = false;
Scene *scene = WM_window_get_active_scene(win);
if (scene && ((scene->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0)) {
use_hdr = true;
use_hdr = GPU_HDR_support();
}
eGPUTextureFormat desired_format = (use_hdr) ? GPU_RGBA16F : GPU_RGBA8;
@ -1328,7 +1329,7 @@ uint8_t *WM_window_pixels_read_from_offscreen(bContext *C, wmWindow *win, int r_
bool use_hdr = false;
Scene *scene = WM_window_get_active_scene(win);
if (scene && ((scene->view_settings.flag & COLORMANAGE_VIEW_USE_HDR) != 0)) {
use_hdr = true;
use_hdr = GPU_HDR_support();
}
eGPUTextureFormat desired_format = (use_hdr) ? GPU_RGBA16F : GPU_RGBA8;