Geometry Nodes: Matrix socket type, attribute type, and initial nodes #116166

Merged
Hans Goudey merged 47 commits from HooglyBoogly/blender:nodes-matrix-socket into main 2024-02-13 18:59:46 +01:00
27 changed files with 208 additions and 120 deletions
Showing only changes of commit d610d0c5e6 - Show all commits

View File

@ -252,12 +252,18 @@ PYGETTEXT_KEYWORDS = (() +
for it in ("BKE_report", "BKE_reportf", "BKE_reports_prepend", "BKE_reports_prependf",
"CTX_wm_operator_poll_msg_set", "WM_report", "WM_reportf")) +
# bmesh operator errors
tuple(("{}\\((?:[^\"',]+,){{3}}\\s*" + _msg_re + r"\s*\)").format(it)
for it in ("BMO_error_raise",)) +
# Modifier errors
tuple(("{}\\((?:[^\"',]+,){{2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
for it in ("BKE_modifier_set_error",)) +
# Compositor error messages
tuple((r"\.{}\(\s*" + _msg_re + r"\s*\)").format(it)
for it in ("set_info_message",)) +
# This one is a tad more risky, but in practice would not expect a name/uid string parameter
# (the second one in those functions) to ever have a comma in it, so think this is fine.
tuple(("{}\\((?:[^,]+,){{2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
@ -297,13 +303,8 @@ PYGETTEXT_KEYWORDS = (() +
# Geometry Nodes field inputs
((r"FieldInput\(CPPType::get<.*?>\(\),\s*" + _msg_re + r"\s*\)"),) +
# bUnitDef unit names.
# NOTE: regex is a bit more complex than it would need too. Since the actual
# identifier (`B_UNIT_DEF_`) is at the end, if it's simpler/too general it
# becomes extremely slow to process some (unrelated) source files.
((r"\{(?:(?:\s*\"[^\",]+\"\s*,)|(?:\s*\"\\\"\",)|(?:\s*nullptr\s*,)){4}\s*" +
_msg_re + r"\s*,(?:(?:\s*\"[^\"',]+\"\s*,)|(?:\s*nullptr\s*,))(?:[^,]+,){2}"
+ "(?:\|?\s*B_UNIT_DEF_[_A-Z]+\s*)+\}"),) +
# bUnitDef unit names
((r"/\*name_display\*/\s*" + _msg_re + r"\s*,"),) +
tuple((r"{}\(\s*" + _msg_re + r"\s*,\s*(?:" +
r"\s*,\s*)?(?:".join(_ctxt_re_gen(i) for i in range(PYGETTEXT_MAX_MULTI_CTXT)) + r")?\s*\)").format(it)

View File

@ -13,7 +13,10 @@ from bpy.props import (
EnumProperty,
StringProperty,
)
from bpy.app.translations import pgettext_tip as tip_
from bpy.app.translations import (
pgettext_tip as tip_,
contexts as i18n_contexts,
)
class ANIM_OT_keying_set_export(Operator):
@ -238,6 +241,7 @@ class NLA_OT_bake(Operator):
)
bake_types: EnumProperty(
name="Bake Data",
translation_context=i18n_contexts.id_action,
description="Which data's transformations to bake",
options={'ENUM_FLAG'},
items=(

View File

@ -5,7 +5,10 @@
from bpy.types import (
Panel,
)
from bpy.app.translations import pgettext_iface as iface_
from bpy.app.translations import (
pgettext_iface as iface_,
contexts as i18n_contexts,
)
class PHYSICS_PT_geometry_nodes(Panel):
@ -35,16 +38,16 @@ class PHYSICS_PT_geometry_nodes(Panel):
calc_text = iface_("Calculate to Frame")
bake_text = iface_("Bake")
layout.operator("object.simulation_nodes_cache_calculate_to_frame", text=calc_text).selected = True
layout.operator("object.simulation_nodes_cache_calculate_to_frame", text=calc_text, translate=False).selected = True
row = layout.row(align=True)
row.operator("object.simulation_nodes_cache_bake", text=bake_text).selected = True
row.operator("object.simulation_nodes_cache_bake", text=bake_text, translate=False).selected = True
row.operator("object.simulation_nodes_cache_delete", text="", icon='TRASH').selected = True
layout.use_property_split = True
layout.use_property_decorate = False
ob = context.object
layout.prop(ob, "use_simulation_cache", text="Cache")
layout.prop(ob, "use_simulation_cache", text="Cache", text_ctxt=i18n_contexts.id_simulation)
classes = (

View File

@ -322,7 +322,10 @@ class SEQUENCER_PT_sequencer_overlay(Panel):
layout.separator()
layout.prop_menu_enum(overlay_settings, "waveform_display_type")
layout.label(text="Waveforms")
layout.row().prop(overlay_settings, "waveform_display_type", expand=True)
layout.label(text="Waveform Style")
layout.row().prop(overlay_settings, "waveform_display_style", expand=True)
class SEQUENCER_MT_view_cache(Menu):

View File

@ -702,7 +702,7 @@ class VIEW3D_HT_header(Header):
icon = snap_items[elem].icon
break
else:
text = "Mix"
text = iface_("Mix", i18n_contexts.editor_view3d)
icon = 'NONE'
del snap_items, snap_elements
@ -714,6 +714,7 @@ class VIEW3D_HT_header(Header):
panel="VIEW3D_PT_snapping",
icon=icon,
text=text,
translate=False,
)
# Proportional editing

View File

@ -39,9 +39,11 @@ enum {
FFMPEG_PRESET_AV1 = 8,
};
struct AVFrame;
struct RenderData;
struct ReportList;
struct Scene;
struct SwsContext;
int BKE_ffmpeg_start(void *context_v,
const Scene *scene,
@ -73,4 +75,8 @@ bool BKE_ffmpeg_alpha_channel_is_supported(const RenderData *rd);
void *BKE_ffmpeg_context_create(void);
void BKE_ffmpeg_context_free(void *context_v);
SwsContext *BKE_ffmpeg_sws_get_context(
int width, int height, int av_src_format, int av_dst_format, int sws_flags);
void BKE_ffmpeg_sws_scale_frame(SwsContext *ctx, AVFrame *dst, const AVFrame *src);
#endif

View File

@ -110,7 +110,9 @@ void CurveComponent::ensure_owns_direct_data()
{
BLI_assert(this->is_mutable());
if (ownership_ != GeometryOwnershipType::Owned) {
curves_ = BKE_curves_copy_for_eval(curves_);
if (curves_) {
curves_ = BKE_curves_copy_for_eval(curves_);
}
ownership_ = GeometryOwnershipType::Owned;
}
}

View File

@ -94,7 +94,9 @@ void GreasePencilComponent::ensure_owns_direct_data()
{
BLI_assert(this->is_mutable());
if (ownership_ != GeometryOwnershipType::Owned) {
grease_pencil_ = BKE_grease_pencil_copy_for_eval(grease_pencil_);
if (grease_pencil_) {
grease_pencil_ = BKE_grease_pencil_copy_for_eval(grease_pencil_);
}
ownership_ = GeometryOwnershipType::Owned;
}
}

View File

@ -105,7 +105,9 @@ void MeshComponent::ensure_owns_direct_data()
{
BLI_assert(this->is_mutable());
if (ownership_ != GeometryOwnershipType::Owned) {
mesh_ = BKE_mesh_copy_for_eval(mesh_);
if (mesh_) {
mesh_ = BKE_mesh_copy_for_eval(mesh_);
}
ownership_ = GeometryOwnershipType::Owned;
}
}

View File

@ -94,7 +94,9 @@ void PointCloudComponent::ensure_owns_direct_data()
{
BLI_assert(this->is_mutable());
if (ownership_ != GeometryOwnershipType::Owned) {
pointcloud_ = BKE_pointcloud_copy_for_eval(pointcloud_);
if (pointcloud_) {
pointcloud_ = BKE_pointcloud_copy_for_eval(pointcloud_);
}
ownership_ = GeometryOwnershipType::Owned;
}
}

View File

@ -87,7 +87,9 @@ void VolumeComponent::ensure_owns_direct_data()
{
BLI_assert(this->is_mutable());
if (ownership_ != GeometryOwnershipType::Owned) {
volume_ = BKE_volume_copy_for_eval(volume_);
if (volume_) {
volume_ = BKE_volume_copy_for_eval(volume_);
}
ownership_ = GeometryOwnershipType::Owned;
}
}

View File

@ -81,7 +81,10 @@
/* clang-format on */
/* Define a single unit. */
/* Define a single unit.
* When changing the format, please check that the PYGETTEXT_KEYWORDS regex
* used to extract the unit names for translation still works
* in scripts/modules/bl_i18n_utils/settings.py. */
struct bUnitDef {
const char *name;
/** Abused a bit for the display name. */

View File

@ -420,17 +420,7 @@ static AVFrame *generate_video_frame(FFMpegContext *context, const uint8_t *pixe
/* Convert to the output pixel format, if it's different that Blender's internal one. */
if (context->img_convert_frame != nullptr) {
BLI_assert(context->img_convert_ctx != NULL);
# if defined(FFMPEG_SWSCALE_THREADING)
sws_scale_frame(context->img_convert_ctx, context->current_frame, rgb_frame);
# else
sws_scale(context->img_convert_ctx,
(const uint8_t *const *)rgb_frame->data,
rgb_frame->linesize,
0,
codec->height,
context->current_frame->data,
context->current_frame->linesize);
# endif
BKE_ffmpeg_sws_scale_frame(context->img_convert_ctx, context->current_frame, rgb_frame);
}
return context->current_frame;
@ -677,10 +667,8 @@ static const AVCodec *get_av1_encoder(
return codec;
}
static SwsContext *get_threaded_sws_context(int width,
int height,
AVPixelFormat src_format,
AVPixelFormat dst_format)
SwsContext *BKE_ffmpeg_sws_get_context(
int width, int height, int av_src_format, int av_dst_format, int sws_flags)
{
# if defined(FFMPEG_SWSCALE_THREADING)
/* sws_getContext does not allow passing flags that ask for multi-threaded
@ -691,11 +679,11 @@ static SwsContext *get_threaded_sws_context(int width,
}
av_opt_set_int(c, "srcw", width, 0);
av_opt_set_int(c, "srch", height, 0);
av_opt_set_int(c, "src_format", src_format, 0);
av_opt_set_int(c, "src_format", av_src_format, 0);
av_opt_set_int(c, "dstw", width, 0);
av_opt_set_int(c, "dsth", height, 0);
av_opt_set_int(c, "dst_format", dst_format, 0);
av_opt_set_int(c, "sws_flags", SWS_BICUBIC, 0);
av_opt_set_int(c, "dst_format", av_dst_format, 0);
av_opt_set_int(c, "sws_flags", sws_flags, 0);
av_opt_set_int(c, "threads", BLI_system_thread_count(), 0);
if (sws_init_context(c, nullptr, nullptr) < 0) {
@ -705,11 +693,11 @@ static SwsContext *get_threaded_sws_context(int width,
# else
SwsContext *c = sws_getContext(width,
height,
src_format,
AVPixelFormat(av_src_format),
width,
height,
dst_format,
SWS_BICUBIC,
AVPixelFormat(av_dst_format),
sws_flags,
nullptr,
nullptr,
nullptr);
@ -717,6 +705,14 @@ static SwsContext *get_threaded_sws_context(int width,
return c;
}
void BKE_ffmpeg_sws_scale_frame(SwsContext *ctx, AVFrame *dst, const AVFrame *src)
{
# if defined(FFMPEG_SWSCALE_THREADING)
sws_scale_frame(ctx, dst, src);
# else
sws_scale(ctx, src->data, src->linesize, 0, src->height, dst->data, dst->linesize);
# endif
}
/* prepare a video stream for the output file */
@ -955,8 +951,8 @@ static AVStream *alloc_video_stream(FFMpegContext *context,
else {
/* Output pixel format is different, allocate frame for conversion. */
context->img_convert_frame = alloc_picture(AV_PIX_FMT_RGBA, c->width, c->height);
context->img_convert_ctx = get_threaded_sws_context(
c->width, c->height, AV_PIX_FMT_RGBA, c->pix_fmt);
context->img_convert_ctx = BKE_ffmpeg_sws_get_context(
c->width, c->height, AV_PIX_FMT_RGBA, c->pix_fmt, SWS_BICUBIC);
}
avcodec_parameters_from_context(st->codecpar, c);

View File

@ -151,7 +151,7 @@ void World::sync_volume()
}
if (gpumat && (GPU_material_status(gpumat) == GPU_MAT_SUCCESS)) {
has_volume_ = true;
has_volume_ = GPU_material_has_volume_output(gpumat);
has_volume_scatter_ = GPU_material_flag_get(gpumat, GPU_MATFLAG_VOLUME_SCATTER);
has_volume_absorption_ = GPU_material_flag_get(gpumat, GPU_MATFLAG_VOLUME_ABSORPTION);
}

View File

@ -25,8 +25,6 @@ ClosureDiffuse g_diffuse_data;
ClosureTranslucent g_translucent_data;
ClosureReflection g_reflection_data;
ClosureRefraction g_refraction_data;
ClosureVolumeScatter g_volume_scatter_data;
ClosureVolumeAbsorption g_volume_absorption_data;
/* Random number per sampled closure type. */
float g_diffuse_rand;
float g_translucent_rand;
@ -82,12 +80,9 @@ void closure_weights_reset()
g_refraction_data.roughness = 0.0;
g_refraction_data.ior = 0.0;
g_volume_scatter_data.weight = 0.0;
g_volume_scatter_data.scattering = vec3(0.0);
g_volume_scatter_data.anisotropy = 0.0;
g_volume_absorption_data.weight = 0.0;
g_volume_absorption_data.absorption = vec3(0.0);
g_volume_scattering = vec3(0.0);
g_volume_anisotropy = 0.0;
g_volume_absorption = vec3(0.0);
#if defined(GPU_FRAGMENT_SHADER)
g_diffuse_rand = g_translucent_rand = g_reflection_rand = g_refraction_rand = g_closure_rand;
@ -143,14 +138,14 @@ Closure closure_eval(ClosureTransparency transparency)
Closure closure_eval(ClosureVolumeScatter volume_scatter)
{
g_volume_scattering += volume_scatter.scattering;
g_volume_anisotropy += volume_scatter.anisotropy;
g_volume_scattering += volume_scatter.scattering * volume_scatter.weight;
g_volume_anisotropy += volume_scatter.anisotropy * volume_scatter.weight;
return Closure(0);
}
Closure closure_eval(ClosureVolumeAbsorption volume_absorption)
{
g_volume_absorption += volume_absorption.absorption;
g_volume_absorption += volume_absorption.absorption * volume_absorption.weight;
return Closure(0);
}

View File

@ -18,6 +18,8 @@
#include "BLF_api.h"
#include "BLT_translation.h"
#include "BKE_colortools.h"
#include "BKE_context.hh"
#include "BKE_curve.hh"
@ -1151,7 +1153,7 @@ void DRW_draw_region_engine_info(int xoffset, int *yoffset, int line_height)
BLF_shadow(font_id, 5, blender::float4{0.0f, 0.0f, 0.0f, 1.0f});
BLF_shadow_offset(font_id, 1, -1);
const char *buf_step = data->info;
const char *buf_step = IFACE_(data->info);
do {
const char *buf = buf_step;
buf_step = BLI_strchr_or_end(buf, '\n');

View File

@ -5301,8 +5301,15 @@ static void CurveProfile_buttons_layout(uiLayout *layout, PointerRNA *ptr, RNAUp
/* There is probably potential to use simpler "uiItemR" functions here, but automatic updating
* after a preset is selected would be more complicated. */
uiLayout *row = uiLayoutRow(layout, true);
bt = uiDefBlockBut(
block, CurveProfile_buttons_presets, profile, "Preset", 0, 0, UI_UNIT_X, UI_UNIT_X, "");
bt = uiDefBlockBut(block,
CurveProfile_buttons_presets,
profile,
IFACE_("Preset"),
0,
0,
UI_UNIT_X,
UI_UNIT_X,
"");
UI_but_funcN_set(bt, rna_update_cb, MEM_dupallocN(cb), nullptr);
/* Show a "re-apply" preset button when it has been changed from the preset. */
@ -5313,7 +5320,7 @@ static void CurveProfile_buttons_layout(uiLayout *layout, PointerRNA *ptr, RNAUp
UI_BTYPE_BUT,
0,
ICON_NONE,
"Apply Preset",
IFACE_("Apply Preset"),
0,
0,
UI_UNIT_X,

View File

@ -294,7 +294,7 @@ void AssetCatalogTreeViewItem::build_context_menu(bContext &C, uiLayout &column)
uiItemFullO(&column,
"ASSET_OT_catalog_new",
"New Catalog",
IFACE_("New Catalog"),
ICON_NONE,
nullptr,
WM_OP_INVOKE_DEFAULT,
@ -306,14 +306,14 @@ void AssetCatalogTreeViewItem::build_context_menu(bContext &C, uiLayout &column)
BLI_uuid_format(catalog_id_str_buffer, catalog_item_.get_catalog_id());
uiItemFullO(&column,
"ASSET_OT_catalog_delete",
"Delete Catalog",
IFACE_("Delete Catalog"),
ICON_NONE,
nullptr,
WM_OP_INVOKE_DEFAULT,
UI_ITEM_NONE,
&props);
RNA_string_set(&props, "catalog_id", catalog_id_str_buffer);
uiItemO(&column, "Rename", ICON_NONE, "UI_OT_view_item_rename");
uiItemO(&column, IFACE_("Rename"), ICON_NONE, "UI_OT_view_item_rename");
/* Doesn't actually exist right now, but could be defined in Python. Reason that this isn't done
* in Python yet is that catalogs are not exposed in BPY, and we'd somehow pass the clicked on

View File

@ -421,13 +421,18 @@ static void draw_seq_waveform_overlay(TimelineDrawContext *timeline_ctx,
Scene *scene = timeline_ctx->scene;
Sequence *seq = strip_ctx->seq;
const bool half_style = (timeline_ctx->sseq->timeline_overlay.flag &
SEQ_TIMELINE_WAVEFORMS_HALF) != 0;
const float frames_per_pixel = BLI_rctf_size_x(&v2d->cur) / timeline_ctx->region->winx;
const float samples_per_frame = SOUND_WAVE_SAMPLES_PER_SECOND / FPS;
const float samples_per_pixel = samples_per_frame * frames_per_pixel;
/* The y coordinate for the middle of the strip. */
const float y_zero = (strip_ctx->bottom + strip_ctx->strip_content_top) / 2.0f;
/* The length from the middle of the strip to the top/bottom. */
const float y_scale = (strip_ctx->strip_content_top - strip_ctx->bottom) / 2.0f;
/* The y coordinate of signal level zero. */
const float y_zero = half_style ? strip_ctx->bottom :
(strip_ctx->bottom + strip_ctx->strip_content_top) / 2.0f;
/* The y range of unit signal level. */
const float y_scale = half_style ? strip_ctx->strip_content_top - strip_ctx->bottom :
(strip_ctx->strip_content_top - strip_ctx->bottom) / 2.0f;
/* Align strip start with nearest pixel to prevent waveform flickering. */
const float strip_start_aligned = align_frame_with_pixel(strip_ctx->left_handle,
@ -512,6 +517,25 @@ static void draw_seq_waveform_overlay(TimelineDrawContext *timeline_ctx,
CLAMP_MIN(value_min, -1.0f);
}
/* We are drawing only half ot the waveform, mirroring the lower part upwards.
* If both min and max are on the same side of zero line, we want to draw a bar
* between them. If min and max cross zero, we want to fill bar from zero to max
* of those. */
if (half_style) {
bool pos_min = value_min > 0.0f;
bool pos_max = value_max > 0.0f;
float abs_min = std::abs(value_min);
float abs_max = std::abs(value_max);
if (pos_min == pos_max) {
value_min = std::min(abs_min, abs_max);
value_max = std::max(abs_min, abs_max);
}
else {
value_min = 0;
value_max = std::max(abs_min, abs_max);
}
}
float x1 = draw_start_frame + i * frames_per_pixel;
float x2 = draw_start_frame + (i + 1) * frames_per_pixel;
float y_min = y_zero + value_min * y_scale;

View File

@ -63,6 +63,7 @@
#ifdef WITH_FFMPEG
# include "BKE_global.h" /* ENDIAN_ORDER */
# include "BKE_writeffmpeg.hh"
extern "C" {
# include <libavcodec/avcodec.h>
@ -694,16 +695,12 @@ static int startffmpeg(anim *anim)
1);
}
anim->img_convert_ctx = sws_getContext(anim->x,
anim->y,
anim->pCodecCtx->pix_fmt,
anim->x,
anim->y,
AV_PIX_FMT_RGBA,
SWS_BILINEAR | SWS_PRINT_INFO | SWS_FULL_CHR_H_INT,
nullptr,
nullptr,
nullptr);
anim->img_convert_ctx = BKE_ffmpeg_sws_get_context(anim->x,
anim->y,
anim->pCodecCtx->pix_fmt,
AV_PIX_FMT_RGBA,
SWS_BILINEAR | SWS_PRINT_INFO |
SWS_FULL_CHR_H_INT);
if (!anim->img_convert_ctx) {
fprintf(stderr, "Can't transform color space??? Bailing out...\n");
@ -846,32 +843,48 @@ static void ffmpeg_postprocess(anim *anim, AVFrame *input, ImBuf *ibuf)
}
}
sws_scale(anim->img_convert_ctx,
(const uint8_t *const *)input->data,
input->linesize,
0,
anim->y,
anim->pFrameRGB->data,
anim->pFrameRGB->linesize);
/* If final destination image layout matches that of decoded RGB frame (including
* any line padding done by ffmpeg for SIMD alignment), we can directly
* decode into that, doing the vertical flip in the same step. Otherwise have
* to do a separate flip. */
const int ibuf_linesize = ibuf->x * 4;
const int rgb_linesize = anim->pFrameRGB->linesize[0];
bool scale_to_ibuf = (rgb_linesize == ibuf_linesize);
/* swscale on arm64 before ffmpeg 6.0 (libswscale major version 7)
* could not handle negative line sizes. That has been fixed in all major
* ffmpeg releases in early 2023, but easier to just check for "below 7". */
# if (defined(__aarch64__) || defined(_M_ARM64)) && (LIBSWSCALE_VERSION_MAJOR < 7)
scale_to_ibuf = false;
# endif
uint8_t *rgb_data = anim->pFrameRGB->data[0];
if (scale_to_ibuf) {
/* Decode RGB and do vertical flip directly into destination image, by using negative
* line size. */
anim->pFrameRGB->linesize[0] = -ibuf_linesize;
anim->pFrameRGB->data[0] = ibuf->byte_buffer.data + (ibuf->y - 1) * ibuf_linesize;
BKE_ffmpeg_sws_scale_frame(anim->img_convert_ctx, anim->pFrameRGB, input);
anim->pFrameRGB->linesize[0] = rgb_linesize;
anim->pFrameRGB->data[0] = rgb_data;
}
else {
/* Decode, then do vertical flip into destination. */
BKE_ffmpeg_sws_scale_frame(anim->img_convert_ctx, anim->pFrameRGB, input);
/* Use negative line size to do vertical image flip. */
const int src_linesize[4] = {-rgb_linesize, 0, 0, 0};
const uint8_t *const src[4] = {
rgb_data + (anim->y - 1) * rgb_linesize, nullptr, nullptr, nullptr};
int dst_size = av_image_get_buffer_size(AVPixelFormat(anim->pFrameRGB->format),
anim->pFrameRGB->width,
anim->pFrameRGB->height,
1);
av_image_copy_to_buffer(
ibuf->byte_buffer.data, dst_size, src, src_linesize, AV_PIX_FMT_RGBA, anim->x, anim->y, 1);
}
/* Copy the valid bytes from the aligned buffer vertically flipped into ImBuf */
int aligned_stride = anim->pFrameRGB->linesize[0];
const uint8_t *const src[4] = {
anim->pFrameRGB->data[0] + (anim->y - 1) * aligned_stride, nullptr, nullptr, nullptr};
/* NOTE: Negative linesize is used to copy and flip image at once with function
* `av_image_copy_to_buffer`. This could cause issues in future and image may need to be flipped
* explicitly. */
const int src_linesize[4] = {-anim->pFrameRGB->linesize[0], 0, 0, 0};
int dst_size = av_image_get_buffer_size(
AVPixelFormat(anim->pFrameRGB->format), anim->pFrameRGB->width, anim->pFrameRGB->height, 1);
av_image_copy_to_buffer((uint8_t *)ibuf->byte_buffer.data,
dst_size,
src,
src_linesize,
AV_PIX_FMT_RGBA,
anim->x,
anim->y,
1);
if (filter_y) {
IMB_filtery(ibuf);
}

View File

@ -82,6 +82,7 @@ void USDCameraReader::read_object_data(Main *bmain, const double motionSampleTim
/* Call UncheckedGet() to silence compiler warnings.
* Clamp to 1e-6 matching range defined in RNA. */
bcam->clip_start = max_ff(1e-6f, clippingRangeVal.UncheckedGet<pxr::GfVec2f>()[0]);
bcam->clip_end = clippingRangeVal.UncheckedGet<pxr::GfVec2f>()[1];
bcam->dof.focus_distance = focalDistanceVal.Get<float>();

View File

@ -623,6 +623,8 @@ typedef enum eSpaceSeq_SequencerTimelineOverlay_Flag {
SEQ_TIMELINE_ALL_WAVEFORMS = (1 << 7),
/** Draw no wave-forms. */
SEQ_TIMELINE_NO_WAVEFORMS = (1 << 8),
/** Draw only upper part of the waveform, showing absolute signal value. */
SEQ_TIMELINE_WAVEFORMS_HALF = (1 << 9),
SEQ_TIMELINE_SHOW_STRIP_NAME = (1 << 14),
SEQ_TIMELINE_SHOW_STRIP_SOURCE = (1 << 15),
SEQ_TIMELINE_SHOW_STRIP_DURATION = (1 << 16),
@ -712,7 +714,6 @@ typedef enum eSpaceSeq_Flag {
SPACE_SEQ_FLAG_UNUSED_4 = (1 << 4),
SPACE_SEQ_FLAG_UNUSED_5 = (1 << 5),
SEQ_USE_ALPHA = (1 << 6), /* use RGBA display mode for preview */
SPACE_SEQ_FLAG_UNUSED_9 = (1 << 9),
SPACE_SEQ_FLAG_UNUSED_10 = (1 << 10),
SEQ_SHOW_MARKERS = (1 << 11), /* show markers region */
SEQ_ZOOM_TO_FIT = (1 << 12),

View File

@ -5767,21 +5767,17 @@ static void rna_def_space_sequencer_timeline_overlay(BlenderRNA *brna)
RNA_def_struct_ui_text(srna, "Timeline Overlay Settings", "");
static const EnumPropertyItem waveform_type_display_items[] = {
{SEQ_TIMELINE_NO_WAVEFORMS,
"NO_WAVEFORMS",
0,
"Waveforms Off",
"Don't display waveforms for any sound strips"},
{SEQ_TIMELINE_ALL_WAVEFORMS,
"ALL_WAVEFORMS",
0,
"Waveforms On",
"On",
"Display waveforms for all sound strips"},
{0,
"DEFAULT_WAVEFORMS",
{0, "DEFAULT_WAVEFORMS", 0, "Strip", "Display waveforms depending on strip setting"},
{SEQ_TIMELINE_NO_WAVEFORMS,
"NO_WAVEFORMS",
0,
"Use Strip Option",
"Display waveforms depending on strip setting"},
"Off",
"Don't display waveforms for any sound strips"},
{0, nullptr, 0, nullptr, nullptr},
};
@ -5791,6 +5787,22 @@ static void rna_def_space_sequencer_timeline_overlay(BlenderRNA *brna)
RNA_def_property_ui_text(prop, "Waveform Display", "How Waveforms are displayed");
RNA_def_property_update(prop, NC_SPACE | ND_SPACE_SEQUENCER, nullptr);
static const EnumPropertyItem waveform_style_display_items[] = {
{0, "FULL_WAVEFORMS", 0, "Full", "Display full waveform"},
{SEQ_TIMELINE_WAVEFORMS_HALF,
"HALF_WAVEFORMS",
0,
"Half",
"Display upper half of the absolute value waveform"},
{0, nullptr, 0, nullptr, nullptr},
};
prop = RNA_def_property(srna, "waveform_display_style", PROP_ENUM, PROP_NONE);
RNA_def_property_enum_bitflag_sdna(prop, nullptr, "flag");
RNA_def_property_enum_items(prop, waveform_style_display_items);
RNA_def_property_ui_text(prop, "Waveform Style", "How Waveforms are displayed");
RNA_def_property_update(prop, NC_SPACE | ND_SPACE_SEQUENCER, nullptr);
prop = RNA_def_property(srna, "show_fcurves", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, nullptr, "flag", SEQ_TIMELINE_SHOW_FCURVES);
RNA_def_property_ui_text(prop, "Show F-Curves", "Display strip opacity/volume curve");

View File

@ -42,7 +42,7 @@ static void node_declare(NodeDeclarationBuilder &b)
node_storage(node).mode = GEO_NODE_EXTRUDE_MESH_FACES;
});
b.add_output<decl::Geometry>("Mesh").propagate_all();
b.add_output<decl::Bool>("Top").field_on_all();
b.add_output<decl::Bool>("Top").field_on_all().translation_context(BLT_I18NCONTEXT_ID_NODETREE);
b.add_output<decl::Bool>("Side").field_on_all();
}

View File

@ -52,8 +52,9 @@ static void node_declare(NodeDeclarationBuilder &b)
.subtype(PROP_DISTANCE)
.description("Height of the generated cone");
b.add_output<decl::Geometry>("Mesh");
b.add_output<decl::Bool>("Top").field_on_all();
b.add_output<decl::Bool>("Bottom").field_on_all();
b.add_output<decl::Bool>("Top").field_on_all().translation_context(BLT_I18NCONTEXT_ID_NODETREE);
b.add_output<decl::Bool>("Bottom").field_on_all().translation_context(
BLT_I18NCONTEXT_ID_NODETREE);
b.add_output<decl::Bool>("Side").field_on_all();
b.add_output<decl::Vector>("UV Map").field_on_all();
}

View File

@ -47,9 +47,10 @@ static void node_declare(NodeDeclarationBuilder &b)
.subtype(PROP_DISTANCE)
.description("The height of the cylinder");
b.add_output<decl::Geometry>("Mesh");
b.add_output<decl::Bool>("Top").field_on_all();
b.add_output<decl::Bool>("Top").field_on_all().translation_context(BLT_I18NCONTEXT_ID_NODETREE);
b.add_output<decl::Bool>("Side").field_on_all();
b.add_output<decl::Bool>("Bottom").field_on_all();
b.add_output<decl::Bool>("Bottom").field_on_all().translation_context(
BLT_I18NCONTEXT_ID_NODETREE);
b.add_output<decl::Vector>("UV Map").field_on_all();
}

View File

@ -927,11 +927,15 @@ static void seq_update_sound_modifiers(Sequence *seq)
BKE_sound_update_sequence_handle(seq->scene_sound, sound_handle);
}
static bool must_update_strip_sound(Scene *scene, Sequence *seq)
{
return (scene->id.recalc & ID_RECALC_AUDIO | ID_RECALC_COPY_ON_WRITE) != 0 ||
(seq->sound->id.recalc & ID_RECALC_AUDIO | ID_RECALC_COPY_ON_WRITE) != 0;
}
static void seq_update_sound_strips(Scene *scene, Sequence *seq)
{
if (seq->sound == nullptr || ((scene->id.recalc & ID_RECALC_AUDIO) == 0 &&
(seq->sound->id.recalc & ID_RECALC_AUDIO) == 0))
{
if (seq->sound == nullptr || !must_update_strip_sound(scene, seq)) {
return;
}
/* Ensure strip is playing correct sound. */