From 51ba7d6227fc00708a0bcb25efe0a43c9f3d0ec2 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Sun, 10 Sep 2023 18:29:00 +0100 Subject: [PATCH 01/14] Initial working version --- io_scene_fbx/export_fbx_bin.py | 133 +++++++++++++++++++------- io_scene_fbx/fbx_utils.py | 167 +++++++++++++++++++++++---------- 2 files changed, 215 insertions(+), 85 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index 2b2a393f5..8fa6480c0 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -1985,8 +1985,8 @@ def fbx_data_animation_elements(root, scene_data): fps = scene.render.fps / scene.render.fps_base - def keys_to_ktimes(keys): - return (int(v) for v in convert_sec_to_ktime_iter((f / fps for f, _v in keys))) + def keys_to_ktimes(keys_array): + return (keys_array / fps * FBX_KTIME).astype(np.int64) # Animation stacks. for astack_key, alayers, alayer_key, name, f_start, f_end in animations: @@ -2026,18 +2026,18 @@ def fbx_data_animation_elements(root, scene_data): acn_tmpl = elem_props_template_init(scene_data.templates, b"AnimationCurveNode") acn_props = elem_properties(acurvenode) - for fbx_item, (acurve_key, def_value, keys, _acurve_valid) in acurves.items(): + for fbx_item, (acurve_key, def_value, (keys, values), _acurve_valid) in acurves.items(): elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(), def_value, animatable=True) # Only create Animation curve if needed! - if keys: + nbr_keys = len(keys) + if nbr_keys: acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbx_uuid_from_key(acurve_key)) acurve.add_string(fbx_name_class(b"", b"AnimCurve")) acurve.add_string(b"") # key attributes... - nbr_keys = len(keys) # flags... keyattr_flags = ( 1 << 2 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic. @@ -2053,7 +2053,7 @@ def fbx_data_animation_elements(root, scene_data): elem_data_single_float64(acurve, b"Default", def_value) elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION) elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys)) - elem_data_single_float32_array(acurve, b"KeyValueFloat", (v for _f, v in keys)) + elem_data_single_float32_array(acurve, b"KeyValueFloat", values.astype(np.float32, copy=False)) elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags) elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat) elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,)) @@ -2254,36 +2254,97 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No dupli_parent_bdata = {dup.get_parent().bdata for dup in animdata_ob if dup.is_dupli} has_animated_duplis = bool(dupli_parent_bdata) - currframe = f_start - while currframe <= f_end: - real_currframe = currframe - f_start if start_zero else currframe - scene.frame_set(int(currframe), subframe=currframe - int(currframe)) + # Initialize keyframe times array. Each AnimationCurveNodeWrapper will share the same instance. + currframes = np.arange(f_start, np.nextafter(f_end, np.inf), step=bake_step) + real_currframes = currframes - f_start if start_zero else currframes - if has_animated_duplis: - # Changing the scene's frame invalidates existing dupli instances. To get the updated matrices of duplis for - # this frame, we must get the duplis from the depsgraph again. - for dup in depsgraph.object_instances: - if (parent := dup.parent) and parent.original in dupli_parent_bdata: - # ObjectWrapper caches its instances. Attempting to create a new instance updates the existing - # ObjectWrapper instance with the current frame's matrix and then returns the existing instance. - ObjectWrapper(dup) - for ob_obj, (anim_loc, anim_rot, anim_scale) in animdata_ob.items(): - # We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!). - p_rot = p_rots.get(ob_obj, None) - loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot) - p_rots[ob_obj] = rot - anim_loc.add_keyframe(real_currframe, loc) - anim_rot.add_keyframe(real_currframe, tuple(convert_rad_to_deg_iter(rot))) - anim_scale.add_keyframe(real_currframe, scale) - for anim_shape, me, shape in animdata_shapes.values(): - anim_shape.add_keyframe(real_currframe, (shape.value * 100.0,)) - for anim_camera_lens, anim_camera_focus_distance, camera in animdata_cameras.values(): - anim_camera_lens.add_keyframe(real_currframe, (camera.lens,)) - anim_camera_focus_distance.add_keyframe(real_currframe, (camera.dof.focus_distance * 1000 * gscale,)) - currframe += bake_step + # Get all animated values + def frame_values_gen(): + # Iterate through each frame and yield the values for that frame. + int_currframes = currframes.astype(int) + subframes = currframes - int_currframes + for real_currframe, int_currframe, subframe in zip(real_currframes.data, int_currframes.data, subframes.data): + scene.frame_set(int_currframe, subframe=subframe) + if has_animated_duplis: + # Changing the scene's frame invalidates existing dupli instances. To get the updated matrices of duplis + # for this frame, we must get the duplis from the depsgraph again. + for dup in depsgraph.object_instances: + if (parent := dup.parent) and parent.original in dupli_parent_bdata: + # ObjectWrapper caches its instances. Attempting to create a new instance updates the existing + # ObjectWrapper instance with the current frame's matrix and then returns the existing instance. + ObjectWrapper(dup) + for ob_obj in animdata_ob: + # We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!). + p_rot = p_rots.get(ob_obj, None) + loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot) + p_rots[ob_obj] = rot + yield from loc + yield from rot + yield from scale + for anim_shape, me, shape in animdata_shapes.values(): + yield shape.value + for anim_camera_lens, anim_camera_focus_distance, camera in animdata_cameras.values(): + yield camera.lens + yield camera.dof.focus_distance + + # Calculating the total expected number of values reduces memory allocations while iterating and ensures the array + # ends up the size we're expecting. + num_ob_loc_values = num_ob_rot_values = num_ob_scale_values = 3 + num_values_per_ob = num_ob_loc_values + num_ob_rot_values + num_ob_scale_values + num_ob_values = len(animdata_ob) * num_values_per_ob + num_shape_values = len(animdata_shapes) + num_values_per_camera = 2 + num_camera_values = len(animdata_cameras) * num_values_per_camera + num_values_per_frame = num_ob_values + num_shape_values + num_camera_values + num_frames = len(real_currframes) + total_num_values = num_frames * num_values_per_frame + all_values = np.fromiter(frame_values_gen(), dtype=np.float64, count=total_num_values) scene.frame_set(back_currframe, subframe=0.0) + # View as each column being the values for a single frame and each row being all values for a single property in a + # curve. + all_values = all_values.reshape(num_frames, -1).T + # Split into views of the arrays for each curve type. + split_at = [num_ob_values, num_shape_values, num_camera_values] + # The last value isn't needed, because the last split is assumed to go to the end of the array. + split_at = split_at[:-1] + # For uneven splits, np.split takes indices to split at, which can be acquired through a cumulative sum across the + # list. + split_at = np.cumsum(split_at) + all_ob_values, all_shape_key_values, all_camera_values = np.split(all_values, split_at) + + # Set location/rotation/scale curves + # Further split into views of the arrays for each object. + num_animdata_ob = len(animdata_ob) + all_ob_values = np.split(all_ob_values, num_animdata_ob) if num_animdata_ob else () + for (anim_loc, anim_rot, anim_scale), ob_values in zip(animdata_ob.values(), all_ob_values): + # Further split into views of the location, rotation and scaling arrays. + loc_xyz, rot_xyz, sca_xyz = np.split(ob_values, 3) + # In-place convert to degrees. + np.rad2deg(rot_xyz, out=rot_xyz) + + anim_loc.set_keyframes(real_currframes, loc_xyz) + anim_rot.set_keyframes(real_currframes, rot_xyz) + anim_scale.set_keyframes(real_currframes, sca_xyz) + + # Set shape key curves + for (anim_shape, _me, _shape), shape_key_values in zip(animdata_shapes.values(), all_shape_key_values): + # In-place convert from Blender Shape Key Value to FBX Deform Percent. + shape_key_values *= 100.0 + anim_shape.set_keyframes(real_currframes, shape_key_values) + + # Set camera curves + # Further split into views of the arrays for each camera. + num_animdata_cameras = len(animdata_cameras) + all_camera_values = np.split(all_camera_values, num_animdata_cameras) if num_animdata_cameras else () + for (anim_camera_lens, anim_camera_focus_distance, camera), camera_values in zip(animdata_cameras.values(), all_camera_values): + lens_values, focus_distance_values = camera_values + # In-place convert from Blender to FBX + focus_distance_values *= (1000 * gscale) + anim_camera_lens.set_keyframes(real_currframes, lens_values) + anim_camera_focus_distance.set_keyframes(real_currframes, focus_distance_values) + animations = {} # And now, produce final data (usable by FBX export code) @@ -2848,8 +2909,8 @@ def fbx_data_from_scene(scene, depsgraph, settings): for _alayer_key, alayer in astack.values(): for _acnode_key, acnode, _acnode_name in alayer.values(): nbr_acnodes += 1 - for _acurve_key, _dval, acurve, acurve_valid in acnode.values(): - if acurve: + for _acurve_key, _dval, (acurve_keys, acurve_values), acurve_valid in acnode.values(): + if len(acurve_keys): nbr_acurves += 1 templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=nbr_astacks) @@ -2983,8 +3044,8 @@ def fbx_data_from_scene(scene, depsgraph, settings): connections.append((b"OO", acurvenode_id, alayer_id, None)) # Animcurvenode -> object property. connections.append((b"OP", acurvenode_id, elem_id, fbx_prop.encode())) - for fbx_item, (acurve_key, default_value, acurve, acurve_valid) in acurves.items(): - if acurve: + for fbx_item, (acurve_key, default_value, (acurve_keys, acurve_values), acurve_valid) in acurves.items(): + if len(acurve_keys): # Animcurve -> Animcurvenode. connections.append((b"OP", get_fbx_uuid_from_key(acurve_key), acurvenode_id, fbx_item.encode())) diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index f874bb49b..44c162916 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1234,8 +1234,10 @@ class AnimationCurveNodeWrapper: and easy API to handle those. """ __slots__ = ( - 'elem_keys', '_keys', 'default_values', 'fbx_group', 'fbx_gname', 'fbx_props', - 'force_keying', 'force_startend_keying') + 'elem_keys', 'default_values', 'fbx_group', 'fbx_gname', 'fbx_props', + 'force_keying', 'force_startend_keying', + '_frame_times_array', '_frame_values_array', '_frame_write_mask_array', + ) kinds = { 'LCL_TRANSLATION': ("Lcl Translation", "T", ("X", "Y", "Z")), @@ -1254,7 +1256,9 @@ class AnimationCurveNodeWrapper: self.fbx_props = [self.kinds[kind][2]] self.force_keying = force_keying self.force_startend_keying = force_startend_keying - self._keys = [] # (frame, values, write_flags) + self._frame_times_array = None + self._frame_values_array = None + self._frame_write_mask_array = None if default_values is not ...: assert(len(default_values) == len(self.fbx_props[0])) self.default_values = default_values @@ -1263,7 +1267,7 @@ class AnimationCurveNodeWrapper: def __bool__(self): # We are 'True' if we do have some validated keyframes... - return bool(self._keys) and (True in ((True in k[2]) for k in self._keys)) + return self._frame_write_mask_array is not None and bool(np.any(self._frame_write_mask_array)) def add_group(self, elem_key, fbx_group, fbx_gname, fbx_props): """ @@ -1276,19 +1280,30 @@ class AnimationCurveNodeWrapper: self.fbx_gname.append(fbx_gname) self.fbx_props.append(fbx_props) - def add_keyframe(self, frame, values): + def set_keyframes(self, keyframe_times, keyframe_values): """ - Add a new keyframe to all curves of the group. + Set all keyframe times and values of the group. + Values can be a 2D array where each row is a separate curve. """ - assert(len(values) == len(self.fbx_props[0])) - self._keys.append((frame, values, [True] * len(values))) # write everything by default. + # View 1D keyframe_values as 2D with a single row, so that the same iterative code can be used for both 1D and + # 2D inputs. + if len(keyframe_values.shape) == 1: + keyframe_values = keyframe_values[np.newaxis] + # There must be a time for each column of values. + assert(len(keyframe_times) == keyframe_values.shape[1]) + # There must be as many rows of values as there are properties. + assert(len(self.fbx_props[0]) == len(keyframe_values)) + write_mask = np.full_like(keyframe_values, True, dtype=bool) # write everything by default + self._frame_times_array = keyframe_times + self._frame_values_array = keyframe_values + self._frame_write_mask_array = write_mask def simplify(self, fac, step, force_keep=False): """ Simplifies sampled curves by only enabling samples when: * their values relatively differ from the previous sample ones. """ - if not self._keys: + if self._frame_times_array is None: return if fac == 0.0: @@ -1297,36 +1312,87 @@ class AnimationCurveNodeWrapper: # So that, with default factor and step values (1), we get: min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'. min_absdiff_fac = 0.1 # A tenth of reldiff... - keys = self._keys - p_currframe, p_key, p_key_write = keys[0] - p_keyed = list(p_key) - are_keyed = [False] * len(p_key) - for currframe, key, key_write in keys: - for idx, (val, p_val) in enumerate(zip(key, p_key)): - key_write[idx] = False - p_keyedval = p_keyed[idx] - if val == p_val: - # Never write keyframe when value is exactly the same as prev one! - continue - # This is contracted form of relative + absolute-near-zero difference: - # absdiff = abs(a - b) - # if absdiff < min_reldiff_fac * min_absdiff_fac: - # return False - # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac - # Note that we ignore the '/ 2' part here, since it's not much significant for us. - if abs(val - p_val) > (min_reldiff_fac * max(abs(val) + abs(p_val), min_absdiff_fac)): - # If enough difference from previous sampled value, key this value *and* the previous one! - key_write[idx] = True - p_key_write[idx] = True - p_keyed[idx] = val - are_keyed[idx] = True - elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)): - # Else, if enough difference from previous keyed value, key this value only! - key_write[idx] = True - p_keyed[idx] = val - are_keyed[idx] = True - p_currframe, p_key, p_key_write = currframe, key, key_write + for values, write_mask in zip(self._frame_values_array, self._frame_write_mask_array): + # Initialise to no frames written. + write_mask[:] = False + + # Create views of the 'previous' and 'current' + p_key_write_mask = write_mask[:-1] + key_write_mask = write_mask[1:] + p_val = values[:-1] + val = values[1:] + abs_values = np.abs(values) + p_val_abs = abs_values[:-1] + val_abs = abs_values[1:] + + # This is contracted form of relative + absolute-near-zero difference: + # absdiff = abs(a - b) + # if absdiff < min_reldiff_fac * min_absdiff_fac: + # return False + # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac + # Note that we ignore the '/ 2' part here, since it's not much significant for us. + enough_diff_prev_sampled_mask = ( + np.abs(val - p_val) > (min_reldiff_fac * np.maximum(val_abs + p_val_abs, min_absdiff_fac)) + ) + # If enough difference from previous sampled value, key this value *and* the previous one! + # Unless it is forcefully keyed later, this is the only way that the first value can be keyed. + p_key_write_mask[enough_diff_prev_sampled_mask] = True + key_write_mask[enough_diff_prev_sampled_mask] = True + + # The other case where we key a value is if there is enough difference between it and the previous keyed + # value. + # Values that equal their previous value are skipped and the remaining values to check are those which are + # currently not keyed + not_keyed_mask = ~key_write_mask + check_diff_mask = np.logical_and(not_keyed_mask, p_val != val) + val_check_idx = np.flatnonzero(check_diff_mask) + val_check = val[val_check_idx] + val_abs_check = val_abs[val_check_idx] + + # For each frame, get the index of the previous keyed value. + prev_keyframe_indices = np.arange(1, len(values)) + # The first 'previous keyframe' defaults to values[0], even if it's not actually keyed. + prev_keyframe_indices[not_keyed_mask] = 0 + # Accumulative maximum fills in the zeroed indices with the closest previous non-zero index. + prev_keyframe_indices = np.maximum.accumulate(prev_keyframe_indices) + # Extract only the indices that need checking. + prev_keyframe_indices = prev_keyframe_indices[val_check_idx] + p_kf_vals = values[prev_keyframe_indices] + p_kf_vals_abs = np.abs(p_kf_vals) + + # We check our relative + absolute-near-zero difference again, but against the previous keyed value this + # time. + enough_diff_prev_keyed_mask = ( + np.abs(val_check - p_kf_vals) + > (min_reldiff_fac * np.maximum(val_abs_check + p_kf_vals_abs, min_absdiff_fac)) + ) + if np.any(enough_diff_prev_keyed_mask): + # If there are any that are different enough from the previous keyed value, then we have to check them + # all iteratively because keying a new value can change the previous keyed value of some elements, which + # can change whether a value is different enough from its previous keyed value. + last_new_kf_idx = -1 + last_new_kf_val = -1 + last_new_kf_val_abs = -1 + # Accessing .data, the memoryview of the array iteratively or by individual index is faster than doing + # the same with the array itself. + key_write_mv = key_write_mask.data + zipped = zip(val_check_idx.data, val_check.data, val_abs_check.data, prev_keyframe_indices.data, + enough_diff_prev_keyed_mask.data) + for cur_idx, cur_val, abs_cur_val, p_kf_idx, enough_diff in zipped: + if last_new_kf_idx > p_kf_idx: + # The previous keyframe is new and was not included when enough_diff_prev_keyed_mask was + # calculated, so whether the current value is different enough from the previous keyframe needs + # to be calculated. + # Check if the relative + absolute-near-zero difference is enough to key this frame. + enough_diff = (abs(cur_val - last_new_kf_val) + > (min_reldiff_fac * max(abs_cur_val + last_new_kf_val_abs, min_absdiff_fac))) + if enough_diff: + # The current index needs to be keyed. + last_new_kf_idx = cur_idx + last_new_kf_val = cur_val + last_new_kf_val_abs = abs_cur_val + key_write_mv[cur_idx] = True # If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P # See T41766. @@ -1334,25 +1400,27 @@ class AnimationCurveNodeWrapper: # are not animated, but are children of animated ones, so added an option to systematically force writing # one key in this case. # See T41719, T41605, T41254... - if self.force_keying or (force_keep and not self): - are_keyed[:] = [True] * len(are_keyed) + if self.force_keying or (force_keep and not np.any(self._frame_write_mask_array)): + are_keyed = [True] * len(self._frame_write_mask_array) + else: + are_keyed = np.any(self._frame_write_mask_array, axis=1) # If we did key something, ensure first and last sampled values are keyed as well. if self.force_startend_keying: - for idx, is_keyed in enumerate(are_keyed): + for is_keyed, write_mask in zip(are_keyed, self._frame_write_mask_array): if is_keyed: - keys[0][2][idx] = keys[-1][2][idx] = True + write_mask[:1] = True + write_mask[-1:] = True def get_final_data(self, scene, ref_id, force_keep=False): """ Yield final anim data for this 'curvenode' (for all curvenodes defined). force_keep is to force to keep a curve even if it only has one valid keyframe. """ - curves = [[] for k in self._keys[0][1]] - for currframe, key, key_write in self._keys: - for curve, val, wrt in zip(curves, key, key_write): - if wrt: - curve.append((currframe, val)) + curves = [ + (self._frame_times_array[write_mask], values[write_mask]) + for values, write_mask in zip(self._frame_values_array, self._frame_write_mask_array) + ] force_keep = force_keep or self.force_keying for elem_key, fbx_group, fbx_gname, fbx_props in \ @@ -1363,8 +1431,9 @@ class AnimationCurveNodeWrapper: fbx_item = FBX_ANIM_PROPSGROUP_NAME + "|" + fbx_item curve_key = get_blender_anim_curve_key(scene, ref_id, elem_key, fbx_group, fbx_item) # (curve key, default value, keyframes, write flag). - group[fbx_item] = (curve_key, def_val, c, - True if (len(c) > 1 or (len(c) > 0 and force_keep)) else False) + times = c[0] + write_flag = len(times) > (0 if force_keep else 1) + group[fbx_item] = (curve_key, def_val, c, write_flag) yield elem_key, group_key, group, fbx_group, fbx_gname -- 2.30.2 From 3f9b11c9cb966cf16fb7c11e67d83d78a4ed07de Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Mon, 11 Sep 2023 03:11:47 +0100 Subject: [PATCH 02/14] Revert to using `bool(self)` in force_keep check --- io_scene_fbx/fbx_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index 44c162916..dca0df2d5 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1400,7 +1400,7 @@ class AnimationCurveNodeWrapper: # are not animated, but are children of animated ones, so added an option to systematically force writing # one key in this case. # See T41719, T41605, T41254... - if self.force_keying or (force_keep and not np.any(self._frame_write_mask_array)): + if self.force_keying or (force_keep and not self): are_keyed = [True] * len(self._frame_write_mask_array) else: are_keyed = np.any(self._frame_write_mask_array, axis=1) -- 2.30.2 From 4232afbc6a98632fe76ae3200664c8cd93e3fce6 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Mon, 11 Sep 2023 12:24:38 +0100 Subject: [PATCH 03/14] Comment updates and rename variables for better consistency --- io_scene_fbx/export_fbx_bin.py | 2 + io_scene_fbx/fbx_utils.py | 115 +++++++++++++++++---------------- 2 files changed, 61 insertions(+), 56 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index 8fa6480c0..16e2948d1 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -2255,6 +2255,8 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No has_animated_duplis = bool(dupli_parent_bdata) # Initialize keyframe times array. Each AnimationCurveNodeWrapper will share the same instance. + # `np.arange` excludes the `stop` argument like when using `range`, so we use np.nextafter to get the next + # representable value after f_end and use that as the `stop` argument instead. currframes = np.arange(f_start, np.nextafter(f_end, np.inf), step=bake_step) real_currframes = currframes - f_start if start_zero else currframes diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index dca0df2d5..7296f7c7e 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1283,9 +1283,9 @@ class AnimationCurveNodeWrapper: def set_keyframes(self, keyframe_times, keyframe_values): """ Set all keyframe times and values of the group. - Values can be a 2D array where each row is a separate curve. + Values can be a 2D array where each row is the values for a separate curve. """ - # View 1D keyframe_values as 2D with a single row, so that the same iterative code can be used for both 1D and + # View 1D keyframe_values as 2D with a single row, so that the same code can be used for both 1D and # 2D inputs. if len(keyframe_values.shape) == 1: keyframe_values = keyframe_values[np.newaxis] @@ -1304,6 +1304,7 @@ class AnimationCurveNodeWrapper: * their values relatively differ from the previous sample ones. """ if self._frame_times_array is None: + # Keyframes have not been added yet. return if fac == 0.0: @@ -1313,18 +1314,19 @@ class AnimationCurveNodeWrapper: min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'. min_absdiff_fac = 0.1 # A tenth of reldiff... - for values, write_mask in zip(self._frame_values_array, self._frame_write_mask_array): + for values, frame_write_mask in zip(self._frame_values_array, self._frame_write_mask_array): # Initialise to no frames written. - write_mask[:] = False + frame_write_mask[:] = False - # Create views of the 'previous' and 'current' - p_key_write_mask = write_mask[:-1] - key_write_mask = write_mask[1:] - p_val = values[:-1] - val = values[1:] abs_values = np.abs(values) - p_val_abs = abs_values[:-1] - val_abs = abs_values[1:] + + # Create views of the 'previous' and 'current' mask and values. + p_val = values[:-1] + abs_p_val = abs_values[:-1] + p_write_mask = frame_write_mask[:-1] + val = values[1:] + abs_val = abs_values[1:] + write_mask = frame_write_mask[1:] # This is contracted form of relative + absolute-near-zero difference: # absdiff = abs(a - b) @@ -1332,67 +1334,68 @@ class AnimationCurveNodeWrapper: # return False # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac # Note that we ignore the '/ 2' part here, since it's not much significant for us. - enough_diff_prev_sampled_mask = ( - np.abs(val - p_val) > (min_reldiff_fac * np.maximum(val_abs + p_val_abs, min_absdiff_fac)) + enough_diff_p_val_mask = ( + np.abs(val - p_val) > (min_reldiff_fac * np.maximum(abs_val + abs_p_val, min_absdiff_fac)) ) # If enough difference from previous sampled value, key this value *and* the previous one! # Unless it is forcefully keyed later, this is the only way that the first value can be keyed. - p_key_write_mask[enough_diff_prev_sampled_mask] = True - key_write_mask[enough_diff_prev_sampled_mask] = True + p_write_mask[enough_diff_p_val_mask] = True + write_mask[enough_diff_p_val_mask] = True # The other case where we key a value is if there is enough difference between it and the previous keyed # value. - # Values that equal their previous value are skipped and the remaining values to check are those which are - # currently not keyed - not_keyed_mask = ~key_write_mask + # Values that equal their previous value can be skipped, so the remaining values to check are those which + # are currently not keyed. + not_keyed_mask = ~write_mask check_diff_mask = np.logical_and(not_keyed_mask, p_val != val) val_check_idx = np.flatnonzero(check_diff_mask) val_check = val[val_check_idx] - val_abs_check = val_abs[val_check_idx] + abs_val_check = abs_val[val_check_idx] - # For each frame, get the index of the previous keyed value. - prev_keyframe_indices = np.arange(1, len(values)) - # The first 'previous keyframe' defaults to values[0], even if it's not actually keyed. - prev_keyframe_indices[not_keyed_mask] = 0 - # Accumulative maximum fills in the zeroed indices with the closest previous non-zero index. - prev_keyframe_indices = np.maximum.accumulate(prev_keyframe_indices) + # For each current value, get the index of the previous keyed value in `values`. + p_keyed_idx = np.arange(1, len(values)) + # The first 'previous keyed value' defaults to values[0], even if it's not actually keyed. + p_keyed_idx[not_keyed_mask] = 0 + # Accumulative maximum fills in the zeroed indices with the closest previous non-zero index because the + # indices must be increasing. + p_keyed_idx = np.maximum.accumulate(p_keyed_idx) # Extract only the indices that need checking. - prev_keyframe_indices = prev_keyframe_indices[val_check_idx] - p_kf_vals = values[prev_keyframe_indices] - p_kf_vals_abs = np.abs(p_kf_vals) + p_keyed_idx_check = p_keyed_idx[val_check_idx] + p_keyed_val_check = values[p_keyed_idx_check] + abs_p_keyed_val_check = np.abs(p_keyed_val_check) - # We check our relative + absolute-near-zero difference again, but against the previous keyed value this + # We check the relative + absolute-near-zero difference again, but against the previous keyed value this # time. - enough_diff_prev_keyed_mask = ( - np.abs(val_check - p_kf_vals) - > (min_reldiff_fac * np.maximum(val_abs_check + p_kf_vals_abs, min_absdiff_fac)) + enough_diff_p_keyed_val_mask = ( + np.abs(val_check - p_keyed_val_check) + > (min_reldiff_fac * np.maximum(abs_val_check + abs_p_keyed_val_check, min_absdiff_fac)) ) - if np.any(enough_diff_prev_keyed_mask): + if np.any(enough_diff_p_keyed_val_mask): # If there are any that are different enough from the previous keyed value, then we have to check them # all iteratively because keying a new value can change the previous keyed value of some elements, which - # can change whether a value is different enough from its previous keyed value. - last_new_kf_idx = -1 - last_new_kf_val = -1 - last_new_kf_val_abs = -1 - # Accessing .data, the memoryview of the array iteratively or by individual index is faster than doing + # changes their relative + absolute-near-zero difference. + new_p_keyed_idx = -1 + new_p_keyed_val = -1 + new_p_keyed_val_abs = -1 + # Accessing .data, the memoryview of the array, iteratively or by individual index is faster than doing # the same with the array itself. - key_write_mv = key_write_mask.data - zipped = zip(val_check_idx.data, val_check.data, val_abs_check.data, prev_keyframe_indices.data, - enough_diff_prev_keyed_mask.data) - for cur_idx, cur_val, abs_cur_val, p_kf_idx, enough_diff in zipped: - if last_new_kf_idx > p_kf_idx: - # The previous keyframe is new and was not included when enough_diff_prev_keyed_mask was - # calculated, so whether the current value is different enough from the previous keyframe needs - # to be calculated. - # Check if the relative + absolute-near-zero difference is enough to key this frame. - enough_diff = (abs(cur_val - last_new_kf_val) - > (min_reldiff_fac * max(abs_cur_val + last_new_kf_val_abs, min_absdiff_fac))) + key_write_mv = write_mask.data + zipped = zip(val_check_idx.data, val_check.data, abs_val_check.data, p_keyed_idx_check.data, + enough_diff_p_keyed_val_mask.data) + for cur_idx, cur_val, abs_cur_val, old_p_keyed_idx, enough_diff in zipped: + if new_p_keyed_idx > old_p_keyed_idx: + # The previous keyed value is new and was not included when enough_diff_p_keyed_val_mask was + # calculated, so whether the current value is different enough needs to be calculated. + # Check if the relative + absolute-near-zero difference is enough to key this value. + enough_diff = (abs(cur_val - new_p_keyed_val) + > (min_reldiff_fac * max(abs_cur_val + new_p_keyed_val_abs, min_absdiff_fac))) if enough_diff: - # The current index needs to be keyed. - last_new_kf_idx = cur_idx - last_new_kf_val = cur_val - last_new_kf_val_abs = abs_cur_val + # The current value needs to be keyed. key_write_mv[cur_idx] = True + # Update the index and values for this newly keyed value. + new_p_keyed_idx = cur_idx + new_p_keyed_val = cur_val + new_p_keyed_val_abs = abs_cur_val # If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P # See T41766. @@ -1407,10 +1410,10 @@ class AnimationCurveNodeWrapper: # If we did key something, ensure first and last sampled values are keyed as well. if self.force_startend_keying: - for is_keyed, write_mask in zip(are_keyed, self._frame_write_mask_array): + for is_keyed, frame_write_mask in zip(are_keyed, self._frame_write_mask_array): if is_keyed: - write_mask[:1] = True - write_mask[-1:] = True + frame_write_mask[:1] = True + frame_write_mask[-1:] = True def get_final_data(self, scene, ref_id, force_keep=False): """ -- 2.30.2 From 7c7ab219730f020705d8574f54e83a4f2261d6c4 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Mon, 11 Sep 2023 23:15:09 +0100 Subject: [PATCH 04/14] Simplify iterables and p_rots in frame_values_gen --- io_scene_fbx/export_fbx_bin.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index 16e2948d1..f0d053910 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -2265,6 +2265,13 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No # Iterate through each frame and yield the values for that frame. int_currframes = currframes.astype(int) subframes = currframes - int_currframes + # Create simpler iterables that return only the values we care about. + animdata_shapes_only = [shape for _anim_shape, _me, shape in animdata_shapes.values()] + animdata_cameras_only = [camera for _anim_camera_lens, _anim_camera_focus_distance, camera + in animdata_cameras.values()] + # Previous frame's rotation for each object in animdata_ob, this will be updated each frame. + animdata_ob_p_rots = p_rots.values() + # Iterating .data, the memoryview of an array, is faster than iterating the array directly. for real_currframe, int_currframe, subframe in zip(real_currframes.data, int_currframes.data, subframes.data): scene.frame_set(int_currframe, subframe=subframe) @@ -2276,17 +2283,18 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No # ObjectWrapper caches its instances. Attempting to create a new instance updates the existing # ObjectWrapper instance with the current frame's matrix and then returns the existing instance. ObjectWrapper(dup) - for ob_obj in animdata_ob: + next_p_rots = [] + for ob_obj, p_rot in zip(animdata_ob, animdata_ob_p_rots): # We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!). - p_rot = p_rots.get(ob_obj, None) loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot) - p_rots[ob_obj] = rot + next_p_rots.append(rot) yield from loc yield from rot yield from scale - for anim_shape, me, shape in animdata_shapes.values(): + animdata_ob_p_rots = next_p_rots + for shape in animdata_shapes_only: yield shape.value - for anim_camera_lens, anim_camera_focus_distance, camera in animdata_cameras.values(): + for camera in animdata_cameras_only: yield camera.lens yield camera.dof.focus_distance -- 2.30.2 From 892a192b6b1d6061418ef040ecba761a84c95e01 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Mon, 11 Sep 2023 23:16:22 +0100 Subject: [PATCH 05/14] Make curves unpacked iteration variable names consistent --- io_scene_fbx/export_fbx_bin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index f0d053910..857968bef 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -2919,8 +2919,8 @@ def fbx_data_from_scene(scene, depsgraph, settings): for _alayer_key, alayer in astack.values(): for _acnode_key, acnode, _acnode_name in alayer.values(): nbr_acnodes += 1 - for _acurve_key, _dval, (acurve_keys, acurve_values), acurve_valid in acnode.values(): - if len(acurve_keys): + for _acurve_key, _dval, (keys, _values), acurve_valid in acnode.values(): + if len(keys): nbr_acurves += 1 templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=nbr_astacks) @@ -3054,8 +3054,8 @@ def fbx_data_from_scene(scene, depsgraph, settings): connections.append((b"OO", acurvenode_id, alayer_id, None)) # Animcurvenode -> object property. connections.append((b"OP", acurvenode_id, elem_id, fbx_prop.encode())) - for fbx_item, (acurve_key, default_value, (acurve_keys, acurve_values), acurve_valid) in acurves.items(): - if len(acurve_keys): + for fbx_item, (acurve_key, default_value, (keys, values), acurve_valid) in acurves.items(): + if len(keys): # Animcurve -> Animcurvenode. connections.append((b"OP", get_fbx_uuid_from_key(acurve_key), acurvenode_id, fbx_item.encode())) -- 2.30.2 From b3d4aa8d6496c077c28e2769711ff030e5d858f3 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Tue, 12 Sep 2023 03:34:07 +0100 Subject: [PATCH 06/14] fbx_animations_do comments and non-functional refactoring --- io_scene_fbx/export_fbx_bin.py | 67 +++++++++++++++++----------------- 1 file changed, 33 insertions(+), 34 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index 857968bef..548324d2c 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -2260,17 +2260,20 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No currframes = np.arange(f_start, np.nextafter(f_end, np.inf), step=bake_step) real_currframes = currframes - f_start if start_zero else currframes - # Get all animated values + # Generator that yields the animated values of each frame in order. def frame_values_gen(): - # Iterate through each frame and yield the values for that frame. + # Precalculate integer frames and subframes. int_currframes = currframes.astype(int) subframes = currframes - int_currframes + # Create simpler iterables that return only the values we care about. animdata_shapes_only = [shape for _anim_shape, _me, shape in animdata_shapes.values()] animdata_cameras_only = [camera for _anim_camera_lens, _anim_camera_focus_distance, camera in animdata_cameras.values()] # Previous frame's rotation for each object in animdata_ob, this will be updated each frame. animdata_ob_p_rots = p_rots.values() + + # Iterate through each frame and yield the values for that frame. # Iterating .data, the memoryview of an array, is faster than iterating the array directly. for real_currframe, int_currframe, subframe in zip(real_currframes.data, int_currframes.data, subframes.data): scene.frame_set(int_currframe, subframe=subframe) @@ -2298,59 +2301,55 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No yield camera.lens yield camera.dof.focus_distance - # Calculating the total expected number of values reduces memory allocations while iterating and ensures the array - # ends up the size we're expecting. - num_ob_loc_values = num_ob_rot_values = num_ob_scale_values = 3 - num_values_per_ob = num_ob_loc_values + num_ob_rot_values + num_ob_scale_values - num_ob_values = len(animdata_ob) * num_values_per_ob - num_shape_values = len(animdata_shapes) - num_values_per_camera = 2 - num_camera_values = len(animdata_cameras) * num_values_per_camera + # Providing `count` to np.fromiter pre-allocates the array, avoiding extra memory allocations while iterating. + num_ob_values = len(animdata_ob) * 9 # Location, rotation and scale, each of which have x, y, and z components + num_shape_values = len(animdata_shapes) # Only 1 value per shape key + num_camera_values = len(animdata_cameras) * 2 # Focal length (`.lens`) and focus distance num_values_per_frame = num_ob_values + num_shape_values + num_camera_values num_frames = len(real_currframes) - total_num_values = num_frames * num_values_per_frame - all_values = np.fromiter(frame_values_gen(), dtype=np.float64, count=total_num_values) + all_values_flat = np.fromiter(frame_values_gen(), dtype=float, count=num_frames * num_values_per_frame) + + # Restore the scene's current frame. scene.frame_set(back_currframe, subframe=0.0) - # View as each column being the values for a single frame and each row being all values for a single property in a - # curve. - all_values = all_values.reshape(num_frames, -1).T + # View such that each column is all values for a single frame and each row is all values for a single curve. + all_values = all_values_flat.reshape(num_frames, num_values_per_frame).T # Split into views of the arrays for each curve type. split_at = [num_ob_values, num_shape_values, num_camera_values] + # For unequal sized splits, np.split takes indices to split at, which can be acquired through a cumulative sum + # across the list. # The last value isn't needed, because the last split is assumed to go to the end of the array. - split_at = split_at[:-1] - # For uneven splits, np.split takes indices to split at, which can be acquired through a cumulative sum across the - # list. - split_at = np.cumsum(split_at) + split_at = np.cumsum(split_at[:-1]) all_ob_values, all_shape_key_values, all_camera_values = np.split(all_values, split_at) - # Set location/rotation/scale curves - # Further split into views of the arrays for each object. - num_animdata_ob = len(animdata_ob) - all_ob_values = np.split(all_ob_values, num_animdata_ob) if num_animdata_ob else () - for (anim_loc, anim_rot, anim_scale), ob_values in zip(animdata_ob.values(), all_ob_values): - # Further split into views of the location, rotation and scaling arrays. + # Set location/rotation/scale curves. + # Split into equal sized views of the arrays for each object. + split_into = len(animdata_ob) + per_ob_values = np.split(all_ob_values, split_into) if split_into > 0 else () + for (anim_loc, anim_rot, anim_scale), ob_values in zip(animdata_ob.values(), per_ob_values): + # Split again into equal sized views of the location, rotation and scaling arrays. loc_xyz, rot_xyz, sca_xyz = np.split(ob_values, 3) - # In-place convert to degrees. + # In-place convert from Blender rotation to FBX rotation. np.rad2deg(rot_xyz, out=rot_xyz) anim_loc.set_keyframes(real_currframes, loc_xyz) anim_rot.set_keyframes(real_currframes, rot_xyz) anim_scale.set_keyframes(real_currframes, sca_xyz) - # Set shape key curves + # Set shape key curves. + # There's only one array per shape key, so there's no need to split `all_shape_key_values`. for (anim_shape, _me, _shape), shape_key_values in zip(animdata_shapes.values(), all_shape_key_values): # In-place convert from Blender Shape Key Value to FBX Deform Percent. shape_key_values *= 100.0 anim_shape.set_keyframes(real_currframes, shape_key_values) - # Set camera curves - # Further split into views of the arrays for each camera. - num_animdata_cameras = len(animdata_cameras) - all_camera_values = np.split(all_camera_values, num_animdata_cameras) if num_animdata_cameras else () - for (anim_camera_lens, anim_camera_focus_distance, camera), camera_values in zip(animdata_cameras.values(), all_camera_values): - lens_values, focus_distance_values = camera_values - # In-place convert from Blender to FBX + # Set camera curves. + # Split into equal sized views of the arrays for each camera. + split_into = len(animdata_cameras) + per_camera_values = np.split(all_camera_values, split_into) if split_into > 0 else () + zipped = zip(animdata_cameras.values(), per_camera_values) + for (anim_camera_lens, anim_camera_focus_distance, _camera), (lens_values, focus_distance_values) in zipped: + # In-place convert from Blender focus distance to FBX. focus_distance_values *= (1000 * gscale) anim_camera_lens.set_keyframes(real_currframes, lens_values) anim_camera_focus_distance.set_keyframes(real_currframes, focus_distance_values) -- 2.30.2 From 5af95962e0bfc36f4cada82deb77a26ba93b9cab Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Tue, 12 Sep 2023 15:08:41 +0100 Subject: [PATCH 07/14] Restore original `simplify` code This will be slower, but means the patch can be split into two parts. --- io_scene_fbx/fbx_utils.py | 112 +++++++++++--------------------------- 1 file changed, 32 insertions(+), 80 deletions(-) diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index 7296f7c7e..6232533b3 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1314,88 +1314,42 @@ class AnimationCurveNodeWrapper: min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'. min_absdiff_fac = 0.1 # A tenth of reldiff... + are_keyed = [] for values, frame_write_mask in zip(self._frame_values_array, self._frame_write_mask_array): # Initialise to no frames written. frame_write_mask[:] = False - abs_values = np.abs(values) + # Create views of the 'previous' and 'current' mask and values. The memoryview, .data, of each array is used + # for its iteration and indexing performance compared to the array. + key = values[1:].data + p_key = values[:-1].data + key_write = frame_write_mask[1:].data + p_key_write = frame_write_mask[:-1].data - # Create views of the 'previous' and 'current' mask and values. - p_val = values[:-1] - abs_p_val = abs_values[:-1] - p_write_mask = frame_write_mask[:-1] - val = values[1:] - abs_val = abs_values[1:] - write_mask = frame_write_mask[1:] - - # This is contracted form of relative + absolute-near-zero difference: - # absdiff = abs(a - b) - # if absdiff < min_reldiff_fac * min_absdiff_fac: - # return False - # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac - # Note that we ignore the '/ 2' part here, since it's not much significant for us. - enough_diff_p_val_mask = ( - np.abs(val - p_val) > (min_reldiff_fac * np.maximum(abs_val + abs_p_val, min_absdiff_fac)) - ) - # If enough difference from previous sampled value, key this value *and* the previous one! - # Unless it is forcefully keyed later, this is the only way that the first value can be keyed. - p_write_mask[enough_diff_p_val_mask] = True - write_mask[enough_diff_p_val_mask] = True - - # The other case where we key a value is if there is enough difference between it and the previous keyed - # value. - # Values that equal their previous value can be skipped, so the remaining values to check are those which - # are currently not keyed. - not_keyed_mask = ~write_mask - check_diff_mask = np.logical_and(not_keyed_mask, p_val != val) - val_check_idx = np.flatnonzero(check_diff_mask) - val_check = val[val_check_idx] - abs_val_check = abs_val[val_check_idx] - - # For each current value, get the index of the previous keyed value in `values`. - p_keyed_idx = np.arange(1, len(values)) - # The first 'previous keyed value' defaults to values[0], even if it's not actually keyed. - p_keyed_idx[not_keyed_mask] = 0 - # Accumulative maximum fills in the zeroed indices with the closest previous non-zero index because the - # indices must be increasing. - p_keyed_idx = np.maximum.accumulate(p_keyed_idx) - # Extract only the indices that need checking. - p_keyed_idx_check = p_keyed_idx[val_check_idx] - p_keyed_val_check = values[p_keyed_idx_check] - abs_p_keyed_val_check = np.abs(p_keyed_val_check) - - # We check the relative + absolute-near-zero difference again, but against the previous keyed value this - # time. - enough_diff_p_keyed_val_mask = ( - np.abs(val_check - p_keyed_val_check) - > (min_reldiff_fac * np.maximum(abs_val_check + abs_p_keyed_val_check, min_absdiff_fac)) - ) - if np.any(enough_diff_p_keyed_val_mask): - # If there are any that are different enough from the previous keyed value, then we have to check them - # all iteratively because keying a new value can change the previous keyed value of some elements, which - # changes their relative + absolute-near-zero difference. - new_p_keyed_idx = -1 - new_p_keyed_val = -1 - new_p_keyed_val_abs = -1 - # Accessing .data, the memoryview of the array, iteratively or by individual index is faster than doing - # the same with the array itself. - key_write_mv = write_mask.data - zipped = zip(val_check_idx.data, val_check.data, abs_val_check.data, p_keyed_idx_check.data, - enough_diff_p_keyed_val_mask.data) - for cur_idx, cur_val, abs_cur_val, old_p_keyed_idx, enough_diff in zipped: - if new_p_keyed_idx > old_p_keyed_idx: - # The previous keyed value is new and was not included when enough_diff_p_keyed_val_mask was - # calculated, so whether the current value is different enough needs to be calculated. - # Check if the relative + absolute-near-zero difference is enough to key this value. - enough_diff = (abs(cur_val - new_p_keyed_val) - > (min_reldiff_fac * max(abs_cur_val + new_p_keyed_val_abs, min_absdiff_fac))) - if enough_diff: - # The current value needs to be keyed. - key_write_mv[cur_idx] = True - # Update the index and values for this newly keyed value. - new_p_keyed_idx = cur_idx - new_p_keyed_val = cur_val - new_p_keyed_val_abs = abs_cur_val + p_keyedval = values[0] + is_keyed = False + for idx, (val, p_val) in enumerate(zip(key, p_key)): + if val == p_val: + # Never write keyframe when value is exactly the same as prev one! + continue + # This is contracted form of relative + absolute-near-zero difference: + # absdiff = abs(a - b) + # if absdiff < min_reldiff_fac * min_absdiff_fac: + # return False + # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac + # Note that we ignore the '/ 2' part here, since it's not much significant for us. + if abs(val - p_val) > (min_reldiff_fac * max(abs(val) + abs(p_val), min_absdiff_fac)): + # If enough difference from previous sampled value, key this value *and* the previous one! + key_write[idx] = True + p_key_write[idx] = True + p_keyedval = val + is_keyed = True + elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)): + # Else, if enough difference from previous keyed value, key this value only! + key_write[idx] = True + p_keyedval = val + is_keyed = True + are_keyed.append(is_keyed) # If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P # See T41766. @@ -1404,9 +1358,7 @@ class AnimationCurveNodeWrapper: # one key in this case. # See T41719, T41605, T41254... if self.force_keying or (force_keep and not self): - are_keyed = [True] * len(self._frame_write_mask_array) - else: - are_keyed = np.any(self._frame_write_mask_array, axis=1) + are_keyed[:] = [True] * len(are_keyed) # If we did key something, ensure first and last sampled values are keyed as well. if self.force_startend_keying: -- 2.30.2 From 67b97a59a4c3d6bb9a06eaf816120aa0474724ea Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 13 Sep 2023 04:25:19 +0100 Subject: [PATCH 08/14] Remove unused real_currframes iteration from frame_values_gen --- io_scene_fbx/export_fbx_bin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index 548324d2c..f66c6c912 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -2275,7 +2275,7 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No # Iterate through each frame and yield the values for that frame. # Iterating .data, the memoryview of an array, is faster than iterating the array directly. - for real_currframe, int_currframe, subframe in zip(real_currframes.data, int_currframes.data, subframes.data): + for int_currframe, subframe in zip(int_currframes.data, subframes.data): scene.frame_set(int_currframe, subframe=subframe) if has_animated_duplis: -- 2.30.2 From 46062a745f9fc2e1fd0e5a9d2530127caf45250f Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 13 Sep 2023 04:54:21 +0100 Subject: [PATCH 09/14] Convert from Blender to FBX time once per fbx_animations_do instead of for every exported curve --- io_scene_fbx/export_fbx_bin.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index f66c6c912..0d22f9ab9 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -1981,12 +1981,6 @@ def fbx_data_animation_elements(root, scene_data): animations = scene_data.animations if not animations: return - scene = scene_data.scene - - fps = scene.render.fps / scene.render.fps_base - - def keys_to_ktimes(keys_array): - return (keys_array / fps * FBX_KTIME).astype(np.int64) # Animation stacks. for astack_key, alayers, alayer_key, name, f_start, f_end in animations: @@ -2052,7 +2046,7 @@ def fbx_data_animation_elements(root, scene_data): # And now, the *real* data! elem_data_single_float64(acurve, b"Default", def_value) elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION) - elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys)) + elem_data_single_int64_array(acurve, b"KeyTime", astype_view_signedness(keys, np.int64)) elem_data_single_float32_array(acurve, b"KeyValueFloat", values.astype(np.float32, copy=False)) elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags) elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat) @@ -2258,7 +2252,11 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No # `np.arange` excludes the `stop` argument like when using `range`, so we use np.nextafter to get the next # representable value after f_end and use that as the `stop` argument instead. currframes = np.arange(f_start, np.nextafter(f_end, np.inf), step=bake_step) + + # Convert from Blender time to FBX time. + fps = scene.render.fps / scene.render.fps_base real_currframes = currframes - f_start if start_zero else currframes + real_currframes = (real_currframes / fps * FBX_KTIME).astype(np.int64) # Generator that yields the animated values of each frame in order. def frame_values_gen(): -- 2.30.2 From d3febb760249fb08498011606c88bab4c38e0a92 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 13 Sep 2023 05:14:48 +0100 Subject: [PATCH 10/14] Refactor: Deduplicate animation simplifying and final data creation --- io_scene_fbx/export_fbx_bin.py | 47 ++++++++++------------------------ 1 file changed, 13 insertions(+), 34 deletions(-) diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py index 0d22f9ab9..69b89ccd4 100644 --- a/io_scene_fbx/export_fbx_bin.py +++ b/io_scene_fbx/export_fbx_bin.py @@ -2320,19 +2320,23 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No split_at = np.cumsum(split_at[:-1]) all_ob_values, all_shape_key_values, all_camera_values = np.split(all_values, split_at) + all_anims = [] + # Set location/rotation/scale curves. # Split into equal sized views of the arrays for each object. split_into = len(animdata_ob) per_ob_values = np.split(all_ob_values, split_into) if split_into > 0 else () - for (anim_loc, anim_rot, anim_scale), ob_values in zip(animdata_ob.values(), per_ob_values): + for anims, ob_values in zip(animdata_ob.values(), per_ob_values): # Split again into equal sized views of the location, rotation and scaling arrays. loc_xyz, rot_xyz, sca_xyz = np.split(ob_values, 3) # In-place convert from Blender rotation to FBX rotation. np.rad2deg(rot_xyz, out=rot_xyz) + anim_loc, anim_rot, anim_scale = anims anim_loc.set_keyframes(real_currframes, loc_xyz) anim_rot.set_keyframes(real_currframes, rot_xyz) anim_scale.set_keyframes(real_currframes, sca_xyz) + all_anims.extend(anims) # Set shape key curves. # There's only one array per shape key, so there's no need to split `all_shape_key_values`. @@ -2340,6 +2344,7 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No # In-place convert from Blender Shape Key Value to FBX Deform Percent. shape_key_values *= 100.0 anim_shape.set_keyframes(real_currframes, shape_key_values) + all_anims.append(anim_shape) # Set camera curves. # Split into equal sized views of the arrays for each camera. @@ -2351,46 +2356,20 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No focus_distance_values *= (1000 * gscale) anim_camera_lens.set_keyframes(real_currframes, lens_values) anim_camera_focus_distance.set_keyframes(real_currframes, focus_distance_values) + all_anims.append(anim_camera_lens) + all_anims.append(anim_camera_focus_distance) animations = {} # And now, produce final data (usable by FBX export code) - # Objects-like loc/rot/scale... - for ob_obj, anims in animdata_ob.items(): - for anim in anims: - anim.simplify(simplify_fac, bake_step, force_keep) - if not anim: - continue - for obj_key, group_key, group, fbx_group, fbx_gname in anim.get_final_data(scene, ref_id, force_keep): - anim_data = animations.setdefault(obj_key, ("dummy_unused_key", {})) - anim_data[1][fbx_group] = (group_key, group, fbx_gname) - - # And meshes' shape keys. - for channel_key, (anim_shape, me, shape) in animdata_shapes.items(): - final_keys = {} - anim_shape.simplify(simplify_fac, bake_step, force_keep) - if not anim_shape: + for anim in all_anims: + anim.simplify(simplify_fac, bake_step, force_keep) + if not anim: continue - for elem_key, group_key, group, fbx_group, fbx_gname in anim_shape.get_final_data(scene, ref_id, force_keep): - anim_data = animations.setdefault(elem_key, ("dummy_unused_key", {})) + for obj_key, group_key, group, fbx_group, fbx_gname in anim.get_final_data(scene, ref_id, force_keep): + anim_data = animations.setdefault(obj_key, ("dummy_unused_key", {})) anim_data[1][fbx_group] = (group_key, group, fbx_gname) - # And cameras' lens and focus distance keys. - for cam_key, (anim_camera_lens, anim_camera_focus_distance, camera) in animdata_cameras.items(): - final_keys = {} - anim_camera_lens.simplify(simplify_fac, bake_step, force_keep) - anim_camera_focus_distance.simplify(simplify_fac, bake_step, force_keep) - if anim_camera_lens: - for elem_key, group_key, group, fbx_group, fbx_gname in \ - anim_camera_lens.get_final_data(scene, ref_id, force_keep): - anim_data = animations.setdefault(elem_key, ("dummy_unused_key", {})) - anim_data[1][fbx_group] = (group_key, group, fbx_gname) - if anim_camera_focus_distance: - for elem_key, group_key, group, fbx_group, fbx_gname in \ - anim_camera_focus_distance.get_final_data(scene, ref_id, force_keep): - anim_data = animations.setdefault(elem_key, ("dummy_unused_key", {})) - anim_data[1][fbx_group] = (group_key, group, fbx_gname) - astack_key = get_blender_anim_stack_key(scene, ref_id) alayer_key = get_blender_anim_layer_key(scene, ref_id) name = (get_blenderID_name(ref_id) if ref_id else scene.name).encode() -- 2.30.2 From e79677145481f915386612e517fdb166dff7d7cc Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Mon, 18 Sep 2023 00:55:09 +0100 Subject: [PATCH 11/14] FBX IO: Speed up animation simplification using NumPy --- io_scene_fbx/fbx_utils.py | 112 +++++++++++++++++++++++++++----------- 1 file changed, 80 insertions(+), 32 deletions(-) diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index 6232533b3..7296f7c7e 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1314,42 +1314,88 @@ class AnimationCurveNodeWrapper: min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'. min_absdiff_fac = 0.1 # A tenth of reldiff... - are_keyed = [] for values, frame_write_mask in zip(self._frame_values_array, self._frame_write_mask_array): # Initialise to no frames written. frame_write_mask[:] = False - # Create views of the 'previous' and 'current' mask and values. The memoryview, .data, of each array is used - # for its iteration and indexing performance compared to the array. - key = values[1:].data - p_key = values[:-1].data - key_write = frame_write_mask[1:].data - p_key_write = frame_write_mask[:-1].data + abs_values = np.abs(values) - p_keyedval = values[0] - is_keyed = False - for idx, (val, p_val) in enumerate(zip(key, p_key)): - if val == p_val: - # Never write keyframe when value is exactly the same as prev one! - continue - # This is contracted form of relative + absolute-near-zero difference: - # absdiff = abs(a - b) - # if absdiff < min_reldiff_fac * min_absdiff_fac: - # return False - # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac - # Note that we ignore the '/ 2' part here, since it's not much significant for us. - if abs(val - p_val) > (min_reldiff_fac * max(abs(val) + abs(p_val), min_absdiff_fac)): - # If enough difference from previous sampled value, key this value *and* the previous one! - key_write[idx] = True - p_key_write[idx] = True - p_keyedval = val - is_keyed = True - elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)): - # Else, if enough difference from previous keyed value, key this value only! - key_write[idx] = True - p_keyedval = val - is_keyed = True - are_keyed.append(is_keyed) + # Create views of the 'previous' and 'current' mask and values. + p_val = values[:-1] + abs_p_val = abs_values[:-1] + p_write_mask = frame_write_mask[:-1] + val = values[1:] + abs_val = abs_values[1:] + write_mask = frame_write_mask[1:] + + # This is contracted form of relative + absolute-near-zero difference: + # absdiff = abs(a - b) + # if absdiff < min_reldiff_fac * min_absdiff_fac: + # return False + # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac + # Note that we ignore the '/ 2' part here, since it's not much significant for us. + enough_diff_p_val_mask = ( + np.abs(val - p_val) > (min_reldiff_fac * np.maximum(abs_val + abs_p_val, min_absdiff_fac)) + ) + # If enough difference from previous sampled value, key this value *and* the previous one! + # Unless it is forcefully keyed later, this is the only way that the first value can be keyed. + p_write_mask[enough_diff_p_val_mask] = True + write_mask[enough_diff_p_val_mask] = True + + # The other case where we key a value is if there is enough difference between it and the previous keyed + # value. + # Values that equal their previous value can be skipped, so the remaining values to check are those which + # are currently not keyed. + not_keyed_mask = ~write_mask + check_diff_mask = np.logical_and(not_keyed_mask, p_val != val) + val_check_idx = np.flatnonzero(check_diff_mask) + val_check = val[val_check_idx] + abs_val_check = abs_val[val_check_idx] + + # For each current value, get the index of the previous keyed value in `values`. + p_keyed_idx = np.arange(1, len(values)) + # The first 'previous keyed value' defaults to values[0], even if it's not actually keyed. + p_keyed_idx[not_keyed_mask] = 0 + # Accumulative maximum fills in the zeroed indices with the closest previous non-zero index because the + # indices must be increasing. + p_keyed_idx = np.maximum.accumulate(p_keyed_idx) + # Extract only the indices that need checking. + p_keyed_idx_check = p_keyed_idx[val_check_idx] + p_keyed_val_check = values[p_keyed_idx_check] + abs_p_keyed_val_check = np.abs(p_keyed_val_check) + + # We check the relative + absolute-near-zero difference again, but against the previous keyed value this + # time. + enough_diff_p_keyed_val_mask = ( + np.abs(val_check - p_keyed_val_check) + > (min_reldiff_fac * np.maximum(abs_val_check + abs_p_keyed_val_check, min_absdiff_fac)) + ) + if np.any(enough_diff_p_keyed_val_mask): + # If there are any that are different enough from the previous keyed value, then we have to check them + # all iteratively because keying a new value can change the previous keyed value of some elements, which + # changes their relative + absolute-near-zero difference. + new_p_keyed_idx = -1 + new_p_keyed_val = -1 + new_p_keyed_val_abs = -1 + # Accessing .data, the memoryview of the array, iteratively or by individual index is faster than doing + # the same with the array itself. + key_write_mv = write_mask.data + zipped = zip(val_check_idx.data, val_check.data, abs_val_check.data, p_keyed_idx_check.data, + enough_diff_p_keyed_val_mask.data) + for cur_idx, cur_val, abs_cur_val, old_p_keyed_idx, enough_diff in zipped: + if new_p_keyed_idx > old_p_keyed_idx: + # The previous keyed value is new and was not included when enough_diff_p_keyed_val_mask was + # calculated, so whether the current value is different enough needs to be calculated. + # Check if the relative + absolute-near-zero difference is enough to key this value. + enough_diff = (abs(cur_val - new_p_keyed_val) + > (min_reldiff_fac * max(abs_cur_val + new_p_keyed_val_abs, min_absdiff_fac))) + if enough_diff: + # The current value needs to be keyed. + key_write_mv[cur_idx] = True + # Update the index and values for this newly keyed value. + new_p_keyed_idx = cur_idx + new_p_keyed_val = cur_val + new_p_keyed_val_abs = abs_cur_val # If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P # See T41766. @@ -1358,7 +1404,9 @@ class AnimationCurveNodeWrapper: # one key in this case. # See T41719, T41605, T41254... if self.force_keying or (force_keep and not self): - are_keyed[:] = [True] * len(are_keyed) + are_keyed = [True] * len(self._frame_write_mask_array) + else: + are_keyed = np.any(self._frame_write_mask_array, axis=1) # If we did key something, ensure first and last sampled values are keyed as well. if self.force_startend_keying: -- 2.30.2 From 0fb53e49f093dd55ab05c5e836012ec8572c4d6b Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 5 Oct 2023 06:30:31 +0100 Subject: [PATCH 12/14] Big comments and variable renaming update --- io_scene_fbx/fbx_utils.py | 214 +++++++++++++++++++++++++------------- 1 file changed, 140 insertions(+), 74 deletions(-) diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index 19a7a5d8a..198dfb5bb 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1318,88 +1318,154 @@ class AnimationCurveNodeWrapper: min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'. min_absdiff_fac = 0.1 # A tenth of reldiff... - for values, frame_write_mask in zip(self._frame_values_array, self._frame_write_mask_array): - # Initialise to no frames written. - frame_write_mask[:] = False + # Initialise to no values enabled for writing. + self._frame_write_mask_array[:] = False - abs_values = np.abs(values) + # Values are enabled for writing if they differ enough from either of their adjacent values or if they differ + # enough from the closest previous value that is enabled due to either of these conditions. + for sampled_values, enabled_mask in zip(self._frame_values_array, self._frame_write_mask_array): + # Create overlapping views of the 'previous' (all but the last) and 'current' (all but the first) + # `sampled_values` and `enabled_mask`. + # Calculate absolute values from `sampled_values` so that the 'previous' and 'current' absolute arrays can + # be views into the same array instead of separately calculated arrays. + abs_sampled_values = np.abs(sampled_values) + # 'previous' views. + p_val_view = sampled_values[:-1] + p_abs_val_view = abs_sampled_values[:-1] + p_enabled_mask_view = enabled_mask[:-1] + # 'current' views. + c_val_view = sampled_values[1:] + c_abs_val_view = abs_sampled_values[1:] + c_enabled_mask_view = enabled_mask[1:] - # Create views of the 'previous' and 'current' mask and values. - p_val = values[:-1] - abs_p_val = abs_values[:-1] - p_write_mask = frame_write_mask[:-1] - val = values[1:] - abs_val = abs_values[1:] - write_mask = frame_write_mask[1:] - - # This is contracted form of relative + absolute-near-zero difference: - # absdiff = abs(a - b) - # if absdiff < min_reldiff_fac * min_absdiff_fac: + # If enough difference from previous sampled value, enable the current value *and* the previous one! + # The difference check is symmetrical, so this will compare each value to both of its adjacent values. + # Unless it is forcefully enabled later, this is the only way that the first value can be enabled. + # This is a contracted form of relative + absolute-near-zero difference: + # def is_different(a, b): + # abs_diff = abs(a - b) + # if abs_diff < min_reldiff_fac * min_absdiff_fac: # return False - # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac + # return (abs_diff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac # Note that we ignore the '/ 2' part here, since it's not much significant for us. - enough_diff_p_val_mask = ( - np.abs(val - p_val) > (min_reldiff_fac * np.maximum(abs_val + abs_p_val, min_absdiff_fac)) - ) - # If enough difference from previous sampled value, key this value *and* the previous one! - # Unless it is forcefully keyed later, this is the only way that the first value can be keyed. - p_write_mask[enough_diff_p_val_mask] = True - write_mask[enough_diff_p_val_mask] = True + # Contracted form using only builtin Python functions: + # return abs(a - b) > (min_reldiff_fac * max(abs(a) + abs(b), min_absdiff_fac)) + abs_diff = np.abs(c_val_view - p_val_view) + different_if_greater_than = min_reldiff_fac * np.maximum(c_abs_val_view + p_abs_val_view, min_absdiff_fac) + enough_diff_p_val_mask = abs_diff > different_if_greater_than + # Enable both the current values *and* the previous values where `enough_diff_p_val_mask` is True. Some + # values may get set to True twice because the views overlap, but this is not a problem. + p_enabled_mask_view[enough_diff_p_val_mask] = True + c_enabled_mask_view[enough_diff_p_val_mask] = True - # The other case where we key a value is if there is enough difference between it and the previous keyed - # value. - # Values that equal their previous value can be skipped, so the remaining values to check are those which - # are currently not keyed. - not_keyed_mask = ~write_mask - check_diff_mask = np.logical_and(not_keyed_mask, p_val != val) - val_check_idx = np.flatnonzero(check_diff_mask) - val_check = val[val_check_idx] - abs_val_check = abs_val[val_check_idx] - - # For each current value, get the index of the previous keyed value in `values`. - p_keyed_idx = np.arange(1, len(values)) - # The first 'previous keyed value' defaults to values[0], even if it's not actually keyed. - p_keyed_idx[not_keyed_mask] = 0 - # Accumulative maximum fills in the zeroed indices with the closest previous non-zero index because the - # indices must be increasing. - p_keyed_idx = np.maximum.accumulate(p_keyed_idx) - # Extract only the indices that need checking. - p_keyed_idx_check = p_keyed_idx[val_check_idx] - p_keyed_val_check = values[p_keyed_idx_check] - abs_p_keyed_val_check = np.abs(p_keyed_val_check) - - # We check the relative + absolute-near-zero difference again, but against the previous keyed value this - # time. - enough_diff_p_keyed_val_mask = ( - np.abs(val_check - p_keyed_val_check) - > (min_reldiff_fac * np.maximum(abs_val_check + abs_p_keyed_val_check, min_absdiff_fac)) - ) - if np.any(enough_diff_p_keyed_val_mask): - # If there are any that are different enough from the previous keyed value, then we have to check them - # all iteratively because keying a new value can change the previous keyed value of some elements, which - # changes their relative + absolute-near-zero difference. - new_p_keyed_idx = -1 - new_p_keyed_val = -1 - new_p_keyed_val_abs = -1 + # Else, if enough difference from previous enabled value, enable the current value only! + # For each 'current' value, get the index of the nearest previous enabled value in `sampled_values` (or + # itself if the value is enabled). + # Start with an array that is the index of the 'current' value in `sampled_values`. The 'current' values are + # all but the first value, so the indices will be from 1 to `len(sampled_values)` exclusive. + # Let len(sampled_values) == 9: + # [1, 2, 3, 4, 5, 6, 7, 8] + p_enabled_idx_in_sampled_values = np.arange(1, len(sampled_values)) + # Replace the indices of all disabled values with 0 in preparation of filling them in with the index of the + # nearest previous enabled value. We choose to replace with 0 so that if there is no nearest previous + # enabled value, we instead default to `values[0]`. + c_val_disabled_mask = ~c_enabled_mask_view + # Let `c_val_disabled_mask` be: + # [F, F, T, F, F, T, T, T] + # Set indices to 0 where `c_val_disabled_mask` is True: + # [1, 2, 3, 4, 5, 6, 7, 8] + # v v v v + # [1, 2, 0, 4, 5, 0, 0, 0] + p_enabled_idx_in_sampled_values[c_val_disabled_mask] = 0 + # Accumulative maximum travels across the array from left to right, filling in the zeroed indices with the + # maximum value so far, which will be the closest previous enabled index because the non-zero indices are + # strictly increasing. + # [1, 2, 0, 4, 5, 0, 0, 0] + # v v v v + # [1, 2, 2, 4, 5, 5, 5, 5] + p_enabled_idx_in_sampled_values = np.maximum.accumulate(p_enabled_idx_in_sampled_values) + # Only disabled values need to be checked against their nearest previous enabled values. + # We can additionally ignore all values which equal their immediately previous value because those values + # will never be enabled if they were not enabled by the earlier difference check against immediately + # previous values. + p_enabled_diff_to_check_mask = np.logical_and(c_val_disabled_mask, p_val_view != c_val_view) + # Convert from a mask to indices because we need the indices later and because the array of indices will + # usually be smaller than the mask array making it faster to index other arrays with. + p_enabled_diff_to_check_idx = np.flatnonzero(p_enabled_diff_to_check_mask) + # `p_enabled_idx_in_sampled_values` from earlier: + # [1, 2, 2, 4, 5, 5, 5, 5] + # `p_enabled_diff_to_check_mask` assuming no values equal their immediately previous value: + # [F, F, T, F, F, T, T, T] + # `p_enabled_diff_to_check_idx`: + # [ 2, 5, 6, 7] + # `p_enabled_idx_in_sampled_values_to_check`: + # [ 2, 5, 5, 5] + p_enabled_idx_in_sampled_values_to_check = p_enabled_idx_in_sampled_values[p_enabled_diff_to_check_idx] + # Get the 'current' disabled values that need to be checked. + c_val_to_check = c_val_view[p_enabled_diff_to_check_idx] + c_abs_val_to_check = c_abs_val_view[p_enabled_diff_to_check_idx] + # Get the nearest previous enabled value for each value to be checked. + nearest_p_enabled_val = sampled_values[p_enabled_idx_in_sampled_values_to_check] + abs_nearest_p_enabled_val = np.abs(nearest_p_enabled_val) + # Check the relative + absolute-near-zero difference again, but against the nearest previous enabled value + # this time. + abs_diff = np.abs(c_val_to_check - nearest_p_enabled_val) + different_if_greater_than = (min_reldiff_fac + * np.maximum(c_abs_val_to_check + abs_nearest_p_enabled_val, min_absdiff_fac)) + enough_diff_p_enabled_val_mask = abs_diff > different_if_greater_than + # If there are any that are different enough from the previous enabled value, then we have to check them all + # iteratively because enabling a new value can change the nearest previous enabled value of some elements, + # which changes their relative + absolute-near-zero difference: + # `p_enabled_diff_to_check_idx`: + # [2, 5, 6, 7] + # `p_enabled_idx_in_sampled_values_to_check`: + # [2, 5, 5, 5] + # Let `enough_diff_p_enabled_val_mask` be: + # [F, F, T, T] + # The first index that is newly enabled is 6: + # [2, 5,>6<,5] + # But 6 > 5, so the next value's nearest previous enabled index is also affected: + # [2, 5, 6,>6<] + # We had calculated a newly enabled index of 7 too, but that was calculated against the old nearest previous + # enabled index of 5, which has now been updated to 6, so whether 7 is enabled or not needs to be + # recalculated: + # [F, F, T, ?] + if np.any(enough_diff_p_enabled_val_mask): # Accessing .data, the memoryview of the array, iteratively or by individual index is faster than doing # the same with the array itself. - key_write_mv = write_mask.data - zipped = zip(val_check_idx.data, val_check.data, abs_val_check.data, p_keyed_idx_check.data, - enough_diff_p_keyed_val_mask.data) - for cur_idx, cur_val, abs_cur_val, old_p_keyed_idx, enough_diff in zipped: - if new_p_keyed_idx > old_p_keyed_idx: - # The previous keyed value is new and was not included when enough_diff_p_keyed_val_mask was - # calculated, so whether the current value is different enough needs to be calculated. - # Check if the relative + absolute-near-zero difference is enough to key this value. - enough_diff = (abs(cur_val - new_p_keyed_val) - > (min_reldiff_fac * max(abs_cur_val + new_p_keyed_val_abs, min_absdiff_fac))) + zipped = zip(p_enabled_diff_to_check_idx.data, + c_val_to_check.data, + c_abs_val_to_check.data, + p_enabled_idx_in_sampled_values_to_check.data, + enough_diff_p_enabled_val_mask.data) + # While iterating, we could set updated values into `enough_diff_p_enabled_val_mask` as we go and then + # update `enabled_mask` in bulk after the iteration, but if we're going to update an array while + # iterating, we may as well update `enabled_mask` directly instead and skip the bulk update. + # Additionally, the number of `True` writes to `enabled_mask` is usually much less than the number of + # updates that would be required to `enough_diff_p_enabled_val_mask`. + c_enabled_mask_view_mv = c_enabled_mask_view.data + + # While iterating, keep track of the most recent newly enabled index, so we can tell when we need to + # recalculate whether the current value needs to be enabled. + new_p_enabled_idx = -1 + # Keep track of its value too for performance. + new_p_enabled_val = -1 + new_abs_p_enabled_val = -1 + for cur_idx, c_val, c_abs_val, old_p_enabled_idx, enough_diff in zipped: + if new_p_enabled_idx > old_p_enabled_idx: + # The nearest previous enabled value is newly enabled and was not included when + # `enough_diff_p_enabled_val_mask` was calculated, so whether the current value is different + # enough needs to be recalculated using the newly enabled value. + # Check if the relative + absolute-near-zero difference is enough to enable this value. + enough_diff = (abs(c_val - new_p_enabled_val) + > (min_reldiff_fac * max(c_abs_val + new_abs_p_enabled_val, min_absdiff_fac))) if enough_diff: - # The current value needs to be keyed. - key_write_mv[cur_idx] = True - # Update the index and values for this newly keyed value. - new_p_keyed_idx = cur_idx - new_p_keyed_val = cur_val - new_p_keyed_val_abs = abs_cur_val + # The current value needs to be enabled. + c_enabled_mask_view_mv[cur_idx] = True + # Update the index and values for this newly enabled value. + new_p_enabled_idx = cur_idx + new_p_enabled_val = c_val + new_abs_p_enabled_val = c_abs_val # If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P # See T41766. -- 2.30.2 From f5e36f772979669465e28164bc1464e51cba446b Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Fri, 6 Oct 2023 16:12:40 +0100 Subject: [PATCH 13/14] Missed a variable rename in comments in last commit --- io_scene_fbx/fbx_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py index 198dfb5bb..fd3d37db7 100644 --- a/io_scene_fbx/fbx_utils.py +++ b/io_scene_fbx/fbx_utils.py @@ -1368,7 +1368,7 @@ class AnimationCurveNodeWrapper: p_enabled_idx_in_sampled_values = np.arange(1, len(sampled_values)) # Replace the indices of all disabled values with 0 in preparation of filling them in with the index of the # nearest previous enabled value. We choose to replace with 0 so that if there is no nearest previous - # enabled value, we instead default to `values[0]`. + # enabled value, we instead default to `sampled_values[0]`. c_val_disabled_mask = ~c_enabled_mask_view # Let `c_val_disabled_mask` be: # [F, F, T, F, F, T, T, T] -- 2.30.2 From e553a919af925371be3e69baa1c1462efff5d5b5 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Fri, 6 Oct 2023 16:39:11 +0100 Subject: [PATCH 14/14] Increase FBX IO Version --- io_scene_fbx/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_scene_fbx/__init__.py b/io_scene_fbx/__init__.py index 25dd97054..72ae5c995 100644 --- a/io_scene_fbx/__init__.py +++ b/io_scene_fbx/__init__.py @@ -5,7 +5,7 @@ bl_info = { "name": "FBX format", "author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem", - "version": (5, 8, 6), + "version": (5, 8, 7), "blender": (3, 6, 0), "location": "File > Import-Export", "description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions", -- 2.30.2