From 2ffbeeb8e744d7ab3cc303620241175b82131086 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Sun, 20 Aug 2023 18:05:45 +0100 Subject: [PATCH 01/12] Use only the first anim curve per property channel The FBX SDK documentation specifies that the FBX animation system's default implementation only uses the first curve assigned to a channel. Additional curves per channel are supported by the FBX specification, but the handling of these extra curves is considered the responsibility of the application that created them. This patch changes animation imports to discard extra animation curves for each property channel after the first curve has been found. When this has occurred, a warning is printed to the system console. --- io_scene_fbx/import_fbx.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index deeb9cc45..b2681a41e 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -575,9 +575,24 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo from itertools import chain fbx_curves = [] + used_channels = set() + warn_multiple_curves_per_channel = False for curves, fbxprop in cnodes.values(): for (fbx_acdata, _blen_data), channel in curves.values(): - fbx_curves.append((fbxprop, channel, fbx_acdata)) + channel_id = (fbxprop, channel) + if channel_id in used_channels: + # The FBX animation system's default implementation only uses the first curve assigned to a channel. + # Additional curves per channel are allowed by the FBX specification, but the handling of these curves + # is considered the responsibility of the application that created them. Note that each curve node is + # expected to have a unique set of channels, so these additional curves with the same channel would have + # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode. + warn_multiple_curves_per_channel = True + else: + used_channels.add(channel_id) + fbx_curves.append((fbxprop, channel, fbx_acdata)) + if warn_multiple_curves_per_channel: + print("WARNING: Multiple animation curves per animated property channel were found for %s. All but the first" + "curve for each property channel has been discarded." % action.name) # Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0). if len(fbx_curves) == 0: -- 2.30.2 From f91d5766259d95bd991d0abede5bc8f2652587e2 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 30 Aug 2023 00:32:55 +0100 Subject: [PATCH 02/12] initial implementation (and revert only using the first anim curve per channel) --- io_scene_fbx/import_fbx.py | 817 +++++++++++++++++++++++++++++++------ 1 file changed, 703 insertions(+), 114 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index b2681a41e..bb76f8f33 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -524,6 +524,593 @@ def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_p # --------- # Animation +def _transformation_curves_gen(item, values_arrays, channel_keys): + from operator import setitem + from functools import partial + if item.is_bone: + bl_obj = item.bl_obj.pose.bones[item.bl_bone] + else: + bl_obj = item.bl_obj + + rot_mode = bl_obj.rotation_mode + transform_data = item.fbx_transform_data + rot_eul_prev = bl_obj.rotation_euler.copy() + rot_quat_prev = bl_obj.rotation_quaternion.copy() + + # Pre-compute inverted local rest matrix of the bone, if relevant. + restmat_inv = item.get_bind_matrix().inverted_safe() if item.is_bone else None + + transform_prop_to_attr = { + b'Lcl Translation': transform_data.loc, + b'Lcl Rotation': transform_data.rot, + b'Lcl Scaling': transform_data.sca, + } + + # Pre-get/calculate these to reduce the work done inside the hot loop. + anim_compensation_matrix = item.anim_compensation_matrix + do_anim_compensation_matrix = bool(anim_compensation_matrix) + + pre_matrix = item.pre_matrix + do_pre_matrix = bool(pre_matrix) + + post_matrix = item.post_matrix + do_post_matrix = bool(post_matrix) + + do_restmat_inv = bool(restmat_inv) + + # Create a setter into transform_data for each values array. e.g. a values array for 'Lcl Scaling' with channel == 2 + # would set transform_data.sca[2]. + # TODO: Might be faster to create a list of each transform_prop_to_attr[fbx_prop] and a list of channels, then zip + # both and in the main loop, do transform_data_attr[channel] = value + setters = [partial(setitem, transform_prop_to_attr[fbx_prop], channel) for fbx_prop, channel in channel_keys] + zipped_values_iterators = zip(*(iter(arr.data) for arr in values_arrays)) + + # todo: Rather than having to get the Matrix/Quaternion methods upon each call within the loop, we can instead get + # them in advance. + # Before the loop: + # `mat_decompose = Matrix.decompose` + # then within the loop: + # `mat_decompose(mat)` + + for values in zipped_values_iterators: + for setter, value in zip(setters, values): + setter(value) + + mat, _, _ = blen_read_object_transform_do(transform_data) + + # compensate for changes in the local matrix during processing + if do_anim_compensation_matrix: + mat = mat @ anim_compensation_matrix + + # apply pre- and post matrix + # post-matrix will contain any correction for lights, camera and bone orientation + # pre-matrix will contain any correction for a parent's correction matrix or the global matrix + if do_pre_matrix: + mat = pre_matrix @ mat + if do_post_matrix: + mat = mat @ post_matrix + + # And now, remove that rest pose matrix from current mat (also in parent space). + if do_restmat_inv: + mat = restmat_inv @ mat + + # Now we have a virtual matrix of transform from AnimCurves, we can insert keyframes! + loc, rot, sca = mat.decompose() + if rot_mode == 'QUATERNION': + if rot_quat_prev.dot(rot) < 0.0: + rot = -rot + rot_quat_prev = rot + elif rot_mode == 'AXIS_ANGLE': + vec, ang = rot.to_axis_angle() + rot = ang, vec.x, vec.y, vec.z + else: # Euler + rot = rot.to_euler(rot_mode, rot_eul_prev) + rot_eul_prev = rot + + # Yield order matches the order that the location/rotation/scale FCurves are created in. + yield from loc + yield from rot + yield from sca + + +def _combine_same_property_curves(times_and_values_tuples): + """Combine multiple sorted animation curves, that affect the same property, into a single sorted animation curve.""" + if len(times_and_values_tuples) > 1: + # TODO: Print a warning to the console that more than one curve was found + # The FBX animation system's default implementation only uses the first curve assigned to a channel. + # Additional curves per channel are allowed by the FBX specification, but the handling of these curves is + # considered the responsibility of the application that created them. Note that each curve node is expected to + # have a unique set of channels, so these additional curves with the same channel would have to belong to + # separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode. + + # Concatenate all the times into one array and all the values into one array. + all_times = np.concatenate([t[0] for t in times_and_values_tuples]) + all_values = np.concatenate([t[1] for t in times_and_values_tuples]) + # Get the unique, sorted times and the index in all_times of the first occurrence of each unique value. + sorted_unique_times, unique_indices_in_all_times = np.unique(all_times, return_index=True) + + values_of_sorted_unique_times = all_values[unique_indices_in_all_times] + return sorted_unique_times, values_of_sorted_unique_times + + # # Get the indices that would sort all_times. + # # Use a stable algorithm so that if there are any duplicate times, they maintain their original order. + # perm = np.argsort(kind='stable') + # # Use the indices to sort both all_times and all_values. + # all_times = all_times[perm] + # all_values = all_values[perm] + else: + return times_and_values_tuples[0] + + +def _combine_curve_keyframes(times_and_values_tuples, initial_values): + """Combine multiple sorted animation curves, that affect different properties, such that every animation curve + contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each + curve. + + Currently, linear interpolation is assumed, but FBX does store how keyframes should be interpolated, so correctly + interpolating the keyframe values is a TODO.""" + # all_times = [] + # #all_values = [] + # #all_curve_idx = [] + # for i, (times, values) in enumerate(times_and_values_tuples): + # all_times.append(times) + # #all_values.append(values) + # #all_curve_idx.append(np.full_like(times, i)) + # all_times = np.concatenate(all_times) + # #all_curve_idx = np.concatenate(all_curve_idx) + # perm = np.argsort(all_times, kind='stable') + # + # sorted_all_times = all_times[perm] + # + # # Get the index in sorted_all_times of each time in all_times + # all_times_indices = np.empty(len(perm), dtype=np.intp) + # all_times_indices[perm] = np.arange(len(perm)) + + all_times = [t[0] for t in times_and_values_tuples] + + # Get sorted unique times and the index in sorted_all_times of each time in all_times + sorted_all_times, all_times_indices = np.unique(np.concatenate(all_times), return_inverse=True) + + #sorted_all_values = all_values[perm] + #sorted_curve_idx = all_curve_idx[perm] + # An alternative would be to concatenated filled arrays with the index of each array and then index that by perm, + # then a mask for each array can be found by checking for values that equal the index of that array. + values_arrays = [] + times_start = 0 + for (times, values), initial_value in zip(times_and_values_tuples, initial_values): + times_end = times_start + len(times) + # The index in sorted_all_times of each value in times + times_indices = all_times_indices[times_start:times_end] + # Update times_start for the next array + times_start = times_end + + # todo: Not sure the best way to mask out the values here, will need investigating + + #times_extended = sorted_all_times.copy() + + needs_interpolation_mask = np.full(len(sorted_all_times), True) + needs_interpolation_mask[times_indices] = False + #imported_times_mask = ~needs_interpolation_mask + + # # Need to find the before and after times for each time that needs interpolation + # # Times are sorted, so the smallest and largest are simply the first and last values. + # min_time_value = values[0] + # max_time_value = values[-1] + + # todo: It's possible we can get the same result faster by doing the interpolation manually, since we can figure + # out the before and after values for each time that needs interpolating (this would also likely make it easier + # to update the function to support other interpolation than just linear). + interp_values = np.interp(sorted_all_times[needs_interpolation_mask], times, values, left=initial_value) + + # Alt, though we typically expect there to be few times that need interpolation: + # extended_values = np.interp(sorted_all_times, times, values, left=initial_value) + + extended_values = np.empty_like(values, shape=len(sorted_all_times)) + extended_values[needs_interpolation_mask] = interp_values + extended_values[times_indices] = values + + values_arrays.append(extended_values) + + # FIXME: If we have two curves with the same times, aren't they going to break because they're going to try and + # interpolate the same times? + # times1 = [1,2,3,4] + # times2 = [1,2,3,4] + # sorted_all_times = [1,1,2,2,3,3,4,4]... not good + + # # Manual linear interpolation (it may be easier to extend for other interpolation): + # # Get the index of the previous and next elements that are not interpolated + # prev_value_indices = np.arange(len(sorted_all_times), dtype=np.intp) + # next_value_indices = prev_value_indices.copy() + # prev_value_indices[needs_interpolation_mask] = times_indices[0] + # next_value_indices[needs_interpolation_mask] = times_indices[-1] + # prev_value_indices = np.maximum.accumulate(prev_value_indices) + # next_value_indices = np.flip(np.minimum.accumulate(np.flip(next_value_indices))) + # + # # TODO: May be faster to simply not index by needs_interpolation_mask every time and then only index by + # # needs_interpolation_mask at the end. + # prev_times = sorted_all_times[prev_value_indices][needs_interpolation_mask] + # prev_values = sorted_all_values[prev_value_indices][needs_interpolation_mask] + # next_times = sorted_all_times[next_value_indices][needs_interpolation_mask] + # next_values = sorted_all_values[next_value_indices][needs_interpolation_mask] + # + # interp_times = sorted_all_times[needs_interpolation_mask] + # ifac = (interp_times - prev_times) / (next_times - prev_times) + # interp_values = ifac * (next_values - prev_values) + prev_values + + # ifac = (sorted_all_times[needs_interpolation_mask] - prev_times) / () + # + # values_extended = + # + # min_before = np.full_like(sorted_all_times, times[0]) + # max_after = np.full_like(sorted_all_times, times[-1]) + # + # # FIXME: But we need the indices so we can get the before and after values and interpolate between those... + # before_times = times_extended.copy() + # before_times[needs_interpolation_mask] = min_time + # before_times = np.maximum.accumulate(before_times) + # + # after_times = times_extended.copy() + # after_times[needs_interpolation_mask] = max_time + # after_times = np.flip(np.minimum.accumulate(np.flip(after_times))) + # + # before_times[needs_interpolation_mask] + # + # + # + # + # times_full = sorted_all_times.copy() + # values_full = np.full_like(sorted_all_times, np.nan) + # values_full[sorted_curve_idx == i] = values + return sorted_all_times, values_arrays + + +def blen_read_invalid_animation_curve(key_times, key_values): + """FBX will parse animation curves even when their keyframe times are invalid (not strictly increasing). It's + unclear exactly how FBX handles invalid curves, but this matches in some cases and is how the FBX IO addon has been + handling invalid keyframe times for a long time. + + Notably, this function will also correctly parse valid animation curves, though is much slower than the trivial, + regular way. + + The returned keyframe times are guaranteed to be strictly increasing.""" + sorted_unique_times = np.unique(key_times) + + # Unsure if this can be vectorized with numpy, so using iteration for now. + def index_gen(): + idx = 0 + key_times_len = len(key_times) + # Iterating .data, the memoryview of the array, is faster than iterating the array directly. + for curr_fbxktime in sorted_unique_times.data: + if key_times[idx] < curr_fbxktime: + if idx >= 0: + idx += 1 + if idx >= key_times_len: + idx = -1 + yield idx + + indices = np.fromiter(index_gen(), dtype=np.int64, count=len(sorted_unique_times)) + indexed_times = key_times[indices] + indexed_values = key_values[indices] + + # Interpolate the value for each time in sorted_unique_times according to the times and values at each index and + # the previous index. + interpolated_values = np.empty_like(indexed_values) + + # Where the index is 0, there's no previous value to interpolate from, so we set the value without + # interpolating. + # Because the indices are in increasing order, all zeroes must be at the start, so we can find the index of the + # last zero and use that to index with a slice instead of a boolean array for performance. + # Equivalent to, but as a slice: + # idx_zero_mask = indices == 0 + # idx_nonzero_mask = ~idx_zero_mask + first_nonzero_idx = np.searchsorted(indices, 0, side='right') + idx_zero_slice = slice(0, first_nonzero_idx) # [:first_nonzero_idx] + idx_nonzero_slice = slice(first_nonzero_idx, None) # [first_nonzero_idx:] + + interpolated_values[idx_zero_slice] = indexed_values[idx_zero_slice] + + indexed_times_nonzero_idx = indexed_times[idx_nonzero_slice] + indexed_values_nonzero_idx = indexed_values[idx_nonzero_slice] + indices_nonzero = indices[idx_nonzero_slice] + + prev_indices_nonzero = indices_nonzero - 1 + prev_indexed_times_nonzero_idx = key_times[prev_indices_nonzero] + prev_indexed_values_nonzero_idx = key_values[prev_indices_nonzero] + + ifac_a = sorted_unique_times[idx_nonzero_slice] - prev_indexed_times_nonzero_idx + ifac_b = indexed_times_nonzero_idx - prev_indexed_times_nonzero_idx + # If key_times contains two (or more) duplicate times in a row, then values in `ifac_b` can be zero which would + # result in division by zero. + # Use the `np.errstate` context manager to suppress printing the RuntimeWarning to the system console. + with np.errstate(divide='ignore'): + ifac = ifac_a / ifac_b + interpolated_values[idx_nonzero_slice] = ((indexed_values_nonzero_idx - prev_indexed_values_nonzero_idx) * ifac + + prev_indexed_values_nonzero_idx) + + # If the time to interpolate at is larger than the time in indexed_times, then the value has been extrapolated. + # Extrapolated values are excluded. + valid_mask = indexed_times >= sorted_unique_times + + key_times = sorted_unique_times[valid_mask] + key_values = interpolated_values[valid_mask] + + return key_times, key_values + + +def _convert_fbx_time_to_blender_time(key_times, blen_start_offset, fbx_start_offset, fps): + # todo: Could move this into blen_store_keyframes since it probably doesn't need to be used anywhere else + from .fbx_utils import FBX_KTIME + timefac = fps / FBX_KTIME + + # Convert from FBX timing to Blender timing. + # Cannot subtract in-place because key_times could be read directly from FBX and could be used by multiple Actions. + key_times = key_times - fbx_start_offset + # timefac is a Python float, so the new array will be a np.float64 array. + key_times = key_times * timefac + + key_times += blen_start_offset + + return key_times + + +def blen_read_single_animation_curve(fbx_curve): + """Read a single animation curve from FBX data. + + The parsed keyframe times are guaranteed to be strictly increasing.""" + # TODO: Remove these, we can do all time conversion at the very end, just before combining times and values into a + # single array + # from .fbx_utils import FBX_KTIME + # timefac = fps / FBX_KTIME + + key_times = parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve, b'KeyTime'))) + key_values = parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve, b'KeyValueFloat'))) + + assert(len(key_values) == len(key_times)) + + # The FBX SDK specifies that only one key per time is allowed and that the keys are sorted in time order. + # https://help.autodesk.com/view/FBX/2020/ENU/?guid=FBX_Developer_Help_cpp_ref_class_fbx_anim_curve_html + all_times_strictly_increasing = (key_times[1:] > key_times[:-1]).all() + + if all_times_strictly_increasing: + return key_times, key_values + else: + # todo: Print something to the console warning that the animation curve was invalid. + + # FBX will still read animation curves even if they are invalid. + return blen_read_invalid_animation_curve(key_times, key_values) + + # todo When we have transformation curves (or more than one curve per channel (optional support)) separately combine + # singular parsed curves and fill in the gaps with linear interpolation. .concatenate and .unique the key_times + # arrays with return_inverse=True. Use the lengths of each key_times array and their order in the concatenation to + # get the index of each of their elements in the sorted, unique concatenation. + # For each key_times array, create an all True array and use those indices to set values to False. + # Copy the sorted, unique concatenation and use this new mask to effectively delete all times that didn't come from + # this key_times array. Use .maximum.accumulate and a reversed .minimum.accumulate to get the first time before and + # first time after each time that needs its value to be interpolated. These two arrays get the start and end times + # to interpolate from. For each time that needs its value to be interpolated, get the values for the start and end + # times and then use those and the times that needs their values interpolated to calculate the interpolated values. + # Care will need to be taken for times where there is no first value before or where there is no first value after, + # in which case interpolation can't take place and we'll either need to start set values at the very start and end + # or otherwise fill the values that can't be interpolated with a default value or the first/last value in + # key_times. + + if not all_times_strictly_increasing: + # We try to match how FBX behaves when it encounters an invalid KeyTime array. This doesn't quite match when the + # maximum value is not the last value (FBX discards some keyframes whereas we don't), but it's close enough. + + # Start the curve from the index of the smallest KeyTime value. + min_idx = np.amin(key_times) if key_times.size else 0 + key_times = key_times[min_idx:] + key_values = key_values[min_idx:] + + max_idx = np.amax(key_times) if key_times.size else 0 + # If the largest KeyTime value is at the last index then it's simple. + if max_idx == key_times.size - 1: + # Set each element to the maximum of itself and all elements before it + key_times = np.maximum.accumulate(key_times) + else: + # This works the same as Blender's original animation curve parser, without the conversion from FBX time to + # Blender time and modified to operate on a single curve rather than multiple + + # todo: Maybe these should be sorted instead? + # todo: Maybe these should be np.maximum.accumulate-d instead? + # Sorted unique key times + sorted_unique_times = np.unique(key_times) + + # TODO: How is this different from np.searchsorted on np.maximum.acccumulate-d times? Can we use it to find + # the points at which idx will increase and then np.cumsum those increases? + def parse_invalid_curve_times_to_indices(): + idx = 0 + times = key_times.data + num_times = len(times) + + for curr_fbxktime in sorted_unique_times.data: + curr_time = times[idx] + if curr_time < curr_fbxktime: + if idx >= 0: + idx += 1 + if idx >= num_times: + # We have reached our last element for this curve, stay on it from now on... + idx = -1 + yield idx + + key_time_indices = np.fromiter(parse_invalid_curve_times_to_indices(), dtype=np.int64) + key_times = key_times[key_time_indices] + key_values = key_values[key_values] + + # Filter out invalid times + valid_mask = key_times >= sorted_unique_times + key_times = key_times[valid_mask] + key_values = key_values[valid_mask] + + interpolated_values = np.empty_like(key_values) + interpolated_values[:1] = key_values[:1] + ifac = (sorted_unique_times[1:] - key_times[:-1]) / (key_values[1:] - key_times[:-1]) + interpolated_values[1:] = (key_values[1:] - key_values[:-1]) * ifac + key_values[:-1] + key_values = interpolated_values + + # def parse_curve_fallback_gen(): + # idx = 0 + # times = key_times.data + # num_times = len(times) + # values = key_values.data + # # Sorted unique times + # sorted_unique_times = np.unique(key_times) + # for curr_fbxktime in sorted_unique_times.data: + # curr_time = times[idx] + # if curr_time < curr_fbxktime: + # if idx >= 0: + # idx += 1 + # if idx >= num_times: + # # We have reached our last element for this curve, stay on it from now on... + # idx = -1 + # curr_time = times[idx] + # if curr_time >= curr_fbxktime: + # if idx == 0: + # curr_value = values[idx] + # else: + # # Interpolate between this key and the previous one + # prev_time = times[idx - 1] + # ifac = (curr_fbxktime - prev_time) / (curr_time - prev_time) + # prev_value = + # curr_value = (values[idx] - values[idx - 1]) * ifac + values[idx - 1] + # yield curr_fbxktime, curr_value + # structured_dtype = np.dtype([("time", key_times.dtype), ("value", key_values.dtype)]) + # times_and_values = np.fromiter(parse_curve_fallback_gen(), dtype=structured_dtype) + # key_times = times_and_values["time"] + # key_values = times_and_values["values"] + + + + + + # # todo: Get some printable attribute from fbx_curve, don't print the entire fbx_curve + # print("WARNING: Invalid animation keyframe times for %s. The key frame times that are not strictly increasing" + # " and the keyframes before the first keyframe chronologically have been discarded." % str(fbx_curve)) + # # We could sort the key times, but starting from the minimum value and then taking an accumulative maximum + # # better matches FBX and older Blender behaviour when the times are not in order. + # # FIXME: min_idx thing doesn't work, things get weird when the start and end aren't the min and max times... + # min_idx = np.amin(key_times) if key_times.size else 0 + # """ + # Ok, so moving the max_idx to not be last has odd effects... + # Given frames [0, 10, ..., 90, 100] and values [0, ..., 100]. Moving the last time into the: + # second-last position: Frames -> [0, ..., 90], values -> [0, ..., 80, 85] (not 90??) + # [..., 80, _90_, (100)] + # Perhaps [..., 80, ????, (90)] + # Interp [..., 80, 85, (90)] + # Frames [..., 80, 90, (100)] + # Blender appears to agree with Unity in this case, but Blender doesn't exclude the () element + # whereas Unity does. + # Interp [..., 80, 85, 90] + # Frames [..., 80, 90, 100] + # third-last position: Frames -> [0, ..., 70, 90], values -> [0, ..., 70, 76.6666] (not 80??) + # [..., 70, _80_, (90), (100)] + # Perhaps [..., 70, ????, ????, (90)] + # Interp [..., 70, 76.6, 83.3, (90)] + # Frames [..., 70, (80), 90, (100)] + # + # Blender sets frame 100 to 80 instead, which would have been the next value after 70, but Blender + # doesn't exclude the () elements whereas Unity does. + # Perhaps [..., 70, (??), ????, (80)] + # Interp [..., 70,(73.3),76.6, (80)] + # Frames [..., 70, 80, 90, 100] + # fourth-last position: Frames -> [0, ..., 60, 90], values -> [0, ..., 60, 67.5] + # [..., 60, _70_, (80), (90), (100)] + # Perhaps [..., 60, ????, ????, ????, (90)] + # Interp [..., 60, 67.5, 75, 82.5, (90)] + # Frames [..., 60, (70), (80), 90, (100)] + # + # Blender sets frame 100 to 70 instead, which would have been the next value after 60, but Blender + # doesn't exclude the () elements whereas Unity does. + # Perhaps [..., 60, (??), (??), ????, (70)] + # Interp [..., 60,(62.5),(65), 67.5, (70)] + # Frames [..., 60, 70, 80, 90, 100] + # TODO: Try changing the 90 value to something else and see if the interpolations still hold. + # """ + # max_idx = np.amax(key_times) if key_times.size else 0 # max idx might also get reduced by min_idx? + # last_idx = len(key_times) - 1 + # if max_idx != last_idx: + # max_idx = last_idx - 2 # Seems to exclude the last two keyframes for some reason... + # key_times = key_times[min_idx:max_idx] + # key_values = key_values[min_idx:max_idx] + # key_times = np.maximum.accumulate(key_times) + # # TODO: As an alternative to finding the duplicates ourselves, we could just return key_times and key_values + # # as they are from here, letting Blender remove the duplicates when calling .update(). + # # Yes, we should leave it to Blender, imagine a transform channel with values [10, 2, 5, 20] at times + # # [1, 12, 12, 40], and then another transform channel with times at [6, 30]. The first channel will need to + # # interpolate for the missing times of 6 and 30. + # unique_mask = np.empty_like(key_times, dtype=bool) + # # Remove keyframes at duplicate times. Blender would do this when calling .update() on the FCurve. Where there + # # are duplicate times, Blender only keeps the last duplicate. + # # Note that this may produce different results to software that uses the FBX SDK, because it does not remove + # # duplicates. + # # Because key_times is now in ascending order, unique elements are the last element and elements that are + # # pairwise not equal. + # # 1,1,1,2,3,3,4,5,5 + # # F,F,T,T,F,T,T,F + # np.not_equal(key_times[:-1], key_times[1:], out=unique_mask[:-1]) + # # The last element is always kept: + # # F,F,T,T,F,T,T,F,T + # unique_mask[-1:] = True + # + # # # FIXME: This currently gets the first unique time, but if we were to import even duplicate times, when we + # # # .update() the FCurve, Blender keeps only the *last* unique time. + # # # Remove duplicates. Because key_times is now in ascending order, unique elements are the first element and + # # # elements that are pairwise not equal. + # # # 1,1,1,2,3,3,4,5 + # # # F,F,T,T,F,T,T + # # # The first element is always unique: + # # # T,F,F,T,T,F,T,T + # # unique_mask[:1] = True + # # np.not_equal(key_times[:-1], key_times[1:], out=unique_mask[1:]) + # # + # # #indices = np.where(unique_mask, np.arange(len(unique_mask), 0)) + # # #indices = np.maximum.accumulate(indices) + # # + # # Use the mask to get only the times (and their values) that are strictly increasing. + # key_times = key_times[unique_mask] + # key_values = key_values[unique_mask] + + # Convert from FBX timing to Blender timing. + # Cannot subtract in-place because this curve could be used in multiple Actions. + key_times = key_times - fbx_start_offset + # timefac is a Python float, so the new array will be a np.float64 array. + key_times = key_times * timefac + key_times += blen_start_offset + + return key_times, key_values + + +def blen_store_keyframes(blen_fcurve, key_times, key_values): + """Set all keyframe times and values for a newly created FCurve. + Linear interpolation is currently assumed.""" + # The fcurve must be newly created and thus have no keyframe_points. + assert(len(blen_fcurve.keyframe_points) == 0) + num_keys = len(key_times) + + # Compatible with C float type + bl_keyframe_dtype = np.single + # Compatible with C char type + bl_enum_dtype = np.byte + + # TODO: get this value once and store it as a global variable + linear_enum_value = bpy.types.Keyframe.bl_rna.properties['interpolation'].enum_items['LINEAR'].value + + # Stack the arrays into a flattened array of flattened (frame, value) pairs + # Same as `np.column_stack(key_times, key_values).ravel()`, but allows specifying the dtype. + full_key_frame_array = np.concatenate((key_times.reshape(-1, 1), key_values.reshape(-1, 1)), + dtype=bl_keyframe_dtype, casting='unsafe', axis=1).ravel() + + # Add the keyframe points to the FCurve and then set the 'co' and 'interpolation' of each point. + blen_fcurve.keyframe_points.add(num_keys) + blen_fcurve.keyframe_points.foreach_set('co', full_key_frame_array.ravel()) + blen_fcurve.keyframe_points.foreach_set('interpolation', np.full(num_keys, linear_enum_value, dtype=bl_enum_dtype)) + + # Since we inserted our keyframes in 'ultra-fast' mode, we have to update the fcurves now. + blen_fcurve.update() + + +# TODO: Remove this function def blen_read_animations_curves_iter(fbx_curves, blen_start_offset, fbx_start_offset, fps): """ Get raw FBX AnimCurve list, and yield values for all curves at each singular curves' keyframes, @@ -572,42 +1159,54 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo taking any pre_ and post_ matrix into account to transform from fbx into blender space. """ from bpy.types import Object, PoseBone, ShapeKey, Material, Camera + # todo: Remove this import from itertools import chain - fbx_curves = [] - used_channels = set() - warn_multiple_curves_per_channel = False + fbx_curves: dict[bytes, dict[int, list[FBXElem]]] = {} for curves, fbxprop in cnodes.values(): + channels_dict = fbx_curves.setdefault(fbxprop, {}) for (fbx_acdata, _blen_data), channel in curves.values(): - channel_id = (fbxprop, channel) - if channel_id in used_channels: - # The FBX animation system's default implementation only uses the first curve assigned to a channel. - # Additional curves per channel are allowed by the FBX specification, but the handling of these curves - # is considered the responsibility of the application that created them. Note that each curve node is - # expected to have a unique set of channels, so these additional curves with the same channel would have - # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode. - warn_multiple_curves_per_channel = True - else: - used_channels.add(channel_id) - fbx_curves.append((fbxprop, channel, fbx_acdata)) - if warn_multiple_curves_per_channel: - print("WARNING: Multiple animation curves per animated property channel were found for %s. All but the first" - "curve for each property channel has been discarded." % action.name) + channels_dict.setdefault(channel, []).append(fbx_acdata) + + # fbx_curves = [] + # used_channels = set() + # warn_multiple_curves_per_channel = False + # for curves, fbxprop in cnodes.values(): + # channels_dict = fbx_curves_props_channels.setdefault(fbxprop, {}) + # for (fbx_acdata, _blen_data), channel in curves.values(): + # channels_dict.setdefault(channel, []).append(fbx_acdata) + # + # for (fbx_acdata, _blen_data), channel in curves.values(): + # channel_id = (fbxprop, channel) + # if channel_id in used_channels: + # # The FBX animation system's default implementation only uses the first curve assigned to a channel. + # # Additional curves per channel are allowed by the FBX specification, but the handling of these curves + # # is considered the responsibility of the application that created them. Note that each curve node is + # # expected to have a unique set of channels, so these additional curves with the same channel would have + # # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode. + # warn_multiple_curves_per_channel = True + # else: + # used_channels.add(channel_id) + # fbx_curves.append((fbxprop, channel, fbx_acdata)) + # if warn_multiple_curves_per_channel: + # print("WARNING: Multiple animation curves per animated property channel were found for %s. All but the first" + # "curve for each property channel has been discarded." % action.name) # Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0). if len(fbx_curves) == 0: return - blen_curves = [] - props = [] - keyframes = {} + # todo: Remove these + # blen_curves = [] + # props = [] + # keyframes = {} - # Add each keyframe to the keyframe dict - def store_keyframe(fc, frame, value): - fc_key = (fc.data_path, fc.array_index) - if not keyframes.get(fc_key): - keyframes[fc_key] = [] - keyframes[fc_key].extend((frame, value)) + # # Add each keyframe to the keyframe dict + # def store_keyframe(fc, frame, value): + # fc_key = (fc.data_path, fc.array_index) + # if not keyframes.get(fc_key): + # keyframes[fc_key] = [] + # keyframes[fc_key].extend((frame, value)) if isinstance(item, Material): grpname = item.name @@ -642,115 +1241,105 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo for prop, nbr_channels, grpname in props for channel in range(nbr_channels)] if isinstance(item, Material): - for frame, values in blen_read_animations_curves_iter(fbx_curves, anim_offset, 0, fps): - value = [0,0,0] - for v, (fbxprop, channel, _fbx_acdata) in values: - assert(fbxprop == b'DiffuseColor') + for fbxprop, channel_to_curves in fbx_curves.items(): + assert(fbxprop == b'DiffuseColor') + for channel, curves in channel_to_curves.items(): assert(channel in {0, 1, 2}) - value[channel] = v - - for fc, v in zip(blen_curves, value): - store_keyframe(fc, frame, v) + blen_curve = blen_curves[channel] + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) + fbx_key_times, values = _combine_same_property_curves(parsed_curves) + bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, anim_offset, 0, fps) + blen_store_keyframes(blen_curve, bl_key_times, values) elif isinstance(item, ShapeKey): deform_values = shape_key_deforms.setdefault(item, []) - for frame, values in blen_read_animations_curves_iter(fbx_curves, anim_offset, 0, fps): - value = 0.0 - for v, (fbxprop, channel, _fbx_acdata) in values: - assert(fbxprop == b'DeformPercent') + for fbxprop, channel_to_curves in fbx_curves.items(): + assert(fbxprop == b'DeformPercent') + for channel, curves in channel_to_curves.items(): assert(channel == 0) - value = v / 100.0 - deform_values.append(value) - - for fc, v in zip(blen_curves, (value,)): - store_keyframe(fc, frame, v) + blen_curve = blen_curves[channel] + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) + fbx_key_times, values = _combine_same_property_curves(parsed_curves) + bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, anim_offset, 0, fps) + # A fully activated shape key in FBX DeformPercent is 100.0 whereas it is 1.0 in Blender. + values = values / 100.0 + blen_store_keyframes(blen_curve, bl_key_times, values) + # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded if + # necessary after reading all animations. + deform_values.append(values.min()) + deform_values.append(values.max()) elif isinstance(item, Camera): - for frame, values in blen_read_animations_curves_iter(fbx_curves, anim_offset, 0, fps): - focal_length = 0.0 - focus_distance = 0.0 - for v, (fbxprop, channel, _fbx_acdata) in values: - assert(fbxprop == b'FocalLength' or fbxprop == b'FocusDistance' ) + for fbxprop, channel_to_curves in fbx_curves.items(): + is_focus_distance = fbxprop == b'FocusDistance' + assert(fbxprop == b'FocalLength' or is_focus_distance) + for channel, curves in channel_to_curves.items(): assert(channel == 0) - if (fbxprop == b'FocalLength' ): - focal_length = v - elif(fbxprop == b'FocusDistance'): - focus_distance = v / 1000 * global_scale - - for fc, v in zip(blen_curves, (focal_length, focus_distance)): - store_keyframe(fc, frame, v) + # The indices are determined by the creation of the `props` list above. + blen_curve = blen_curves[1 if is_focus_distance else 0] + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) + fbx_key_times, values = _combine_same_property_curves(parsed_curves) + bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, anim_offset, 0, fps) + if is_focus_distance: + # Remap the imported values from FBX to Blender. + values = values / 1000.0 + values *= global_scale + blen_store_keyframes(blen_curve, bl_key_times, values) else: # Object or PoseBone: - if item.is_bone: - bl_obj = item.bl_obj.pose.bones[item.bl_bone] - else: - bl_obj = item.bl_obj - transform_data = item.fbx_transform_data - rot_eul_prev = bl_obj.rotation_euler.copy() - rot_quat_prev = bl_obj.rotation_quaternion.copy() - # Pre-compute inverted local rest matrix of the bone, if relevant. - restmat_inv = item.get_bind_matrix().inverted_safe() if item.is_bone else None + # Each transformation curve needs to have keyframes at the times of every other transformation curve + # (interpolating missing values), so that we can construct a matrix at every keyframe. + transform_prop_to_attr = { + b'Lcl Translation': transform_data.loc, + b'Lcl Rotation': transform_data.rot, + b'Lcl Scaling': transform_data.sca, + } - for frame, values in blen_read_animations_curves_iter(fbx_curves, anim_offset, 0, fps): - for v, (fbxprop, channel, _fbx_acdata) in values: - if fbxprop == b'Lcl Translation': - transform_data.loc[channel] = v - elif fbxprop == b'Lcl Rotation': - transform_data.rot[channel] = v - elif fbxprop == b'Lcl Scaling': - transform_data.sca[channel] = v - mat, _, _ = blen_read_object_transform_do(transform_data) + times_and_values_tuples = [] + initial_values = [] + channel_keys = [] + for fbxprop, channel_to_curves in fbx_curves.items(): + if fbxprop not in transform_prop_to_attr: + # Currently, we only care about transformation curves. + continue + for channel, curves in channel_to_curves.items(): + assert(channel in {0, 1, 2}) + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) + fbx_key_times, values = _combine_same_property_curves(parsed_curves) - # compensate for changes in the local matrix during processing - if item.anim_compensation_matrix: - mat = mat @ item.anim_compensation_matrix + channel_keys.append((fbxprop, channel)) - # apply pre- and post matrix - # post-matrix will contain any correction for lights, camera and bone orientation - # pre-matrix will contain any correction for a parent's correction matrix or the global matrix - if item.pre_matrix: - mat = item.pre_matrix @ mat - if item.post_matrix: - mat = mat @ item.post_matrix + initial_values.append(transform_prop_to_attr[fbxprop][channel]) - # And now, remove that rest pose matrix from current mat (also in parent space). - if restmat_inv: - mat = restmat_inv @ mat + times_and_values_tuples.append((fbx_key_times, values)) - # Now we have a virtual matrix of transform from AnimCurves, we can insert keyframes! - loc, rot, sca = mat.decompose() - if rot_mode == 'QUATERNION': - if rot_quat_prev.dot(rot) < 0.0: - rot = -rot - rot_quat_prev = rot - elif rot_mode == 'AXIS_ANGLE': - vec, ang = rot.to_axis_angle() - rot = ang, vec.x, vec.y, vec.z - else: # Euler - rot = rot.to_euler(rot_mode, rot_eul_prev) - rot_eul_prev = rot + combined_fbx_times, values_arrays = _combine_curve_keyframes(times_and_values_tuples, initial_values) - # Add each keyframe and its value to the keyframe dict - for fc, value in zip(blen_curves, chain(loc, rot, sca)): - store_keyframe(fc, frame, value) + bl_key_times = _convert_fbx_time_to_blender_time(combined_fbx_times, anim_offset, 0, fps) - # Add all keyframe points to the fcurves at once and modify them after - for fc_key, key_values in keyframes.items(): - data_path, index = fc_key + flattened_channel_values_gen = _transformation_curves_gen(item, values_arrays, channel_keys) - # Add all keyframe points at once - fcurve = action.fcurves.find(data_path=data_path, index=index) - num_keys = len(key_values) // 2 - fcurve.keyframe_points.add(num_keys) - fcurve.keyframe_points.foreach_set('co', key_values) - linear_enum_value = bpy.types.Keyframe.bl_rna.properties['interpolation'].enum_items['LINEAR'].value - fcurve.keyframe_points.foreach_set('interpolation', (linear_enum_value,) * num_keys) + num_loc_channels = 3 + num_rot_channels = 4 if rot_mode in {'QUATERNION', 'AXIS_ANGLE'} else 3 # Variations of EULER are all 3 + num_sca_channels = 3 + num_channels = num_loc_channels + num_rot_channels + num_sca_channels + num_frames = len(combined_fbx_times) + full_length = num_channels * num_frames - # Since we inserted our keyframes in 'ultra-fast' mode, we have to update the fcurves now. - for fc in blen_curves: - fc.update() + # TODO: It may be beneficial to iterate into np.float64 since the generator yields Python floats + flattened_channel_values = np.fromiter(flattened_channel_values_gen, dtype=np.single, count=full_length) + # Reshape to one row per frame and then view the transpose so that each row corresponds to a single channel. + # e.g. + # loc_channels = channel_values[:num_loc_channels] + # rot_channels = channel_values[num_loc_channels:num_loc_channels + num_rot_channels] + # sca_channels = channel_values[num_loc_channels + num_rot_channels:] + channel_values = flattened_channel_values.reshape(num_frames, num_channels).T + + for blen_curve, values in zip(blen_curves, channel_values): + # TODO: The bl_key_times is used more than once, meaning we duplicate some of the work + blen_store_keyframes(blen_curve, bl_key_times, values) def blen_read_animations(fbx_tmpl_astack, fbx_tmpl_alayer, stacks, scene, anim_offset, global_scale): -- 2.30.2 From ff192f6f3f835cfdc2edef405de346e8701173e8 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 30 Aug 2023 21:15:38 +0100 Subject: [PATCH 03/12] Optimise and tidy up combining curves with interpolation --- io_scene_fbx/import_fbx.py | 238 ++++++++++++++++++++++--------------- 1 file changed, 142 insertions(+), 96 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index bb76f8f33..8b929ac3b 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -642,6 +642,142 @@ def _combine_same_property_curves(times_and_values_tuples): return times_and_values_tuples[0] +def _interpolate_curves_linear(sorted_all_times, times_indices, times, values, initial_value): + # Find the indices of all times that need their values to be interpolated + needs_interpolation_mask = np.full(len(sorted_all_times), True) + needs_interpolation_mask[times_indices] = False + needs_interpolation_idx = np.flatnonzero(needs_interpolation_mask) + + if not needs_interpolation_idx.size: + # No indices need their values interpolated. + # This can happen when a curve contains all keyframe times of all the other curves, a notable case would be + # when all the imported curves have the same keyframe times. + return values + + # Create the extended values array that will contain `values` and the extra interpolated values for times in + # `sorted_all_times` that are not in `times`. + extended_values = np.empty_like(values, shape=len(sorted_all_times)) + + # Set the non-interpolated values + extended_values[times_indices] = values + + # We can use the fact that sorted_all_times, times_indices and times are all sorted and unique to perform linear + # interpolation with a better scaling time complexity than np.interp, but np.interp is a C-compiled function and + # will pretty much always outperform a step-by-step linear interpolation by calling various NumPy functions. + interp_values = np.interp(sorted_all_times[needs_interpolation_idx], times, values, left=initial_value) + + extended_values[needs_interpolation_idx] = interp_values + extended_values[times_indices] = values + return extended_values + + +def _interpolate_curves(sorted_all_times, times_indices, _times, values, initial_value): + extended_values = np.empty_like(values, shape=len(sorted_all_times)) + + # Because times was sorted, we can get the region within extended_values or sorted_all_times from the first + # time in `times` to the last time in `times`. + # Elements within this region may need interpolation. + # Elements outside this region would result in extrapolation, which we do not do, instead setting an + # `initial_value` or maintaining the last value in `values` + interp_start_full_incl = times_indices[0] + interp_end_full_excl = times_indices[-1] + 1 + + # Fill in the times that would result in extrapolation with their fixed values. + extended_values[:interp_start_full_incl] = initial_value + extended_values[interp_end_full_excl:] = values[-1] + + # Get the regions of extended_values and sorted_all_times where interpolation will take place. + extended_values_interp_region = extended_values[interp_start_full_incl:interp_end_full_excl] + all_times_interp_region = sorted_all_times[interp_start_full_incl:interp_end_full_excl] + + # The index in `extended_values_interp_region` of each value in `times` + interp_region_times_indices = times_indices - times_indices[0] + + # Fill in the times that already have values. + # Same as `extended_values[times_indices] = values`. + extended_values_interp_region[interp_region_times_indices] = values + + # Construct a mask of the values within the interp_region that need interpolation + needs_interpolation_mask = np.full(len(extended_values_interp_region), True, dtype=bool) + needs_interpolation_mask[interp_region_times_indices] = False + # When the number of elements needing interpolation is much smaller than the total number of elements, it can be + # faster to calculate indices from the mask and then index using the indices instead of indexing using the mask. + needs_interpolation_idx = np.flatnonzero(needs_interpolation_mask) + + if not needs_interpolation_idx.size: + # No times need interpolating, we're done. + return extended_values + + # Because both `all_times_sorted` and `times` are sorted, the index in `all_times_sorted` of each value in + # `times` must be increasing. Using this fact, we can find the index of the previous and next non-interpolated + # time for each interpolated time, by taking min/max accumulations across the indices of the non-interpolated + # times. + # This performs similarly to doing a binary search with np.searchsorted when `times` and `interp_times` are + # small, but np.searchsorted scales worse with larger `times` and `interp_times`: + # interp_times = all_times_interp_region[needs_interpolation_idx] + # prev_indices = np.searchsorted(times, interp_times) + # # This only works because `times` and `interp_times` are disjoint. + # next_indices = prev_indices + 1 + # prev_times = times[prev_indices] + # next_times = times[next_indices] + # prev_values = values[prev_indices] + # next_values = values[next_indices] + # First create arrays of indices. + prev_indices = np.arange(len(extended_values_interp_region)) + next_indices = prev_indices.copy() + + # Example prev_indices + # [0, 1, 2, 3, 4, 5, 6, 7] + # Example needs_interpolation_mask: + # [F, F, T, F, T, T, F, F] + # Set interpolated times indices to zero (using needs_interpolation_idx for performance): + # [0, 1, 0, 3, 0, 0, 6, 7] + # maximum.accumulate: + # [0, 1, 1, 3, 4, 4, 6, 7] + # Extract the values at each index requiring interpolation (using needs_interpolation_idx for performance): + # [ 1, 4, 4, ] + # The extracted indices are the indices of the previous non-interpolated time/value. + prev_indices[needs_interpolation_idx] = 0 + prev_indices = np.maximum.accumulate(prev_indices)[needs_interpolation_idx] + + # The same as prev_value_indices, but using minimum and accumulating from right to left. + # Example next_indices: + # [0, 1, 2, 3, 4, 5, 6, 7] + # Example needs_interpolation_mask: + # [F, F, T, F, T, T, F, F] + # Set interpolated times indices to the maximum index (using needs_interpolation_idx for performance): + # [0, 1, 7, 3, 7, 7, 6, 7] + # minimum.accumulate from right to left by creating a flipped view, running minimum.accumulate and then creating + # a flipped view of the result: + # flip: + # [7, 6, 7, 7, 3, 7, 1, 0] + # minimum.accumulate: + # [7, 6, 6, 6, 3, 3, 1, 0] + # flip: + # [0, 1, 3, 3, 6, 6, 6, 7] + # Extract the values at each index requiring interpolation (using needs_interpolation_idx for performance): + # [ 3, 6, 6, ] + # The extracted indices are the indices of the next non-interpolated time/value. + next_indices[needs_interpolation_idx] = len(extended_values_interp_region) - 1 + next_indices = np.flip(np.minimum.accumulate(np.flip(next_indices)))[needs_interpolation_idx] + + prev_times = all_times_interp_region[prev_indices] + next_times = all_times_interp_region[next_indices] + prev_values = extended_values_interp_region[prev_indices] + next_values = extended_values_interp_region[next_indices] + + # This linear interpolation is an example intended to be replaced with other kinds of interpolation once they are + # supported. + # - Begin linear interpolation + interp_times = all_times_interp_region[needs_interpolation_idx] + ifac = (interp_times - prev_times) / (next_times - prev_times) + interp_values = ifac * (next_values - prev_values) + prev_values + # - End linear interpolation + + extended_values_interp_region[needs_interpolation_idx] = interp_values + return extended_values + + def _combine_curve_keyframes(times_and_values_tuples, initial_values): """Combine multiple sorted animation curves, that affect different properties, such that every animation curve contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each @@ -649,118 +785,28 @@ def _combine_curve_keyframes(times_and_values_tuples, initial_values): Currently, linear interpolation is assumed, but FBX does store how keyframes should be interpolated, so correctly interpolating the keyframe values is a TODO.""" - # all_times = [] - # #all_values = [] - # #all_curve_idx = [] - # for i, (times, values) in enumerate(times_and_values_tuples): - # all_times.append(times) - # #all_values.append(values) - # #all_curve_idx.append(np.full_like(times, i)) - # all_times = np.concatenate(all_times) - # #all_curve_idx = np.concatenate(all_curve_idx) - # perm = np.argsort(all_times, kind='stable') - # - # sorted_all_times = all_times[perm] - # - # # Get the index in sorted_all_times of each time in all_times - # all_times_indices = np.empty(len(perm), dtype=np.intp) - # all_times_indices[perm] = np.arange(len(perm)) + if len(times_and_values_tuples) == 1: + # Nothing to do when there is only a single curve. + return times_and_values_tuples[0] all_times = [t[0] for t in times_and_values_tuples] # Get sorted unique times and the index in sorted_all_times of each time in all_times sorted_all_times, all_times_indices = np.unique(np.concatenate(all_times), return_inverse=True) - #sorted_all_values = all_values[perm] - #sorted_curve_idx = all_curve_idx[perm] # An alternative would be to concatenated filled arrays with the index of each array and then index that by perm, # then a mask for each array can be found by checking for values that equal the index of that array. values_arrays = [] times_start = 0 for (times, values), initial_value in zip(times_and_values_tuples, initial_values): times_end = times_start + len(times) - # The index in sorted_all_times of each value in times + # The index in `sorted_all_times` of each value in `times`. times_indices = all_times_indices[times_start:times_end] - # Update times_start for the next array + # Update times_start for the next array. times_start = times_end - # todo: Not sure the best way to mask out the values here, will need investigating - - #times_extended = sorted_all_times.copy() - - needs_interpolation_mask = np.full(len(sorted_all_times), True) - needs_interpolation_mask[times_indices] = False - #imported_times_mask = ~needs_interpolation_mask - - # # Need to find the before and after times for each time that needs interpolation - # # Times are sorted, so the smallest and largest are simply the first and last values. - # min_time_value = values[0] - # max_time_value = values[-1] - - # todo: It's possible we can get the same result faster by doing the interpolation manually, since we can figure - # out the before and after values for each time that needs interpolating (this would also likely make it easier - # to update the function to support other interpolation than just linear). - interp_values = np.interp(sorted_all_times[needs_interpolation_mask], times, values, left=initial_value) - - # Alt, though we typically expect there to be few times that need interpolation: - # extended_values = np.interp(sorted_all_times, times, values, left=initial_value) - - extended_values = np.empty_like(values, shape=len(sorted_all_times)) - extended_values[needs_interpolation_mask] = interp_values - extended_values[times_indices] = values - + extended_values = _interpolate_curves_linear(sorted_all_times, times_indices, times, values, initial_value) values_arrays.append(extended_values) - - # FIXME: If we have two curves with the same times, aren't they going to break because they're going to try and - # interpolate the same times? - # times1 = [1,2,3,4] - # times2 = [1,2,3,4] - # sorted_all_times = [1,1,2,2,3,3,4,4]... not good - - # # Manual linear interpolation (it may be easier to extend for other interpolation): - # # Get the index of the previous and next elements that are not interpolated - # prev_value_indices = np.arange(len(sorted_all_times), dtype=np.intp) - # next_value_indices = prev_value_indices.copy() - # prev_value_indices[needs_interpolation_mask] = times_indices[0] - # next_value_indices[needs_interpolation_mask] = times_indices[-1] - # prev_value_indices = np.maximum.accumulate(prev_value_indices) - # next_value_indices = np.flip(np.minimum.accumulate(np.flip(next_value_indices))) - # - # # TODO: May be faster to simply not index by needs_interpolation_mask every time and then only index by - # # needs_interpolation_mask at the end. - # prev_times = sorted_all_times[prev_value_indices][needs_interpolation_mask] - # prev_values = sorted_all_values[prev_value_indices][needs_interpolation_mask] - # next_times = sorted_all_times[next_value_indices][needs_interpolation_mask] - # next_values = sorted_all_values[next_value_indices][needs_interpolation_mask] - # - # interp_times = sorted_all_times[needs_interpolation_mask] - # ifac = (interp_times - prev_times) / (next_times - prev_times) - # interp_values = ifac * (next_values - prev_values) + prev_values - - # ifac = (sorted_all_times[needs_interpolation_mask] - prev_times) / () - # - # values_extended = - # - # min_before = np.full_like(sorted_all_times, times[0]) - # max_after = np.full_like(sorted_all_times, times[-1]) - # - # # FIXME: But we need the indices so we can get the before and after values and interpolate between those... - # before_times = times_extended.copy() - # before_times[needs_interpolation_mask] = min_time - # before_times = np.maximum.accumulate(before_times) - # - # after_times = times_extended.copy() - # after_times[needs_interpolation_mask] = max_time - # after_times = np.flip(np.minimum.accumulate(np.flip(after_times))) - # - # before_times[needs_interpolation_mask] - # - # - # - # - # times_full = sorted_all_times.copy() - # values_full = np.full_like(sorted_all_times, np.nan) - # values_full[sorted_curve_idx == i] = values return sorted_all_times, values_arrays -- 2.30.2 From 6124ee60affdae83ed4a335522c23a3d31e55b5b Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 30 Aug 2023 22:12:48 +0100 Subject: [PATCH 04/12] Remove the expansive information on how other interpolation can be set up --- io_scene_fbx/import_fbx.py | 158 +++---------------------------------- 1 file changed, 10 insertions(+), 148 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index 8b929ac3b..c0440513f 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -642,142 +642,6 @@ def _combine_same_property_curves(times_and_values_tuples): return times_and_values_tuples[0] -def _interpolate_curves_linear(sorted_all_times, times_indices, times, values, initial_value): - # Find the indices of all times that need their values to be interpolated - needs_interpolation_mask = np.full(len(sorted_all_times), True) - needs_interpolation_mask[times_indices] = False - needs_interpolation_idx = np.flatnonzero(needs_interpolation_mask) - - if not needs_interpolation_idx.size: - # No indices need their values interpolated. - # This can happen when a curve contains all keyframe times of all the other curves, a notable case would be - # when all the imported curves have the same keyframe times. - return values - - # Create the extended values array that will contain `values` and the extra interpolated values for times in - # `sorted_all_times` that are not in `times`. - extended_values = np.empty_like(values, shape=len(sorted_all_times)) - - # Set the non-interpolated values - extended_values[times_indices] = values - - # We can use the fact that sorted_all_times, times_indices and times are all sorted and unique to perform linear - # interpolation with a better scaling time complexity than np.interp, but np.interp is a C-compiled function and - # will pretty much always outperform a step-by-step linear interpolation by calling various NumPy functions. - interp_values = np.interp(sorted_all_times[needs_interpolation_idx], times, values, left=initial_value) - - extended_values[needs_interpolation_idx] = interp_values - extended_values[times_indices] = values - return extended_values - - -def _interpolate_curves(sorted_all_times, times_indices, _times, values, initial_value): - extended_values = np.empty_like(values, shape=len(sorted_all_times)) - - # Because times was sorted, we can get the region within extended_values or sorted_all_times from the first - # time in `times` to the last time in `times`. - # Elements within this region may need interpolation. - # Elements outside this region would result in extrapolation, which we do not do, instead setting an - # `initial_value` or maintaining the last value in `values` - interp_start_full_incl = times_indices[0] - interp_end_full_excl = times_indices[-1] + 1 - - # Fill in the times that would result in extrapolation with their fixed values. - extended_values[:interp_start_full_incl] = initial_value - extended_values[interp_end_full_excl:] = values[-1] - - # Get the regions of extended_values and sorted_all_times where interpolation will take place. - extended_values_interp_region = extended_values[interp_start_full_incl:interp_end_full_excl] - all_times_interp_region = sorted_all_times[interp_start_full_incl:interp_end_full_excl] - - # The index in `extended_values_interp_region` of each value in `times` - interp_region_times_indices = times_indices - times_indices[0] - - # Fill in the times that already have values. - # Same as `extended_values[times_indices] = values`. - extended_values_interp_region[interp_region_times_indices] = values - - # Construct a mask of the values within the interp_region that need interpolation - needs_interpolation_mask = np.full(len(extended_values_interp_region), True, dtype=bool) - needs_interpolation_mask[interp_region_times_indices] = False - # When the number of elements needing interpolation is much smaller than the total number of elements, it can be - # faster to calculate indices from the mask and then index using the indices instead of indexing using the mask. - needs_interpolation_idx = np.flatnonzero(needs_interpolation_mask) - - if not needs_interpolation_idx.size: - # No times need interpolating, we're done. - return extended_values - - # Because both `all_times_sorted` and `times` are sorted, the index in `all_times_sorted` of each value in - # `times` must be increasing. Using this fact, we can find the index of the previous and next non-interpolated - # time for each interpolated time, by taking min/max accumulations across the indices of the non-interpolated - # times. - # This performs similarly to doing a binary search with np.searchsorted when `times` and `interp_times` are - # small, but np.searchsorted scales worse with larger `times` and `interp_times`: - # interp_times = all_times_interp_region[needs_interpolation_idx] - # prev_indices = np.searchsorted(times, interp_times) - # # This only works because `times` and `interp_times` are disjoint. - # next_indices = prev_indices + 1 - # prev_times = times[prev_indices] - # next_times = times[next_indices] - # prev_values = values[prev_indices] - # next_values = values[next_indices] - # First create arrays of indices. - prev_indices = np.arange(len(extended_values_interp_region)) - next_indices = prev_indices.copy() - - # Example prev_indices - # [0, 1, 2, 3, 4, 5, 6, 7] - # Example needs_interpolation_mask: - # [F, F, T, F, T, T, F, F] - # Set interpolated times indices to zero (using needs_interpolation_idx for performance): - # [0, 1, 0, 3, 0, 0, 6, 7] - # maximum.accumulate: - # [0, 1, 1, 3, 4, 4, 6, 7] - # Extract the values at each index requiring interpolation (using needs_interpolation_idx for performance): - # [ 1, 4, 4, ] - # The extracted indices are the indices of the previous non-interpolated time/value. - prev_indices[needs_interpolation_idx] = 0 - prev_indices = np.maximum.accumulate(prev_indices)[needs_interpolation_idx] - - # The same as prev_value_indices, but using minimum and accumulating from right to left. - # Example next_indices: - # [0, 1, 2, 3, 4, 5, 6, 7] - # Example needs_interpolation_mask: - # [F, F, T, F, T, T, F, F] - # Set interpolated times indices to the maximum index (using needs_interpolation_idx for performance): - # [0, 1, 7, 3, 7, 7, 6, 7] - # minimum.accumulate from right to left by creating a flipped view, running minimum.accumulate and then creating - # a flipped view of the result: - # flip: - # [7, 6, 7, 7, 3, 7, 1, 0] - # minimum.accumulate: - # [7, 6, 6, 6, 3, 3, 1, 0] - # flip: - # [0, 1, 3, 3, 6, 6, 6, 7] - # Extract the values at each index requiring interpolation (using needs_interpolation_idx for performance): - # [ 3, 6, 6, ] - # The extracted indices are the indices of the next non-interpolated time/value. - next_indices[needs_interpolation_idx] = len(extended_values_interp_region) - 1 - next_indices = np.flip(np.minimum.accumulate(np.flip(next_indices)))[needs_interpolation_idx] - - prev_times = all_times_interp_region[prev_indices] - next_times = all_times_interp_region[next_indices] - prev_values = extended_values_interp_region[prev_indices] - next_values = extended_values_interp_region[next_indices] - - # This linear interpolation is an example intended to be replaced with other kinds of interpolation once they are - # supported. - # - Begin linear interpolation - interp_times = all_times_interp_region[needs_interpolation_idx] - ifac = (interp_times - prev_times) / (next_times - prev_times) - interp_values = ifac * (next_values - prev_values) + prev_values - # - End linear interpolation - - extended_values_interp_region[needs_interpolation_idx] = interp_values - return extended_values - - def _combine_curve_keyframes(times_and_values_tuples, initial_values): """Combine multiple sorted animation curves, that affect different properties, such that every animation curve contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each @@ -791,21 +655,19 @@ def _combine_curve_keyframes(times_and_values_tuples, initial_values): all_times = [t[0] for t in times_and_values_tuples] - # Get sorted unique times and the index in sorted_all_times of each time in all_times - sorted_all_times, all_times_indices = np.unique(np.concatenate(all_times), return_inverse=True) + # Get the combined sorted unique times of all the curves. + sorted_all_times = np.unique(np.concatenate(all_times)) - # An alternative would be to concatenated filled arrays with the index of each array and then index that by perm, - # then a mask for each array can be found by checking for values that equal the index of that array. values_arrays = [] - times_start = 0 for (times, values), initial_value in zip(times_and_values_tuples, initial_values): - times_end = times_start + len(times) - # The index in `sorted_all_times` of each value in `times`. - times_indices = all_times_indices[times_start:times_end] - # Update times_start for the next array. - times_start = times_end - - extended_values = _interpolate_curves_linear(sorted_all_times, times_indices, times, values, initial_value) + if sorted_all_times.size == times.size: + # `sorted_all_times` will always contain all values in `times` and both `times` and `sorted_all_times` must + # be strictly increasing, so if both arrays have the same size, they must be identical. + extended_values = values + else: + # For now, linear interpolation is assumed. NumPy conveniently has a fast C-compiled function for this. + # Efficiently implementing other FBX supported interpolation will most likely be much more complicated. + extended_values = np.interp(sorted_all_times, times, values, left=initial_value) values_arrays.append(extended_values) return sorted_all_times, values_arrays -- 2.30.2 From 285fea09d6e109e65943450b3830289fb7e2ef45 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Wed, 30 Aug 2023 23:39:49 +0100 Subject: [PATCH 05/12] Faster generator for yielding indices of invalid animation curves --- io_scene_fbx/import_fbx.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index c0440513f..1fa03119a 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -686,10 +686,11 @@ def blen_read_invalid_animation_curve(key_times, key_values): # Unsure if this can be vectorized with numpy, so using iteration for now. def index_gen(): idx = 0 + key_times_data = key_times.data key_times_len = len(key_times) # Iterating .data, the memoryview of the array, is faster than iterating the array directly. for curr_fbxktime in sorted_unique_times.data: - if key_times[idx] < curr_fbxktime: + if key_times_data[idx] < curr_fbxktime: if idx >= 0: idx += 1 if idx >= key_times_len: -- 2.30.2 From b3c8b483d8077f317a7e424198d3ddfa17e91b9c Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 01:12:03 +0100 Subject: [PATCH 06/12] Move fbx to Blender keyframe time conversion into blen_store_keyframes Add faster blen_store_keyframes_multi for when multiple fcurves have the same keyframe times. --- io_scene_fbx/import_fbx.py | 145 +++++++++++-------------------------- 1 file changed, 44 insertions(+), 101 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index 1fa03119a..b4b92a9ae 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -50,6 +50,8 @@ from .fbx_utils import ( expand_shape_key_range, ) +LINEAR_INTERPOLATION_VALUE = bpy.types.Keyframe.bl_rna.properties['interpolation'].enum_items['LINEAR'].value + # global singleton, assign on execution fbx_elem_nil = None @@ -766,11 +768,6 @@ def blen_read_single_animation_curve(fbx_curve): """Read a single animation curve from FBX data. The parsed keyframe times are guaranteed to be strictly increasing.""" - # TODO: Remove these, we can do all time conversion at the very end, just before combining times and values into a - # single array - # from .fbx_utils import FBX_KTIME - # timefac = fps / FBX_KTIME - key_times = parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve, b'KeyTime'))) key_values = parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve, b'KeyValueFloat'))) @@ -788,30 +785,6 @@ def blen_read_single_animation_curve(fbx_curve): # FBX will still read animation curves even if they are invalid. return blen_read_invalid_animation_curve(key_times, key_values) - # todo When we have transformation curves (or more than one curve per channel (optional support)) separately combine - # singular parsed curves and fill in the gaps with linear interpolation. .concatenate and .unique the key_times - # arrays with return_inverse=True. Use the lengths of each key_times array and their order in the concatenation to - # get the index of each of their elements in the sorted, unique concatenation. - # For each key_times array, create an all True array and use those indices to set values to False. - # Copy the sorted, unique concatenation and use this new mask to effectively delete all times that didn't come from - # this key_times array. Use .maximum.accumulate and a reversed .minimum.accumulate to get the first time before and - # first time after each time that needs its value to be interpolated. These two arrays get the start and end times - # to interpolate from. For each time that needs its value to be interpolated, get the values for the start and end - # times and then use those and the times that needs their values interpolated to calculate the interpolated values. - # Care will need to be taken for times where there is no first value before or where there is no first value after, - # in which case interpolation can't take place and we'll either need to start set values at the very start and end - # or otherwise fill the values that can't be interpolated with a default value or the first/last value in - # key_times. - - if not all_times_strictly_increasing: - # We try to match how FBX behaves when it encounters an invalid KeyTime array. This doesn't quite match when the - # maximum value is not the last value (FBX discards some keyframes whereas we don't), but it's close enough. - - # Start the curve from the index of the smallest KeyTime value. - min_idx = np.amin(key_times) if key_times.size else 0 - key_times = key_times[min_idx:] - key_values = key_values[min_idx:] - max_idx = np.amax(key_times) if key_times.size else 0 # If the largest KeyTime value is at the last index then it's simple. if max_idx == key_times.size - 1: @@ -990,76 +963,45 @@ def blen_read_single_animation_curve(fbx_curve): return key_times, key_values -def blen_store_keyframes(blen_fcurve, key_times, key_values): +def blen_store_keyframes(fbx_key_times, blen_fcurve, key_values, blen_start_offset, fps, fbx_start_offset=0): """Set all keyframe times and values for a newly created FCurve. + Linear interpolation is currently assumed. + + This is a convenience function for calling blen_store_keyframes_multi with only a single fcurve and values array.""" + blen_store_keyframes_multi(fbx_key_times, [(blen_fcurve, key_values)], blen_start_offset, fps, fbx_start_offset) + + +def blen_store_keyframes_multi(fbx_key_times, fcurve_and_key_values_pairs, blen_start_offset, fps, fbx_start_offset=0): + """Set all keyframe times and values for multiple pairs of newly created FCurves and keyframe values arrays, where + each pair has the same keyframe times. Linear interpolation is currently assumed.""" - # The fcurve must be newly created and thus have no keyframe_points. - assert(len(blen_fcurve.keyframe_points) == 0) - num_keys = len(key_times) + bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, blen_start_offset, fbx_start_offset, fps) + num_keys = len(bl_key_times) # Compatible with C float type bl_keyframe_dtype = np.single # Compatible with C char type bl_enum_dtype = np.byte - # TODO: get this value once and store it as a global variable - linear_enum_value = bpy.types.Keyframe.bl_rna.properties['interpolation'].enum_items['LINEAR'].value + # The keyframe_points 'co' are accessed as flattened pairs of (time, value). + # The key times are the same for each (blen_fcurve, key_values) pair, so only the values need to be updatedfor each array of values. + keyframe_points_co = np.empty(len(bl_key_times) * 2, dtype=bl_keyframe_dtype) + keyframe_points_co[0::2] = bl_key_times - # Stack the arrays into a flattened array of flattened (frame, value) pairs - # Same as `np.column_stack(key_times, key_values).ravel()`, but allows specifying the dtype. - full_key_frame_array = np.concatenate((key_times.reshape(-1, 1), key_values.reshape(-1, 1)), - dtype=bl_keyframe_dtype, casting='unsafe', axis=1).ravel() + interpolation_array = np.full(num_keys, LINEAR_INTERPOLATION_VALUE, dtype=bl_enum_dtype) - # Add the keyframe points to the FCurve and then set the 'co' and 'interpolation' of each point. - blen_fcurve.keyframe_points.add(num_keys) - blen_fcurve.keyframe_points.foreach_set('co', full_key_frame_array.ravel()) - blen_fcurve.keyframe_points.foreach_set('interpolation', np.full(num_keys, linear_enum_value, dtype=bl_enum_dtype)) + for blen_fcurve, key_values in fcurve_and_key_values_pairs: + # The fcurve must be newly created and thus have no keyframe_points. + assert(len(blen_fcurve.keyframe_points) == 0) + keyframe_points_co[1::2] = key_values - # Since we inserted our keyframes in 'ultra-fast' mode, we have to update the fcurves now. - blen_fcurve.update() + # Add the keyframe points to the FCurve and then set the 'co' and 'interpolation' of each point. + blen_fcurve.keyframe_points.add(num_keys) + blen_fcurve.keyframe_points.foreach_set('co', keyframe_points_co) + blen_fcurve.keyframe_points.foreach_set('interpolation', interpolation_array) - -# TODO: Remove this function -def blen_read_animations_curves_iter(fbx_curves, blen_start_offset, fbx_start_offset, fps): - """ - Get raw FBX AnimCurve list, and yield values for all curves at each singular curves' keyframes, - together with (blender) timing, in frames. - blen_start_offset is expected in frames, while fbx_start_offset is expected in FBX ktime. - """ - # As a first step, assume linear interpolation between key frames, we'll (try to!) handle more - # of FBX curves later. - from .fbx_utils import FBX_KTIME - timefac = fps / FBX_KTIME - - curves = tuple([0, - elem_prop_first(elem_find_first(c[2], b'KeyTime')), - elem_prop_first(elem_find_first(c[2], b'KeyValueFloat')), - c] - for c in fbx_curves) - - allkeys = sorted({item for sublist in curves for item in sublist[1]}) - for curr_fbxktime in allkeys: - curr_values = [] - for item in curves: - idx, times, values, fbx_curve = item - - if times[idx] < curr_fbxktime: - if idx >= 0: - idx += 1 - if idx >= len(times): - # We have reached our last element for this curve, stay on it from now on... - idx = -1 - item[0] = idx - - if times[idx] >= curr_fbxktime: - if idx == 0: - curr_values.append((values[idx], fbx_curve)) - else: - # Interpolate between this key and the previous one. - ifac = (curr_fbxktime - times[idx - 1]) / (times[idx] - times[idx - 1]) - curr_values.append(((values[idx] - values[idx - 1]) * ifac + values[idx - 1], fbx_curve)) - curr_blenkframe = (curr_fbxktime - fbx_start_offset) * timefac + blen_start_offset - yield (curr_blenkframe, curr_values) + # Since we inserted our keyframes in 'ultra-fast' mode, we have to update the fcurves now. + blen_fcurve.update() def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, global_scale, shape_key_deforms): @@ -1155,10 +1097,11 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo for channel, curves in channel_to_curves.items(): assert(channel in {0, 1, 2}) blen_curve = blen_curves[channel] + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) fbx_key_times, values = _combine_same_property_curves(parsed_curves) - bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, anim_offset, 0, fps) - blen_store_keyframes(blen_curve, bl_key_times, values) + + blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps) elif isinstance(item, ShapeKey): deform_values = shape_key_deforms.setdefault(item, []) @@ -1167,12 +1110,13 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo for channel, curves in channel_to_curves.items(): assert(channel == 0) blen_curve = blen_curves[channel] + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) fbx_key_times, values = _combine_same_property_curves(parsed_curves) - bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, anim_offset, 0, fps) # A fully activated shape key in FBX DeformPercent is 100.0 whereas it is 1.0 in Blender. values = values / 100.0 - blen_store_keyframes(blen_curve, bl_key_times, values) + + blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps) # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded if # necessary after reading all animations. deform_values.append(values.min()) @@ -1186,14 +1130,14 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo assert(channel == 0) # The indices are determined by the creation of the `props` list above. blen_curve = blen_curves[1 if is_focus_distance else 0] + parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) fbx_key_times, values = _combine_same_property_curves(parsed_curves) - bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, anim_offset, 0, fps) if is_focus_distance: # Remap the imported values from FBX to Blender. values = values / 1000.0 values *= global_scale - blen_store_keyframes(blen_curve, bl_key_times, values) + blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps) else: # Object or PoseBone: transform_data = item.fbx_transform_data @@ -1223,13 +1167,14 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo initial_values.append(transform_prop_to_attr[fbxprop][channel]) times_and_values_tuples.append((fbx_key_times, values)) + if not times_and_values_tuples: + # If `times_and_values_tuples` is empty, all the imported animation curves are for properties other than + # transformation (e.g. animated custom properties), so there is nothing to do until support for these other + # properties is added. + return combined_fbx_times, values_arrays = _combine_curve_keyframes(times_and_values_tuples, initial_values) - bl_key_times = _convert_fbx_time_to_blender_time(combined_fbx_times, anim_offset, 0, fps) - - flattened_channel_values_gen = _transformation_curves_gen(item, values_arrays, channel_keys) - num_loc_channels = 3 num_rot_channels = 4 if rot_mode in {'QUATERNION', 'AXIS_ANGLE'} else 3 # Variations of EULER are all 3 num_sca_channels = 3 @@ -1237,7 +1182,7 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo num_frames = len(combined_fbx_times) full_length = num_channels * num_frames - # TODO: It may be beneficial to iterate into np.float64 since the generator yields Python floats + flattened_channel_values_gen = _transformation_curves_gen(item, values_arrays, channel_keys) flattened_channel_values = np.fromiter(flattened_channel_values_gen, dtype=np.single, count=full_length) # Reshape to one row per frame and then view the transpose so that each row corresponds to a single channel. # e.g. @@ -1246,9 +1191,7 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo # sca_channels = channel_values[num_loc_channels + num_rot_channels:] channel_values = flattened_channel_values.reshape(num_frames, num_channels).T - for blen_curve, values in zip(blen_curves, channel_values): - # TODO: The bl_key_times is used more than once, meaning we duplicate some of the work - blen_store_keyframes(blen_curve, bl_key_times, values) + blen_store_keyframes_multi(combined_fbx_times, zip(blen_curves, channel_values), anim_offset, fps) def blen_read_animations(fbx_tmpl_astack, fbx_tmpl_alayer, stacks, scene, anim_offset, global_scale): -- 2.30.2 From 3332d1936ae10618f2d885fc078a16e5a5c43558 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 01:12:44 +0100 Subject: [PATCH 07/12] Cleanup --- io_scene_fbx/import_fbx.py | 215 ------------------------------------- 1 file changed, 215 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index b4b92a9ae..2f36b8509 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -785,183 +785,6 @@ def blen_read_single_animation_curve(fbx_curve): # FBX will still read animation curves even if they are invalid. return blen_read_invalid_animation_curve(key_times, key_values) - max_idx = np.amax(key_times) if key_times.size else 0 - # If the largest KeyTime value is at the last index then it's simple. - if max_idx == key_times.size - 1: - # Set each element to the maximum of itself and all elements before it - key_times = np.maximum.accumulate(key_times) - else: - # This works the same as Blender's original animation curve parser, without the conversion from FBX time to - # Blender time and modified to operate on a single curve rather than multiple - - # todo: Maybe these should be sorted instead? - # todo: Maybe these should be np.maximum.accumulate-d instead? - # Sorted unique key times - sorted_unique_times = np.unique(key_times) - - # TODO: How is this different from np.searchsorted on np.maximum.acccumulate-d times? Can we use it to find - # the points at which idx will increase and then np.cumsum those increases? - def parse_invalid_curve_times_to_indices(): - idx = 0 - times = key_times.data - num_times = len(times) - - for curr_fbxktime in sorted_unique_times.data: - curr_time = times[idx] - if curr_time < curr_fbxktime: - if idx >= 0: - idx += 1 - if idx >= num_times: - # We have reached our last element for this curve, stay on it from now on... - idx = -1 - yield idx - - key_time_indices = np.fromiter(parse_invalid_curve_times_to_indices(), dtype=np.int64) - key_times = key_times[key_time_indices] - key_values = key_values[key_values] - - # Filter out invalid times - valid_mask = key_times >= sorted_unique_times - key_times = key_times[valid_mask] - key_values = key_values[valid_mask] - - interpolated_values = np.empty_like(key_values) - interpolated_values[:1] = key_values[:1] - ifac = (sorted_unique_times[1:] - key_times[:-1]) / (key_values[1:] - key_times[:-1]) - interpolated_values[1:] = (key_values[1:] - key_values[:-1]) * ifac + key_values[:-1] - key_values = interpolated_values - - # def parse_curve_fallback_gen(): - # idx = 0 - # times = key_times.data - # num_times = len(times) - # values = key_values.data - # # Sorted unique times - # sorted_unique_times = np.unique(key_times) - # for curr_fbxktime in sorted_unique_times.data: - # curr_time = times[idx] - # if curr_time < curr_fbxktime: - # if idx >= 0: - # idx += 1 - # if idx >= num_times: - # # We have reached our last element for this curve, stay on it from now on... - # idx = -1 - # curr_time = times[idx] - # if curr_time >= curr_fbxktime: - # if idx == 0: - # curr_value = values[idx] - # else: - # # Interpolate between this key and the previous one - # prev_time = times[idx - 1] - # ifac = (curr_fbxktime - prev_time) / (curr_time - prev_time) - # prev_value = - # curr_value = (values[idx] - values[idx - 1]) * ifac + values[idx - 1] - # yield curr_fbxktime, curr_value - # structured_dtype = np.dtype([("time", key_times.dtype), ("value", key_values.dtype)]) - # times_and_values = np.fromiter(parse_curve_fallback_gen(), dtype=structured_dtype) - # key_times = times_and_values["time"] - # key_values = times_and_values["values"] - - - - - - # # todo: Get some printable attribute from fbx_curve, don't print the entire fbx_curve - # print("WARNING: Invalid animation keyframe times for %s. The key frame times that are not strictly increasing" - # " and the keyframes before the first keyframe chronologically have been discarded." % str(fbx_curve)) - # # We could sort the key times, but starting from the minimum value and then taking an accumulative maximum - # # better matches FBX and older Blender behaviour when the times are not in order. - # # FIXME: min_idx thing doesn't work, things get weird when the start and end aren't the min and max times... - # min_idx = np.amin(key_times) if key_times.size else 0 - # """ - # Ok, so moving the max_idx to not be last has odd effects... - # Given frames [0, 10, ..., 90, 100] and values [0, ..., 100]. Moving the last time into the: - # second-last position: Frames -> [0, ..., 90], values -> [0, ..., 80, 85] (not 90??) - # [..., 80, _90_, (100)] - # Perhaps [..., 80, ????, (90)] - # Interp [..., 80, 85, (90)] - # Frames [..., 80, 90, (100)] - # Blender appears to agree with Unity in this case, but Blender doesn't exclude the () element - # whereas Unity does. - # Interp [..., 80, 85, 90] - # Frames [..., 80, 90, 100] - # third-last position: Frames -> [0, ..., 70, 90], values -> [0, ..., 70, 76.6666] (not 80??) - # [..., 70, _80_, (90), (100)] - # Perhaps [..., 70, ????, ????, (90)] - # Interp [..., 70, 76.6, 83.3, (90)] - # Frames [..., 70, (80), 90, (100)] - # - # Blender sets frame 100 to 80 instead, which would have been the next value after 70, but Blender - # doesn't exclude the () elements whereas Unity does. - # Perhaps [..., 70, (??), ????, (80)] - # Interp [..., 70,(73.3),76.6, (80)] - # Frames [..., 70, 80, 90, 100] - # fourth-last position: Frames -> [0, ..., 60, 90], values -> [0, ..., 60, 67.5] - # [..., 60, _70_, (80), (90), (100)] - # Perhaps [..., 60, ????, ????, ????, (90)] - # Interp [..., 60, 67.5, 75, 82.5, (90)] - # Frames [..., 60, (70), (80), 90, (100)] - # - # Blender sets frame 100 to 70 instead, which would have been the next value after 60, but Blender - # doesn't exclude the () elements whereas Unity does. - # Perhaps [..., 60, (??), (??), ????, (70)] - # Interp [..., 60,(62.5),(65), 67.5, (70)] - # Frames [..., 60, 70, 80, 90, 100] - # TODO: Try changing the 90 value to something else and see if the interpolations still hold. - # """ - # max_idx = np.amax(key_times) if key_times.size else 0 # max idx might also get reduced by min_idx? - # last_idx = len(key_times) - 1 - # if max_idx != last_idx: - # max_idx = last_idx - 2 # Seems to exclude the last two keyframes for some reason... - # key_times = key_times[min_idx:max_idx] - # key_values = key_values[min_idx:max_idx] - # key_times = np.maximum.accumulate(key_times) - # # TODO: As an alternative to finding the duplicates ourselves, we could just return key_times and key_values - # # as they are from here, letting Blender remove the duplicates when calling .update(). - # # Yes, we should leave it to Blender, imagine a transform channel with values [10, 2, 5, 20] at times - # # [1, 12, 12, 40], and then another transform channel with times at [6, 30]. The first channel will need to - # # interpolate for the missing times of 6 and 30. - # unique_mask = np.empty_like(key_times, dtype=bool) - # # Remove keyframes at duplicate times. Blender would do this when calling .update() on the FCurve. Where there - # # are duplicate times, Blender only keeps the last duplicate. - # # Note that this may produce different results to software that uses the FBX SDK, because it does not remove - # # duplicates. - # # Because key_times is now in ascending order, unique elements are the last element and elements that are - # # pairwise not equal. - # # 1,1,1,2,3,3,4,5,5 - # # F,F,T,T,F,T,T,F - # np.not_equal(key_times[:-1], key_times[1:], out=unique_mask[:-1]) - # # The last element is always kept: - # # F,F,T,T,F,T,T,F,T - # unique_mask[-1:] = True - # - # # # FIXME: This currently gets the first unique time, but if we were to import even duplicate times, when we - # # # .update() the FCurve, Blender keeps only the *last* unique time. - # # # Remove duplicates. Because key_times is now in ascending order, unique elements are the first element and - # # # elements that are pairwise not equal. - # # # 1,1,1,2,3,3,4,5 - # # # F,F,T,T,F,T,T - # # # The first element is always unique: - # # # T,F,F,T,T,F,T,T - # # unique_mask[:1] = True - # # np.not_equal(key_times[:-1], key_times[1:], out=unique_mask[1:]) - # # - # # #indices = np.where(unique_mask, np.arange(len(unique_mask), 0)) - # # #indices = np.maximum.accumulate(indices) - # # - # # Use the mask to get only the times (and their values) that are strictly increasing. - # key_times = key_times[unique_mask] - # key_values = key_values[unique_mask] - - # Convert from FBX timing to Blender timing. - # Cannot subtract in-place because this curve could be used in multiple Actions. - key_times = key_times - fbx_start_offset - # timefac is a Python float, so the new array will be a np.float64 array. - key_times = key_times * timefac - key_times += blen_start_offset - - return key_times, key_values - def blen_store_keyframes(fbx_key_times, blen_fcurve, key_values, blen_start_offset, fps, fbx_start_offset=0): """Set all keyframe times and values for a newly created FCurve. @@ -1010,8 +833,6 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo taking any pre_ and post_ matrix into account to transform from fbx into blender space. """ from bpy.types import Object, PoseBone, ShapeKey, Material, Camera - # todo: Remove this import - from itertools import chain fbx_curves: dict[bytes, dict[int, list[FBXElem]]] = {} for curves, fbxprop in cnodes.values(): @@ -1019,46 +840,10 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo for (fbx_acdata, _blen_data), channel in curves.values(): channels_dict.setdefault(channel, []).append(fbx_acdata) - # fbx_curves = [] - # used_channels = set() - # warn_multiple_curves_per_channel = False - # for curves, fbxprop in cnodes.values(): - # channels_dict = fbx_curves_props_channels.setdefault(fbxprop, {}) - # for (fbx_acdata, _blen_data), channel in curves.values(): - # channels_dict.setdefault(channel, []).append(fbx_acdata) - # - # for (fbx_acdata, _blen_data), channel in curves.values(): - # channel_id = (fbxprop, channel) - # if channel_id in used_channels: - # # The FBX animation system's default implementation only uses the first curve assigned to a channel. - # # Additional curves per channel are allowed by the FBX specification, but the handling of these curves - # # is considered the responsibility of the application that created them. Note that each curve node is - # # expected to have a unique set of channels, so these additional curves with the same channel would have - # # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode. - # warn_multiple_curves_per_channel = True - # else: - # used_channels.add(channel_id) - # fbx_curves.append((fbxprop, channel, fbx_acdata)) - # if warn_multiple_curves_per_channel: - # print("WARNING: Multiple animation curves per animated property channel were found for %s. All but the first" - # "curve for each property channel has been discarded." % action.name) - # Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0). if len(fbx_curves) == 0: return - # todo: Remove these - # blen_curves = [] - # props = [] - # keyframes = {} - - # # Add each keyframe to the keyframe dict - # def store_keyframe(fc, frame, value): - # fc_key = (fc.data_path, fc.array_index) - # if not keyframes.get(fc_key): - # keyframes[fc_key] = [] - # keyframes[fc_key].extend((frame, value)) - if isinstance(item, Material): grpname = item.name props = [("diffuse_color", 3, grpname or "Diffuse Color")] -- 2.30.2 From fded8b6eb56e460879b4588b3d9af55b2c886fd8 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 02:16:47 +0100 Subject: [PATCH 08/12] Small optimisations and more documentation for _transformation_curves_gen --- io_scene_fbx/import_fbx.py | 44 ++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 19 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index 2f36b8509..0419b1019 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -16,7 +16,7 @@ if "bpy" in locals(): import bpy from bpy.app.translations import pgettext_tip as tip_ -from mathutils import Matrix, Euler, Vector +from mathutils import Matrix, Euler, Vector, Quaternion # Also imported in .fbx_utils, so importing here is unlikely to further affect Blender startup time. import numpy as np @@ -527,8 +527,16 @@ def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_p # --------- # Animation def _transformation_curves_gen(item, values_arrays, channel_keys): + """Yields flattened location/rotation/scaling values for imported PoseBone/Object Lcl Translation/Rotation/Scaling + animation curve values. + + The value arrays must have the same lengths where each index of each array corresponds to a single keyframe. + + Each value array must have a corresponding channel key tuple that identifies the fbx property + (b'Lcl Translation'/b'Lcl Rotation'/b'Lcl Scaling') and the channel (x/y/z as 0/1/2) of that property.""" from operator import setitem from functools import partial + if item.is_bone: bl_obj = item.bl_obj.pose.bones[item.bl_bone] else: @@ -548,6 +556,11 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): b'Lcl Scaling': transform_data.sca, } + # Create a setter into transform_data for each values array. e.g. a values array for 'Lcl Scaling' with channel == 2 + # would set transform_data.sca[2]. + setters = [partial(setitem, transform_prop_to_attr[fbx_prop], channel) for fbx_prop, channel in channel_keys] + frame_values_it = zip(*(iter(arr.data) for arr in values_arrays)) + # Pre-get/calculate these to reduce the work done inside the hot loop. anim_compensation_matrix = item.anim_compensation_matrix do_anim_compensation_matrix = bool(anim_compensation_matrix) @@ -560,24 +573,17 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): do_restmat_inv = bool(restmat_inv) - # Create a setter into transform_data for each values array. e.g. a values array for 'Lcl Scaling' with channel == 2 - # would set transform_data.sca[2]. - # TODO: Might be faster to create a list of each transform_prop_to_attr[fbx_prop] and a list of channels, then zip - # both and in the main loop, do transform_data_attr[channel] = value - setters = [partial(setitem, transform_prop_to_attr[fbx_prop], channel) for fbx_prop, channel in channel_keys] - zipped_values_iterators = zip(*(iter(arr.data) for arr in values_arrays)) + decompose = Matrix.decompose + to_axis_angle = Quaternion.to_axis_angle + to_euler = Quaternion.to_euler - # todo: Rather than having to get the Matrix/Quaternion methods upon each call within the loop, we can instead get - # them in advance. - # Before the loop: - # `mat_decompose = Matrix.decompose` - # then within the loop: - # `mat_decompose(mat)` - - for values in zipped_values_iterators: - for setter, value in zip(setters, values): + # Iterate through the values for each frame. + for frame_values in frame_values_it: + # Set each value into its corresponding attribute in transform_data. + for setter, value in zip(setters, frame_values): setter(value) + # Calculate the updated matrix for this frame. mat, _, _ = blen_read_object_transform_do(transform_data) # compensate for changes in the local matrix during processing @@ -597,16 +603,16 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): mat = restmat_inv @ mat # Now we have a virtual matrix of transform from AnimCurves, we can insert keyframes! - loc, rot, sca = mat.decompose() + loc, rot, sca = decompose(mat) if rot_mode == 'QUATERNION': if rot_quat_prev.dot(rot) < 0.0: rot = -rot rot_quat_prev = rot elif rot_mode == 'AXIS_ANGLE': - vec, ang = rot.to_axis_angle() + vec, ang = to_axis_angle(rot) rot = ang, vec.x, vec.y, vec.z else: # Euler - rot = rot.to_euler(rot_mode, rot_eul_prev) + rot = to_euler(rot, rot_mode, rot_eul_prev) rot_eul_prev = rot # Yield order matches the order that the location/rotation/scale FCurves are created in. -- 2.30.2 From d83d71e45b2ecc6d0b5d9ebbd5d414f0870dd4a1 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 03:21:05 +0100 Subject: [PATCH 09/12] Combine use of _combine_same_property_curves and blen_read_single_animation_curve into a single function --- io_scene_fbx/import_fbx.py | 48 +++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index 0419b1019..b08d370ed 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -621,33 +621,35 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): yield from sca -def _combine_same_property_curves(times_and_values_tuples): - """Combine multiple sorted animation curves, that affect the same property, into a single sorted animation curve.""" - if len(times_and_values_tuples) > 1: - # TODO: Print a warning to the console that more than one curve was found +def blen_read_animation_channel_curves(curves): + """Read one or (rarely) more animation curves, that affect the same channel of the same property, from FBX data. + + When there are multiple curves, they will be combined into a single sorted animation curve. + + Though, it is expected that there will almost never be more than a single curve to read because multiple curves + affecting the same channel of the same property is not part of FBX's default animation system. + + Returns an array of sorted, unique FBX keyframe times and an array of values for each of those keyframe times.""" + if len(curves) > 1: + times_and_values_tuples = list(map(blen_read_single_animation_curve, curves)) # The FBX animation system's default implementation only uses the first curve assigned to a channel. # Additional curves per channel are allowed by the FBX specification, but the handling of these curves is # considered the responsibility of the application that created them. Note that each curve node is expected to # have a unique set of channels, so these additional curves with the same channel would have to belong to # separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode. - # Concatenate all the times into one array and all the values into one array. - all_times = np.concatenate([t[0] for t in times_and_values_tuples]) - all_values = np.concatenate([t[1] for t in times_and_values_tuples]) + # Combine the curves together to produce a single array of sorted keyframe times and a single array of values. + # The arrays are concatenated in reverse so that if there are duplicate times in the read curves, then only the + # value of the last occurrence is kept. + all_times = np.concatenate([t[0] for t in reversed(times_and_values_tuples)]) + all_values = np.concatenate([t[1] for t in reversed(times_and_values_tuples)]) # Get the unique, sorted times and the index in all_times of the first occurrence of each unique value. sorted_unique_times, unique_indices_in_all_times = np.unique(all_times, return_index=True) values_of_sorted_unique_times = all_values[unique_indices_in_all_times] return sorted_unique_times, values_of_sorted_unique_times - - # # Get the indices that would sort all_times. - # # Use a stable algorithm so that if there are any duplicate times, they maintain their original order. - # perm = np.argsort(kind='stable') - # # Use the indices to sort both all_times and all_values. - # all_times = all_times[perm] - # all_values = all_values[perm] else: - return times_and_values_tuples[0] + return blen_read_single_animation_curve(curves[0]) def _combine_curve_keyframes(times_and_values_tuples, initial_values): @@ -888,10 +890,7 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo for channel, curves in channel_to_curves.items(): assert(channel in {0, 1, 2}) blen_curve = blen_curves[channel] - - parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) - fbx_key_times, values = _combine_same_property_curves(parsed_curves) - + fbx_key_times, values = blen_read_animation_channel_curves(curves) blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps) elif isinstance(item, ShapeKey): @@ -902,12 +901,11 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo assert(channel == 0) blen_curve = blen_curves[channel] - parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) - fbx_key_times, values = _combine_same_property_curves(parsed_curves) + fbx_key_times, values = blen_read_animation_channel_curves(curves) # A fully activated shape key in FBX DeformPercent is 100.0 whereas it is 1.0 in Blender. values = values / 100.0 - blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps) + # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded if # necessary after reading all animations. deform_values.append(values.min()) @@ -922,8 +920,7 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo # The indices are determined by the creation of the `props` list above. blen_curve = blen_curves[1 if is_focus_distance else 0] - parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) - fbx_key_times, values = _combine_same_property_curves(parsed_curves) + fbx_key_times, values = blen_read_animation_channel_curves(curves) if is_focus_distance: # Remap the imported values from FBX to Blender. values = values / 1000.0 @@ -950,8 +947,7 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo continue for channel, curves in channel_to_curves.items(): assert(channel in {0, 1, 2}) - parsed_curves = tuple(map(blen_read_single_animation_curve, curves)) - fbx_key_times, values = _combine_same_property_curves(parsed_curves) + fbx_key_times, values = blen_read_animation_channel_curves(curves) channel_keys.append((fbxprop, channel)) -- 2.30.2 From cf782b4b21d426e9ffdf0c8e0ebadfeadb8729e1 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 04:50:36 +0100 Subject: [PATCH 10/12] Cleanup --- io_scene_fbx/import_fbx.py | 51 ++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index b08d370ed..e2d3520e9 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -530,7 +530,7 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): """Yields flattened location/rotation/scaling values for imported PoseBone/Object Lcl Translation/Rotation/Scaling animation curve values. - The value arrays must have the same lengths where each index of each array corresponds to a single keyframe. + The value arrays must have the same lengths, where each index of each array corresponds to a single keyframe. Each value array must have a corresponding channel key tuple that identifies the fbx property (b'Lcl Translation'/b'Lcl Rotation'/b'Lcl Scaling') and the channel (x/y/z as 0/1/2) of that property.""" @@ -561,7 +561,7 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): setters = [partial(setitem, transform_prop_to_attr[fbx_prop], channel) for fbx_prop, channel in channel_keys] frame_values_it = zip(*(iter(arr.data) for arr in values_arrays)) - # Pre-get/calculate these to reduce the work done inside the hot loop. + # Pre-get/calculate these to slightly reduce the work done inside the loop. anim_compensation_matrix = item.anim_compensation_matrix do_anim_compensation_matrix = bool(anim_compensation_matrix) @@ -602,7 +602,7 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): if do_restmat_inv: mat = restmat_inv @ mat - # Now we have a virtual matrix of transform from AnimCurves, we can insert keyframes! + # Now we have a virtual matrix of transform from AnimCurves, we can yield keyframe values! loc, rot, sca = decompose(mat) if rot_mode == 'QUATERNION': if rot_quat_prev.dot(rot) < 0.0: @@ -622,12 +622,13 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): def blen_read_animation_channel_curves(curves): - """Read one or (rarely) more animation curves, that affect the same channel of the same property, from FBX data. + """Read one or (very rarely) more animation curves, that affect a single same channel of a single property, from FBX + data. When there are multiple curves, they will be combined into a single sorted animation curve. - Though, it is expected that there will almost never be more than a single curve to read because multiple curves - affecting the same channel of the same property is not part of FBX's default animation system. + It is expected that there will almost never be more than a single curve to read because FBX's default animation + system only uses the first curve assigned to a channel. Returns an array of sorted, unique FBX keyframe times and an array of values for each of those keyframe times.""" if len(curves) > 1: @@ -652,7 +653,7 @@ def blen_read_animation_channel_curves(curves): return blen_read_single_animation_curve(curves[0]) -def _combine_curve_keyframes(times_and_values_tuples, initial_values): +def _combine_curve_keyframe_times(times_and_values_tuples, initial_values): """Combine multiple sorted animation curves, that affect different properties, such that every animation curve contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each curve. @@ -711,14 +712,13 @@ def blen_read_invalid_animation_curve(key_times, key_values): indexed_times = key_times[indices] indexed_values = key_values[indices] - # Interpolate the value for each time in sorted_unique_times according to the times and values at each index and - # the previous index. + # Interpolate the value for each time in sorted_unique_times according to the times and values at each index and the + # previous index. interpolated_values = np.empty_like(indexed_values) - # Where the index is 0, there's no previous value to interpolate from, so we set the value without - # interpolating. - # Because the indices are in increasing order, all zeroes must be at the start, so we can find the index of the - # last zero and use that to index with a slice instead of a boolean array for performance. + # Where the index is 0, there's no previous value to interpolate from, so we set the value without interpolating. + # Because the indices are in increasing order, all zeroes must be at the start, so we can find the index of the last + # zero and use that to index with a slice instead of a boolean array for performance. # Equivalent to, but as a slice: # idx_zero_mask = indices == 0 # idx_nonzero_mask = ~idx_zero_mask @@ -757,7 +757,6 @@ def blen_read_invalid_animation_curve(key_times, key_values): def _convert_fbx_time_to_blender_time(key_times, blen_start_offset, fbx_start_offset, fps): - # todo: Could move this into blen_store_keyframes since it probably doesn't need to be used anywhere else from .fbx_utils import FBX_KTIME timefac = fps / FBX_KTIME @@ -788,8 +787,6 @@ def blen_read_single_animation_curve(fbx_curve): if all_times_strictly_increasing: return key_times, key_values else: - # todo: Print something to the console warning that the animation curve was invalid. - # FBX will still read animation curves even if they are invalid. return blen_read_invalid_animation_curve(key_times, key_values) @@ -815,8 +812,10 @@ def blen_store_keyframes_multi(fbx_key_times, fcurve_and_key_values_pairs, blen_ bl_enum_dtype = np.byte # The keyframe_points 'co' are accessed as flattened pairs of (time, value). - # The key times are the same for each (blen_fcurve, key_values) pair, so only the values need to be updatedfor each array of values. + # The key times are the same for each (blen_fcurve, key_values) pair, so only the values need to be updated for each + # array of values. keyframe_points_co = np.empty(len(bl_key_times) * 2, dtype=bl_keyframe_dtype) + # Even indices are times. keyframe_points_co[0::2] = bl_key_times interpolation_array = np.full(num_keys, LINEAR_INTERPOLATION_VALUE, dtype=bl_enum_dtype) @@ -824,6 +823,8 @@ def blen_store_keyframes_multi(fbx_key_times, fcurve_and_key_values_pairs, blen_ for blen_fcurve, key_values in fcurve_and_key_values_pairs: # The fcurve must be newly created and thus have no keyframe_points. assert(len(blen_fcurve.keyframe_points) == 0) + + # Odd indices are values. keyframe_points_co[1::2] = key_values # Add the keyframe points to the FCurve and then set the 'co' and 'interpolation' of each point. @@ -906,8 +907,8 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo values = values / 100.0 blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps) - # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded if - # necessary after reading all animations. + # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded + # if necessary after reading all animations. deform_values.append(values.min()) deform_values.append(values.max()) @@ -956,12 +957,16 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo times_and_values_tuples.append((fbx_key_times, values)) if not times_and_values_tuples: # If `times_and_values_tuples` is empty, all the imported animation curves are for properties other than - # transformation (e.g. animated custom properties), so there is nothing to do until support for these other + # transformation (e.g. animated custom properties), so there is nothing to do until support for those other # properties is added. return - combined_fbx_times, values_arrays = _combine_curve_keyframes(times_and_values_tuples, initial_values) + # Combine the keyframe times of all the transformation curves so that each curve has a value at every time. + combined_fbx_times, values_arrays = _combine_curve_keyframe_times(times_and_values_tuples, initial_values) + # Convert from FBX Lcl Translation/Lcl Rotation/Lcl Scaling to the Blender location/rotation/scaling properties + # of this Object/PoseBone. + # The number of fcurves for the Blender properties varies depending on the rotation mode. num_loc_channels = 3 num_rot_channels = 4 if rot_mode in {'QUATERNION', 'AXIS_ANGLE'} else 3 # Variations of EULER are all 3 num_sca_channels = 3 @@ -969,8 +974,10 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo num_frames = len(combined_fbx_times) full_length = num_channels * num_frames + # Do the conversion. flattened_channel_values_gen = _transformation_curves_gen(item, values_arrays, channel_keys) flattened_channel_values = np.fromiter(flattened_channel_values_gen, dtype=np.single, count=full_length) + # Reshape to one row per frame and then view the transpose so that each row corresponds to a single channel. # e.g. # loc_channels = channel_values[:num_loc_channels] @@ -978,6 +985,8 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo # sca_channels = channel_values[num_loc_channels + num_rot_channels:] channel_values = flattened_channel_values.reshape(num_frames, num_channels).T + # Each channel has the same keyframe times, so the combined times can be passed once along with all the curves + # and values arrays. blen_store_keyframes_multi(combined_fbx_times, zip(blen_curves, channel_values), anim_offset, fps) -- 2.30.2 From ae09d491d88a6082ef45dd68c26e0e23d0a13581 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 13:38:24 +0100 Subject: [PATCH 11/12] Cleanup: comments --- io_scene_fbx/import_fbx.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index e2d3520e9..525e469e3 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -622,10 +622,11 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): def blen_read_animation_channel_curves(curves): - """Read one or (very rarely) more animation curves, that affect a single same channel of a single property, from FBX + """Read one or (very rarely) more animation curves, that affect a single channel of a single property, from FBX data. - When there are multiple curves, they will be combined into a single sorted animation curve. + When there are multiple curves, they will be combined into a single sorted animation curve with later curves taking + precedence when the curves contain duplicate times. It is expected that there will almost never be more than a single curve to read because FBX's default animation system only uses the first curve assigned to a channel. @@ -654,7 +655,7 @@ def blen_read_animation_channel_curves(curves): def _combine_curve_keyframe_times(times_and_values_tuples, initial_values): - """Combine multiple sorted animation curves, that affect different properties, such that every animation curve + """Combine multiple parsed animation curves, that affect different channels, such that every animation curve contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each curve. @@ -705,6 +706,7 @@ def blen_read_invalid_animation_curve(key_times, key_values): if idx >= 0: idx += 1 if idx >= key_times_len: + # We have reached our last element for this curve, stay on it from now on... idx = -1 yield idx @@ -712,8 +714,8 @@ def blen_read_invalid_animation_curve(key_times, key_values): indexed_times = key_times[indices] indexed_values = key_values[indices] - # Interpolate the value for each time in sorted_unique_times according to the times and values at each index and the - # previous index. + # Linear interpolate the value for each time in sorted_unique_times according to the times and values at each index + # and the previous index. interpolated_values = np.empty_like(indexed_values) # Where the index is 0, there's no previous value to interpolate from, so we set the value without interpolating. @@ -763,7 +765,7 @@ def _convert_fbx_time_to_blender_time(key_times, blen_start_offset, fbx_start_of # Convert from FBX timing to Blender timing. # Cannot subtract in-place because key_times could be read directly from FBX and could be used by multiple Actions. key_times = key_times - fbx_start_offset - # timefac is a Python float, so the new array will be a np.float64 array. + # FBX times are integers and timefac is a Python float, so the new array will be a np.float64 array. key_times = key_times * timefac key_times += blen_start_offset -- 2.30.2 From 464e036073e3b8601faf4b73745f7734e3ac3fe1 Mon Sep 17 00:00:00 2001 From: Thomas Barlow Date: Thu, 31 Aug 2023 23:56:17 +0100 Subject: [PATCH 12/12] Cleanup: Remove explicit iter use in frame_values_it and add comments --- io_scene_fbx/import_fbx.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py index 525e469e3..7d18ef494 100644 --- a/io_scene_fbx/import_fbx.py +++ b/io_scene_fbx/import_fbx.py @@ -559,7 +559,12 @@ def _transformation_curves_gen(item, values_arrays, channel_keys): # Create a setter into transform_data for each values array. e.g. a values array for 'Lcl Scaling' with channel == 2 # would set transform_data.sca[2]. setters = [partial(setitem, transform_prop_to_attr[fbx_prop], channel) for fbx_prop, channel in channel_keys] - frame_values_it = zip(*(iter(arr.data) for arr in values_arrays)) + # Create an iterator that gets one value from each array. Each iterated tuple will be all the imported + # Lcl Translation/Lcl Rotation/Lcl Scaling values for a single frame, in that order. + # Note that an FBX animation does not have to animate all the channels, so only the animated channels of each + # property will be present. + # .data, the memoryview of an np.ndarray, is faster to iterate than the ndarray itself. + frame_values_it = zip(*(arr.data for arr in values_arrays)) # Pre-get/calculate these to slightly reduce the work done inside the loop. anim_compensation_matrix = item.anim_compensation_matrix -- 2.30.2