FBX IO: Speed up animation import using NumPy #104856

Merged
Thomas Barlow merged 12 commits from Mysteryem/blender-addons:fbx_import_anim_numpy_p1 into main 2023-09-04 22:07:45 +02:00
Showing only changes of commit ff192f6f3f - Show all commits

View File

@ -642,6 +642,142 @@ def _combine_same_property_curves(times_and_values_tuples):
return times_and_values_tuples[0]
def _interpolate_curves_linear(sorted_all_times, times_indices, times, values, initial_value):
# Find the indices of all times that need their values to be interpolated
needs_interpolation_mask = np.full(len(sorted_all_times), True)
needs_interpolation_mask[times_indices] = False
needs_interpolation_idx = np.flatnonzero(needs_interpolation_mask)
if not needs_interpolation_idx.size:
# No indices need their values interpolated.
# This can happen when a curve contains all keyframe times of all the other curves, a notable case would be
# when all the imported curves have the same keyframe times.
return values
# Create the extended values array that will contain `values` and the extra interpolated values for times in
# `sorted_all_times` that are not in `times`.
extended_values = np.empty_like(values, shape=len(sorted_all_times))
# Set the non-interpolated values
extended_values[times_indices] = values
# We can use the fact that sorted_all_times, times_indices and times are all sorted and unique to perform linear
# interpolation with a better scaling time complexity than np.interp, but np.interp is a C-compiled function and
# will pretty much always outperform a step-by-step linear interpolation by calling various NumPy functions.
interp_values = np.interp(sorted_all_times[needs_interpolation_idx], times, values, left=initial_value)
extended_values[needs_interpolation_idx] = interp_values
extended_values[times_indices] = values
return extended_values
def _interpolate_curves(sorted_all_times, times_indices, _times, values, initial_value):
extended_values = np.empty_like(values, shape=len(sorted_all_times))
# Because times was sorted, we can get the region within extended_values or sorted_all_times from the first
# time in `times` to the last time in `times`.
# Elements within this region may need interpolation.
# Elements outside this region would result in extrapolation, which we do not do, instead setting an
# `initial_value` or maintaining the last value in `values`
interp_start_full_incl = times_indices[0]
interp_end_full_excl = times_indices[-1] + 1
# Fill in the times that would result in extrapolation with their fixed values.
extended_values[:interp_start_full_incl] = initial_value
extended_values[interp_end_full_excl:] = values[-1]
# Get the regions of extended_values and sorted_all_times where interpolation will take place.
extended_values_interp_region = extended_values[interp_start_full_incl:interp_end_full_excl]
all_times_interp_region = sorted_all_times[interp_start_full_incl:interp_end_full_excl]
# The index in `extended_values_interp_region` of each value in `times`
interp_region_times_indices = times_indices - times_indices[0]
# Fill in the times that already have values.
# Same as `extended_values[times_indices] = values`.
extended_values_interp_region[interp_region_times_indices] = values
# Construct a mask of the values within the interp_region that need interpolation
needs_interpolation_mask = np.full(len(extended_values_interp_region), True, dtype=bool)
needs_interpolation_mask[interp_region_times_indices] = False
# When the number of elements needing interpolation is much smaller than the total number of elements, it can be
# faster to calculate indices from the mask and then index using the indices instead of indexing using the mask.
needs_interpolation_idx = np.flatnonzero(needs_interpolation_mask)
if not needs_interpolation_idx.size:
# No times need interpolating, we're done.
return extended_values
# Because both `all_times_sorted` and `times` are sorted, the index in `all_times_sorted` of each value in
# `times` must be increasing. Using this fact, we can find the index of the previous and next non-interpolated
# time for each interpolated time, by taking min/max accumulations across the indices of the non-interpolated
# times.
# This performs similarly to doing a binary search with np.searchsorted when `times` and `interp_times` are
# small, but np.searchsorted scales worse with larger `times` and `interp_times`:
# interp_times = all_times_interp_region[needs_interpolation_idx]
# prev_indices = np.searchsorted(times, interp_times)
# # This only works because `times` and `interp_times` are disjoint.
# next_indices = prev_indices + 1
# prev_times = times[prev_indices]
# next_times = times[next_indices]
# prev_values = values[prev_indices]
# next_values = values[next_indices]
# First create arrays of indices.
prev_indices = np.arange(len(extended_values_interp_region))
next_indices = prev_indices.copy()
# Example prev_indices
# [0, 1, 2, 3, 4, 5, 6, 7]
# Example needs_interpolation_mask:
# [F, F, T, F, T, T, F, F]
# Set interpolated times indices to zero (using needs_interpolation_idx for performance):
# [0, 1, 0, 3, 0, 0, 6, 7]
# maximum.accumulate:
# [0, 1, 1, 3, 4, 4, 6, 7]
# Extract the values at each index requiring interpolation (using needs_interpolation_idx for performance):
# [ 1, 4, 4, ]
# The extracted indices are the indices of the previous non-interpolated time/value.
prev_indices[needs_interpolation_idx] = 0
prev_indices = np.maximum.accumulate(prev_indices)[needs_interpolation_idx]
# The same as prev_value_indices, but using minimum and accumulating from right to left.
# Example next_indices:
# [0, 1, 2, 3, 4, 5, 6, 7]
# Example needs_interpolation_mask:
# [F, F, T, F, T, T, F, F]
# Set interpolated times indices to the maximum index (using needs_interpolation_idx for performance):
# [0, 1, 7, 3, 7, 7, 6, 7]
# minimum.accumulate from right to left by creating a flipped view, running minimum.accumulate and then creating
# a flipped view of the result:
# flip:
# [7, 6, 7, 7, 3, 7, 1, 0]
# minimum.accumulate:
# [7, 6, 6, 6, 3, 3, 1, 0]
# flip:
# [0, 1, 3, 3, 6, 6, 6, 7]
# Extract the values at each index requiring interpolation (using needs_interpolation_idx for performance):
# [ 3, 6, 6, ]
# The extracted indices are the indices of the next non-interpolated time/value.
next_indices[needs_interpolation_idx] = len(extended_values_interp_region) - 1
next_indices = np.flip(np.minimum.accumulate(np.flip(next_indices)))[needs_interpolation_idx]
prev_times = all_times_interp_region[prev_indices]
next_times = all_times_interp_region[next_indices]
prev_values = extended_values_interp_region[prev_indices]
next_values = extended_values_interp_region[next_indices]
# This linear interpolation is an example intended to be replaced with other kinds of interpolation once they are
# supported.
# - Begin linear interpolation
interp_times = all_times_interp_region[needs_interpolation_idx]
ifac = (interp_times - prev_times) / (next_times - prev_times)
interp_values = ifac * (next_values - prev_values) + prev_values
# - End linear interpolation
extended_values_interp_region[needs_interpolation_idx] = interp_values
return extended_values
def _combine_curve_keyframes(times_and_values_tuples, initial_values):
"""Combine multiple sorted animation curves, that affect different properties, such that every animation curve
contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each
@ -649,118 +785,28 @@ def _combine_curve_keyframes(times_and_values_tuples, initial_values):
Currently, linear interpolation is assumed, but FBX does store how keyframes should be interpolated, so correctly
interpolating the keyframe values is a TODO."""
# all_times = []
# #all_values = []
# #all_curve_idx = []
# for i, (times, values) in enumerate(times_and_values_tuples):
# all_times.append(times)
# #all_values.append(values)
# #all_curve_idx.append(np.full_like(times, i))
# all_times = np.concatenate(all_times)
# #all_curve_idx = np.concatenate(all_curve_idx)
# perm = np.argsort(all_times, kind='stable')
#
# sorted_all_times = all_times[perm]
#
# # Get the index in sorted_all_times of each time in all_times
# all_times_indices = np.empty(len(perm), dtype=np.intp)
# all_times_indices[perm] = np.arange(len(perm))
if len(times_and_values_tuples) == 1:
# Nothing to do when there is only a single curve.
return times_and_values_tuples[0]
all_times = [t[0] for t in times_and_values_tuples]
# Get sorted unique times and the index in sorted_all_times of each time in all_times
sorted_all_times, all_times_indices = np.unique(np.concatenate(all_times), return_inverse=True)
#sorted_all_values = all_values[perm]
#sorted_curve_idx = all_curve_idx[perm]
# An alternative would be to concatenated filled arrays with the index of each array and then index that by perm,
# then a mask for each array can be found by checking for values that equal the index of that array.
values_arrays = []
times_start = 0
for (times, values), initial_value in zip(times_and_values_tuples, initial_values):
times_end = times_start + len(times)
# The index in sorted_all_times of each value in times
# The index in `sorted_all_times` of each value in `times`.
times_indices = all_times_indices[times_start:times_end]
# Update times_start for the next array
# Update times_start for the next array.
times_start = times_end
# todo: Not sure the best way to mask out the values here, will need investigating
#times_extended = sorted_all_times.copy()
needs_interpolation_mask = np.full(len(sorted_all_times), True)
needs_interpolation_mask[times_indices] = False
#imported_times_mask = ~needs_interpolation_mask
# # Need to find the before and after times for each time that needs interpolation
# # Times are sorted, so the smallest and largest are simply the first and last values.
# min_time_value = values[0]
# max_time_value = values[-1]
# todo: It's possible we can get the same result faster by doing the interpolation manually, since we can figure
# out the before and after values for each time that needs interpolating (this would also likely make it easier
# to update the function to support other interpolation than just linear).
interp_values = np.interp(sorted_all_times[needs_interpolation_mask], times, values, left=initial_value)
# Alt, though we typically expect there to be few times that need interpolation:
# extended_values = np.interp(sorted_all_times, times, values, left=initial_value)
extended_values = np.empty_like(values, shape=len(sorted_all_times))
extended_values[needs_interpolation_mask] = interp_values
extended_values[times_indices] = values
extended_values = _interpolate_curves_linear(sorted_all_times, times_indices, times, values, initial_value)
values_arrays.append(extended_values)
# FIXME: If we have two curves with the same times, aren't they going to break because they're going to try and
# interpolate the same times?
# times1 = [1,2,3,4]
# times2 = [1,2,3,4]
# sorted_all_times = [1,1,2,2,3,3,4,4]... not good
# # Manual linear interpolation (it may be easier to extend for other interpolation):
# # Get the index of the previous and next elements that are not interpolated
# prev_value_indices = np.arange(len(sorted_all_times), dtype=np.intp)
# next_value_indices = prev_value_indices.copy()
# prev_value_indices[needs_interpolation_mask] = times_indices[0]
# next_value_indices[needs_interpolation_mask] = times_indices[-1]
# prev_value_indices = np.maximum.accumulate(prev_value_indices)
# next_value_indices = np.flip(np.minimum.accumulate(np.flip(next_value_indices)))
#
# # TODO: May be faster to simply not index by needs_interpolation_mask every time and then only index by
# # needs_interpolation_mask at the end.
# prev_times = sorted_all_times[prev_value_indices][needs_interpolation_mask]
# prev_values = sorted_all_values[prev_value_indices][needs_interpolation_mask]
# next_times = sorted_all_times[next_value_indices][needs_interpolation_mask]
# next_values = sorted_all_values[next_value_indices][needs_interpolation_mask]
#
# interp_times = sorted_all_times[needs_interpolation_mask]
# ifac = (interp_times - prev_times) / (next_times - prev_times)
# interp_values = ifac * (next_values - prev_values) + prev_values
# ifac = (sorted_all_times[needs_interpolation_mask] - prev_times) / ()
#
# values_extended =
#
# min_before = np.full_like(sorted_all_times, times[0])
# max_after = np.full_like(sorted_all_times, times[-1])
#
# # FIXME: But we need the indices so we can get the before and after values and interpolate between those...
# before_times = times_extended.copy()
# before_times[needs_interpolation_mask] = min_time
# before_times = np.maximum.accumulate(before_times)
#
# after_times = times_extended.copy()
# after_times[needs_interpolation_mask] = max_time
# after_times = np.flip(np.minimum.accumulate(np.flip(after_times)))
#
# before_times[needs_interpolation_mask]
#
#
#
#
# times_full = sorted_all_times.copy()
# values_full = np.full_like(sorted_all_times, np.nan)
# values_full[sorted_curve_idx == i] = values
return sorted_all_times, values_arrays