FBX IO: Speed up animation import using NumPy #104856
@ -785,183 +785,6 @@ def blen_read_single_animation_curve(fbx_curve):
|
|||||||
# FBX will still read animation curves even if they are invalid.
|
# FBX will still read animation curves even if they are invalid.
|
||||||
return blen_read_invalid_animation_curve(key_times, key_values)
|
return blen_read_invalid_animation_curve(key_times, key_values)
|
||||||
|
|
||||||
max_idx = np.amax(key_times) if key_times.size else 0
|
|
||||||
# If the largest KeyTime value is at the last index then it's simple.
|
|
||||||
if max_idx == key_times.size - 1:
|
|
||||||
# Set each element to the maximum of itself and all elements before it
|
|
||||||
key_times = np.maximum.accumulate(key_times)
|
|
||||||
else:
|
|
||||||
# This works the same as Blender's original animation curve parser, without the conversion from FBX time to
|
|
||||||
# Blender time and modified to operate on a single curve rather than multiple
|
|
||||||
|
|
||||||
# todo: Maybe these should be sorted instead?
|
|
||||||
# todo: Maybe these should be np.maximum.accumulate-d instead?
|
|
||||||
# Sorted unique key times
|
|
||||||
sorted_unique_times = np.unique(key_times)
|
|
||||||
|
|
||||||
# TODO: How is this different from np.searchsorted on np.maximum.acccumulate-d times? Can we use it to find
|
|
||||||
# the points at which idx will increase and then np.cumsum those increases?
|
|
||||||
def parse_invalid_curve_times_to_indices():
|
|
||||||
idx = 0
|
|
||||||
times = key_times.data
|
|
||||||
num_times = len(times)
|
|
||||||
|
|
||||||
for curr_fbxktime in sorted_unique_times.data:
|
|
||||||
curr_time = times[idx]
|
|
||||||
if curr_time < curr_fbxktime:
|
|
||||||
if idx >= 0:
|
|
||||||
idx += 1
|
|
||||||
if idx >= num_times:
|
|
||||||
# We have reached our last element for this curve, stay on it from now on...
|
|
||||||
idx = -1
|
|
||||||
yield idx
|
|
||||||
|
|
||||||
key_time_indices = np.fromiter(parse_invalid_curve_times_to_indices(), dtype=np.int64)
|
|
||||||
key_times = key_times[key_time_indices]
|
|
||||||
key_values = key_values[key_values]
|
|
||||||
|
|
||||||
# Filter out invalid times
|
|
||||||
valid_mask = key_times >= sorted_unique_times
|
|
||||||
key_times = key_times[valid_mask]
|
|
||||||
key_values = key_values[valid_mask]
|
|
||||||
|
|
||||||
interpolated_values = np.empty_like(key_values)
|
|
||||||
interpolated_values[:1] = key_values[:1]
|
|
||||||
ifac = (sorted_unique_times[1:] - key_times[:-1]) / (key_values[1:] - key_times[:-1])
|
|
||||||
interpolated_values[1:] = (key_values[1:] - key_values[:-1]) * ifac + key_values[:-1]
|
|
||||||
key_values = interpolated_values
|
|
||||||
|
|
||||||
# def parse_curve_fallback_gen():
|
|
||||||
# idx = 0
|
|
||||||
# times = key_times.data
|
|
||||||
# num_times = len(times)
|
|
||||||
# values = key_values.data
|
|
||||||
# # Sorted unique times
|
|
||||||
# sorted_unique_times = np.unique(key_times)
|
|
||||||
# for curr_fbxktime in sorted_unique_times.data:
|
|
||||||
# curr_time = times[idx]
|
|
||||||
# if curr_time < curr_fbxktime:
|
|
||||||
# if idx >= 0:
|
|
||||||
# idx += 1
|
|
||||||
# if idx >= num_times:
|
|
||||||
# # We have reached our last element for this curve, stay on it from now on...
|
|
||||||
# idx = -1
|
|
||||||
# curr_time = times[idx]
|
|
||||||
# if curr_time >= curr_fbxktime:
|
|
||||||
# if idx == 0:
|
|
||||||
# curr_value = values[idx]
|
|
||||||
# else:
|
|
||||||
# # Interpolate between this key and the previous one
|
|
||||||
# prev_time = times[idx - 1]
|
|
||||||
# ifac = (curr_fbxktime - prev_time) / (curr_time - prev_time)
|
|
||||||
# prev_value =
|
|
||||||
# curr_value = (values[idx] - values[idx - 1]) * ifac + values[idx - 1]
|
|
||||||
# yield curr_fbxktime, curr_value
|
|
||||||
# structured_dtype = np.dtype([("time", key_times.dtype), ("value", key_values.dtype)])
|
|
||||||
# times_and_values = np.fromiter(parse_curve_fallback_gen(), dtype=structured_dtype)
|
|
||||||
# key_times = times_and_values["time"]
|
|
||||||
# key_values = times_and_values["values"]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# # todo: Get some printable attribute from fbx_curve, don't print the entire fbx_curve
|
|
||||||
# print("WARNING: Invalid animation keyframe times for %s. The key frame times that are not strictly increasing"
|
|
||||||
# " and the keyframes before the first keyframe chronologically have been discarded." % str(fbx_curve))
|
|
||||||
# # We could sort the key times, but starting from the minimum value and then taking an accumulative maximum
|
|
||||||
# # better matches FBX and older Blender behaviour when the times are not in order.
|
|
||||||
# # FIXME: min_idx thing doesn't work, things get weird when the start and end aren't the min and max times...
|
|
||||||
# min_idx = np.amin(key_times) if key_times.size else 0
|
|
||||||
# """
|
|
||||||
# Ok, so moving the max_idx to not be last has odd effects...
|
|
||||||
# Given frames [0, 10, ..., 90, 100] and values [0, ..., 100]. Moving the last time into the:
|
|
||||||
# second-last position: Frames -> [0, ..., 90], values -> [0, ..., 80, 85] (not 90??)
|
|
||||||
# [..., 80, _90_, (100)]
|
|
||||||
# Perhaps [..., 80, ????, (90)]
|
|
||||||
# Interp [..., 80, 85, (90)]
|
|
||||||
# Frames [..., 80, 90, (100)]
|
|
||||||
# Blender appears to agree with Unity in this case, but Blender doesn't exclude the (<value>) element
|
|
||||||
# whereas Unity does.
|
|
||||||
# Interp [..., 80, 85, 90]
|
|
||||||
# Frames [..., 80, 90, 100]
|
|
||||||
# third-last position: Frames -> [0, ..., 70, 90], values -> [0, ..., 70, 76.6666] (not 80??)
|
|
||||||
# [..., 70, _80_, (90), (100)]
|
|
||||||
# Perhaps [..., 70, ????, ????, (90)]
|
|
||||||
# Interp [..., 70, 76.6, 83.3, (90)]
|
|
||||||
# Frames [..., 70, (80), 90, (100)]
|
|
||||||
#
|
|
||||||
# Blender sets frame 100 to 80 instead, which would have been the next value after 70, but Blender
|
|
||||||
# doesn't exclude the (<value>) elements whereas Unity does.
|
|
||||||
# Perhaps [..., 70, (??), ????, (80)]
|
|
||||||
# Interp [..., 70,(73.3),76.6, (80)]
|
|
||||||
# Frames [..., 70, 80, 90, 100]
|
|
||||||
# fourth-last position: Frames -> [0, ..., 60, 90], values -> [0, ..., 60, 67.5]
|
|
||||||
# [..., 60, _70_, (80), (90), (100)]
|
|
||||||
# Perhaps [..., 60, ????, ????, ????, (90)]
|
|
||||||
# Interp [..., 60, 67.5, 75, 82.5, (90)]
|
|
||||||
# Frames [..., 60, (70), (80), 90, (100)]
|
|
||||||
#
|
|
||||||
# Blender sets frame 100 to 70 instead, which would have been the next value after 60, but Blender
|
|
||||||
# doesn't exclude the (<value>) elements whereas Unity does.
|
|
||||||
# Perhaps [..., 60, (??), (??), ????, (70)]
|
|
||||||
# Interp [..., 60,(62.5),(65), 67.5, (70)]
|
|
||||||
# Frames [..., 60, 70, 80, 90, 100]
|
|
||||||
# TODO: Try changing the 90 value to something else and see if the interpolations still hold.
|
|
||||||
# """
|
|
||||||
# max_idx = np.amax(key_times) if key_times.size else 0 # max idx might also get reduced by min_idx?
|
|
||||||
# last_idx = len(key_times) - 1
|
|
||||||
# if max_idx != last_idx:
|
|
||||||
# max_idx = last_idx - 2 # Seems to exclude the last two keyframes for some reason...
|
|
||||||
# key_times = key_times[min_idx:max_idx]
|
|
||||||
# key_values = key_values[min_idx:max_idx]
|
|
||||||
# key_times = np.maximum.accumulate(key_times)
|
|
||||||
# # TODO: As an alternative to finding the duplicates ourselves, we could just return key_times and key_values
|
|
||||||
# # as they are from here, letting Blender remove the duplicates when calling .update().
|
|
||||||
# # Yes, we should leave it to Blender, imagine a transform channel with values [10, 2, 5, 20] at times
|
|
||||||
# # [1, 12, 12, 40], and then another transform channel with times at [6, 30]. The first channel will need to
|
|
||||||
# # interpolate for the missing times of 6 and 30.
|
|
||||||
# unique_mask = np.empty_like(key_times, dtype=bool)
|
|
||||||
# # Remove keyframes at duplicate times. Blender would do this when calling .update() on the FCurve. Where there
|
|
||||||
# # are duplicate times, Blender only keeps the last duplicate.
|
|
||||||
# # Note that this may produce different results to software that uses the FBX SDK, because it does not remove
|
|
||||||
# # duplicates.
|
|
||||||
# # Because key_times is now in ascending order, unique elements are the last element and elements that are
|
|
||||||
# # pairwise not equal.
|
|
||||||
# # 1,1,1,2,3,3,4,5,5
|
|
||||||
# # F,F,T,T,F,T,T,F
|
|
||||||
# np.not_equal(key_times[:-1], key_times[1:], out=unique_mask[:-1])
|
|
||||||
# # The last element is always kept:
|
|
||||||
# # F,F,T,T,F,T,T,F,T
|
|
||||||
# unique_mask[-1:] = True
|
|
||||||
#
|
|
||||||
# # # FIXME: This currently gets the first unique time, but if we were to import even duplicate times, when we
|
|
||||||
# # # .update() the FCurve, Blender keeps only the *last* unique time.
|
|
||||||
# # # Remove duplicates. Because key_times is now in ascending order, unique elements are the first element and
|
|
||||||
# # # elements that are pairwise not equal.
|
|
||||||
# # # 1,1,1,2,3,3,4,5
|
|
||||||
# # # F,F,T,T,F,T,T
|
|
||||||
# # # The first element is always unique:
|
|
||||||
# # # T,F,F,T,T,F,T,T
|
|
||||||
# # unique_mask[:1] = True
|
|
||||||
# # np.not_equal(key_times[:-1], key_times[1:], out=unique_mask[1:])
|
|
||||||
# #
|
|
||||||
# # #indices = np.where(unique_mask, np.arange(len(unique_mask), 0))
|
|
||||||
# # #indices = np.maximum.accumulate(indices)
|
|
||||||
# #
|
|
||||||
# # Use the mask to get only the times (and their values) that are strictly increasing.
|
|
||||||
# key_times = key_times[unique_mask]
|
|
||||||
# key_values = key_values[unique_mask]
|
|
||||||
|
|
||||||
# Convert from FBX timing to Blender timing.
|
|
||||||
# Cannot subtract in-place because this curve could be used in multiple Actions.
|
|
||||||
key_times = key_times - fbx_start_offset
|
|
||||||
# timefac is a Python float, so the new array will be a np.float64 array.
|
|
||||||
key_times = key_times * timefac
|
|
||||||
key_times += blen_start_offset
|
|
||||||
|
|
||||||
return key_times, key_values
|
|
||||||
|
|
||||||
|
|
||||||
def blen_store_keyframes(fbx_key_times, blen_fcurve, key_values, blen_start_offset, fps, fbx_start_offset=0):
|
def blen_store_keyframes(fbx_key_times, blen_fcurve, key_values, blen_start_offset, fps, fbx_start_offset=0):
|
||||||
"""Set all keyframe times and values for a newly created FCurve.
|
"""Set all keyframe times and values for a newly created FCurve.
|
||||||
@ -1010,8 +833,6 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo
|
|||||||
taking any pre_ and post_ matrix into account to transform from fbx into blender space.
|
taking any pre_ and post_ matrix into account to transform from fbx into blender space.
|
||||||
"""
|
"""
|
||||||
from bpy.types import Object, PoseBone, ShapeKey, Material, Camera
|
from bpy.types import Object, PoseBone, ShapeKey, Material, Camera
|
||||||
# todo: Remove this import
|
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
fbx_curves: dict[bytes, dict[int, list[FBXElem]]] = {}
|
fbx_curves: dict[bytes, dict[int, list[FBXElem]]] = {}
|
||||||
for curves, fbxprop in cnodes.values():
|
for curves, fbxprop in cnodes.values():
|
||||||
@ -1019,46 +840,10 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo
|
|||||||
for (fbx_acdata, _blen_data), channel in curves.values():
|
for (fbx_acdata, _blen_data), channel in curves.values():
|
||||||
channels_dict.setdefault(channel, []).append(fbx_acdata)
|
channels_dict.setdefault(channel, []).append(fbx_acdata)
|
||||||
|
|
||||||
# fbx_curves = []
|
|
||||||
# used_channels = set()
|
|
||||||
# warn_multiple_curves_per_channel = False
|
|
||||||
# for curves, fbxprop in cnodes.values():
|
|
||||||
# channels_dict = fbx_curves_props_channels.setdefault(fbxprop, {})
|
|
||||||
# for (fbx_acdata, _blen_data), channel in curves.values():
|
|
||||||
# channels_dict.setdefault(channel, []).append(fbx_acdata)
|
|
||||||
#
|
|
||||||
# for (fbx_acdata, _blen_data), channel in curves.values():
|
|
||||||
# channel_id = (fbxprop, channel)
|
|
||||||
# if channel_id in used_channels:
|
|
||||||
# # The FBX animation system's default implementation only uses the first curve assigned to a channel.
|
|
||||||
# # Additional curves per channel are allowed by the FBX specification, but the handling of these curves
|
|
||||||
# # is considered the responsibility of the application that created them. Note that each curve node is
|
|
||||||
# # expected to have a unique set of channels, so these additional curves with the same channel would have
|
|
||||||
# # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode.
|
|
||||||
# warn_multiple_curves_per_channel = True
|
|
||||||
# else:
|
|
||||||
# used_channels.add(channel_id)
|
|
||||||
# fbx_curves.append((fbxprop, channel, fbx_acdata))
|
|
||||||
# if warn_multiple_curves_per_channel:
|
|
||||||
# print("WARNING: Multiple animation curves per animated property channel were found for %s. All but the first"
|
|
||||||
# "curve for each property channel has been discarded." % action.name)
|
|
||||||
|
|
||||||
# Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0).
|
# Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0).
|
||||||
if len(fbx_curves) == 0:
|
if len(fbx_curves) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
# todo: Remove these
|
|
||||||
# blen_curves = []
|
|
||||||
# props = []
|
|
||||||
# keyframes = {}
|
|
||||||
|
|
||||||
# # Add each keyframe to the keyframe dict
|
|
||||||
# def store_keyframe(fc, frame, value):
|
|
||||||
# fc_key = (fc.data_path, fc.array_index)
|
|
||||||
# if not keyframes.get(fc_key):
|
|
||||||
# keyframes[fc_key] = []
|
|
||||||
# keyframes[fc_key].extend((frame, value))
|
|
||||||
|
|
||||||
if isinstance(item, Material):
|
if isinstance(item, Material):
|
||||||
grpname = item.name
|
grpname = item.name
|
||||||
props = [("diffuse_color", 3, grpname or "Diffuse Color")]
|
props = [("diffuse_color", 3, grpname or "Diffuse Color")]
|
||||||
|
Loading…
Reference in New Issue
Block a user