FBX IO: Speed up animation simplification using NumPy #104904

Merged
Thomas Barlow merged 17 commits from Mysteryem/blender-addons:fbx_anim_export_numpy_simplify into blender-v4.0-release 2023-10-06 17:53:02 +02:00
2 changed files with 215 additions and 85 deletions
Showing only changes of commit 51ba7d6227 - Show all commits

View File

@ -1985,8 +1985,8 @@ def fbx_data_animation_elements(root, scene_data):
fps = scene.render.fps / scene.render.fps_base fps = scene.render.fps / scene.render.fps_base
def keys_to_ktimes(keys): def keys_to_ktimes(keys_array):
return (int(v) for v in convert_sec_to_ktime_iter((f / fps for f, _v in keys))) return (keys_array / fps * FBX_KTIME).astype(np.int64)
# Animation stacks. # Animation stacks.
for astack_key, alayers, alayer_key, name, f_start, f_end in animations: for astack_key, alayers, alayer_key, name, f_start, f_end in animations:
@ -2026,18 +2026,18 @@ def fbx_data_animation_elements(root, scene_data):
acn_tmpl = elem_props_template_init(scene_data.templates, b"AnimationCurveNode") acn_tmpl = elem_props_template_init(scene_data.templates, b"AnimationCurveNode")
acn_props = elem_properties(acurvenode) acn_props = elem_properties(acurvenode)
for fbx_item, (acurve_key, def_value, keys, _acurve_valid) in acurves.items(): for fbx_item, (acurve_key, def_value, (keys, values), _acurve_valid) in acurves.items():
elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(), elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(),
def_value, animatable=True) def_value, animatable=True)
# Only create Animation curve if needed! # Only create Animation curve if needed!
if keys: nbr_keys = len(keys)
if nbr_keys:
acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbx_uuid_from_key(acurve_key)) acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbx_uuid_from_key(acurve_key))
acurve.add_string(fbx_name_class(b"", b"AnimCurve")) acurve.add_string(fbx_name_class(b"", b"AnimCurve"))
acurve.add_string(b"") acurve.add_string(b"")
# key attributes... # key attributes...
nbr_keys = len(keys)
# flags... # flags...
keyattr_flags = ( keyattr_flags = (
1 << 2 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic. 1 << 2 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic.
@ -2053,7 +2053,7 @@ def fbx_data_animation_elements(root, scene_data):
elem_data_single_float64(acurve, b"Default", def_value) elem_data_single_float64(acurve, b"Default", def_value)
elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION) elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION)
elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys)) elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys))
elem_data_single_float32_array(acurve, b"KeyValueFloat", (v for _f, v in keys)) elem_data_single_float32_array(acurve, b"KeyValueFloat", values.astype(np.float32, copy=False))
elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags) elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags)
elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat) elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat)
elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,)) elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,))
@ -2254,36 +2254,97 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No
dupli_parent_bdata = {dup.get_parent().bdata for dup in animdata_ob if dup.is_dupli} dupli_parent_bdata = {dup.get_parent().bdata for dup in animdata_ob if dup.is_dupli}
has_animated_duplis = bool(dupli_parent_bdata) has_animated_duplis = bool(dupli_parent_bdata)
currframe = f_start # Initialize keyframe times array. Each AnimationCurveNodeWrapper will share the same instance.
while currframe <= f_end: currframes = np.arange(f_start, np.nextafter(f_end, np.inf), step=bake_step)
real_currframe = currframe - f_start if start_zero else currframe real_currframes = currframes - f_start if start_zero else currframes
scene.frame_set(int(currframe), subframe=currframe - int(currframe))
# Get all animated values
def frame_values_gen():
# Iterate through each frame and yield the values for that frame.
int_currframes = currframes.astype(int)
subframes = currframes - int_currframes
for real_currframe, int_currframe, subframe in zip(real_currframes.data, int_currframes.data, subframes.data):
scene.frame_set(int_currframe, subframe=subframe)
if has_animated_duplis: if has_animated_duplis:
# Changing the scene's frame invalidates existing dupli instances. To get the updated matrices of duplis for # Changing the scene's frame invalidates existing dupli instances. To get the updated matrices of duplis
# this frame, we must get the duplis from the depsgraph again. # for this frame, we must get the duplis from the depsgraph again.
for dup in depsgraph.object_instances: for dup in depsgraph.object_instances:
if (parent := dup.parent) and parent.original in dupli_parent_bdata: if (parent := dup.parent) and parent.original in dupli_parent_bdata:
# ObjectWrapper caches its instances. Attempting to create a new instance updates the existing # ObjectWrapper caches its instances. Attempting to create a new instance updates the existing
# ObjectWrapper instance with the current frame's matrix and then returns the existing instance. # ObjectWrapper instance with the current frame's matrix and then returns the existing instance.
ObjectWrapper(dup) ObjectWrapper(dup)
for ob_obj, (anim_loc, anim_rot, anim_scale) in animdata_ob.items(): for ob_obj in animdata_ob:
# We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!). # We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!).
p_rot = p_rots.get(ob_obj, None) p_rot = p_rots.get(ob_obj, None)
loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot) loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot)
p_rots[ob_obj] = rot p_rots[ob_obj] = rot
anim_loc.add_keyframe(real_currframe, loc) yield from loc
anim_rot.add_keyframe(real_currframe, tuple(convert_rad_to_deg_iter(rot))) yield from rot
anim_scale.add_keyframe(real_currframe, scale) yield from scale
for anim_shape, me, shape in animdata_shapes.values(): for anim_shape, me, shape in animdata_shapes.values():
anim_shape.add_keyframe(real_currframe, (shape.value * 100.0,)) yield shape.value
for anim_camera_lens, anim_camera_focus_distance, camera in animdata_cameras.values(): for anim_camera_lens, anim_camera_focus_distance, camera in animdata_cameras.values():
anim_camera_lens.add_keyframe(real_currframe, (camera.lens,)) yield camera.lens
anim_camera_focus_distance.add_keyframe(real_currframe, (camera.dof.focus_distance * 1000 * gscale,)) yield camera.dof.focus_distance
currframe += bake_step
# Calculating the total expected number of values reduces memory allocations while iterating and ensures the array
# ends up the size we're expecting.
num_ob_loc_values = num_ob_rot_values = num_ob_scale_values = 3
num_values_per_ob = num_ob_loc_values + num_ob_rot_values + num_ob_scale_values
num_ob_values = len(animdata_ob) * num_values_per_ob
num_shape_values = len(animdata_shapes)
num_values_per_camera = 2
num_camera_values = len(animdata_cameras) * num_values_per_camera
num_values_per_frame = num_ob_values + num_shape_values + num_camera_values
num_frames = len(real_currframes)
total_num_values = num_frames * num_values_per_frame
all_values = np.fromiter(frame_values_gen(), dtype=np.float64, count=total_num_values)
scene.frame_set(back_currframe, subframe=0.0) scene.frame_set(back_currframe, subframe=0.0)
# View as each column being the values for a single frame and each row being all values for a single property in a
# curve.
all_values = all_values.reshape(num_frames, -1).T
# Split into views of the arrays for each curve type.
split_at = [num_ob_values, num_shape_values, num_camera_values]
# The last value isn't needed, because the last split is assumed to go to the end of the array.
split_at = split_at[:-1]
# For uneven splits, np.split takes indices to split at, which can be acquired through a cumulative sum across the
# list.
split_at = np.cumsum(split_at)
all_ob_values, all_shape_key_values, all_camera_values = np.split(all_values, split_at)
# Set location/rotation/scale curves
# Further split into views of the arrays for each object.
num_animdata_ob = len(animdata_ob)
all_ob_values = np.split(all_ob_values, num_animdata_ob) if num_animdata_ob else ()
for (anim_loc, anim_rot, anim_scale), ob_values in zip(animdata_ob.values(), all_ob_values):
# Further split into views of the location, rotation and scaling arrays.
loc_xyz, rot_xyz, sca_xyz = np.split(ob_values, 3)
# In-place convert to degrees.
np.rad2deg(rot_xyz, out=rot_xyz)
anim_loc.set_keyframes(real_currframes, loc_xyz)
anim_rot.set_keyframes(real_currframes, rot_xyz)
anim_scale.set_keyframes(real_currframes, sca_xyz)
# Set shape key curves
for (anim_shape, _me, _shape), shape_key_values in zip(animdata_shapes.values(), all_shape_key_values):
# In-place convert from Blender Shape Key Value to FBX Deform Percent.
shape_key_values *= 100.0
anim_shape.set_keyframes(real_currframes, shape_key_values)
# Set camera curves
# Further split into views of the arrays for each camera.
num_animdata_cameras = len(animdata_cameras)
all_camera_values = np.split(all_camera_values, num_animdata_cameras) if num_animdata_cameras else ()
for (anim_camera_lens, anim_camera_focus_distance, camera), camera_values in zip(animdata_cameras.values(), all_camera_values):
lens_values, focus_distance_values = camera_values
# In-place convert from Blender to FBX
focus_distance_values *= (1000 * gscale)
anim_camera_lens.set_keyframes(real_currframes, lens_values)
anim_camera_focus_distance.set_keyframes(real_currframes, focus_distance_values)
animations = {} animations = {}
# And now, produce final data (usable by FBX export code) # And now, produce final data (usable by FBX export code)
@ -2848,8 +2909,8 @@ def fbx_data_from_scene(scene, depsgraph, settings):
for _alayer_key, alayer in astack.values(): for _alayer_key, alayer in astack.values():
for _acnode_key, acnode, _acnode_name in alayer.values(): for _acnode_key, acnode, _acnode_name in alayer.values():
nbr_acnodes += 1 nbr_acnodes += 1
for _acurve_key, _dval, acurve, acurve_valid in acnode.values(): for _acurve_key, _dval, (acurve_keys, acurve_values), acurve_valid in acnode.values():
if acurve: if len(acurve_keys):
nbr_acurves += 1 nbr_acurves += 1
templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=nbr_astacks) templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=nbr_astacks)
@ -2983,8 +3044,8 @@ def fbx_data_from_scene(scene, depsgraph, settings):
connections.append((b"OO", acurvenode_id, alayer_id, None)) connections.append((b"OO", acurvenode_id, alayer_id, None))
# Animcurvenode -> object property. # Animcurvenode -> object property.
connections.append((b"OP", acurvenode_id, elem_id, fbx_prop.encode())) connections.append((b"OP", acurvenode_id, elem_id, fbx_prop.encode()))
for fbx_item, (acurve_key, default_value, acurve, acurve_valid) in acurves.items(): for fbx_item, (acurve_key, default_value, (acurve_keys, acurve_values), acurve_valid) in acurves.items():
if acurve: if len(acurve_keys):
# Animcurve -> Animcurvenode. # Animcurve -> Animcurvenode.
connections.append((b"OP", get_fbx_uuid_from_key(acurve_key), acurvenode_id, fbx_item.encode())) connections.append((b"OP", get_fbx_uuid_from_key(acurve_key), acurvenode_id, fbx_item.encode()))

View File

@ -1234,8 +1234,10 @@ class AnimationCurveNodeWrapper:
and easy API to handle those. and easy API to handle those.
""" """
__slots__ = ( __slots__ = (
'elem_keys', '_keys', 'default_values', 'fbx_group', 'fbx_gname', 'fbx_props', 'elem_keys', 'default_values', 'fbx_group', 'fbx_gname', 'fbx_props',
'force_keying', 'force_startend_keying') 'force_keying', 'force_startend_keying',
'_frame_times_array', '_frame_values_array', '_frame_write_mask_array',
)
kinds = { kinds = {
'LCL_TRANSLATION': ("Lcl Translation", "T", ("X", "Y", "Z")), 'LCL_TRANSLATION': ("Lcl Translation", "T", ("X", "Y", "Z")),
@ -1254,7 +1256,9 @@ class AnimationCurveNodeWrapper:
self.fbx_props = [self.kinds[kind][2]] self.fbx_props = [self.kinds[kind][2]]
self.force_keying = force_keying self.force_keying = force_keying
self.force_startend_keying = force_startend_keying self.force_startend_keying = force_startend_keying
self._keys = [] # (frame, values, write_flags) self._frame_times_array = None
self._frame_values_array = None
self._frame_write_mask_array = None
if default_values is not ...: if default_values is not ...:
assert(len(default_values) == len(self.fbx_props[0])) assert(len(default_values) == len(self.fbx_props[0]))
self.default_values = default_values self.default_values = default_values
@ -1263,7 +1267,7 @@ class AnimationCurveNodeWrapper:
def __bool__(self): def __bool__(self):
# We are 'True' if we do have some validated keyframes... # We are 'True' if we do have some validated keyframes...
return bool(self._keys) and (True in ((True in k[2]) for k in self._keys)) return self._frame_write_mask_array is not None and bool(np.any(self._frame_write_mask_array))
def add_group(self, elem_key, fbx_group, fbx_gname, fbx_props): def add_group(self, elem_key, fbx_group, fbx_gname, fbx_props):
""" """
@ -1276,19 +1280,30 @@ class AnimationCurveNodeWrapper:
self.fbx_gname.append(fbx_gname) self.fbx_gname.append(fbx_gname)
self.fbx_props.append(fbx_props) self.fbx_props.append(fbx_props)
def add_keyframe(self, frame, values): def set_keyframes(self, keyframe_times, keyframe_values):
""" """
Add a new keyframe to all curves of the group. Set all keyframe times and values of the group.
Values can be a 2D array where each row is a separate curve.
""" """
assert(len(values) == len(self.fbx_props[0])) # View 1D keyframe_values as 2D with a single row, so that the same iterative code can be used for both 1D and
self._keys.append((frame, values, [True] * len(values))) # write everything by default. # 2D inputs.
if len(keyframe_values.shape) == 1:
keyframe_values = keyframe_values[np.newaxis]
# There must be a time for each column of values.
assert(len(keyframe_times) == keyframe_values.shape[1])
# There must be as many rows of values as there are properties.
assert(len(self.fbx_props[0]) == len(keyframe_values))
write_mask = np.full_like(keyframe_values, True, dtype=bool) # write everything by default
self._frame_times_array = keyframe_times
self._frame_values_array = keyframe_values
self._frame_write_mask_array = write_mask
def simplify(self, fac, step, force_keep=False): def simplify(self, fac, step, force_keep=False):
""" """
Simplifies sampled curves by only enabling samples when: Simplifies sampled curves by only enabling samples when:
* their values relatively differ from the previous sample ones. * their values relatively differ from the previous sample ones.
""" """
if not self._keys: if self._frame_times_array is None:
return return
if fac == 0.0: if fac == 0.0:
@ -1297,36 +1312,87 @@ class AnimationCurveNodeWrapper:
# So that, with default factor and step values (1), we get: # So that, with default factor and step values (1), we get:
min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'. min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'.
min_absdiff_fac = 0.1 # A tenth of reldiff... min_absdiff_fac = 0.1 # A tenth of reldiff...
keys = self._keys
p_currframe, p_key, p_key_write = keys[0] for values, write_mask in zip(self._frame_values_array, self._frame_write_mask_array):
p_keyed = list(p_key) # Initialise to no frames written.
are_keyed = [False] * len(p_key) write_mask[:] = False
for currframe, key, key_write in keys:
for idx, (val, p_val) in enumerate(zip(key, p_key)): # Create views of the 'previous' and 'current'
key_write[idx] = False p_key_write_mask = write_mask[:-1]
p_keyedval = p_keyed[idx] key_write_mask = write_mask[1:]
if val == p_val: p_val = values[:-1]
# Never write keyframe when value is exactly the same as prev one! val = values[1:]
continue abs_values = np.abs(values)
p_val_abs = abs_values[:-1]
val_abs = abs_values[1:]
# This is contracted form of relative + absolute-near-zero difference: # This is contracted form of relative + absolute-near-zero difference:
# absdiff = abs(a - b) # absdiff = abs(a - b)
# if absdiff < min_reldiff_fac * min_absdiff_fac: # if absdiff < min_reldiff_fac * min_absdiff_fac:
# return False # return False
# return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac
# Note that we ignore the '/ 2' part here, since it's not much significant for us. # Note that we ignore the '/ 2' part here, since it's not much significant for us.
if abs(val - p_val) > (min_reldiff_fac * max(abs(val) + abs(p_val), min_absdiff_fac)): enough_diff_prev_sampled_mask = (
np.abs(val - p_val) > (min_reldiff_fac * np.maximum(val_abs + p_val_abs, min_absdiff_fac))
)
# If enough difference from previous sampled value, key this value *and* the previous one! # If enough difference from previous sampled value, key this value *and* the previous one!
key_write[idx] = True # Unless it is forcefully keyed later, this is the only way that the first value can be keyed.
p_key_write[idx] = True p_key_write_mask[enough_diff_prev_sampled_mask] = True
p_keyed[idx] = val key_write_mask[enough_diff_prev_sampled_mask] = True
are_keyed[idx] = True
elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)): # The other case where we key a value is if there is enough difference between it and the previous keyed
# Else, if enough difference from previous keyed value, key this value only! # value.
key_write[idx] = True # Values that equal their previous value are skipped and the remaining values to check are those which are
p_keyed[idx] = val # currently not keyed
are_keyed[idx] = True not_keyed_mask = ~key_write_mask
p_currframe, p_key, p_key_write = currframe, key, key_write check_diff_mask = np.logical_and(not_keyed_mask, p_val != val)
val_check_idx = np.flatnonzero(check_diff_mask)
val_check = val[val_check_idx]
val_abs_check = val_abs[val_check_idx]
# For each frame, get the index of the previous keyed value.
prev_keyframe_indices = np.arange(1, len(values))
# The first 'previous keyframe' defaults to values[0], even if it's not actually keyed.
prev_keyframe_indices[not_keyed_mask] = 0
# Accumulative maximum fills in the zeroed indices with the closest previous non-zero index.
prev_keyframe_indices = np.maximum.accumulate(prev_keyframe_indices)
# Extract only the indices that need checking.
prev_keyframe_indices = prev_keyframe_indices[val_check_idx]
p_kf_vals = values[prev_keyframe_indices]
p_kf_vals_abs = np.abs(p_kf_vals)
# We check our relative + absolute-near-zero difference again, but against the previous keyed value this
# time.
enough_diff_prev_keyed_mask = (
np.abs(val_check - p_kf_vals)
> (min_reldiff_fac * np.maximum(val_abs_check + p_kf_vals_abs, min_absdiff_fac))
)
if np.any(enough_diff_prev_keyed_mask):
# If there are any that are different enough from the previous keyed value, then we have to check them
# all iteratively because keying a new value can change the previous keyed value of some elements, which
# can change whether a value is different enough from its previous keyed value.
last_new_kf_idx = -1
last_new_kf_val = -1
last_new_kf_val_abs = -1
# Accessing .data, the memoryview of the array iteratively or by individual index is faster than doing
# the same with the array itself.
key_write_mv = key_write_mask.data
zipped = zip(val_check_idx.data, val_check.data, val_abs_check.data, prev_keyframe_indices.data,
enough_diff_prev_keyed_mask.data)
for cur_idx, cur_val, abs_cur_val, p_kf_idx, enough_diff in zipped:
if last_new_kf_idx > p_kf_idx:
# The previous keyframe is new and was not included when enough_diff_prev_keyed_mask was
# calculated, so whether the current value is different enough from the previous keyframe needs
# to be calculated.
# Check if the relative + absolute-near-zero difference is enough to key this frame.
enough_diff = (abs(cur_val - last_new_kf_val)
> (min_reldiff_fac * max(abs_cur_val + last_new_kf_val_abs, min_absdiff_fac)))
if enough_diff:
# The current index needs to be keyed.
last_new_kf_idx = cur_idx
last_new_kf_val = cur_val
last_new_kf_val_abs = abs_cur_val
key_write_mv[cur_idx] = True
# If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P # If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P
# See T41766. # See T41766.
@ -1334,25 +1400,27 @@ class AnimationCurveNodeWrapper:
# are not animated, but are children of animated ones, so added an option to systematically force writing # are not animated, but are children of animated ones, so added an option to systematically force writing
# one key in this case. # one key in this case.
# See T41719, T41605, T41254... # See T41719, T41605, T41254...
if self.force_keying or (force_keep and not self): if self.force_keying or (force_keep and not np.any(self._frame_write_mask_array)):
are_keyed[:] = [True] * len(are_keyed) are_keyed = [True] * len(self._frame_write_mask_array)
else:
are_keyed = np.any(self._frame_write_mask_array, axis=1)
# If we did key something, ensure first and last sampled values are keyed as well. # If we did key something, ensure first and last sampled values are keyed as well.
if self.force_startend_keying: if self.force_startend_keying:
for idx, is_keyed in enumerate(are_keyed): for is_keyed, write_mask in zip(are_keyed, self._frame_write_mask_array):
if is_keyed: if is_keyed:
keys[0][2][idx] = keys[-1][2][idx] = True write_mask[:1] = True
write_mask[-1:] = True
def get_final_data(self, scene, ref_id, force_keep=False): def get_final_data(self, scene, ref_id, force_keep=False):
""" """
Yield final anim data for this 'curvenode' (for all curvenodes defined). Yield final anim data for this 'curvenode' (for all curvenodes defined).
force_keep is to force to keep a curve even if it only has one valid keyframe. force_keep is to force to keep a curve even if it only has one valid keyframe.
""" """
curves = [[] for k in self._keys[0][1]] curves = [
for currframe, key, key_write in self._keys: (self._frame_times_array[write_mask], values[write_mask])
for curve, val, wrt in zip(curves, key, key_write): for values, write_mask in zip(self._frame_values_array, self._frame_write_mask_array)
if wrt: ]
curve.append((currframe, val))
force_keep = force_keep or self.force_keying force_keep = force_keep or self.force_keying
for elem_key, fbx_group, fbx_gname, fbx_props in \ for elem_key, fbx_group, fbx_gname, fbx_props in \
@ -1363,8 +1431,9 @@ class AnimationCurveNodeWrapper:
fbx_item = FBX_ANIM_PROPSGROUP_NAME + "|" + fbx_item fbx_item = FBX_ANIM_PROPSGROUP_NAME + "|" + fbx_item
curve_key = get_blender_anim_curve_key(scene, ref_id, elem_key, fbx_group, fbx_item) curve_key = get_blender_anim_curve_key(scene, ref_id, elem_key, fbx_group, fbx_item)
# (curve key, default value, keyframes, write flag). # (curve key, default value, keyframes, write flag).
group[fbx_item] = (curve_key, def_val, c, times = c[0]
True if (len(c) > 1 or (len(c) > 0 and force_keep)) else False) write_flag = len(times) > (0 if force_keep else 1)
group[fbx_item] = (curve_key, def_val, c, write_flag)
yield elem_key, group_key, group, fbx_group, fbx_gname yield elem_key, group_key, group, fbx_group, fbx_gname