Fix #104714: Missing shape keys in FBX export when original mesh data cannot be used #104890
@ -5,7 +5,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "FBX format",
|
"name": "FBX format",
|
||||||
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
|
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
|
||||||
"version": (5, 7, 4),
|
"version": (5, 8, 0),
|
||||||
"blender": (3, 6, 0),
|
"blender": (3, 6, 0),
|
||||||
"location": "File > Import-Export",
|
"location": "File > Import-Export",
|
||||||
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",
|
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",
|
||||||
|
@ -552,14 +552,20 @@ def fbx_data_element_custom_properties(props, bid):
|
|||||||
|
|
||||||
def fbx_data_empty_elements(root, empty, scene_data):
|
def fbx_data_empty_elements(root, empty, scene_data):
|
||||||
"""
|
"""
|
||||||
Write the Empty data block (you can control its FBX datatype with the 'fbx_type' string custom property).
|
Write the Empty data block (you can control its FBX datatype with the 'fbx_type' string custom property) or Armature
|
||||||
|
NodeAttribute.
|
||||||
"""
|
"""
|
||||||
empty_key = scene_data.data_empties[empty]
|
empty_key = scene_data.data_empties[empty]
|
||||||
|
|
||||||
null = elem_data_single_int64(root, b"NodeAttribute", get_fbx_uuid_from_key(empty_key))
|
null = elem_data_single_int64(root, b"NodeAttribute", get_fbx_uuid_from_key(empty_key))
|
||||||
null.add_string(fbx_name_class(empty.name.encode(), b"NodeAttribute"))
|
null.add_string(fbx_name_class(empty.name.encode(), b"NodeAttribute"))
|
||||||
val = empty.bdata.get('fbx_type', None)
|
bdata = empty.bdata
|
||||||
null.add_string(val.encode() if val and isinstance(val, str) else b"Null")
|
if bdata.type == 'EMPTY':
|
||||||
|
val = bdata.get('fbx_type', None)
|
||||||
|
fbx_type = val.encode() if val and isinstance(val, str) else b"Null"
|
||||||
|
else:
|
||||||
|
fbx_type = b"Null"
|
||||||
|
null.add_string(fbx_type)
|
||||||
|
|
||||||
elem_data_single_string(null, b"TypeFlags", b"Null")
|
elem_data_single_string(null, b"TypeFlags", b"Null")
|
||||||
|
|
||||||
@ -567,7 +573,10 @@ def fbx_data_empty_elements(root, empty, scene_data):
|
|||||||
props = elem_properties(null)
|
props = elem_properties(null)
|
||||||
elem_props_template_finalize(tmpl, props)
|
elem_props_template_finalize(tmpl, props)
|
||||||
|
|
||||||
# No custom properties, already saved with object (Model).
|
# Empty/Armature Object custom properties have already been saved with the Model.
|
||||||
|
# Only Armature data custom properties need to be saved here with the NodeAttribute.
|
||||||
|
if bdata.type == 'ARMATURE':
|
||||||
|
fbx_data_element_custom_properties(props, bdata.data)
|
||||||
|
|
||||||
|
|
||||||
def fbx_data_light_elements(root, lamp, scene_data):
|
def fbx_data_light_elements(root, lamp, scene_data):
|
||||||
@ -1981,12 +1990,6 @@ def fbx_data_animation_elements(root, scene_data):
|
|||||||
animations = scene_data.animations
|
animations = scene_data.animations
|
||||||
if not animations:
|
if not animations:
|
||||||
return
|
return
|
||||||
scene = scene_data.scene
|
|
||||||
|
|
||||||
fps = scene.render.fps / scene.render.fps_base
|
|
||||||
|
|
||||||
def keys_to_ktimes(keys):
|
|
||||||
return (int(v) for v in convert_sec_to_ktime_iter((f / fps for f, _v in keys)))
|
|
||||||
|
|
||||||
# Animation stacks.
|
# Animation stacks.
|
||||||
for astack_key, alayers, alayer_key, name, f_start, f_end in animations:
|
for astack_key, alayers, alayer_key, name, f_start, f_end in animations:
|
||||||
@ -2026,18 +2029,18 @@ def fbx_data_animation_elements(root, scene_data):
|
|||||||
acn_tmpl = elem_props_template_init(scene_data.templates, b"AnimationCurveNode")
|
acn_tmpl = elem_props_template_init(scene_data.templates, b"AnimationCurveNode")
|
||||||
acn_props = elem_properties(acurvenode)
|
acn_props = elem_properties(acurvenode)
|
||||||
|
|
||||||
for fbx_item, (acurve_key, def_value, keys, _acurve_valid) in acurves.items():
|
for fbx_item, (acurve_key, def_value, (keys, values), _acurve_valid) in acurves.items():
|
||||||
elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(),
|
elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(),
|
||||||
def_value, animatable=True)
|
def_value, animatable=True)
|
||||||
|
|
||||||
# Only create Animation curve if needed!
|
# Only create Animation curve if needed!
|
||||||
if keys:
|
nbr_keys = len(keys)
|
||||||
|
if nbr_keys:
|
||||||
acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbx_uuid_from_key(acurve_key))
|
acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbx_uuid_from_key(acurve_key))
|
||||||
acurve.add_string(fbx_name_class(b"", b"AnimCurve"))
|
acurve.add_string(fbx_name_class(b"", b"AnimCurve"))
|
||||||
acurve.add_string(b"")
|
acurve.add_string(b"")
|
||||||
|
|
||||||
# key attributes...
|
# key attributes...
|
||||||
nbr_keys = len(keys)
|
|
||||||
# flags...
|
# flags...
|
||||||
keyattr_flags = (
|
keyattr_flags = (
|
||||||
1 << 2 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic.
|
1 << 2 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic.
|
||||||
@ -2052,8 +2055,8 @@ def fbx_data_animation_elements(root, scene_data):
|
|||||||
# And now, the *real* data!
|
# And now, the *real* data!
|
||||||
elem_data_single_float64(acurve, b"Default", def_value)
|
elem_data_single_float64(acurve, b"Default", def_value)
|
||||||
elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION)
|
elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION)
|
||||||
elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys))
|
elem_data_single_int64_array(acurve, b"KeyTime", astype_view_signedness(keys, np.int64))
|
||||||
elem_data_single_float32_array(acurve, b"KeyValueFloat", (v for _f, v in keys))
|
elem_data_single_float32_array(acurve, b"KeyValueFloat", values.astype(np.float32, copy=False))
|
||||||
elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags)
|
elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags)
|
||||||
elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat)
|
elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat)
|
||||||
elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,))
|
elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,))
|
||||||
@ -2254,75 +2257,128 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No
|
|||||||
dupli_parent_bdata = {dup.get_parent().bdata for dup in animdata_ob if dup.is_dupli}
|
dupli_parent_bdata = {dup.get_parent().bdata for dup in animdata_ob if dup.is_dupli}
|
||||||
has_animated_duplis = bool(dupli_parent_bdata)
|
has_animated_duplis = bool(dupli_parent_bdata)
|
||||||
|
|
||||||
currframe = f_start
|
# Initialize keyframe times array. Each AnimationCurveNodeWrapper will share the same instance.
|
||||||
while currframe <= f_end:
|
# `np.arange` excludes the `stop` argument like when using `range`, so we use np.nextafter to get the next
|
||||||
real_currframe = currframe - f_start if start_zero else currframe
|
# representable value after f_end and use that as the `stop` argument instead.
|
||||||
scene.frame_set(int(currframe), subframe=currframe - int(currframe))
|
currframes = np.arange(f_start, np.nextafter(f_end, np.inf), step=bake_step)
|
||||||
|
|
||||||
if has_animated_duplis:
|
# Convert from Blender time to FBX time.
|
||||||
# Changing the scene's frame invalidates existing dupli instances. To get the updated matrices of duplis for
|
fps = scene.render.fps / scene.render.fps_base
|
||||||
# this frame, we must get the duplis from the depsgraph again.
|
real_currframes = currframes - f_start if start_zero else currframes
|
||||||
for dup in depsgraph.object_instances:
|
real_currframes = (real_currframes / fps * FBX_KTIME).astype(np.int64)
|
||||||
if (parent := dup.parent) and parent.original in dupli_parent_bdata:
|
|
||||||
# ObjectWrapper caches its instances. Attempting to create a new instance updates the existing
|
|
||||||
# ObjectWrapper instance with the current frame's matrix and then returns the existing instance.
|
|
||||||
ObjectWrapper(dup)
|
|
||||||
for ob_obj, (anim_loc, anim_rot, anim_scale) in animdata_ob.items():
|
|
||||||
# We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!).
|
|
||||||
p_rot = p_rots.get(ob_obj, None)
|
|
||||||
loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot)
|
|
||||||
p_rots[ob_obj] = rot
|
|
||||||
anim_loc.add_keyframe(real_currframe, loc)
|
|
||||||
anim_rot.add_keyframe(real_currframe, tuple(convert_rad_to_deg_iter(rot)))
|
|
||||||
anim_scale.add_keyframe(real_currframe, scale)
|
|
||||||
for anim_shape, me, shape in animdata_shapes.values():
|
|
||||||
anim_shape.add_keyframe(real_currframe, (shape.value * 100.0,))
|
|
||||||
for anim_camera_lens, anim_camera_focus_distance, camera in animdata_cameras.values():
|
|
||||||
anim_camera_lens.add_keyframe(real_currframe, (camera.lens,))
|
|
||||||
anim_camera_focus_distance.add_keyframe(real_currframe, (camera.dof.focus_distance * 1000 * gscale,))
|
|
||||||
currframe += bake_step
|
|
||||||
|
|
||||||
|
# Generator that yields the animated values of each frame in order.
|
||||||
|
def frame_values_gen():
|
||||||
|
# Precalculate integer frames and subframes.
|
||||||
|
int_currframes = currframes.astype(int)
|
||||||
|
subframes = currframes - int_currframes
|
||||||
|
|
||||||
|
# Create simpler iterables that return only the values we care about.
|
||||||
|
animdata_shapes_only = [shape for _anim_shape, _me, shape in animdata_shapes.values()]
|
||||||
|
animdata_cameras_only = [camera for _anim_camera_lens, _anim_camera_focus_distance, camera
|
||||||
|
in animdata_cameras.values()]
|
||||||
|
# Previous frame's rotation for each object in animdata_ob, this will be updated each frame.
|
||||||
|
animdata_ob_p_rots = p_rots.values()
|
||||||
|
|
||||||
|
# Iterate through each frame and yield the values for that frame.
|
||||||
|
# Iterating .data, the memoryview of an array, is faster than iterating the array directly.
|
||||||
|
for int_currframe, subframe in zip(int_currframes.data, subframes.data):
|
||||||
|
scene.frame_set(int_currframe, subframe=subframe)
|
||||||
|
|
||||||
|
if has_animated_duplis:
|
||||||
|
# Changing the scene's frame invalidates existing dupli instances. To get the updated matrices of duplis
|
||||||
|
# for this frame, we must get the duplis from the depsgraph again.
|
||||||
|
for dup in depsgraph.object_instances:
|
||||||
|
if (parent := dup.parent) and parent.original in dupli_parent_bdata:
|
||||||
|
# ObjectWrapper caches its instances. Attempting to create a new instance updates the existing
|
||||||
|
# ObjectWrapper instance with the current frame's matrix and then returns the existing instance.
|
||||||
|
ObjectWrapper(dup)
|
||||||
|
next_p_rots = []
|
||||||
|
for ob_obj, p_rot in zip(animdata_ob, animdata_ob_p_rots):
|
||||||
|
# We compute baked loc/rot/scale for all objects (rot being euler-compat with previous value!).
|
||||||
|
loc, rot, scale, _m, _mr = ob_obj.fbx_object_tx(scene_data, rot_euler_compat=p_rot)
|
||||||
|
next_p_rots.append(rot)
|
||||||
|
yield from loc
|
||||||
|
yield from rot
|
||||||
|
yield from scale
|
||||||
|
animdata_ob_p_rots = next_p_rots
|
||||||
|
for shape in animdata_shapes_only:
|
||||||
|
yield shape.value
|
||||||
|
for camera in animdata_cameras_only:
|
||||||
|
yield camera.lens
|
||||||
|
yield camera.dof.focus_distance
|
||||||
|
|
||||||
|
# Providing `count` to np.fromiter pre-allocates the array, avoiding extra memory allocations while iterating.
|
||||||
|
num_ob_values = len(animdata_ob) * 9 # Location, rotation and scale, each of which have x, y, and z components
|
||||||
|
num_shape_values = len(animdata_shapes) # Only 1 value per shape key
|
||||||
|
num_camera_values = len(animdata_cameras) * 2 # Focal length (`.lens`) and focus distance
|
||||||
|
num_values_per_frame = num_ob_values + num_shape_values + num_camera_values
|
||||||
|
num_frames = len(real_currframes)
|
||||||
|
all_values_flat = np.fromiter(frame_values_gen(), dtype=float, count=num_frames * num_values_per_frame)
|
||||||
|
|
||||||
|
# Restore the scene's current frame.
|
||||||
scene.frame_set(back_currframe, subframe=0.0)
|
scene.frame_set(back_currframe, subframe=0.0)
|
||||||
|
|
||||||
|
# View such that each column is all values for a single frame and each row is all values for a single curve.
|
||||||
|
all_values = all_values_flat.reshape(num_frames, num_values_per_frame).T
|
||||||
|
# Split into views of the arrays for each curve type.
|
||||||
|
split_at = [num_ob_values, num_shape_values, num_camera_values]
|
||||||
|
# For unequal sized splits, np.split takes indices to split at, which can be acquired through a cumulative sum
|
||||||
|
# across the list.
|
||||||
|
# The last value isn't needed, because the last split is assumed to go to the end of the array.
|
||||||
|
split_at = np.cumsum(split_at[:-1])
|
||||||
|
all_ob_values, all_shape_key_values, all_camera_values = np.split(all_values, split_at)
|
||||||
|
|
||||||
|
all_anims = []
|
||||||
|
|
||||||
|
# Set location/rotation/scale curves.
|
||||||
|
# Split into equal sized views of the arrays for each object.
|
||||||
|
split_into = len(animdata_ob)
|
||||||
|
per_ob_values = np.split(all_ob_values, split_into) if split_into > 0 else ()
|
||||||
|
for anims, ob_values in zip(animdata_ob.values(), per_ob_values):
|
||||||
|
# Split again into equal sized views of the location, rotation and scaling arrays.
|
||||||
|
loc_xyz, rot_xyz, sca_xyz = np.split(ob_values, 3)
|
||||||
|
# In-place convert from Blender rotation to FBX rotation.
|
||||||
|
np.rad2deg(rot_xyz, out=rot_xyz)
|
||||||
|
|
||||||
|
anim_loc, anim_rot, anim_scale = anims
|
||||||
|
anim_loc.set_keyframes(real_currframes, loc_xyz)
|
||||||
|
anim_rot.set_keyframes(real_currframes, rot_xyz)
|
||||||
|
anim_scale.set_keyframes(real_currframes, sca_xyz)
|
||||||
|
all_anims.extend(anims)
|
||||||
|
|
||||||
|
# Set shape key curves.
|
||||||
|
# There's only one array per shape key, so there's no need to split `all_shape_key_values`.
|
||||||
|
for (anim_shape, _me, _shape), shape_key_values in zip(animdata_shapes.values(), all_shape_key_values):
|
||||||
|
# In-place convert from Blender Shape Key Value to FBX Deform Percent.
|
||||||
|
shape_key_values *= 100.0
|
||||||
|
anim_shape.set_keyframes(real_currframes, shape_key_values)
|
||||||
|
all_anims.append(anim_shape)
|
||||||
|
|
||||||
|
# Set camera curves.
|
||||||
|
# Split into equal sized views of the arrays for each camera.
|
||||||
|
split_into = len(animdata_cameras)
|
||||||
|
per_camera_values = np.split(all_camera_values, split_into) if split_into > 0 else ()
|
||||||
|
zipped = zip(animdata_cameras.values(), per_camera_values)
|
||||||
|
for (anim_camera_lens, anim_camera_focus_distance, _camera), (lens_values, focus_distance_values) in zipped:
|
||||||
|
# In-place convert from Blender focus distance to FBX.
|
||||||
|
focus_distance_values *= (1000 * gscale)
|
||||||
|
anim_camera_lens.set_keyframes(real_currframes, lens_values)
|
||||||
|
anim_camera_focus_distance.set_keyframes(real_currframes, focus_distance_values)
|
||||||
|
all_anims.append(anim_camera_lens)
|
||||||
|
all_anims.append(anim_camera_focus_distance)
|
||||||
|
|
||||||
animations = {}
|
animations = {}
|
||||||
|
|
||||||
# And now, produce final data (usable by FBX export code)
|
# And now, produce final data (usable by FBX export code)
|
||||||
# Objects-like loc/rot/scale...
|
for anim in all_anims:
|
||||||
for ob_obj, anims in animdata_ob.items():
|
anim.simplify(simplify_fac, bake_step, force_keep)
|
||||||
for anim in anims:
|
if not anim:
|
||||||
anim.simplify(simplify_fac, bake_step, force_keep)
|
|
||||||
if not anim:
|
|
||||||
continue
|
|
||||||
for obj_key, group_key, group, fbx_group, fbx_gname in anim.get_final_data(scene, ref_id, force_keep):
|
|
||||||
anim_data = animations.setdefault(obj_key, ("dummy_unused_key", {}))
|
|
||||||
anim_data[1][fbx_group] = (group_key, group, fbx_gname)
|
|
||||||
|
|
||||||
# And meshes' shape keys.
|
|
||||||
for channel_key, (anim_shape, me, shape) in animdata_shapes.items():
|
|
||||||
final_keys = {}
|
|
||||||
anim_shape.simplify(simplify_fac, bake_step, force_keep)
|
|
||||||
if not anim_shape:
|
|
||||||
continue
|
continue
|
||||||
for elem_key, group_key, group, fbx_group, fbx_gname in anim_shape.get_final_data(scene, ref_id, force_keep):
|
for obj_key, group_key, group, fbx_group, fbx_gname in anim.get_final_data(scene, ref_id, force_keep):
|
||||||
anim_data = animations.setdefault(elem_key, ("dummy_unused_key", {}))
|
anim_data = animations.setdefault(obj_key, ("dummy_unused_key", {}))
|
||||||
anim_data[1][fbx_group] = (group_key, group, fbx_gname)
|
anim_data[1][fbx_group] = (group_key, group, fbx_gname)
|
||||||
|
|
||||||
# And cameras' lens and focus distance keys.
|
|
||||||
for cam_key, (anim_camera_lens, anim_camera_focus_distance, camera) in animdata_cameras.items():
|
|
||||||
final_keys = {}
|
|
||||||
anim_camera_lens.simplify(simplify_fac, bake_step, force_keep)
|
|
||||||
anim_camera_focus_distance.simplify(simplify_fac, bake_step, force_keep)
|
|
||||||
if anim_camera_lens:
|
|
||||||
for elem_key, group_key, group, fbx_group, fbx_gname in \
|
|
||||||
anim_camera_lens.get_final_data(scene, ref_id, force_keep):
|
|
||||||
anim_data = animations.setdefault(elem_key, ("dummy_unused_key", {}))
|
|
||||||
anim_data[1][fbx_group] = (group_key, group, fbx_gname)
|
|
||||||
if anim_camera_focus_distance:
|
|
||||||
for elem_key, group_key, group, fbx_group, fbx_gname in \
|
|
||||||
anim_camera_focus_distance.get_final_data(scene, ref_id, force_keep):
|
|
||||||
anim_data = animations.setdefault(elem_key, ("dummy_unused_key", {}))
|
|
||||||
anim_data[1][fbx_group] = (group_key, group, fbx_gname)
|
|
||||||
|
|
||||||
astack_key = get_blender_anim_stack_key(scene, ref_id)
|
astack_key = get_blender_anim_stack_key(scene, ref_id)
|
||||||
alayer_key = get_blender_anim_layer_key(scene, ref_id)
|
alayer_key = get_blender_anim_layer_key(scene, ref_id)
|
||||||
name = (get_blenderID_name(ref_id) if ref_id else scene.name).encode()
|
name = (get_blenderID_name(ref_id) if ref_id else scene.name).encode()
|
||||||
@ -2871,8 +2927,8 @@ def fbx_data_from_scene(scene, depsgraph, settings):
|
|||||||
for _alayer_key, alayer in astack.values():
|
for _alayer_key, alayer in astack.values():
|
||||||
for _acnode_key, acnode, _acnode_name in alayer.values():
|
for _acnode_key, acnode, _acnode_name in alayer.values():
|
||||||
nbr_acnodes += 1
|
nbr_acnodes += 1
|
||||||
for _acurve_key, _dval, acurve, acurve_valid in acnode.values():
|
for _acurve_key, _dval, (keys, _values), acurve_valid in acnode.values():
|
||||||
if acurve:
|
if len(keys):
|
||||||
nbr_acurves += 1
|
nbr_acurves += 1
|
||||||
|
|
||||||
templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=nbr_astacks)
|
templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=nbr_astacks)
|
||||||
@ -3006,8 +3062,8 @@ def fbx_data_from_scene(scene, depsgraph, settings):
|
|||||||
connections.append((b"OO", acurvenode_id, alayer_id, None))
|
connections.append((b"OO", acurvenode_id, alayer_id, None))
|
||||||
# Animcurvenode -> object property.
|
# Animcurvenode -> object property.
|
||||||
connections.append((b"OP", acurvenode_id, elem_id, fbx_prop.encode()))
|
connections.append((b"OP", acurvenode_id, elem_id, fbx_prop.encode()))
|
||||||
for fbx_item, (acurve_key, default_value, acurve, acurve_valid) in acurves.items():
|
for fbx_item, (acurve_key, default_value, (keys, values), acurve_valid) in acurves.items():
|
||||||
if acurve:
|
if len(keys):
|
||||||
# Animcurve -> Animcurvenode.
|
# Animcurve -> Animcurvenode.
|
||||||
connections.append((b"OP", get_fbx_uuid_from_key(acurve_key), acurvenode_id, fbx_item.encode()))
|
connections.append((b"OP", get_fbx_uuid_from_key(acurve_key), acurvenode_id, fbx_item.encode()))
|
||||||
|
|
||||||
|
@ -1234,8 +1234,10 @@ class AnimationCurveNodeWrapper:
|
|||||||
and easy API to handle those.
|
and easy API to handle those.
|
||||||
"""
|
"""
|
||||||
__slots__ = (
|
__slots__ = (
|
||||||
'elem_keys', '_keys', 'default_values', 'fbx_group', 'fbx_gname', 'fbx_props',
|
'elem_keys', 'default_values', 'fbx_group', 'fbx_gname', 'fbx_props',
|
||||||
'force_keying', 'force_startend_keying')
|
'force_keying', 'force_startend_keying',
|
||||||
|
'_frame_times_array', '_frame_values_array', '_frame_write_mask_array',
|
||||||
|
)
|
||||||
|
|
||||||
kinds = {
|
kinds = {
|
||||||
'LCL_TRANSLATION': ("Lcl Translation", "T", ("X", "Y", "Z")),
|
'LCL_TRANSLATION': ("Lcl Translation", "T", ("X", "Y", "Z")),
|
||||||
@ -1254,7 +1256,9 @@ class AnimationCurveNodeWrapper:
|
|||||||
self.fbx_props = [self.kinds[kind][2]]
|
self.fbx_props = [self.kinds[kind][2]]
|
||||||
self.force_keying = force_keying
|
self.force_keying = force_keying
|
||||||
self.force_startend_keying = force_startend_keying
|
self.force_startend_keying = force_startend_keying
|
||||||
self._keys = [] # (frame, values, write_flags)
|
self._frame_times_array = None
|
||||||
|
self._frame_values_array = None
|
||||||
|
self._frame_write_mask_array = None
|
||||||
if default_values is not ...:
|
if default_values is not ...:
|
||||||
assert(len(default_values) == len(self.fbx_props[0]))
|
assert(len(default_values) == len(self.fbx_props[0]))
|
||||||
self.default_values = default_values
|
self.default_values = default_values
|
||||||
@ -1263,7 +1267,7 @@ class AnimationCurveNodeWrapper:
|
|||||||
|
|
||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
# We are 'True' if we do have some validated keyframes...
|
# We are 'True' if we do have some validated keyframes...
|
||||||
return bool(self._keys) and (True in ((True in k[2]) for k in self._keys))
|
return self._frame_write_mask_array is not None and bool(np.any(self._frame_write_mask_array))
|
||||||
|
|
||||||
def add_group(self, elem_key, fbx_group, fbx_gname, fbx_props):
|
def add_group(self, elem_key, fbx_group, fbx_gname, fbx_props):
|
||||||
"""
|
"""
|
||||||
@ -1276,19 +1280,31 @@ class AnimationCurveNodeWrapper:
|
|||||||
self.fbx_gname.append(fbx_gname)
|
self.fbx_gname.append(fbx_gname)
|
||||||
self.fbx_props.append(fbx_props)
|
self.fbx_props.append(fbx_props)
|
||||||
|
|
||||||
def add_keyframe(self, frame, values):
|
def set_keyframes(self, keyframe_times, keyframe_values):
|
||||||
"""
|
"""
|
||||||
Add a new keyframe to all curves of the group.
|
Set all keyframe times and values of the group.
|
||||||
|
Values can be a 2D array where each row is the values for a separate curve.
|
||||||
"""
|
"""
|
||||||
assert(len(values) == len(self.fbx_props[0]))
|
# View 1D keyframe_values as 2D with a single row, so that the same code can be used for both 1D and
|
||||||
self._keys.append((frame, values, [True] * len(values))) # write everything by default.
|
# 2D inputs.
|
||||||
|
if len(keyframe_values.shape) == 1:
|
||||||
|
keyframe_values = keyframe_values[np.newaxis]
|
||||||
|
# There must be a time for each column of values.
|
||||||
|
assert(len(keyframe_times) == keyframe_values.shape[1])
|
||||||
|
# There must be as many rows of values as there are properties.
|
||||||
|
assert(len(self.fbx_props[0]) == len(keyframe_values))
|
||||||
|
write_mask = np.full_like(keyframe_values, True, dtype=bool) # write everything by default
|
||||||
|
self._frame_times_array = keyframe_times
|
||||||
|
self._frame_values_array = keyframe_values
|
||||||
|
self._frame_write_mask_array = write_mask
|
||||||
|
|
||||||
def simplify(self, fac, step, force_keep=False):
|
def simplify(self, fac, step, force_keep=False):
|
||||||
"""
|
"""
|
||||||
Simplifies sampled curves by only enabling samples when:
|
Simplifies sampled curves by only enabling samples when:
|
||||||
* their values relatively differ from the previous sample ones.
|
* their values relatively differ from the previous sample ones.
|
||||||
"""
|
"""
|
||||||
if not self._keys:
|
if self._frame_times_array is None:
|
||||||
|
# Keyframes have not been added yet.
|
||||||
return
|
return
|
||||||
|
|
||||||
if fac == 0.0:
|
if fac == 0.0:
|
||||||
@ -1297,15 +1313,22 @@ class AnimationCurveNodeWrapper:
|
|||||||
# So that, with default factor and step values (1), we get:
|
# So that, with default factor and step values (1), we get:
|
||||||
min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'.
|
min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'.
|
||||||
min_absdiff_fac = 0.1 # A tenth of reldiff...
|
min_absdiff_fac = 0.1 # A tenth of reldiff...
|
||||||
keys = self._keys
|
|
||||||
|
|
||||||
p_currframe, p_key, p_key_write = keys[0]
|
are_keyed = []
|
||||||
p_keyed = list(p_key)
|
for values, frame_write_mask in zip(self._frame_values_array, self._frame_write_mask_array):
|
||||||
are_keyed = [False] * len(p_key)
|
# Initialise to no frames written.
|
||||||
for currframe, key, key_write in keys:
|
frame_write_mask[:] = False
|
||||||
|
|
||||||
|
# Create views of the 'previous' and 'current' mask and values. The memoryview, .data, of each array is used
|
||||||
|
# for its iteration and indexing performance compared to the array.
|
||||||
|
key = values[1:].data
|
||||||
|
p_key = values[:-1].data
|
||||||
|
key_write = frame_write_mask[1:].data
|
||||||
|
p_key_write = frame_write_mask[:-1].data
|
||||||
|
|
||||||
|
p_keyedval = values[0]
|
||||||
|
is_keyed = False
|
||||||
for idx, (val, p_val) in enumerate(zip(key, p_key)):
|
for idx, (val, p_val) in enumerate(zip(key, p_key)):
|
||||||
key_write[idx] = False
|
|
||||||
p_keyedval = p_keyed[idx]
|
|
||||||
if val == p_val:
|
if val == p_val:
|
||||||
# Never write keyframe when value is exactly the same as prev one!
|
# Never write keyframe when value is exactly the same as prev one!
|
||||||
continue
|
continue
|
||||||
@ -1319,14 +1342,14 @@ class AnimationCurveNodeWrapper:
|
|||||||
# If enough difference from previous sampled value, key this value *and* the previous one!
|
# If enough difference from previous sampled value, key this value *and* the previous one!
|
||||||
key_write[idx] = True
|
key_write[idx] = True
|
||||||
p_key_write[idx] = True
|
p_key_write[idx] = True
|
||||||
p_keyed[idx] = val
|
p_keyedval = val
|
||||||
are_keyed[idx] = True
|
is_keyed = True
|
||||||
elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)):
|
elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)):
|
||||||
# Else, if enough difference from previous keyed value, key this value only!
|
# Else, if enough difference from previous keyed value, key this value only!
|
||||||
key_write[idx] = True
|
key_write[idx] = True
|
||||||
p_keyed[idx] = val
|
p_keyedval = val
|
||||||
are_keyed[idx] = True
|
is_keyed = True
|
||||||
p_currframe, p_key, p_key_write = currframe, key, key_write
|
are_keyed.append(is_keyed)
|
||||||
|
|
||||||
# If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P
|
# If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P
|
||||||
# See T41766.
|
# See T41766.
|
||||||
@ -1339,20 +1362,20 @@ class AnimationCurveNodeWrapper:
|
|||||||
|
|
||||||
# If we did key something, ensure first and last sampled values are keyed as well.
|
# If we did key something, ensure first and last sampled values are keyed as well.
|
||||||
if self.force_startend_keying:
|
if self.force_startend_keying:
|
||||||
for idx, is_keyed in enumerate(are_keyed):
|
for is_keyed, frame_write_mask in zip(are_keyed, self._frame_write_mask_array):
|
||||||
if is_keyed:
|
if is_keyed:
|
||||||
keys[0][2][idx] = keys[-1][2][idx] = True
|
frame_write_mask[:1] = True
|
||||||
|
frame_write_mask[-1:] = True
|
||||||
|
|
||||||
def get_final_data(self, scene, ref_id, force_keep=False):
|
def get_final_data(self, scene, ref_id, force_keep=False):
|
||||||
"""
|
"""
|
||||||
Yield final anim data for this 'curvenode' (for all curvenodes defined).
|
Yield final anim data for this 'curvenode' (for all curvenodes defined).
|
||||||
force_keep is to force to keep a curve even if it only has one valid keyframe.
|
force_keep is to force to keep a curve even if it only has one valid keyframe.
|
||||||
"""
|
"""
|
||||||
curves = [[] for k in self._keys[0][1]]
|
curves = [
|
||||||
for currframe, key, key_write in self._keys:
|
(self._frame_times_array[write_mask], values[write_mask])
|
||||||
for curve, val, wrt in zip(curves, key, key_write):
|
for values, write_mask in zip(self._frame_values_array, self._frame_write_mask_array)
|
||||||
if wrt:
|
]
|
||||||
curve.append((currframe, val))
|
|
||||||
|
|
||||||
force_keep = force_keep or self.force_keying
|
force_keep = force_keep or self.force_keying
|
||||||
for elem_key, fbx_group, fbx_gname, fbx_props in \
|
for elem_key, fbx_group, fbx_gname, fbx_props in \
|
||||||
@ -1363,8 +1386,9 @@ class AnimationCurveNodeWrapper:
|
|||||||
fbx_item = FBX_ANIM_PROPSGROUP_NAME + "|" + fbx_item
|
fbx_item = FBX_ANIM_PROPSGROUP_NAME + "|" + fbx_item
|
||||||
curve_key = get_blender_anim_curve_key(scene, ref_id, elem_key, fbx_group, fbx_item)
|
curve_key = get_blender_anim_curve_key(scene, ref_id, elem_key, fbx_group, fbx_item)
|
||||||
# (curve key, default value, keyframes, write flag).
|
# (curve key, default value, keyframes, write flag).
|
||||||
group[fbx_item] = (curve_key, def_val, c,
|
times = c[0]
|
||||||
True if (len(c) > 1 or (len(c) > 0 and force_keep)) else False)
|
write_flag = len(times) > (0 if force_keep else 1)
|
||||||
|
group[fbx_item] = (curve_key, def_val, c, write_flag)
|
||||||
yield elem_key, group_key, group, fbx_group, fbx_gname
|
yield elem_key, group_key, group, fbx_group, fbx_gname
|
||||||
|
|
||||||
|
|
||||||
|
@ -2826,8 +2826,13 @@ class FbxImportHelperNode:
|
|||||||
elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
|
elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
|
||||||
|
|
||||||
if settings.use_custom_props:
|
if settings.use_custom_props:
|
||||||
|
# Read Armature Object custom props from the Node
|
||||||
blen_read_custom_properties(self.fbx_elem, arm, settings)
|
blen_read_custom_properties(self.fbx_elem, arm, settings)
|
||||||
|
|
||||||
|
if self.fbx_data_elem:
|
||||||
|
# Read Armature Data custom props from the NodeAttribute
|
||||||
|
blen_read_custom_properties(self.fbx_data_elem, arm_data, settings)
|
||||||
|
|
||||||
# instance in scene
|
# instance in scene
|
||||||
view_layer.active_layer_collection.collection.objects.link(arm)
|
view_layer.active_layer_collection.collection.objects.link(arm)
|
||||||
arm.select_set(True)
|
arm.select_set(True)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
'name': 'glTF 2.0 format',
|
'name': 'glTF 2.0 format',
|
||||||
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
||||||
"version": (4, 0, 15),
|
"version": (4, 0, 17),
|
||||||
'blender': (4, 0, 0),
|
'blender': (4, 0, 0),
|
||||||
'location': 'File > Import-Export',
|
'location': 'File > Import-Export',
|
||||||
'description': 'Import-Export as glTF 2.0',
|
'description': 'Import-Export as glTF 2.0',
|
||||||
|
@ -15,9 +15,9 @@ def export_clearcoat(blender_material, export_settings):
|
|||||||
clearcoat_extension = {}
|
clearcoat_extension = {}
|
||||||
clearcoat_roughness_slots = ()
|
clearcoat_roughness_slots = ()
|
||||||
|
|
||||||
clearcoat_socket = gltf2_blender_get.get_socket(blender_material, 'Clearcoat')
|
clearcoat_socket = gltf2_blender_get.get_socket(blender_material, 'Coat')
|
||||||
clearcoat_roughness_socket = gltf2_blender_get.get_socket(blender_material, 'Clearcoat Roughness')
|
clearcoat_roughness_socket = gltf2_blender_get.get_socket(blender_material, 'Coat Roughness')
|
||||||
clearcoat_normal_socket = gltf2_blender_get.get_socket(blender_material, 'Clearcoat Normal')
|
clearcoat_normal_socket = gltf2_blender_get.get_socket(blender_material, 'Coat Normal')
|
||||||
|
|
||||||
if isinstance(clearcoat_socket, bpy.types.NodeSocket) and not clearcoat_socket.is_linked:
|
if isinstance(clearcoat_socket, bpy.types.NodeSocket) and not clearcoat_socket.is_linked:
|
||||||
clearcoat_extension['clearcoatFactor'] = clearcoat_socket.default_value
|
clearcoat_extension['clearcoatFactor'] = clearcoat_socket.default_value
|
||||||
|
@ -228,15 +228,15 @@ def __get_image_data_mapping(sockets, default_sockets, results, export_settings)
|
|||||||
# some sockets need channel rewriting (gltf pbr defines fixed channels for some attributes)
|
# some sockets need channel rewriting (gltf pbr defines fixed channels for some attributes)
|
||||||
if socket.name == 'Metallic':
|
if socket.name == 'Metallic':
|
||||||
dst_chan = Channel.B
|
dst_chan = Channel.B
|
||||||
elif socket.name == 'Roughness':
|
elif socket.name == 'Roughness' and socket.node.type == "BSDF_PRINCIPLED":
|
||||||
dst_chan = Channel.G
|
dst_chan = Channel.G
|
||||||
elif socket.name == 'Occlusion':
|
elif socket.name == 'Occlusion':
|
||||||
dst_chan = Channel.R
|
dst_chan = Channel.R
|
||||||
elif socket.name == 'Alpha':
|
elif socket.name == 'Alpha':
|
||||||
dst_chan = Channel.A
|
dst_chan = Channel.A
|
||||||
elif socket.name == 'Clearcoat':
|
elif socket.name == 'Coat':
|
||||||
dst_chan = Channel.R
|
dst_chan = Channel.R
|
||||||
elif socket.name == 'Clearcoat Roughness':
|
elif socket.name == 'Coat Roughness':
|
||||||
dst_chan = Channel.G
|
dst_chan = Channel.G
|
||||||
elif socket.name == 'Thickness': # For KHR_materials_volume
|
elif socket.name == 'Thickness': # For KHR_materials_volume
|
||||||
dst_chan = Channel.G
|
dst_chan = Channel.G
|
||||||
|
@ -130,19 +130,19 @@ def pbr_metallic_roughness(mh: MaterialHelper):
|
|||||||
clearcoat(
|
clearcoat(
|
||||||
mh,
|
mh,
|
||||||
location=locs['clearcoat'],
|
location=locs['clearcoat'],
|
||||||
clearcoat_socket=pbr_node.inputs['Clearcoat'],
|
clearcoat_socket=pbr_node.inputs['Coat'],
|
||||||
)
|
)
|
||||||
|
|
||||||
clearcoat_roughness(
|
clearcoat_roughness(
|
||||||
mh,
|
mh,
|
||||||
location=locs['clearcoat_roughness'],
|
location=locs['clearcoat_roughness'],
|
||||||
roughness_socket=pbr_node.inputs['Clearcoat Roughness'],
|
roughness_socket=pbr_node.inputs['Coat Roughness'],
|
||||||
)
|
)
|
||||||
|
|
||||||
clearcoat_normal(
|
clearcoat_normal(
|
||||||
mh,
|
mh,
|
||||||
location=locs['clearcoat_normal'],
|
location=locs['clearcoat_normal'],
|
||||||
normal_socket=pbr_node.inputs['Clearcoat Normal'],
|
normal_socket=pbr_node.inputs['Coat Normal'],
|
||||||
)
|
)
|
||||||
|
|
||||||
transmission(
|
transmission(
|
||||||
|
@ -24,7 +24,7 @@ from itertools import chain
|
|||||||
|
|
||||||
from .interface import NWConnectionListInputs, NWConnectionListOutputs
|
from .interface import NWConnectionListInputs, NWConnectionListOutputs
|
||||||
|
|
||||||
from .utils.constants import blend_types, geo_combine_operations, operations, navs, get_nodes_from_category, rl_outputs
|
from .utils.constants import blend_types, geo_combine_operations, operations, navs, get_texture_node_types, rl_outputs
|
||||||
from .utils.draw import draw_callback_nodeoutline
|
from .utils.draw import draw_callback_nodeoutline
|
||||||
from .utils.paths import match_files_to_socket_names, split_into_components
|
from .utils.paths import match_files_to_socket_names, split_into_components
|
||||||
from .utils.nodes import (node_mid_pt, autolink, node_at_pos, get_active_tree, get_nodes_links, is_viewer_socket,
|
from .utils.nodes import (node_mid_pt, autolink, node_at_pos, get_active_tree, get_nodes_links, is_viewer_socket,
|
||||||
@ -507,14 +507,19 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_output_sockets(cls, node_tree):
|
||||||
|
return [item for item in node_tree.interface.items_tree if item.item_type == 'SOCKET' and item.in_out in {'OUTPUT', 'BOTH'}]
|
||||||
|
|
||||||
def ensure_viewer_socket(self, node, socket_type, connect_socket=None):
|
def ensure_viewer_socket(self, node, socket_type, connect_socket=None):
|
||||||
# check if a viewer output already exists in a node group otherwise create
|
# check if a viewer output already exists in a node group otherwise create
|
||||||
if hasattr(node, "node_tree"):
|
if hasattr(node, "node_tree"):
|
||||||
index = None
|
viewer_socket = None
|
||||||
if len(node.node_tree.outputs):
|
output_sockets = self.get_output_sockets(node.node_tree)
|
||||||
|
if len(output_sockets):
|
||||||
free_socket = None
|
free_socket = None
|
||||||
for i, socket in enumerate(node.node_tree.outputs):
|
for socket in output_sockets:
|
||||||
if is_viewer_socket(socket) and is_visible_socket(node.outputs[i]) and socket.type == socket_type:
|
if is_viewer_socket(socket) and socket.socket_type == socket_type:
|
||||||
# if viewer output is already used but leads to the same socket we can still use it
|
# if viewer output is already used but leads to the same socket we can still use it
|
||||||
is_used = self.is_socket_used_other_mats(socket)
|
is_used = self.is_socket_used_other_mats(socket)
|
||||||
if is_used:
|
if is_used:
|
||||||
@ -525,19 +530,18 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
links = groupout_input.links
|
links = groupout_input.links
|
||||||
if connect_socket not in [link.from_socket for link in links]:
|
if connect_socket not in [link.from_socket for link in links]:
|
||||||
continue
|
continue
|
||||||
index = i
|
viewer_socket = socket
|
||||||
break
|
break
|
||||||
if not free_socket:
|
if not free_socket:
|
||||||
free_socket = i
|
free_socket = socket
|
||||||
if not index and free_socket:
|
if not viewer_socket and free_socket:
|
||||||
index = free_socket
|
viewer_socket = free_socket
|
||||||
|
|
||||||
if not index:
|
if not viewer_socket:
|
||||||
# create viewer socket
|
# create viewer socket
|
||||||
node.node_tree.outputs.new(socket_type, viewer_socket_name)
|
viewer_socket = node.node_tree.interface.new_socket(viewer_socket_name, in_out={'OUTPUT'}, socket_type=socket_type)
|
||||||
index = len(node.node_tree.outputs) - 1
|
viewer_socket.NWViewerSocket = True
|
||||||
node.node_tree.outputs[index].NWViewerSocket = True
|
return viewer_socket
|
||||||
return index
|
|
||||||
|
|
||||||
def init_shader_variables(self, space, shader_type):
|
def init_shader_variables(self, space, shader_type):
|
||||||
if shader_type == 'OBJECT':
|
if shader_type == 'OBJECT':
|
||||||
@ -582,10 +586,9 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
next_node = link.from_node
|
next_node = link.from_node
|
||||||
external_socket = link.from_socket
|
external_socket = link.from_socket
|
||||||
if hasattr(next_node, "node_tree"):
|
if hasattr(next_node, "node_tree"):
|
||||||
for socket_index, s in enumerate(next_node.outputs):
|
for socket_index, socket in enumerate(next_node.node_tree.interface.items_tree):
|
||||||
if s == external_socket:
|
if socket.identifier == external_socket.identifier:
|
||||||
break
|
break
|
||||||
socket = next_node.node_tree.outputs[socket_index]
|
|
||||||
if is_viewer_socket(socket) and socket not in sockets:
|
if is_viewer_socket(socket) and socket not in sockets:
|
||||||
sockets.append(socket)
|
sockets.append(socket)
|
||||||
# continue search inside of node group but restrict socket to where we came from
|
# continue search inside of node group but restrict socket to where we came from
|
||||||
@ -599,11 +602,17 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
if hasattr(node, "node_tree"):
|
if hasattr(node, "node_tree"):
|
||||||
if node.node_tree is None:
|
if node.node_tree is None:
|
||||||
continue
|
continue
|
||||||
for socket in node.node_tree.outputs:
|
for socket in cls.get_output_sockets(node.node_tree):
|
||||||
if is_viewer_socket(socket) and (socket not in sockets):
|
if is_viewer_socket(socket) and (socket not in sockets):
|
||||||
sockets.append(socket)
|
sockets.append(socket)
|
||||||
cls.scan_nodes(node.node_tree, sockets)
|
cls.scan_nodes(node.node_tree, sockets)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def remove_socket(cls, tree, socket):
|
||||||
|
interface = tree.interface
|
||||||
|
interface.remove(socket)
|
||||||
|
interface.active_index = min(interface.active_index, len(interface.items_tree) - 1)
|
||||||
|
|
||||||
def link_leads_to_used_socket(self, link):
|
def link_leads_to_used_socket(self, link):
|
||||||
# return True if link leads to a socket that is already used in this material
|
# return True if link leads to a socket that is already used in this material
|
||||||
socket = get_internal_socket(link.to_socket)
|
socket = get_internal_socket(link.to_socket)
|
||||||
@ -710,22 +719,22 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
link_end = output_socket
|
link_end = output_socket
|
||||||
while tree.nodes.active != active:
|
while tree.nodes.active != active:
|
||||||
node = tree.nodes.active
|
node = tree.nodes.active
|
||||||
index = self.ensure_viewer_socket(
|
viewer_socket = self.ensure_viewer_socket(
|
||||||
node, 'NodeSocketGeometry', connect_socket=active.outputs[out_i] if node.node_tree.nodes.active == active else None)
|
node, 'NodeSocketGeometry', connect_socket=active.outputs[out_i] if node.node_tree.nodes.active == active else None)
|
||||||
link_start = node.outputs[index]
|
link_start = node.outputs[viewer_socket_name]
|
||||||
node_socket = node.node_tree.outputs[index]
|
node_socket = viewer_socket
|
||||||
if node_socket in delete_sockets:
|
if node_socket in delete_sockets:
|
||||||
delete_sockets.remove(node_socket)
|
delete_sockets.remove(node_socket)
|
||||||
connect_sockets(link_start, link_end)
|
connect_sockets(link_start, link_end)
|
||||||
# Iterate
|
# Iterate
|
||||||
link_end = self.ensure_group_output(node.node_tree).inputs[index]
|
link_end = self.ensure_group_output(node.node_tree).inputs[viewer_socket_name]
|
||||||
tree = tree.nodes.active.node_tree
|
tree = tree.nodes.active.node_tree
|
||||||
connect_sockets(active.outputs[out_i], link_end)
|
connect_sockets(active.outputs[out_i], link_end)
|
||||||
|
|
||||||
# Delete sockets
|
# Delete sockets
|
||||||
for socket in delete_sockets:
|
for socket in delete_sockets:
|
||||||
tree = socket.id_data
|
tree = socket.id_data
|
||||||
tree.outputs.remove(socket)
|
self.remove_socket(tree, socket)
|
||||||
|
|
||||||
nodes.active = active
|
nodes.active = active
|
||||||
active.select = True
|
active.select = True
|
||||||
@ -733,15 +742,12 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
# What follows is code for the shader editor
|
# What follows is code for the shader editor
|
||||||
output_types = [x.nodetype for x in
|
|
||||||
get_nodes_from_category('Output', context)]
|
|
||||||
valid = False
|
valid = False
|
||||||
if active:
|
if active:
|
||||||
if active.rna_type.identifier not in output_types:
|
for out in active.outputs:
|
||||||
for out in active.outputs:
|
if is_visible_socket(out):
|
||||||
if is_visible_socket(out):
|
valid = True
|
||||||
valid = True
|
break
|
||||||
break
|
|
||||||
if valid:
|
if valid:
|
||||||
# get material_output node
|
# get material_output node
|
||||||
materialout = None # placeholder node
|
materialout = None # placeholder node
|
||||||
@ -786,15 +792,15 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
link_end = output_socket
|
link_end = output_socket
|
||||||
while tree.nodes.active != active:
|
while tree.nodes.active != active:
|
||||||
node = tree.nodes.active
|
node = tree.nodes.active
|
||||||
index = self.ensure_viewer_socket(
|
viewer_socket = self.ensure_viewer_socket(
|
||||||
node, socket_type, connect_socket=active.outputs[out_i] if node.node_tree.nodes.active == active else None)
|
node, socket_type, connect_socket=active.outputs[out_i] if node.node_tree.nodes.active == active else None)
|
||||||
link_start = node.outputs[index]
|
link_start = node.outputs[viewer_socket_name]
|
||||||
node_socket = node.node_tree.outputs[index]
|
node_socket = viewer_socket
|
||||||
if node_socket in delete_sockets:
|
if node_socket in delete_sockets:
|
||||||
delete_sockets.remove(node_socket)
|
delete_sockets.remove(node_socket)
|
||||||
connect_sockets(link_start, link_end)
|
connect_sockets(link_start, link_end)
|
||||||
# Iterate
|
# Iterate
|
||||||
link_end = self.ensure_group_output(node.node_tree).inputs[index]
|
link_end = self.ensure_group_output(node.node_tree).inputs[viewer_socket_name]
|
||||||
tree = tree.nodes.active.node_tree
|
tree = tree.nodes.active.node_tree
|
||||||
connect_sockets(active.outputs[out_i], link_end)
|
connect_sockets(active.outputs[out_i], link_end)
|
||||||
|
|
||||||
@ -802,7 +808,7 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
for socket in delete_sockets:
|
for socket in delete_sockets:
|
||||||
if not self.is_socket_used_other_mats(socket):
|
if not self.is_socket_used_other_mats(socket):
|
||||||
tree = socket.id_data
|
tree = socket.id_data
|
||||||
tree.outputs.remove(socket)
|
self.remove_socket(tree, socket)
|
||||||
|
|
||||||
nodes.active = active
|
nodes.active = active
|
||||||
active.select = True
|
active.select = True
|
||||||
@ -1820,8 +1826,7 @@ class NWAddTextureSetup(Operator, NWBase):
|
|||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
nodes, links = get_nodes_links(context)
|
nodes, links = get_nodes_links(context)
|
||||||
|
|
||||||
texture_types = [x.nodetype for x in
|
texture_types = get_texture_node_types()
|
||||||
get_nodes_from_category('Texture', context)]
|
|
||||||
selected_nodes = [n for n in nodes if n.select]
|
selected_nodes = [n for n in nodes if n.select]
|
||||||
|
|
||||||
for node in selected_nodes:
|
for node in selected_nodes:
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from nodeitems_utils import node_categories_iter
|
|
||||||
|
|
||||||
|
|
||||||
#################
|
#################
|
||||||
@ -160,11 +159,23 @@ draw_color_sets = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_nodes_from_category(category_name, context):
|
def get_texture_node_types():
|
||||||
for category in node_categories_iter(context):
|
return [
|
||||||
if category.name == category_name:
|
"ShaderNodeTexBrick",
|
||||||
return sorted(category.items(context), key=lambda node: node.label)
|
"ShaderNodeTexChecker",
|
||||||
|
"ShaderNodeTexEnvironment",
|
||||||
|
"ShaderNodeTexGradient",
|
||||||
|
"ShaderNodeTexIES",
|
||||||
|
"ShaderNodeTexImage",
|
||||||
|
"ShaderNodeTexMagic",
|
||||||
|
"ShaderNodeTexMusgrave",
|
||||||
|
"ShaderNodeTexNoise",
|
||||||
|
"ShaderNodeTexPointDensity",
|
||||||
|
"ShaderNodeTexSky",
|
||||||
|
"ShaderNodeTexVoronoi",
|
||||||
|
"ShaderNodeTexWave",
|
||||||
|
"ShaderNodeTexWhiteNoise"
|
||||||
|
]
|
||||||
|
|
||||||
def nice_hotkey_name(punc):
|
def nice_hotkey_name(punc):
|
||||||
# convert the ugly string name into the actual character
|
# convert the ugly string name into the actual character
|
||||||
|
@ -170,25 +170,18 @@ def get_internal_socket(socket):
|
|||||||
# get the internal socket from a socket inside or outside the group
|
# get the internal socket from a socket inside or outside the group
|
||||||
node = socket.node
|
node = socket.node
|
||||||
if node.type == 'GROUP_OUTPUT':
|
if node.type == 'GROUP_OUTPUT':
|
||||||
source_iterator = node.inputs
|
iterator = node.id_data.interface.items_tree
|
||||||
iterator = node.id_data.outputs
|
|
||||||
elif node.type == 'GROUP_INPUT':
|
elif node.type == 'GROUP_INPUT':
|
||||||
source_iterator = node.outputs
|
iterator = node.id_data.interface.items_tree
|
||||||
iterator = node.id_data.inputs
|
|
||||||
elif hasattr(node, "node_tree"):
|
elif hasattr(node, "node_tree"):
|
||||||
if socket.is_output:
|
iterator = node.node_tree.interface.items_tree
|
||||||
source_iterator = node.outputs
|
|
||||||
iterator = node.node_tree.outputs
|
|
||||||
else:
|
|
||||||
source_iterator = node.inputs
|
|
||||||
iterator = node.node_tree.inputs
|
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for i, s in enumerate(source_iterator):
|
for s in iterator:
|
||||||
if s == socket:
|
if s.identifier == socket.identifier:
|
||||||
break
|
return s
|
||||||
return iterator[i]
|
return iterator[0]
|
||||||
|
|
||||||
|
|
||||||
def is_viewer_link(link, output_node):
|
def is_viewer_link(link, output_node):
|
||||||
|
@ -9,7 +9,7 @@ Pose Library based on the Asset Browser.
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Pose Library",
|
"name": "Pose Library",
|
||||||
"description": "Pose Library based on the Asset Browser.",
|
"description": "Pose Library based on the Asset Browser.",
|
||||||
"author": "Sybren A. Stüvel",
|
"author": "Sybren A. Stüvel, Julian Eisel",
|
||||||
"version": (2, 0),
|
"version": (2, 0),
|
||||||
"blender": (3, 0, 0),
|
"blender": (3, 0, 0),
|
||||||
"location": "Asset Browser -> Animations, and 3D Viewport -> Animation panel",
|
"location": "Asset Browser -> Animations, and 3D Viewport -> Animation panel",
|
||||||
|
@ -9,6 +9,7 @@ Pose Library - GUI definition.
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy.types import (
|
from bpy.types import (
|
||||||
AssetHandle,
|
AssetHandle,
|
||||||
|
AssetRepresentation,
|
||||||
Context,
|
Context,
|
||||||
Menu,
|
Menu,
|
||||||
Panel,
|
Panel,
|
||||||
@ -41,11 +42,11 @@ class VIEW3D_AST_pose_library(bpy.types.AssetShelf):
|
|||||||
return PoseLibraryPanel.poll(context)
|
return PoseLibraryPanel.poll(context)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def asset_poll(cls, asset: AssetHandle) -> bool:
|
def asset_poll(cls, asset: AssetRepresentation) -> bool:
|
||||||
return asset.file_data.id_type == 'ACTION'
|
return asset.id_type == 'ACTION'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def draw_context_menu(cls, _context: Context, _asset: AssetHandle, layout: UILayout):
|
def draw_context_menu(cls, _context: Context, _asset: AssetRepresentation, layout: UILayout):
|
||||||
# Make sure these operator properties match those used in `VIEW3D_PT_pose_library_legacy`.
|
# Make sure these operator properties match those used in `VIEW3D_PT_pose_library_legacy`.
|
||||||
layout.operator("poselib.apply_pose_asset", text="Apply Pose").flipped = False
|
layout.operator("poselib.apply_pose_asset", text="Apply Pose").flipped = False
|
||||||
layout.operator("poselib.apply_pose_asset", text="Apply Pose Flipped").flipped = True
|
layout.operator("poselib.apply_pose_asset", text="Apply Pose Flipped").flipped = True
|
||||||
|
@ -40,6 +40,7 @@ initial_load_order = [
|
|||||||
'utils.mechanism',
|
'utils.mechanism',
|
||||||
'utils.animation',
|
'utils.animation',
|
||||||
'utils.metaclass',
|
'utils.metaclass',
|
||||||
|
'utils.objects',
|
||||||
'feature_sets',
|
'feature_sets',
|
||||||
'rigs',
|
'rigs',
|
||||||
'rigs.utils',
|
'rigs.utils',
|
||||||
@ -708,6 +709,14 @@ def register():
|
|||||||
get=color_set_get, set=color_set_set, search=color_set_search
|
get=color_set_get, set=color_set_set, search=color_set_search
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Object properties
|
||||||
|
obj_store = bpy.types.Object
|
||||||
|
|
||||||
|
obj_store.rigify_owner_rig = PointerProperty(
|
||||||
|
type=bpy.types.Object,
|
||||||
|
name="Rigify Owner Rig",
|
||||||
|
description="Rig that owns this object and may delete or overwrite it upon re-generation")
|
||||||
|
|
||||||
prefs = RigifyPreferences.get_instance()
|
prefs = RigifyPreferences.get_instance()
|
||||||
prefs.register_feature_sets(True)
|
prefs.register_feature_sets(True)
|
||||||
prefs.update_external_rigs()
|
prefs.update_external_rigs()
|
||||||
@ -772,6 +781,10 @@ def unregister():
|
|||||||
del coll_store.rigify_color_set_id
|
del coll_store.rigify_color_set_id
|
||||||
del coll_store.rigify_color_set_name
|
del coll_store.rigify_color_set_name
|
||||||
|
|
||||||
|
obj_store: typing.Any = bpy.types.Object
|
||||||
|
|
||||||
|
del obj_store.rigify_owner_rig
|
||||||
|
|
||||||
# Classes.
|
# Classes.
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
unregister_class(cls)
|
unregister_class(cls)
|
||||||
|
@ -21,6 +21,7 @@ from . import base_rig
|
|||||||
from itertools import count
|
from itertools import count
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from .utils.objects import ArtifactManager
|
||||||
from .rig_ui_template import ScriptGenerator
|
from .rig_ui_template import ScriptGenerator
|
||||||
|
|
||||||
|
|
||||||
@ -192,6 +193,7 @@ class BaseGenerator:
|
|||||||
obj: ArmatureObject
|
obj: ArmatureObject
|
||||||
|
|
||||||
script: 'ScriptGenerator'
|
script: 'ScriptGenerator'
|
||||||
|
artifacts: 'ArtifactManager'
|
||||||
|
|
||||||
rig_list: List[base_rig.BaseRig]
|
rig_list: List[base_rig.BaseRig]
|
||||||
root_rigs: List[base_rig.BaseRig]
|
root_rigs: List[base_rig.BaseRig]
|
||||||
|
@ -24,6 +24,7 @@ from .utils.rig import get_rigify_type, get_rigify_target_rig,\
|
|||||||
get_rigify_rig_basename, get_rigify_force_widget_update, get_rigify_finalize_script,\
|
get_rigify_rig_basename, get_rigify_force_widget_update, get_rigify_finalize_script,\
|
||||||
get_rigify_mirror_widgets, get_rigify_colors
|
get_rigify_mirror_widgets, get_rigify_colors
|
||||||
from .utils.action_layers import ActionLayerBuilder
|
from .utils.action_layers import ActionLayerBuilder
|
||||||
|
from .utils.objects import ArtifactManager
|
||||||
|
|
||||||
from . import base_generate
|
from . import base_generate
|
||||||
from . import rig_ui_template
|
from . import rig_ui_template
|
||||||
@ -135,6 +136,8 @@ class Generator(base_generate.BaseGenerator):
|
|||||||
if obj_found:
|
if obj_found:
|
||||||
self.saved_visible_layers = {coll.name: coll.is_visible for coll in obj.data.collections}
|
self.saved_visible_layers = {coll.name: coll.is_visible for coll in obj.data.collections}
|
||||||
|
|
||||||
|
self.artifacts.generate_init_existing(obj)
|
||||||
|
|
||||||
def __find_legacy_collection(self) -> bpy.types.Collection:
|
def __find_legacy_collection(self) -> bpy.types.Collection:
|
||||||
"""For backwards comp, matching by name to find a legacy collection.
|
"""For backwards comp, matching by name to find a legacy collection.
|
||||||
(For before there was a Widget Collection PointerProperty)
|
(For before there was a Widget Collection PointerProperty)
|
||||||
@ -217,8 +220,12 @@ class Generator(base_generate.BaseGenerator):
|
|||||||
|
|
||||||
validate_collection_references(self.metarig)
|
validate_collection_references(self.metarig)
|
||||||
|
|
||||||
if ROOT_COLLECTION not in collections:
|
coll = collections.get(ROOT_COLLECTION)
|
||||||
|
|
||||||
|
if not coll:
|
||||||
coll = collections.new(ROOT_COLLECTION)
|
coll = collections.new(ROOT_COLLECTION)
|
||||||
|
|
||||||
|
if coll.rigify_ui_row <= 0:
|
||||||
coll.rigify_ui_row = 2 + choose_next_uid(collections, 'rigify_ui_row', min_value=1)
|
coll.rigify_ui_row = 2 + choose_next_uid(collections, 'rigify_ui_row', min_value=1)
|
||||||
|
|
||||||
def __duplicate_rig(self):
|
def __duplicate_rig(self):
|
||||||
@ -452,6 +459,8 @@ class Generator(base_generate.BaseGenerator):
|
|||||||
self.__unhide_rig_object(obj)
|
self.__unhide_rig_object(obj)
|
||||||
|
|
||||||
# Collect data from the existing rig
|
# Collect data from the existing rig
|
||||||
|
self.artifacts = ArtifactManager(self)
|
||||||
|
|
||||||
self.__save_rig_data(obj, obj_found)
|
self.__save_rig_data(obj, obj_found)
|
||||||
|
|
||||||
# Select the chosen working collection in case it changed
|
# Select the chosen working collection in case it changed
|
||||||
@ -633,6 +642,8 @@ class Generator(base_generate.BaseGenerator):
|
|||||||
|
|
||||||
obj.data.collections.active_index = 0
|
obj.data.collections.active_index = 0
|
||||||
|
|
||||||
|
self.artifacts.generate_cleanup()
|
||||||
|
|
||||||
###########################################
|
###########################################
|
||||||
# Restore active collection
|
# Restore active collection
|
||||||
view_layer.active_layer_collection = self.layer_collection
|
view_layer.active_layer_collection = self.layer_collection
|
||||||
|
@ -8,6 +8,7 @@ from collections import OrderedDict
|
|||||||
from typing import Union, Optional, Any
|
from typing import Union, Optional, Any
|
||||||
|
|
||||||
from .utils.animation import SCRIPT_REGISTER_BAKE, SCRIPT_UTILITIES_BAKE
|
from .utils.animation import SCRIPT_REGISTER_BAKE, SCRIPT_UTILITIES_BAKE
|
||||||
|
from .utils.mechanism import quote_property
|
||||||
|
|
||||||
from . import base_generate
|
from . import base_generate
|
||||||
|
|
||||||
@ -918,6 +919,157 @@ class RigLayers(bpy.types.Panel):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class PanelExpression(object):
|
||||||
|
"""A runtime expression involving bone properties"""
|
||||||
|
|
||||||
|
_rigify_expr: str
|
||||||
|
|
||||||
|
def __init__(self, expr: str):
|
||||||
|
self._rigify_expr = expr
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self._rigify_expr
|
||||||
|
|
||||||
|
def __add__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} + {repr(other)})")
|
||||||
|
|
||||||
|
def __sub__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} - {repr(other)})")
|
||||||
|
|
||||||
|
def __mul__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} * {repr(other)})")
|
||||||
|
|
||||||
|
def __matmul__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} @ {repr(other)})")
|
||||||
|
|
||||||
|
def __truediv__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} / {repr(other)})")
|
||||||
|
|
||||||
|
def __floordiv__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} // {repr(other)})")
|
||||||
|
|
||||||
|
def __mod__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} % {repr(other)})")
|
||||||
|
|
||||||
|
def __lshift__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} << {repr(other)})")
|
||||||
|
|
||||||
|
def __rshift__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} >> {repr(other)})")
|
||||||
|
|
||||||
|
def __and__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} & {repr(other)})")
|
||||||
|
|
||||||
|
def __xor__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} ^ {repr(other)})")
|
||||||
|
|
||||||
|
def __or__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} | {repr(other)})")
|
||||||
|
|
||||||
|
def __radd__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} + {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rsub__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} - {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rmul__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} * {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rmatmul__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} @ {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rtruediv__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} / {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rfloordiv__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} // {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rmod__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} % {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rlshift__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} << {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rrshift__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} >> {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rand__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} & {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __rxor__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} ^ {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __ror__(self, other):
|
||||||
|
return PanelExpression(f"({repr(other)} | {self._rigify_expr})")
|
||||||
|
|
||||||
|
def __neg__(self):
|
||||||
|
return PanelExpression(f"-{self._rigify_expr}")
|
||||||
|
|
||||||
|
def __pos__(self):
|
||||||
|
return PanelExpression(f"+{self._rigify_expr}")
|
||||||
|
|
||||||
|
def __abs__(self):
|
||||||
|
return PanelExpression(f"abs({self._rigify_expr})")
|
||||||
|
|
||||||
|
def __invert__(self):
|
||||||
|
return PanelExpression(f"~{self._rigify_expr}")
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
return PanelExpression(f"int({self._rigify_expr})")
|
||||||
|
|
||||||
|
def __float__(self):
|
||||||
|
return PanelExpression(f"float({self._rigify_expr})")
|
||||||
|
|
||||||
|
def __round__(self, digits=None):
|
||||||
|
return PanelExpression(f"round({self._rigify_expr}, {digits})")
|
||||||
|
|
||||||
|
def __trunc__(self):
|
||||||
|
return PanelExpression(f"trunc({self._rigify_expr})")
|
||||||
|
|
||||||
|
def __floor__(self):
|
||||||
|
return PanelExpression(f"floor({self._rigify_expr})")
|
||||||
|
|
||||||
|
def __ceil__(self):
|
||||||
|
return PanelExpression(f"ceil({self._rigify_expr})")
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} < {repr(other)})")
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} <= {repr(other)})")
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} == {repr(other)})")
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} != {repr(other)})")
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} > {repr(other)})")
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return PanelExpression(f"({self._rigify_expr} >= {repr(other)})")
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
raise NotImplementedError("This object wraps an expression, not a value; casting to boolean is meaningless")
|
||||||
|
|
||||||
|
|
||||||
|
class PanelReferenceExpression(PanelExpression):
|
||||||
|
"""
|
||||||
|
A runtime expression referencing an object.
|
||||||
|
@DynamicAttrs
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
return PanelReferenceExpression(self._rigify_expr + quote_property(item))
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return PanelReferenceExpression(self._rigify_expr + '.' + quote_property(item))
|
||||||
|
|
||||||
|
def get(self, item, default=None):
|
||||||
|
return PanelReferenceExpression(f"{self._rigify_expr}.get({repr(item)}, {repr(default)})")
|
||||||
|
|
||||||
|
|
||||||
def quote_parameters(positional: list[Any], named: dict[str, Any]):
|
def quote_parameters(positional: list[Any], named: dict[str, Any]):
|
||||||
"""Quote the given positional and named parameters as a code string."""
|
"""Quote the given positional and named parameters as a code string."""
|
||||||
positional_list = [repr(v) for v in positional]
|
positional_list = [repr(v) for v in positional]
|
||||||
@ -1036,6 +1188,51 @@ class PanelLayout(object):
|
|||||||
"""Add a split layout to the panel."""
|
"""Add a split layout to the panel."""
|
||||||
return self.add_nested_layout('split', params)
|
return self.add_nested_layout('split', params)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expr_bone(bone_name: str):
|
||||||
|
"""Returns an expression referencing the specified pose bone."""
|
||||||
|
return PanelReferenceExpression(f"pose_bones[%r]" % bone_name)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expr_and(*expressions):
|
||||||
|
"""Returns a boolean and expression of its parameters."""
|
||||||
|
return PanelExpression("(" + " and ".join(repr(e) for e in expressions) + ")")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expr_or(*expressions):
|
||||||
|
"""Returns a boolean or expression of its parameters."""
|
||||||
|
return PanelExpression("(" + " or ".join(repr(e) for e in expressions) + ")")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expr_if_else(condition, true_expr, false_expr):
|
||||||
|
"""Returns a conditional expression."""
|
||||||
|
return PanelExpression(f"({repr(true_expr)} if {repr(condition)} else {repr(false_expr)})")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def expr_call(func: str, *expressions):
|
||||||
|
"""Returns an expression calling the specified function with given parameters."""
|
||||||
|
return PanelExpression(func + "(" + ", ".join(repr(e) for e in expressions) + ")")
|
||||||
|
|
||||||
|
def set_layout_property(self, prop_name: str, prop_value: Any):
|
||||||
|
assert self.index > 0 # Don't change properties on the root layout
|
||||||
|
self.add_line("%s.%s = %r" % (self.layout, prop_name, prop_value))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def active(self):
|
||||||
|
raise NotImplementedError("This is a write only property")
|
||||||
|
|
||||||
|
@active.setter
|
||||||
|
def active(self, value):
|
||||||
|
self.set_layout_property('active', value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def enabled(self):
|
||||||
|
raise NotImplementedError("This is a write only property")
|
||||||
|
|
||||||
|
@enabled.setter
|
||||||
|
def enabled(self, value):
|
||||||
|
self.set_layout_property('enabled', value)
|
||||||
|
|
||||||
|
|
||||||
class BoneSetPanelLayout(PanelLayout):
|
class BoneSetPanelLayout(PanelLayout):
|
||||||
"""Panel restricted to a certain set of bones."""
|
"""Panel restricted to a certain set of bones."""
|
||||||
|
@ -4,7 +4,9 @@
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import math
|
import math
|
||||||
|
import json
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
from mathutils import Vector, Matrix
|
from mathutils import Vector, Matrix
|
||||||
|
|
||||||
from ...utils.rig import is_rig_base_bone
|
from ...utils.rig import is_rig_base_bone
|
||||||
@ -13,12 +15,15 @@ from ...utils.bones import put_bone, align_bone_orientation
|
|||||||
from ...utils.naming import make_derived_name
|
from ...utils.naming import make_derived_name
|
||||||
from ...utils.misc import matrix_from_axis_roll, matrix_from_axis_pair
|
from ...utils.misc import matrix_from_axis_roll, matrix_from_axis_pair
|
||||||
from ...utils.widgets import adjust_widget_transform_mesh
|
from ...utils.widgets import adjust_widget_transform_mesh
|
||||||
|
from ...utils.animation import add_fk_ik_snap_buttons
|
||||||
|
from ...utils.mechanism import driver_var_transform
|
||||||
|
|
||||||
from ..widgets import create_foot_widget, create_ball_socket_widget
|
from ..widgets import create_foot_widget, create_ball_socket_widget
|
||||||
|
|
||||||
from ...base_rig import stage
|
from ...base_rig import stage
|
||||||
|
from ...rig_ui_template import PanelLayout
|
||||||
|
|
||||||
from .limb_rigs import BaseLimbRig
|
from .limb_rigs import BaseLimbRig, SCRIPT_UTILITIES_OP_SNAP_IK_FK
|
||||||
|
|
||||||
|
|
||||||
DEG_360 = math.pi * 2
|
DEG_360 = math.pi * 2
|
||||||
@ -33,6 +38,7 @@ class Rig(BaseLimbRig):
|
|||||||
pivot_type: str
|
pivot_type: str
|
||||||
heel_euler_order: str
|
heel_euler_order: str
|
||||||
use_ik_toe: bool
|
use_ik_toe: bool
|
||||||
|
use_toe_roll: bool
|
||||||
|
|
||||||
ik_matrix: Matrix
|
ik_matrix: Matrix
|
||||||
roll_matrix: Matrix
|
roll_matrix: Matrix
|
||||||
@ -55,6 +61,7 @@ class Rig(BaseLimbRig):
|
|||||||
self.pivot_type = self.params.foot_pivot_type
|
self.pivot_type = self.params.foot_pivot_type
|
||||||
self.heel_euler_order = 'ZXY' if self.main_axis == 'x' else 'XZY'
|
self.heel_euler_order = 'ZXY' if self.main_axis == 'x' else 'XZY'
|
||||||
self.use_ik_toe = self.params.extra_ik_toe
|
self.use_ik_toe = self.params.extra_ik_toe
|
||||||
|
self.use_toe_roll = self.params.extra_toe_roll
|
||||||
|
|
||||||
if self.use_ik_toe:
|
if self.use_ik_toe:
|
||||||
self.fk_name_suffix_cutoff = 3
|
self.fk_name_suffix_cutoff = 3
|
||||||
@ -116,6 +123,37 @@ class Rig(BaseLimbRig):
|
|||||||
list[str]
|
list[str]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
####################################################
|
||||||
|
# UI
|
||||||
|
|
||||||
|
def add_global_buttons(self, panel, rig_name):
|
||||||
|
super().add_global_buttons(panel, rig_name)
|
||||||
|
|
||||||
|
ik_chain, tail_chain, fk_chain = self.get_ik_fk_position_chains()
|
||||||
|
|
||||||
|
add_leg_snap_ik_to_fk(
|
||||||
|
panel,
|
||||||
|
master=self.bones.ctrl.master,
|
||||||
|
fk_bones=fk_chain, ik_bones=ik_chain, tail_bones=tail_chain,
|
||||||
|
ik_ctrl_bones=self.get_ik_control_chain(),
|
||||||
|
ik_extra_ctrls=self.get_extra_ik_controls(),
|
||||||
|
heel_control=self.bones.ctrl.heel,
|
||||||
|
rig_name=rig_name
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_ik_only_buttons(self, panel, rig_name):
|
||||||
|
super().add_ik_only_buttons(panel, rig_name)
|
||||||
|
|
||||||
|
if self.use_toe_roll:
|
||||||
|
bone = self.bones.ctrl.heel
|
||||||
|
|
||||||
|
self.make_property(
|
||||||
|
bone, 'Toe_Roll', default=0.0,
|
||||||
|
description='Pivot on the tip of the toe when rolling forward with the heel control'
|
||||||
|
)
|
||||||
|
|
||||||
|
panel.custom_prop(bone, 'Toe_Roll', text='Roll On Toe', slider=True)
|
||||||
|
|
||||||
####################################################
|
####################################################
|
||||||
# IK controls
|
# IK controls
|
||||||
|
|
||||||
@ -282,7 +320,18 @@ class Rig(BaseLimbRig):
|
|||||||
put_bone(self.obj, rock1, heel_bone.tail, matrix=self.roll_matrix, scale=0.5)
|
put_bone(self.obj, rock1, heel_bone.tail, matrix=self.roll_matrix, scale=0.5)
|
||||||
put_bone(self.obj, rock2, heel_bone.head, matrix=self.roll_matrix, scale=0.5)
|
put_bone(self.obj, rock2, heel_bone.head, matrix=self.roll_matrix, scale=0.5)
|
||||||
|
|
||||||
return [rock2, rock1, roll2, roll1, result]
|
if self.use_toe_roll:
|
||||||
|
roll3 = self.copy_bone(toe, make_derived_name(heel, 'mch', '_roll3'), scale=0.3)
|
||||||
|
|
||||||
|
toe_pos = Vector(self.get_bone(toe).tail)
|
||||||
|
toe_pos.z = self.get_bone(roll2).head.z
|
||||||
|
|
||||||
|
put_bone(self.obj, roll3, toe_pos, matrix=self.roll_matrix)
|
||||||
|
|
||||||
|
return [rock2, rock1, roll2, roll3, roll1, result]
|
||||||
|
|
||||||
|
else:
|
||||||
|
return [rock2, rock1, roll2, roll1, result]
|
||||||
|
|
||||||
@stage.parent_bones
|
@stage.parent_bones
|
||||||
def parent_roll_mch_chain(self):
|
def parent_roll_mch_chain(self):
|
||||||
@ -295,7 +344,36 @@ class Rig(BaseLimbRig):
|
|||||||
self.rig_roll_mch_bones(self.bones.mch.heel, self.bones.ctrl.heel, self.bones.org.heel)
|
self.rig_roll_mch_bones(self.bones.mch.heel, self.bones.ctrl.heel, self.bones.org.heel)
|
||||||
|
|
||||||
def rig_roll_mch_bones(self, chain: list[str], heel: str, org_heel: str):
|
def rig_roll_mch_bones(self, chain: list[str], heel: str, org_heel: str):
|
||||||
rock2, rock1, roll2, roll1, result = chain
|
if self.use_toe_roll:
|
||||||
|
rock2, rock1, roll2, roll3, roll1, result = chain
|
||||||
|
|
||||||
|
# Interpolate rotation in Euler space via drivers to simplify Snap With Roll
|
||||||
|
self.make_driver(
|
||||||
|
roll3, 'rotation_euler', index=0,
|
||||||
|
expression='max(0,x*i)' if self.main_axis == 'x' else 'x*i',
|
||||||
|
variables={
|
||||||
|
'x': driver_var_transform(
|
||||||
|
self.obj, heel, type='ROT_X', space='LOCAL',
|
||||||
|
rotation_mode=self.heel_euler_order,
|
||||||
|
),
|
||||||
|
'i': (heel, 'Toe_Roll'),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.make_driver(
|
||||||
|
roll3, 'rotation_euler', index=2,
|
||||||
|
expression='max(0,z*i)' if self.main_axis == 'z' else 'z*i',
|
||||||
|
variables={
|
||||||
|
'z': driver_var_transform(
|
||||||
|
self.obj, heel, type='ROT_Z', space='LOCAL',
|
||||||
|
rotation_mode=self.heel_euler_order,
|
||||||
|
),
|
||||||
|
'i': (heel, 'Toe_Roll'),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
rock2, rock1, roll2, roll1, result = chain
|
||||||
|
|
||||||
# This order is required for correct working of the constraints
|
# This order is required for correct working of the constraints
|
||||||
for bone in chain:
|
for bone in chain:
|
||||||
@ -392,14 +470,170 @@ class Rig(BaseLimbRig):
|
|||||||
description="Generate a separate IK toe control for better IK/FK snapping"
|
description="Generate a separate IK toe control for better IK/FK snapping"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
params.extra_toe_roll = bpy.props.BoolProperty(
|
||||||
|
name='Toe Tip Roll',
|
||||||
|
default=False,
|
||||||
|
description="Generate a slider to pivot forward heel roll on the tip rather than the base of the toe"
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parameters_ui(cls, layout, params, end='Foot'):
|
def parameters_ui(cls, layout, params, end='Foot'):
|
||||||
layout.prop(params, 'foot_pivot_type')
|
layout.prop(params, 'foot_pivot_type')
|
||||||
layout.prop(params, 'extra_ik_toe')
|
layout.prop(params, 'extra_ik_toe')
|
||||||
|
layout.prop(params, 'extra_toe_roll')
|
||||||
|
|
||||||
super().parameters_ui(layout, params, end)
|
super().parameters_ui(layout, params, end)
|
||||||
|
|
||||||
|
|
||||||
|
##########################
|
||||||
|
# Leg IK to FK operator ##
|
||||||
|
##########################
|
||||||
|
|
||||||
|
SCRIPT_REGISTER_OP_LEG_SNAP_IK_FK = [
|
||||||
|
'POSE_OT_rigify_leg_roll_ik2fk', 'POSE_OT_rigify_leg_roll_ik2fk_bake']
|
||||||
|
|
||||||
|
SCRIPT_UTILITIES_OP_LEG_SNAP_IK_FK = SCRIPT_UTILITIES_OP_SNAP_IK_FK + ['''
|
||||||
|
#######################
|
||||||
|
## Leg Snap IK to FK ##
|
||||||
|
#######################
|
||||||
|
|
||||||
|
class RigifyLegRollIk2FkBase(RigifyLimbIk2FkBase):
|
||||||
|
heel_control: StringProperty(name="Heel")
|
||||||
|
use_roll: bpy.props.BoolVectorProperty(
|
||||||
|
name="Use Roll", size=3, default=(True, True, False),
|
||||||
|
description="Specifies which rotation axes of the heel roll control to use"
|
||||||
|
)
|
||||||
|
|
||||||
|
MODES = {
|
||||||
|
'ZXY': ((0, 2), (1, 0, 2)),
|
||||||
|
'XZY': ((2, 0), (2, 0, 1)),
|
||||||
|
}
|
||||||
|
|
||||||
|
def save_frame_state(self, context, obj):
|
||||||
|
return get_chain_transform_matrices(obj, self.fk_bone_list + self.ctrl_bone_list[-1:])
|
||||||
|
|
||||||
|
def assign_extra_controls(self, context, obj, all_matrices, ik_bones, ctrl_bones):
|
||||||
|
for extra in self.extra_ctrl_list:
|
||||||
|
set_transform_from_matrix(
|
||||||
|
obj, extra, Matrix.Identity(4), space='LOCAL', keyflags=self.keyflags
|
||||||
|
)
|
||||||
|
|
||||||
|
if any(self.use_roll):
|
||||||
|
foot_matrix = all_matrices[len(ik_bones) - 1]
|
||||||
|
ctrl_matrix = all_matrices[len(self.fk_bone_list)]
|
||||||
|
heel_bone = obj.pose.bones[self.heel_control]
|
||||||
|
foot_bone = ctrl_bones[-1]
|
||||||
|
|
||||||
|
# Relative rotation of heel from orientation of master IK control
|
||||||
|
# to actual foot orientation.
|
||||||
|
heel_rest = convert_pose_matrix_via_rest_delta(ctrl_matrix, foot_bone, heel_bone)
|
||||||
|
heel_rot = convert_pose_matrix_via_rest_delta(foot_matrix, ik_bones[-1], heel_bone)
|
||||||
|
|
||||||
|
# Decode the euler decomposition mode
|
||||||
|
rot_mode = heel_bone.rotation_mode
|
||||||
|
indices, use_map = self.MODES[rot_mode]
|
||||||
|
use_roll = [self.use_roll[i] for i in use_map]
|
||||||
|
roll, turn = indices
|
||||||
|
|
||||||
|
# If the last rotation (yaw) is unused, move it to be first for better result
|
||||||
|
if not use_roll[turn]:
|
||||||
|
rot_mode = rot_mode[1:] + rot_mode[0:1]
|
||||||
|
|
||||||
|
local_rot = (heel_rest.inverted() @ heel_rot).to_euler(rot_mode)
|
||||||
|
|
||||||
|
heel_bone.rotation_euler = [
|
||||||
|
(val if use else 0) for val, use in zip(local_rot, use_roll)
|
||||||
|
]
|
||||||
|
|
||||||
|
if self.keyflags is not None:
|
||||||
|
keyframe_transform_properties(
|
||||||
|
obj, bone_name, self.keyflags, no_loc=True, no_rot=no_rot, no_scale=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'Toe_Roll' in heel_bone and self.tail_bone_list:
|
||||||
|
toe_matrix = all_matrices[len(ik_bones)]
|
||||||
|
toe_bone = obj.pose.bones[self.tail_bone_list[0]]
|
||||||
|
|
||||||
|
# Compute relative rotation of heel determined by toe
|
||||||
|
heel_rot_toe = convert_pose_matrix_via_rest_delta(toe_matrix, toe_bone, heel_bone)
|
||||||
|
toe_rot = (heel_rest.inverted() @ heel_rot_toe).to_euler(rot_mode)
|
||||||
|
|
||||||
|
# Determine how much of the already computed heel rotation seems to be applied
|
||||||
|
heel_rot = list(heel_bone.rotation_euler)
|
||||||
|
heel_rot[roll] = max(0.0, heel_rot[roll])
|
||||||
|
|
||||||
|
# This relies on toe roll interpolation being done in Euler space
|
||||||
|
ratios = [
|
||||||
|
toe_rot[i] / heel_rot[i] for i in (roll, turn)
|
||||||
|
if use_roll[i] and heel_rot[i] * toe_rot[i] > 0
|
||||||
|
]
|
||||||
|
|
||||||
|
val = min(1.0, max(0.0, min(ratios) if ratios else 0.0))
|
||||||
|
if val < 1e-5:
|
||||||
|
val = 0.0
|
||||||
|
|
||||||
|
set_custom_property_value(
|
||||||
|
obj, heel_bone.name, 'Toe_Roll', val, keyflags=self.keyflags)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
row = self.layout.row(align=True)
|
||||||
|
row.label(text="Use:")
|
||||||
|
row.prop(self, 'use_roll', index=0, text="Rock", toggle=True)
|
||||||
|
row.prop(self, 'use_roll', index=1, text="Roll", toggle=True)
|
||||||
|
row.prop(self, 'use_roll', index=2, text="Yaw", toggle=True)
|
||||||
|
|
||||||
|
class POSE_OT_rigify_leg_roll_ik2fk(
|
||||||
|
RigifyLegRollIk2FkBase, RigifySingleUpdateMixin, bpy.types.Operator):
|
||||||
|
bl_options = {'REGISTER', 'UNDO', 'INTERNAL'}
|
||||||
|
bl_idname = "pose.rigify_leg_roll_ik2fk_" + rig_id
|
||||||
|
bl_label = "Snap IK->FK With Roll"
|
||||||
|
bl_description = "Snap the IK chain to FK result, using foot roll to preserve the current IK "\
|
||||||
|
"control orientation as much as possible"
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
self.init_invoke(context)
|
||||||
|
return self.execute(context)
|
||||||
|
|
||||||
|
class POSE_OT_rigify_leg_roll_ik2fk_bake(
|
||||||
|
RigifyLegRollIk2FkBase, RigifyBakeKeyframesMixin, bpy.types.Operator):
|
||||||
|
bl_idname = "pose.rigify_leg_roll_ik2fk_bake_" + rig_id
|
||||||
|
bl_label = "Apply Snap IK->FK To Keyframes"
|
||||||
|
bl_description = "Snap the IK chain keyframes to FK result, using foot roll to preserve the "\
|
||||||
|
"current IK control orientation as much as possible"
|
||||||
|
|
||||||
|
def execute_scan_curves(self, context, obj):
|
||||||
|
self.bake_add_bone_frames(self.fk_bone_list, TRANSFORM_PROPS_ALL)
|
||||||
|
self.bake_add_bone_frames(self.ctrl_bone_list[-1:], TRANSFORM_PROPS_ROTATION)
|
||||||
|
return self.bake_get_all_bone_curves(
|
||||||
|
self.ctrl_bone_list + self.extra_ctrl_list, TRANSFORM_PROPS_ALL)
|
||||||
|
''']
|
||||||
|
|
||||||
|
|
||||||
|
def add_leg_snap_ik_to_fk(panel: PanelLayout, *, master: Optional[str] = None,
|
||||||
|
fk_bones=(), ik_bones=(), tail_bones=(),
|
||||||
|
ik_ctrl_bones=(), ik_extra_ctrls=(), heel_control, rig_name=''):
|
||||||
|
panel.use_bake_settings()
|
||||||
|
panel.script.add_utilities(SCRIPT_UTILITIES_OP_LEG_SNAP_IK_FK)
|
||||||
|
panel.script.register_classes(SCRIPT_REGISTER_OP_LEG_SNAP_IK_FK)
|
||||||
|
|
||||||
|
assert len(fk_bones) == len(ik_bones) + len(tail_bones)
|
||||||
|
|
||||||
|
op_props = {
|
||||||
|
'prop_bone': master,
|
||||||
|
'fk_bones': json.dumps(fk_bones),
|
||||||
|
'ik_bones': json.dumps(ik_bones),
|
||||||
|
'ctrl_bones': json.dumps(ik_ctrl_bones),
|
||||||
|
'tail_bones': json.dumps(tail_bones),
|
||||||
|
'extra_ctrls': json.dumps(ik_extra_ctrls),
|
||||||
|
'heel_control': heel_control,
|
||||||
|
}
|
||||||
|
|
||||||
|
add_fk_ik_snap_buttons(
|
||||||
|
panel, 'pose.rigify_leg_roll_ik2fk_{rig_id}', 'pose.rigify_leg_roll_ik2fk_bake_{rig_id}',
|
||||||
|
label='IK->FK With Roll', rig_name=rig_name, properties=op_props,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_sample(obj):
|
def create_sample(obj):
|
||||||
# generated by rigify.utils.write_metarig
|
# generated by rigify.utils.write_metarig
|
||||||
bpy.ops.object.mode_set(mode='EDIT')
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
@ -622,7 +622,8 @@ class BaseLimbRig(BaseRig):
|
|||||||
self.make_property(self.prop_bone, 'IK_Stretch', default=1.0, description='IK Stretch')
|
self.make_property(self.prop_bone, 'IK_Stretch', default=1.0, description='IK Stretch')
|
||||||
panel.custom_prop(self.prop_bone, 'IK_Stretch', text='IK Stretch', slider=True)
|
panel.custom_prop(self.prop_bone, 'IK_Stretch', text='IK Stretch', slider=True)
|
||||||
|
|
||||||
self.make_property(self.prop_bone, 'pole_vector', default=False, description='Use a pole target control')
|
self.make_property(self.prop_bone, 'pole_vector', default=0, min=0, max=1,
|
||||||
|
description='Use a pole target control')
|
||||||
|
|
||||||
self.add_ik_only_buttons(panel, rig_name)
|
self.add_ik_only_buttons(panel, rig_name)
|
||||||
|
|
||||||
|
10
rigify/ui.py
10
rigify/ui.py
@ -16,7 +16,7 @@ from typing import TYPE_CHECKING, Callable, Any
|
|||||||
from mathutils import Color
|
from mathutils import Color
|
||||||
|
|
||||||
from .utils.errors import MetarigError
|
from .utils.errors import MetarigError
|
||||||
from .utils.layers import ROOT_COLLECTION, validate_collection_references
|
from .utils.layers import ROOT_COLLECTION, SPECIAL_COLLECTIONS, validate_collection_references
|
||||||
from .utils.rig import write_metarig, get_rigify_type, get_rigify_target_rig, \
|
from .utils.rig import write_metarig, get_rigify_type, get_rigify_target_rig, \
|
||||||
get_rigify_colors, get_rigify_params
|
get_rigify_colors, get_rigify_params
|
||||||
from .utils.widgets import write_widget
|
from .utils.widgets import write_widget
|
||||||
@ -1017,6 +1017,14 @@ class Generate(bpy.types.Operator):
|
|||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
metarig = verify_armature_obj(context.object)
|
metarig = verify_armature_obj(context.object)
|
||||||
|
|
||||||
|
for bcoll in metarig.data.collections:
|
||||||
|
if bcoll.rigify_ui_row > 0 and bcoll.name not in SPECIAL_COLLECTIONS:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.report({'ERROR'}, 'No bone collections have UI buttons assigned - all bones would be invisible.')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
generate.generate_rig(context, metarig)
|
generate.generate_rig(context, metarig)
|
||||||
except MetarigError as rig_exception:
|
except MetarigError as rig_exception:
|
||||||
|
@ -29,6 +29,8 @@ DEF_COLLECTION = "DEF"
|
|||||||
ORG_COLLECTION = "ORG"
|
ORG_COLLECTION = "ORG"
|
||||||
MCH_COLLECTION = "MCH"
|
MCH_COLLECTION = "MCH"
|
||||||
|
|
||||||
|
SPECIAL_COLLECTIONS = (ROOT_COLLECTION, DEF_COLLECTION, MCH_COLLECTION, ORG_COLLECTION)
|
||||||
|
|
||||||
REFS_TOGGLE_SUFFIX = '_layers_extra'
|
REFS_TOGGLE_SUFFIX = '_layers_extra'
|
||||||
REFS_LIST_SUFFIX = "_coll_refs"
|
REFS_LIST_SUFFIX = "_coll_refs"
|
||||||
|
|
||||||
|
206
rigify/utils/objects.py
Normal file
206
rigify/utils/objects.py
Normal file
@ -0,0 +1,206 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2019-2022 Blender Foundation
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from bpy.types import LayerCollection, Collection, Object
|
||||||
|
|
||||||
|
from .misc import ArmatureObject
|
||||||
|
from .naming import strip_org
|
||||||
|
|
||||||
|
from mathutils import Matrix
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..generate import Generator
|
||||||
|
from ..base_rig import BaseRig
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection SpellCheckingInspection
|
||||||
|
def create_object_data(obj_type, name):
|
||||||
|
if obj_type == 'EMPTY':
|
||||||
|
return None
|
||||||
|
if obj_type == 'MESH':
|
||||||
|
return bpy.data.meshes.new(name)
|
||||||
|
if obj_type in ('CURVE', 'SURFACE', 'FONT'):
|
||||||
|
return bpy.data.curves.new(name, obj_type)
|
||||||
|
if obj_type == 'META':
|
||||||
|
return bpy.data.metaballs.new(name)
|
||||||
|
if obj_type == 'CURVES':
|
||||||
|
return bpy.data.hair_curves.new(name)
|
||||||
|
if obj_type == 'POINTCLOUD':
|
||||||
|
return bpy.data.pointclouds.new(name)
|
||||||
|
if obj_type == 'VOLUME':
|
||||||
|
return bpy.data.volumes.new(name)
|
||||||
|
if obj_type == 'GREASEPENCIL':
|
||||||
|
return bpy.data.grease_pencils.new(name)
|
||||||
|
if obj_type == 'ARMATURE':
|
||||||
|
return bpy.data.armatures.new(name)
|
||||||
|
if obj_type == 'LATTICE':
|
||||||
|
return bpy.data.lattices.new(name)
|
||||||
|
raise ValueError(f"Invalid object type {obj_type}")
|
||||||
|
|
||||||
|
|
||||||
|
class ArtifactManager:
|
||||||
|
generator: 'Generator'
|
||||||
|
|
||||||
|
collection: Collection | None
|
||||||
|
layer_collection: LayerCollection | None
|
||||||
|
|
||||||
|
used_artifacts: list[Object]
|
||||||
|
temp_artifacts: list[Object]
|
||||||
|
|
||||||
|
artifact_reuse_table: dict[tuple[str, ...], Object]
|
||||||
|
|
||||||
|
def __init__(self, generator: 'Generator'):
|
||||||
|
self.generator = generator
|
||||||
|
self.collection = None
|
||||||
|
self.layer_collection = None
|
||||||
|
self.used_artifacts = []
|
||||||
|
self.temp_artifacts = []
|
||||||
|
self.artifact_reuse_table = {}
|
||||||
|
|
||||||
|
def _make_name(self, owner: 'BaseRig', name: str):
|
||||||
|
return self.generator.obj.name + ":" + strip_org(owner.base_bone) + ":" + name
|
||||||
|
|
||||||
|
def create_new(self, owner: 'BaseRig', obj_type: str, name: str):
|
||||||
|
"""
|
||||||
|
Creates an artifact object of the specified type and name. If it already exists, all
|
||||||
|
references are updated to point to the new instance, and the existing one is deleted.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
owner: rig component that requests the object.
|
||||||
|
obj_type: type of the object to create.
|
||||||
|
name: unique name of the object within the rig component.
|
||||||
|
Returns:
|
||||||
|
Object that was created.
|
||||||
|
"""
|
||||||
|
return self.find_or_create(owner, obj_type, name, recreate=True)[1]
|
||||||
|
|
||||||
|
def find_or_create(self, owner: 'BaseRig', obj_type: str, name: str, *, recreate=False):
|
||||||
|
"""
|
||||||
|
Creates or reuses an artifact object of the specified type.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
owner: rig component that requests the object.
|
||||||
|
obj_type: type of the object to create.
|
||||||
|
name: unique name of the object within the rig component.
|
||||||
|
recreate: instructs that the object should be re-created from scratch even if it exists.
|
||||||
|
Returns:
|
||||||
|
(bool, Object) tuple, with the boolean specifying if the object already existed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
obj_name = self._make_name(owner, name)
|
||||||
|
key = (owner.base_bone, name)
|
||||||
|
|
||||||
|
obj = self.artifact_reuse_table.get(key)
|
||||||
|
|
||||||
|
# If the existing object has incorrect type, delete it
|
||||||
|
if obj and obj.type != obj_type:
|
||||||
|
if obj in self.used_artifacts:
|
||||||
|
owner.raise_error(f"duplicate reuse of artifact object {obj.name}")
|
||||||
|
|
||||||
|
print(f"RIGIFY: incompatible artifact object {obj.name} type: {obj.type} instead of {obj_type}")
|
||||||
|
del self.artifact_reuse_table[key]
|
||||||
|
bpy.data.objects.remove(obj)
|
||||||
|
obj = None
|
||||||
|
|
||||||
|
# Reuse the existing object
|
||||||
|
if obj:
|
||||||
|
if obj in self.used_artifacts:
|
||||||
|
owner.raise_error(f"duplicate reuse of artifact object {obj.name}")
|
||||||
|
|
||||||
|
if recreate:
|
||||||
|
# Forcefully re-create and replace the existing object
|
||||||
|
obj.name += '-OLD'
|
||||||
|
if data := obj.data:
|
||||||
|
data.name += '-OLD'
|
||||||
|
|
||||||
|
new_obj = bpy.data.objects.new(obj_name, create_object_data(obj_type, obj_name))
|
||||||
|
|
||||||
|
obj.user_remap(new_obj)
|
||||||
|
self.artifact_reuse_table[key] = new_obj
|
||||||
|
bpy.data.objects.remove(obj)
|
||||||
|
obj = new_obj
|
||||||
|
|
||||||
|
# Ensure the existing object is visible
|
||||||
|
obj.hide_viewport = False
|
||||||
|
obj.hide_set(False, view_layer=self.generator.view_layer)
|
||||||
|
|
||||||
|
if not obj.visible_get(view_layer=self.generator.view_layer):
|
||||||
|
owner.raise_error(f"could not un-hide existing artifact object {obj.name}")
|
||||||
|
|
||||||
|
# Try renaming the existing object
|
||||||
|
obj.name = obj_name
|
||||||
|
if data := obj.data:
|
||||||
|
data.name = obj_name
|
||||||
|
|
||||||
|
found = True
|
||||||
|
|
||||||
|
# Create an object from scratch
|
||||||
|
else:
|
||||||
|
obj = bpy.data.objects.new(obj_name, create_object_data(obj_type, obj_name))
|
||||||
|
|
||||||
|
self.generator.collection.objects.link(obj)
|
||||||
|
self.artifact_reuse_table[key] = obj
|
||||||
|
|
||||||
|
found = False
|
||||||
|
|
||||||
|
self.used_artifacts.append(obj)
|
||||||
|
|
||||||
|
obj.rigify_owner_rig = self.generator.obj
|
||||||
|
obj["rigify_artifact_id"] = key
|
||||||
|
|
||||||
|
obj.parent = self.generator.obj
|
||||||
|
obj.parent_type = 'OBJECT'
|
||||||
|
obj.matrix_parent_inverse = Matrix.Identity(4)
|
||||||
|
obj.matrix_basis = Matrix.Identity(4)
|
||||||
|
|
||||||
|
return found, obj
|
||||||
|
|
||||||
|
def new_temporary(self, owner: 'BaseRig', obj_type: str, name="temp"):
|
||||||
|
"""
|
||||||
|
Creates a new temporary object of the specified type.
|
||||||
|
The object will be removed after generation finishes.
|
||||||
|
"""
|
||||||
|
obj_name = "TEMP:" + self._make_name(owner, name)
|
||||||
|
obj = bpy.data.objects.new(obj_name, create_object_data(obj_type, obj_name))
|
||||||
|
obj.rigify_owner_rig = self.generator.obj
|
||||||
|
obj["rigify_artifact_id"] = 'temporary'
|
||||||
|
self.generator.collection.objects.link(obj)
|
||||||
|
self.temp_artifacts.append(obj)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def remove_temporary(self, obj):
|
||||||
|
"""
|
||||||
|
Immediately removes a temporary object previously created using new_temporary.
|
||||||
|
"""
|
||||||
|
self.temp_artifacts.remove(obj)
|
||||||
|
bpy.data.objects.remove(obj)
|
||||||
|
|
||||||
|
def generate_init_existing(self, armature: ArmatureObject):
|
||||||
|
for obj in bpy.data.objects:
|
||||||
|
if obj.rigify_owner_rig != armature:
|
||||||
|
continue
|
||||||
|
|
||||||
|
aid = obj["rigify_artifact_id"]
|
||||||
|
if isinstance(aid, list) and all(isinstance(x, str) for x in aid):
|
||||||
|
self.artifact_reuse_table[tuple(aid)] = obj
|
||||||
|
else:
|
||||||
|
print(f"RIGIFY: removing orphan artifact {obj.name}")
|
||||||
|
bpy.data.objects.remove(obj)
|
||||||
|
|
||||||
|
def generate_cleanup(self):
|
||||||
|
for obj in self.temp_artifacts:
|
||||||
|
bpy.data.objects.remove(obj)
|
||||||
|
|
||||||
|
self.temp_artifacts = []
|
||||||
|
|
||||||
|
for key, obj in self.artifact_reuse_table.items():
|
||||||
|
if obj in self.used_artifacts:
|
||||||
|
obj.hide_viewport = True
|
||||||
|
obj.hide_render = True
|
||||||
|
else:
|
||||||
|
del self.artifact_reuse_table[key]
|
||||||
|
bpy.data.objects.remove(obj)
|
@ -267,8 +267,15 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
|
|||||||
default_layers = [i == 1 for i in range(32)]
|
default_layers = [i == 1 for i in range(32)]
|
||||||
default_map = {
|
default_map = {
|
||||||
'faces.super_face': ['primary', 'secondary'],
|
'faces.super_face': ['primary', 'secondary'],
|
||||||
|
'limbs.arm': ['fk', 'tweak'],
|
||||||
|
'limbs.front_paw': ['fk', 'tweak'],
|
||||||
|
'limbs.leg': ['fk', 'tweak'],
|
||||||
|
'limbs.paw': ['fk', 'tweak'],
|
||||||
|
'limbs.rear_paw': ['fk', 'tweak'],
|
||||||
'limbs.simple_tentacle': ['tweak'],
|
'limbs.simple_tentacle': ['tweak'],
|
||||||
'limbs.super_finger': ['tweak'],
|
'limbs.super_finger': ['tweak'],
|
||||||
|
'limbs.super_limb': ['fk', 'tweak'],
|
||||||
|
'spines.basic_spine': ['fk', 'tweak'],
|
||||||
}
|
}
|
||||||
|
|
||||||
for pose_bone in metarig.pose.bones:
|
for pose_bone in metarig.pose.bones:
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Manage UI translations",
|
"name": "Manage UI translations",
|
||||||
"author": "Bastien Montagne",
|
"author": "Bastien Montagne",
|
||||||
"version": (1, 3, 4),
|
"version": (2, 0, 0),
|
||||||
"blender": (4, 0, 0),
|
"blender": (4, 0, 0),
|
||||||
"location": "Main \"File\" menu, text editor, any UI control",
|
"location": "Main \"File\" menu, text editor, any UI control",
|
||||||
"description": "Allows managing UI translations directly from Blender "
|
"description": "Allows managing UI translations directly from Blender "
|
||||||
@ -17,32 +17,32 @@ bl_info = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
settings,
|
||||||
|
edit_translation,
|
||||||
|
update_repo,
|
||||||
|
update_addon,
|
||||||
|
update_ui,
|
||||||
|
)
|
||||||
if "bpy" in locals():
|
if "bpy" in locals():
|
||||||
import importlib
|
import importlib
|
||||||
importlib.reload(settings)
|
importlib.reload(settings)
|
||||||
importlib.reload(edit_translation)
|
importlib.reload(edit_translation)
|
||||||
importlib.reload(update_svn)
|
importlib.reload(update_repo)
|
||||||
importlib.reload(update_addon)
|
importlib.reload(update_addon)
|
||||||
importlib.reload(update_ui)
|
importlib.reload(update_ui)
|
||||||
else:
|
|
||||||
import bpy
|
import bpy
|
||||||
from . import (
|
|
||||||
settings,
|
|
||||||
edit_translation,
|
|
||||||
update_svn,
|
|
||||||
update_addon,
|
|
||||||
update_ui,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
classes = settings.classes + edit_translation.classes + update_svn.classes + update_addon.classes + update_ui.classes
|
classes = settings.classes + edit_translation.classes + update_repo.classes + update_addon.classes + update_ui.classes
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.register_class(cls)
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
bpy.types.WindowManager.i18n_update_svn_settings = \
|
bpy.types.WindowManager.i18n_update_settings = \
|
||||||
bpy.props.PointerProperty(type=update_ui.I18nUpdateTranslationSettings)
|
bpy.props.PointerProperty(type=update_ui.I18nUpdateTranslationSettings)
|
||||||
|
|
||||||
# Init addon's preferences (unfortunately, as we are using an external storage for the properties,
|
# Init addon's preferences (unfortunately, as we are using an external storage for the properties,
|
||||||
@ -58,4 +58,4 @@ def unregister():
|
|||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.unregister_class(cls)
|
bpy.utils.unregister_class(cls)
|
||||||
|
|
||||||
del bpy.types.WindowManager.i18n_update_svn_settings
|
del bpy.types.WindowManager.i18n_update_settings
|
||||||
|
@ -110,19 +110,10 @@ class UI_AP_i18n_settings(AddonPreferences):
|
|||||||
set=lambda self, val: _setattr(self._settings, "WARN_MSGID_NOT_CAPITALIZED", val),
|
set=lambda self, val: _setattr(self._settings, "WARN_MSGID_NOT_CAPITALIZED", val),
|
||||||
)
|
)
|
||||||
|
|
||||||
GETTEXT_MSGFMT_EXECUTABLE: StringProperty(
|
|
||||||
name="Gettext 'msgfmt' executable",
|
|
||||||
description="The gettext msgfmt 'compiler'. You’ll likely have to edit it if you’re under Windows",
|
|
||||||
subtype='FILE_PATH',
|
|
||||||
default="msgfmt",
|
|
||||||
get=lambda self: self._settings.GETTEXT_MSGFMT_EXECUTABLE,
|
|
||||||
set=lambda self, val: setattr(self._settings, "GETTEXT_MSGFMT_EXECUTABLE", val),
|
|
||||||
)
|
|
||||||
|
|
||||||
FRIBIDI_LIB: StringProperty(
|
FRIBIDI_LIB: StringProperty(
|
||||||
name="Fribidi Library",
|
name="Fribidi Library",
|
||||||
description="The FriBidi C compiled library (.so under Linux, .dll under windows...), you’ll likely have "
|
description="The FriBidi C compiled library (.so under Linux, .dll under windows...), you’ll likely have "
|
||||||
"to edit it if you’re under Windows, e.g. using the one included in svn's libraries repository",
|
"to edit it if you’re under Windows, e.g. using the one included in Blender libraries repository",
|
||||||
subtype='FILE_PATH',
|
subtype='FILE_PATH',
|
||||||
default="libfribidi.so.0",
|
default="libfribidi.so.0",
|
||||||
get=lambda self: self._settings.FRIBIDI_LIB,
|
get=lambda self: self._settings.FRIBIDI_LIB,
|
||||||
@ -178,7 +169,6 @@ class UI_AP_i18n_settings(AddonPreferences):
|
|||||||
layout.label(text="WARNING: preferences are lost when add-on is disabled, be sure to use \"Save Persistent\" "
|
layout.label(text="WARNING: preferences are lost when add-on is disabled, be sure to use \"Save Persistent\" "
|
||||||
"if you want to keep your settings!")
|
"if you want to keep your settings!")
|
||||||
layout.prop(self, "WARN_MSGID_NOT_CAPITALIZED")
|
layout.prop(self, "WARN_MSGID_NOT_CAPITALIZED")
|
||||||
layout.prop(self, "GETTEXT_MSGFMT_EXECUTABLE")
|
|
||||||
layout.prop(self, "FRIBIDI_LIB")
|
layout.prop(self, "FRIBIDI_LIB")
|
||||||
layout.prop(self, "SOURCE_DIR")
|
layout.prop(self, "SOURCE_DIR")
|
||||||
layout.prop(self, "I18N_DIR")
|
layout.prop(self, "I18N_DIR")
|
||||||
|
@ -132,7 +132,7 @@ class UI_OT_i18n_addon_translation_update(Operator):
|
|||||||
_cached_enum_addons[:] = []
|
_cached_enum_addons[:] = []
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
|
|
||||||
module_name, mod = validate_module(self, context)
|
module_name, mod = validate_module(self, context)
|
||||||
|
|
||||||
@ -220,7 +220,7 @@ class UI_OT_i18n_addon_translation_import(Operator):
|
|||||||
_cached_enum_addons[:] = []
|
_cached_enum_addons[:] = []
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
|
|
||||||
module_name, mod = validate_module(self, context)
|
module_name, mod = validate_module(self, context)
|
||||||
if not (module_name and mod):
|
if not (module_name and mod):
|
||||||
@ -323,7 +323,7 @@ class UI_OT_i18n_addon_translation_export(Operator):
|
|||||||
_cached_enum_addons[:] = []
|
_cached_enum_addons[:] = []
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
|
|
||||||
module_name, mod = validate_module(self, context)
|
module_name, mod = validate_module(self, context)
|
||||||
if not (module_name and mod):
|
if not (module_name and mod):
|
||||||
|
@ -28,7 +28,7 @@ import tempfile
|
|||||||
|
|
||||||
# Operators ###################################################################
|
# Operators ###################################################################
|
||||||
|
|
||||||
def i18n_updatetranslation_svn_branches_callback(pot, lng, settings):
|
def i18n_updatetranslation_work_repo_callback(pot, lng, settings):
|
||||||
if not lng['use']:
|
if not lng['use']:
|
||||||
return
|
return
|
||||||
if os.path.isfile(lng['po_path']):
|
if os.path.isfile(lng['po_path']):
|
||||||
@ -40,10 +40,10 @@ def i18n_updatetranslation_svn_branches_callback(pot, lng, settings):
|
|||||||
print("{} PO written!".format(lng['uid']))
|
print("{} PO written!".format(lng['uid']))
|
||||||
|
|
||||||
|
|
||||||
class UI_OT_i18n_updatetranslation_svn_branches(Operator):
|
class UI_OT_i18n_updatetranslation_work_repo(Operator):
|
||||||
"""Update i18n svn's branches (po files)"""
|
"""Update i18n working repository (po files)"""
|
||||||
bl_idname = "ui.i18n_updatetranslation_svn_branches"
|
bl_idname = "ui.i18n_updatetranslation_work_repo"
|
||||||
bl_label = "Update I18n Branches"
|
bl_label = "Update I18n Work Repo"
|
||||||
|
|
||||||
use_skip_pot_gen: BoolProperty(
|
use_skip_pot_gen: BoolProperty(
|
||||||
name="Skip POT",
|
name="Skip POT",
|
||||||
@ -54,7 +54,7 @@ class UI_OT_i18n_updatetranslation_svn_branches(Operator):
|
|||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
self.settings.FILE_NAME_POT = i18n_sett.pot_path
|
self.settings.FILE_NAME_POT = i18n_sett.pot_path
|
||||||
|
|
||||||
context.window_manager.progress_begin(0, len(i18n_sett.langs) + 1)
|
context.window_manager.progress_begin(0, len(i18n_sett.langs) + 1)
|
||||||
@ -88,7 +88,7 @@ class UI_OT_i18n_updatetranslation_svn_branches(Operator):
|
|||||||
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
||||||
pot = utils_i18n.I18nMessages(kind='PO', src=self.settings.FILE_NAME_POT, settings=self.settings)
|
pot = utils_i18n.I18nMessages(kind='PO', src=self.settings.FILE_NAME_POT, settings=self.settings)
|
||||||
num_langs = len(i18n_sett.langs)
|
num_langs = len(i18n_sett.langs)
|
||||||
for progress, _ in enumerate(exctr.map(i18n_updatetranslation_svn_branches_callback,
|
for progress, _ in enumerate(exctr.map(i18n_updatetranslation_work_repo_callback,
|
||||||
(pot,) * num_langs,
|
(pot,) * num_langs,
|
||||||
[dict(lng.items()) for lng in i18n_sett.langs],
|
[dict(lng.items()) for lng in i18n_sett.langs],
|
||||||
(self.settings,) * num_langs,
|
(self.settings,) * num_langs,
|
||||||
@ -102,7 +102,7 @@ class UI_OT_i18n_updatetranslation_svn_branches(Operator):
|
|||||||
return wm.invoke_props_dialog(self)
|
return wm.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
|
||||||
def i18n_cleanuptranslation_svn_branches_callback(lng, settings):
|
def i18n_cleanuptranslation_work_repo_callback(lng, settings):
|
||||||
if not lng['use']:
|
if not lng['use']:
|
||||||
print("Skipping {} language ({}).".format(lng['name'], lng['uid']))
|
print("Skipping {} language ({}).".format(lng['name'], lng['uid']))
|
||||||
return
|
return
|
||||||
@ -115,15 +115,15 @@ def i18n_cleanuptranslation_svn_branches_callback(lng, settings):
|
|||||||
("Errors in this po, solved as best as possible!\n\t" + "\n\t".join(errs) if errs else "") + "\n")
|
("Errors in this po, solved as best as possible!\n\t" + "\n\t".join(errs) if errs else "") + "\n")
|
||||||
|
|
||||||
|
|
||||||
class UI_OT_i18n_cleanuptranslation_svn_branches(Operator):
|
class UI_OT_i18n_cleanuptranslation_work_repo(Operator):
|
||||||
"""Clean up i18n svn's branches (po files)"""
|
"""Clean up i18n working repository (po files)"""
|
||||||
bl_idname = "ui.i18n_cleanuptranslation_svn_branches"
|
bl_idname = "ui.i18n_cleanuptranslation_work_repo"
|
||||||
bl_label = "Clean up I18n Branches"
|
bl_label = "Clean up I18n Work Repo"
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
# 'DEFAULT' and en_US are always valid, fully-translated "languages"!
|
# 'DEFAULT' and en_US are always valid, fully-translated "languages"!
|
||||||
stats = {"DEFAULT": 1.0, "en_US": 1.0}
|
stats = {"DEFAULT": 1.0, "en_US": 1.0}
|
||||||
|
|
||||||
@ -131,7 +131,7 @@ class UI_OT_i18n_cleanuptranslation_svn_branches(Operator):
|
|||||||
context.window_manager.progress_update(0)
|
context.window_manager.progress_update(0)
|
||||||
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
||||||
num_langs = len(i18n_sett.langs)
|
num_langs = len(i18n_sett.langs)
|
||||||
for progress, _ in enumerate(exctr.map(i18n_cleanuptranslation_svn_branches_callback,
|
for progress, _ in enumerate(exctr.map(i18n_cleanuptranslation_work_repo_callback,
|
||||||
[dict(lng.items()) for lng in i18n_sett.langs],
|
[dict(lng.items()) for lng in i18n_sett.langs],
|
||||||
(self.settings,) * num_langs,
|
(self.settings,) * num_langs,
|
||||||
chunksize=4)):
|
chunksize=4)):
|
||||||
@ -142,7 +142,7 @@ class UI_OT_i18n_cleanuptranslation_svn_branches(Operator):
|
|||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
def i18n_updatetranslation_svn_trunk_callback(lng, settings):
|
def i18n_updatetranslation_blender_repo_callback(lng, settings):
|
||||||
reports = []
|
reports = []
|
||||||
if lng['uid'] in settings.IMPORT_LANGUAGES_SKIP:
|
if lng['uid'] in settings.IMPORT_LANGUAGES_SKIP:
|
||||||
reports.append("Skipping {} language ({}), edit settings if you want to enable it.".format(lng['name'], lng['uid']))
|
reports.append("Skipping {} language ({}), edit settings if you want to enable it.".format(lng['name'], lng['uid']))
|
||||||
@ -156,32 +156,21 @@ def i18n_updatetranslation_svn_trunk_callback(lng, settings):
|
|||||||
"Cleaned up {} commented messages.\n".format(lng['name'], lng['uid'], po.clean_commented()) +
|
"Cleaned up {} commented messages.\n".format(lng['name'], lng['uid'], po.clean_commented()) +
|
||||||
("Errors in this po, solved as best as possible!\n\t" + "\n\t".join(errs) if errs else ""))
|
("Errors in this po, solved as best as possible!\n\t" + "\n\t".join(errs) if errs else ""))
|
||||||
if lng['uid'] in settings.IMPORT_LANGUAGES_RTL:
|
if lng['uid'] in settings.IMPORT_LANGUAGES_RTL:
|
||||||
po.write(kind="PO", dest=lng['po_path_trunk'][:-3] + "_raw.po")
|
|
||||||
po.rtl_process()
|
po.rtl_process()
|
||||||
po.write(kind="PO", dest=lng['po_path_trunk'])
|
po.write(kind="PO_COMPACT", dest=lng['po_path_blender'])
|
||||||
po.write(kind="PO_COMPACT", dest=lng['po_path_git'])
|
|
||||||
ret = po.write(kind="MO", dest=lng['mo_path_trunk'])
|
|
||||||
if (ret.stdout):
|
|
||||||
reports.append(ret.stdout.decode().rstrip("\n"))
|
|
||||||
if (ret.stderr):
|
|
||||||
stderr_str = ret.stderr.decode().rstrip("\n")
|
|
||||||
if ret.returncode != 0:
|
|
||||||
reports.append("ERROR: " + stderr_str)
|
|
||||||
else:
|
|
||||||
reports.append(stderr_str)
|
|
||||||
po.update_info()
|
po.update_info()
|
||||||
return lng['uid'], po.nbr_trans_msgs / po.nbr_msgs, reports
|
return lng['uid'], po.nbr_trans_msgs / po.nbr_msgs, reports
|
||||||
|
|
||||||
|
|
||||||
class UI_OT_i18n_updatetranslation_svn_trunk(Operator):
|
class UI_OT_i18n_updatetranslation_blender_repo(Operator):
|
||||||
"""Update i18n svn's branches (po files)"""
|
"""Update i18n data (po files) in Blneder source code repository"""
|
||||||
bl_idname = "ui.i18n_updatetranslation_svn_trunk"
|
bl_idname = "ui.i18n_updatetranslation_blender_repo"
|
||||||
bl_label = "Update I18n Trunk"
|
bl_label = "Update I18n Blender Repo"
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
# 'DEFAULT' and en_US are always valid, fully-translated "languages"!
|
# 'DEFAULT' and en_US are always valid, fully-translated "languages"!
|
||||||
stats = {"DEFAULT": 1.0, "en_US": 1.0}
|
stats = {"DEFAULT": 1.0, "en_US": 1.0}
|
||||||
|
|
||||||
@ -189,7 +178,7 @@ class UI_OT_i18n_updatetranslation_svn_trunk(Operator):
|
|||||||
context.window_manager.progress_update(0)
|
context.window_manager.progress_update(0)
|
||||||
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
||||||
num_langs = len(i18n_sett.langs)
|
num_langs = len(i18n_sett.langs)
|
||||||
for progress, (lng_uid, stats_val, reports) in enumerate(exctr.map(i18n_updatetranslation_svn_trunk_callback,
|
for progress, (lng_uid, stats_val, reports) in enumerate(exctr.map(i18n_updatetranslation_blender_repo_callback,
|
||||||
[dict(lng.items()) for lng in i18n_sett.langs],
|
[dict(lng.items()) for lng in i18n_sett.langs],
|
||||||
(self.settings,) * num_langs,
|
(self.settings,) * num_langs,
|
||||||
chunksize=4)):
|
chunksize=4)):
|
||||||
@ -197,61 +186,31 @@ class UI_OT_i18n_updatetranslation_svn_trunk(Operator):
|
|||||||
stats[lng_uid] = stats_val
|
stats[lng_uid] = stats_val
|
||||||
print("".join(reports) + "\n")
|
print("".join(reports) + "\n")
|
||||||
|
|
||||||
# Copy pot file from branches to trunk.
|
|
||||||
shutil.copy2(self.settings.FILE_NAME_POT, self.settings.TRUNK_PO_DIR)
|
|
||||||
|
|
||||||
print("Generating languages' menu...")
|
print("Generating languages' menu...")
|
||||||
context.window_manager.progress_update(progress + 2)
|
context.window_manager.progress_update(progress + 2)
|
||||||
# First complete our statistics by checking po files we did not touch this time!
|
|
||||||
po_to_uid = {os.path.basename(lng.po_path): lng.uid for lng in i18n_sett.langs}
|
|
||||||
for po_path in os.listdir(self.settings.TRUNK_PO_DIR):
|
|
||||||
uid = po_to_uid.get(po_path, None)
|
|
||||||
po_path = os.path.join(self.settings.TRUNK_PO_DIR, po_path)
|
|
||||||
if uid and uid not in stats:
|
|
||||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=po_path, settings=self.settings)
|
|
||||||
stats[uid] = po.nbr_trans_msgs / po.nbr_msgs if po.nbr_msgs > 0 else 0
|
|
||||||
languages_menu_lines = utils_languages_menu.gen_menu_file(stats, self.settings)
|
languages_menu_lines = utils_languages_menu.gen_menu_file(stats, self.settings)
|
||||||
with open(os.path.join(self.settings.TRUNK_MO_DIR, self.settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
with open(os.path.join(self.settings.BLENDER_I18N_ROOT, self.settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
||||||
f.write("\n".join(languages_menu_lines))
|
|
||||||
with open(os.path.join(self.settings.GIT_I18N_ROOT, self.settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
|
||||||
f.write("\n".join(languages_menu_lines))
|
f.write("\n".join(languages_menu_lines))
|
||||||
context.window_manager.progress_end()
|
context.window_manager.progress_end()
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
class UI_OT_i18n_updatetranslation_svn_statistics(Operator):
|
class UI_OT_i18n_updatetranslation_statistics(Operator):
|
||||||
"""Create or extend a 'i18n_info.txt' Text datablock"""
|
"""Create or extend a 'i18n_info.txt' Text datablock"""
|
||||||
"""(it will contain statistics and checks about current branches and/or trunk)"""
|
"""(it will contain statistics and checks about current working repository PO files)"""
|
||||||
bl_idname = "ui.i18n_updatetranslation_svn_statistics"
|
bl_idname = "ui.i18n_updatetranslation_statistics"
|
||||||
bl_label = "Update I18n Statistics"
|
bl_label = "Update I18n Statistics"
|
||||||
|
|
||||||
use_branches: BoolProperty(
|
|
||||||
name="Check Branches",
|
|
||||||
description="Check po files in branches",
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
use_trunk: BoolProperty(
|
|
||||||
name="Check Trunk",
|
|
||||||
description="Check po files in trunk",
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
report_name = "i18n_info.txt"
|
report_name = "i18n_info.txt"
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
if not hasattr(self, "settings"):
|
if not hasattr(self, "settings"):
|
||||||
self.settings = settings.settings
|
self.settings = settings.settings
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
|
|
||||||
buff = io.StringIO()
|
buff = io.StringIO()
|
||||||
lst = []
|
lst = [(lng, lng.po_path) for lng in i18n_sett.langs]
|
||||||
if self.use_branches:
|
|
||||||
lst += [(lng, lng.po_path) for lng in i18n_sett.langs]
|
|
||||||
if self.use_trunk:
|
|
||||||
lst += [(lng, lng.po_path_trunk) for lng in i18n_sett.langs
|
|
||||||
if lng.uid not in self.settings.IMPORT_LANGUAGES_SKIP]
|
|
||||||
|
|
||||||
context.window_manager.progress_begin(0, len(lst))
|
context.window_manager.progress_begin(0, len(lst))
|
||||||
context.window_manager.progress_update(0)
|
context.window_manager.progress_update(0)
|
||||||
@ -278,7 +237,7 @@ class UI_OT_i18n_updatetranslation_svn_statistics(Operator):
|
|||||||
data = text.as_string()
|
data = text.as_string()
|
||||||
data = data + "\n" + buff.getvalue()
|
data = data + "\n" + buff.getvalue()
|
||||||
text.from_string(data)
|
text.from_string(data)
|
||||||
self.report({'INFO'}, "Info written to {} text datablock!".format(self.report_name))
|
self.report({'INFO'}, "Info written to %s text datablock!" % self.report_name)
|
||||||
context.window_manager.progress_end()
|
context.window_manager.progress_end()
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
@ -289,8 +248,8 @@ class UI_OT_i18n_updatetranslation_svn_statistics(Operator):
|
|||||||
|
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
UI_OT_i18n_updatetranslation_svn_branches,
|
UI_OT_i18n_updatetranslation_work_repo,
|
||||||
UI_OT_i18n_cleanuptranslation_svn_branches,
|
UI_OT_i18n_cleanuptranslation_work_repo,
|
||||||
UI_OT_i18n_updatetranslation_svn_trunk,
|
UI_OT_i18n_updatetranslation_blender_repo,
|
||||||
UI_OT_i18n_updatetranslation_svn_statistics,
|
UI_OT_i18n_updatetranslation_statistics,
|
||||||
)
|
)
|
@ -58,29 +58,15 @@ class I18nUpdateTranslationLanguage(PropertyGroup):
|
|||||||
)
|
)
|
||||||
|
|
||||||
po_path: StringProperty(
|
po_path: StringProperty(
|
||||||
name="PO File Path",
|
name="PO Work File Path",
|
||||||
description="Path to the relevant po file in branches",
|
description="Path to the relevant po file in the work repository",
|
||||||
subtype='FILE_PATH',
|
subtype='FILE_PATH',
|
||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
po_path_trunk: StringProperty(
|
po_path_blender: StringProperty(
|
||||||
name="PO Trunk File Path",
|
name="PO Blender File Path",
|
||||||
description="Path to the relevant po file in trunk",
|
description="Path to the relevant po file in Blender's source repository",
|
||||||
subtype='FILE_PATH',
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
|
|
||||||
mo_path_trunk: StringProperty(
|
|
||||||
name="MO File Path",
|
|
||||||
description="Path to the relevant mo file",
|
|
||||||
subtype='FILE_PATH',
|
|
||||||
default="",
|
|
||||||
)
|
|
||||||
|
|
||||||
po_path_git: StringProperty(
|
|
||||||
name="PO Git Master File Path",
|
|
||||||
description="Path to the relevant po file in Blender's translations git repository",
|
|
||||||
subtype='FILE_PATH',
|
subtype='FILE_PATH',
|
||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
@ -92,7 +78,7 @@ class I18nUpdateTranslationSettings(PropertyGroup):
|
|||||||
langs: CollectionProperty(
|
langs: CollectionProperty(
|
||||||
name="Languages",
|
name="Languages",
|
||||||
type=I18nUpdateTranslationLanguage,
|
type=I18nUpdateTranslationLanguage,
|
||||||
description="Languages to update in branches",
|
description="Languages to update in work repository",
|
||||||
)
|
)
|
||||||
|
|
||||||
active_lang: IntProperty(
|
active_lang: IntProperty(
|
||||||
@ -140,34 +126,35 @@ class UI_PT_i18n_update_translations_settings(Panel):
|
|||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
|
|
||||||
if not i18n_sett.is_init and bpy.ops.ui.i18n_updatetranslation_svn_init_settings.poll():
|
if not i18n_sett.is_init and bpy.ops.ui.i18n_updatetranslation_init_settings.poll():
|
||||||
# Cannot call the operator from here, this code might run while `pyrna_write_check()` returns False
|
# Cannot call the operator from here, this code might run while `pyrna_write_check()` returns False
|
||||||
# (which prevents any operator call from Python), during initialization of Blender.
|
# (which prevents any operator call from Python), during initialization of Blender.
|
||||||
UI_OT_i18n_updatetranslation_svn_init_settings.execute_static(context, settings.settings)
|
UI_OT_i18n_updatetranslation_init_settings.execute_static(context, settings.settings)
|
||||||
|
|
||||||
if not i18n_sett.is_init:
|
if not i18n_sett.is_init:
|
||||||
layout.label(text="Could not init languages data!")
|
layout.label(text="Could not init languages data!")
|
||||||
layout.label(text="Please edit the preferences of the UI Translate add-on")
|
layout.label(text="Please edit the preferences of the UI Translate add-on")
|
||||||
layout.operator("ui.i18n_updatetranslation_svn_init_settings", text="Init Settings")
|
layout.operator("ui.i18n_updatetranslation_init_settings", text="Init Settings")
|
||||||
else:
|
else:
|
||||||
split = layout.split(factor=0.75)
|
split = layout.split(factor=0.75)
|
||||||
split.template_list("UI_UL_i18n_languages", "", i18n_sett, "langs", i18n_sett, "active_lang", rows=8)
|
split.template_list("UI_UL_i18n_languages", "", i18n_sett, "langs", i18n_sett, "active_lang", rows=8)
|
||||||
col = split.column()
|
col = split.column()
|
||||||
col.operator("ui.i18n_updatetranslation_svn_init_settings", text="Reset Settings")
|
col.operator("ui.i18n_updatetranslation_init_settings", text="Reset Settings")
|
||||||
deselect = any(l.use for l in i18n_sett.langs)
|
deselect = any(l.use for l in i18n_sett.langs)
|
||||||
op = col.operator("ui.i18n_updatetranslation_svn_settings_select",
|
op = col.operator("ui.i18n_updatetranslation_settings_select",
|
||||||
text="Deselect All" if deselect else "Select All")
|
text="Deselect All" if deselect else "Select All")
|
||||||
op.use_invert = False
|
op.use_invert = False
|
||||||
op.use_select = not deselect
|
op.use_select = not deselect
|
||||||
col.operator("ui.i18n_updatetranslation_svn_settings_select", text="Invert Selection").use_invert = True
|
col.operator("ui.i18n_updatetranslation_settings_select", text="Invert Selection").use_invert = True
|
||||||
col.separator()
|
col.separator()
|
||||||
col.operator("ui.i18n_updatetranslation_svn_branches", text="Update Branches")
|
col.operator("ui.i18n_updatetranslation_work_repo", text="Update Work Repo")
|
||||||
col.operator("ui.i18n_updatetranslation_svn_trunk", text="Update Trunk")
|
col.operator("ui.i18n_cleanuptranslation_work_repo", text="Clean up Work Repo")
|
||||||
col.separator()
|
col.separator()
|
||||||
col.operator("ui.i18n_cleanuptranslation_svn_branches", text="Clean up Branches")
|
col.operator("ui.i18n_updatetranslation_blender_repo", text="Update Blender Repo")
|
||||||
col.operator("ui.i18n_updatetranslation_svn_statistics", text="Statistics")
|
col.separator()
|
||||||
|
col.operator("ui.i18n_updatetranslation_statistics", text="Statistics")
|
||||||
|
|
||||||
if i18n_sett.active_lang >= 0 and i18n_sett.active_lang < len(i18n_sett.langs):
|
if i18n_sett.active_lang >= 0 and i18n_sett.active_lang < len(i18n_sett.langs):
|
||||||
lng = i18n_sett.langs[i18n_sett.active_lang]
|
lng = i18n_sett.langs[i18n_sett.active_lang]
|
||||||
@ -177,9 +164,7 @@ class UI_PT_i18n_update_translations_settings(Panel):
|
|||||||
row.label(text="[{}]: \"{}\" ({})".format(lng.uid, iface_(lng.name), lng.num_id), translate=False)
|
row.label(text="[{}]: \"{}\" ({})".format(lng.uid, iface_(lng.name), lng.num_id), translate=False)
|
||||||
row.prop(lng, "use", text="")
|
row.prop(lng, "use", text="")
|
||||||
col.prop(lng, "po_path")
|
col.prop(lng, "po_path")
|
||||||
col.prop(lng, "po_path_trunk")
|
col.prop(lng, "po_path_blender")
|
||||||
col.prop(lng, "mo_path_trunk")
|
|
||||||
col.prop(lng, "po_path_git")
|
|
||||||
layout.separator()
|
layout.separator()
|
||||||
layout.prop(i18n_sett, "pot_path")
|
layout.prop(i18n_sett, "pot_path")
|
||||||
|
|
||||||
@ -196,10 +181,10 @@ class UI_PT_i18n_update_translations_settings(Panel):
|
|||||||
|
|
||||||
# Operators ###################################################################
|
# Operators ###################################################################
|
||||||
|
|
||||||
class UI_OT_i18n_updatetranslation_svn_init_settings(Operator):
|
class UI_OT_i18n_updatetranslation_init_settings(Operator):
|
||||||
"""Init settings for i18n svn's update operators"""
|
"""Init settings for i18n files update operators"""
|
||||||
|
|
||||||
bl_idname = "ui.i18n_updatetranslation_svn_init_settings"
|
bl_idname = "ui.i18n_updatetranslation_init_settings"
|
||||||
bl_label = "Init I18n Update Settings"
|
bl_label = "Init I18n Update Settings"
|
||||||
bl_option = {'REGISTER'}
|
bl_option = {'REGISTER'}
|
||||||
|
|
||||||
@ -209,28 +194,27 @@ class UI_OT_i18n_updatetranslation_svn_init_settings(Operator):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def execute_static(context, self_settings):
|
def execute_static(context, self_settings):
|
||||||
i18n_sett = context.window_manager.i18n_update_svn_settings
|
i18n_sett = context.window_manager.i18n_update_settings
|
||||||
|
|
||||||
# First, create the list of languages from settings.
|
# First, create the list of languages from settings.
|
||||||
i18n_sett.langs.clear()
|
i18n_sett.langs.clear()
|
||||||
root_br = self_settings.BRANCHES_DIR
|
root_work = self_settings.WORK_DIR
|
||||||
root_tr_po = self_settings.TRUNK_PO_DIR
|
root_blender_po = self_settings.BLENDER_I18N_PO_DIR
|
||||||
root_git_po = self_settings.GIT_I18N_PO_DIR
|
print(root_work)
|
||||||
root_tr_mo = os.path.join(self_settings.TRUNK_DIR, self_settings.MO_PATH_TEMPLATE, self_settings.MO_FILE_NAME)
|
print(root_blender_po)
|
||||||
if not (os.path.isdir(root_br) and os.path.isdir(root_tr_po)):
|
print(self_settings.FILE_NAME_POT)
|
||||||
|
if not (os.path.isdir(root_work) and os.path.isdir(root_blender_po)):
|
||||||
i18n_sett.is_init = False
|
i18n_sett.is_init = False
|
||||||
return;
|
return;
|
||||||
for can_use, uid, num_id, name, isocode, po_path_branch in utils_i18n.list_po_dir(root_br, self_settings):
|
for can_use, uid, num_id, name, isocode, po_path_work in utils_i18n.list_po_dir(root_work, self_settings):
|
||||||
lng = i18n_sett.langs.add()
|
lng = i18n_sett.langs.add()
|
||||||
lng.use = can_use
|
lng.use = can_use
|
||||||
lng.uid = uid
|
lng.uid = uid
|
||||||
lng.num_id = num_id
|
lng.num_id = num_id
|
||||||
lng.name = name
|
lng.name = name
|
||||||
if can_use:
|
if can_use:
|
||||||
lng.po_path = po_path_branch
|
lng.po_path = po_path_work
|
||||||
lng.po_path_trunk = os.path.join(root_tr_po, isocode + ".po")
|
lng.po_path_blender = os.path.join(root_blender_po, isocode + ".po")
|
||||||
lng.mo_path_trunk = root_tr_mo.format(isocode)
|
|
||||||
lng.po_path_git = os.path.join(root_git_po, isocode + ".po")
|
|
||||||
|
|
||||||
i18n_sett.pot_path = self_settings.FILE_NAME_POT
|
i18n_sett.pot_path = self_settings.FILE_NAME_POT
|
||||||
i18n_sett.is_init = True
|
i18n_sett.is_init = True
|
||||||
@ -241,18 +225,17 @@ class UI_OT_i18n_updatetranslation_svn_init_settings(Operator):
|
|||||||
|
|
||||||
self.execute_static(context, self.settings)
|
self.execute_static(context, self.settings)
|
||||||
|
|
||||||
if context.window_manager.i18n_update_svn_settings.is_init is False:
|
if context.window_manager.i18n_update_settings.is_init is False:
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
class UI_OT_i18n_updatetranslation_svn_settings_select(Operator):
|
class UI_OT_i18n_updatetranslation_settings_select(Operator):
|
||||||
"""(De)select (or invert selection of) all languages for i18n svn's update operators"""
|
"""(De)select (or invert selection of) all languages for i18n files update operators"""
|
||||||
|
|
||||||
bl_idname = "ui.i18n_updatetranslation_svn_settings_select"
|
bl_idname = "ui.i18n_updatetranslation_settings_select"
|
||||||
bl_label = "Init I18n Update Select Languages"
|
bl_label = "Init I18n Update Select Languages"
|
||||||
|
|
||||||
# Operator Arguments
|
|
||||||
use_select: BoolProperty(
|
use_select: BoolProperty(
|
||||||
name="Select All",
|
name="Select All",
|
||||||
description="Select all if True, else deselect all",
|
description="Select all if True, else deselect all",
|
||||||
@ -264,7 +247,6 @@ class UI_OT_i18n_updatetranslation_svn_settings_select(Operator):
|
|||||||
description="Inverse selection (overrides 'Select All' when True)",
|
description="Inverse selection (overrides 'Select All' when True)",
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
# /End Operator Arguments
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
@ -272,10 +254,10 @@ class UI_OT_i18n_updatetranslation_svn_settings_select(Operator):
|
|||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
if self.use_invert:
|
if self.use_invert:
|
||||||
for lng in context.window_manager.i18n_update_svn_settings.langs:
|
for lng in context.window_manager.i18n_update_settings.langs:
|
||||||
lng.use = not lng.use
|
lng.use = not lng.use
|
||||||
else:
|
else:
|
||||||
for lng in context.window_manager.i18n_update_svn_settings.langs:
|
for lng in context.window_manager.i18n_update_settings.langs:
|
||||||
lng.use = self.use_select
|
lng.use = self.use_select
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
@ -285,6 +267,6 @@ classes = (
|
|||||||
I18nUpdateTranslationSettings,
|
I18nUpdateTranslationSettings,
|
||||||
UI_UL_i18n_languages,
|
UI_UL_i18n_languages,
|
||||||
UI_PT_i18n_update_translations_settings,
|
UI_PT_i18n_update_translations_settings,
|
||||||
UI_OT_i18n_updatetranslation_svn_init_settings,
|
UI_OT_i18n_updatetranslation_init_settings,
|
||||||
UI_OT_i18n_updatetranslation_svn_settings_select,
|
UI_OT_i18n_updatetranslation_settings_select,
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user