WIP: X3D HAnim with Blender bones (no animation yet present) #23
@ -18,6 +18,7 @@ material_cache = {}
|
||||
conversion_scale = 1.0
|
||||
|
||||
EPSILON = 0.0000001 # Very crude.
|
||||
TIME_MULTIPLIER = 250
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
|
||||
|
||||
|
||||
def imageConvertCompat(path):
|
||||
@ -359,6 +360,9 @@ class vrmlNode(object):
|
||||
'node_type',
|
||||
'parent',
|
||||
'children',
|
||||
'skeleton', # TODO no joints, segments or sites yet.
|
||||
Cedric Steiert
commented
Usually todos follow this pattern: Usually todos follow this pattern: `TODO: i want to be awesome!`
|
||||
'skinCoord', # this is intentionally NOT skin_coord, but I don't know if HAnim should be here at all
|
||||
Cedric Steiert
commented
snake_case: snake_case: `skin_coord`
|
||||
'skin',
|
||||
'parent',
|
||||
'array_data',
|
||||
'reference',
|
||||
@ -1594,8 +1598,11 @@ def translateTexTransform(node, ancestry):
|
||||
|
||||
def getFinalMatrix(node, mtx, ancestry, global_matrix):
|
||||
|
||||
transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
|
||||
if node.getSpec() == 'Transform':
|
||||
transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() in ('Transform', 'HAnimHumanoid', 'HAnimJoint', 'HAnimSite', 'HAnimDisplacer')]
|
||||
if node.getSpec() in ('Transform', 'HAnimHumanoid', 'HAnimJoint', 'HAnimSite', 'HAnimDisplacer'):
|
||||
# This comment is here so I can quick replace the above in testing
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
dead code, either remove the commented out code pieces or explain why it's currently not used dead code, either remove the commented out code pieces or explain why it's currently not used
|
||||
#transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
|
||||
#if node.getSpec() == 'Transform':
|
||||
transform_nodes.append(node)
|
||||
transform_nodes.reverse()
|
||||
|
||||
@ -1639,6 +1646,16 @@ def set_new_float_color_attribute(bpymesh, color_data, name: str = "ColorPerCorn
|
||||
bpymesh.color_attributes.new(name, 'FLOAT_COLOR', 'CORNER')
|
||||
bpymesh.color_attributes[name].data.foreach_set("color", color_data)
|
||||
|
||||
# TODO not tested
|
||||
def set_new_float_color_attribute_curve(bpycurve, color_data, name: str = "ColorPerCorner", convert_to_linear: bool = True):
|
||||
if (convert_to_linear):
|
||||
# convert color spaces to account for api changes from legacy to newer api
|
||||
color_data = [srgb_to_linear(col_val) for col_val in color_data]
|
||||
mat = bpy.data.materials.new(name="ColorMaterial")
|
||||
mat.color = (1, 0, 0, 1)
|
||||
curve_obj.data.materials.append(mat)
|
||||
|
||||
|
||||
# Assumes that the mesh has polygons.
|
||||
def importMesh_ApplyColors(bpymesh, geom, ancestry):
|
||||
colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
|
||||
@ -1907,6 +1924,80 @@ def importMesh_TriangleFanSet(geom, ancestry):
|
||||
bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
|
||||
return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
|
||||
|
||||
# TODO, not for this release
|
||||
def processColors_IndexedLineSet(geom, ancestry, bpycurve, lines):
|
||||
colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
|
||||
index = geom.getFieldAsArray('coordIndex', 0, ancestry)
|
||||
if colors:
|
||||
cco = []
|
||||
if colors.getSpec() == 'ColorRGBA':
|
||||
rgb = colors.getFieldAsArray('color', 4, ancestry)
|
||||
else:
|
||||
# Array of arrays; no need to flatten
|
||||
rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
|
||||
|
||||
color_per_vertex = geom.getFieldAsBool('colorPerVertex', True, ancestry)
|
||||
color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
|
||||
has_color_index = len(color_index) != 0
|
||||
has_valid_color_index = index.count(-1) == color_index.count(-1)
|
||||
|
||||
# rebuild a corrupted colorIndex field (assuming the end of face markers -1 are missing)
|
||||
if has_color_index and not has_valid_color_index:
|
||||
# remove all -1 beforehand to ensure clean working copy
|
||||
color_index = [x for x in color_index if x != -1]
|
||||
# copy all -1 from coordIndex to colorIndex
|
||||
for i, v in enumerate(index):
|
||||
if v == -1:
|
||||
color_index.insert(i, -1)
|
||||
|
||||
if color_per_vertex and has_color_index: # Color per vertex with index
|
||||
cco = [cco for f in processPerVertexIndex(color_index)
|
||||
for v in f
|
||||
for cco in rgb[v]]
|
||||
elif color_per_vertex: # Color per vertex without index
|
||||
# use vertex value by default, however if lengths mismatch use the positional value to access rgb value
|
||||
# ain't ideal by far, but should most likely work
|
||||
try:
|
||||
cco = [cco for f in lines
|
||||
for v in f
|
||||
for cco in rgb[v]]
|
||||
except IndexError:
|
||||
print("reattempting reading color_per_vertex without index by using positional value because vertex value failed")
|
||||
cco = [cco for f in lines
|
||||
for (i, v) in enumerate(f)
|
||||
for cco in rgb[i]]
|
||||
elif color_index: # Color per face with index
|
||||
cco = [cco for (i, f) in enumerate(lines)
|
||||
for j in f
|
||||
for cco in rgb[color_index[i]]]
|
||||
elif len(lines) > len(rgb): # Static color per face without index, when all lines have the same color.
|
||||
# Exported from SOLIDWORKS, see: `blender/blender-addons#105398`.
|
||||
cco = [cco for (i, f) in enumerate(lines)
|
||||
for j in f
|
||||
for cco in rgb[0]]
|
||||
else: # Color per face without index
|
||||
cco = [cco for (i, f) in enumerate(lines)
|
||||
for j in f
|
||||
for cco in rgb[i]]
|
||||
|
||||
for i, spline in enumerate(bpycurve.splines):
|
||||
# Example: Color based on spline index
|
||||
# TODO do RGBA
|
||||
if cco is not None:
|
||||
color = cco[i * 3:(i+1) * 3]
|
||||
color.append(1) # TODO includ transporency
|
||||
else:
|
||||
color = (1, 0, 0, 1)
|
||||
print(f"Color = {color}")
|
||||
# Apply color to the spline directly
|
||||
spline.material_index = i # Assign a material index (optional)
|
||||
|
||||
# Create a material for the spline
|
||||
if len(bpy.data.materials) <= i:
|
||||
mat = bpy.data.materials.new(name="ColorMaterial_" + str(i))
|
||||
mat.diffuse_color = color
|
||||
bpycurve.materials.append(mat)
|
||||
|
||||
|
||||
def importMesh_IndexedFaceSet(geom, ancestry):
|
||||
# Saw the following structure in X3Ds: the first mesh has a huge set
|
||||
@ -2503,6 +2594,8 @@ def importMesh_IndexedLineSet(geom, ancestry):
|
||||
nu.points.add(len(line) - 1) # the new nu has 1 point to begin with
|
||||
for il, pt in zip(line, nu.points):
|
||||
pt.co[0:3] = points[il]
|
||||
# TODO
|
||||
# processColors_IndexedLineSet(geom, ancestry, bpycurve, lines)
|
||||
|
||||
return bpycurve
|
||||
|
||||
@ -2850,6 +2943,8 @@ def appearance_LoadImageTextureFile(ima_urls, node):
|
||||
bpyima = None
|
||||
for f in ima_urls:
|
||||
dirname = os.path.dirname(node.getFilename())
|
||||
if f.startswith('"'):
|
||||
f = f[1:-1] # strip quotes (I want to strip both quotes, front and tail. I am not sure if this works)
|
||||
bpyima = image_utils.load_image(f, dirname,
|
||||
place_holder=False,
|
||||
recursive=False,
|
||||
@ -3163,7 +3258,7 @@ def importShape_ProcessObject(
|
||||
# Can transform data or object, better the object so we can instance
|
||||
# the data
|
||||
# bpymesh.transform(getFinalMatrix(node))
|
||||
bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
|
||||
bpyob = node.blendData = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
|
||||
bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
|
||||
bpycollection.objects.link(bpyob)
|
||||
bpyob.select_set(True)
|
||||
@ -3171,6 +3266,8 @@ def importShape_ProcessObject(
|
||||
if DEBUG:
|
||||
bpyob["source_line_no"] = geom.lineno
|
||||
|
||||
return bpyob
|
||||
|
||||
|
||||
def importText(geom, ancestry):
|
||||
fmt = geom.getChildBySpec('FontStyle')
|
||||
@ -3253,6 +3350,7 @@ def importShape(bpycollection, node, ancestry, global_matrix):
|
||||
|
||||
bpydata = None
|
||||
geom_spec = geom.getSpec()
|
||||
coord = geom.getChildBySpec('Coordinate')
|
||||
|
||||
# ccw is handled by every geometry importer separately; some
|
||||
# geometries are easier to flip than others
|
||||
@ -3266,14 +3364,127 @@ def importShape(bpycollection, node, ancestry, global_matrix):
|
||||
|
||||
# There are no geometry importers that can legally return
|
||||
# no object. It's either a bpy object, or an exception
|
||||
importShape_ProcessObject(
|
||||
bpypo = importShape_ProcessObject(
|
||||
bpycollection, vrmlname, bpydata, geom, geom_spec,
|
||||
node, bpymat, tex_has_alpha, texmtx,
|
||||
ancestry, global_matrix)
|
||||
|
||||
if bpypo is None:
|
||||
print('ImportX3D warning: importShape_ProcessObject did not return a shape to return for HAnim "%s"' % vrmlname)
|
||||
else:
|
||||
print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
|
||||
bpypo = None
|
||||
return [ geom, bpypo, coord ]
|
||||
|
||||
|
||||
def importHAnimHumanoid(bpycollection, node, ancestry, global_matrix, joints, segments, jointSkin):
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
dead code dead code
|
||||
vrmlname = node.getDefName()
|
||||
# print(vrmlname)
|
||||
prefix = ''
|
||||
if vrmlname:
|
||||
first_underscore = vrmlname.find('_')
|
||||
if first_underscore > 0:
|
||||
prefix = vrmlname[:first_underscore+1]
|
||||
else:
|
||||
vrmlname = 'HAnimHumanoid'
|
||||
|
||||
Bujus_Krachus marked this conversation as resolved
Cedric Steiert
commented
like above could get simplified using like above could get simplified using `or`
|
||||
# Create armature and object
|
||||
armature_data = bpy.data.armatures.new(prefix+"humanoid_root")
|
||||
skeleton = bpy.data.objects.new(vrmlname, armature_data)
|
||||
skeleton.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
|
||||
|
||||
# Link object to collection and make it active
|
||||
bpycollection.objects.link(skeleton)
|
||||
bpy.context.view_layer.objects.active = skeleton
|
||||
skeleton.select_set(True)
|
||||
|
||||
# Enter edit mode
|
||||
Bujus_Krachus marked this conversation as resolved
Cedric Steiert
commented
like above could get simplified using like above could get simplified using `or`
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Store reference to the object on the node
|
||||
bpyob = node.blendData = node.blendObject = skeleton
|
||||
|
||||
|
||||
# Process children joints, including USE, if present
|
||||
child = node.getChildBySpec('HAnimJoint') # 'HAnimJoint'
|
||||
if child:
|
||||
Bujus_Krachus marked this conversation as resolved
Cedric Steiert
commented
better move the nl after the validation section better move the nl after the validation section
|
||||
first_joint_name = child.getDefName() or child.getFieldAsString('name', None, ancestry)
|
||||
joint_center = child.getFieldAsFloatTuple('center', (0.0, 0.0, 0.0), ancestry)
|
||||
if DEBUG:
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
is unpacking needed or just the conversion to tuple?
is unpacking needed or just the conversion to tuple?
`joints.append((child_bone_name, tuple(child_center), tuple(parent_center), skinCoordWeight, skinCoordIndex))`
|
||||
print(f"Joint {first_joint_name} {joint_center}")
|
||||
importHAnimJoint(joints, segments, child, ancestry, first_joint_name, parent_center=joint_center[:])
|
||||
|
||||
# Create bones for each joint
|
||||
for joint_name, joint_start, joint_end, skinCoordWeight, skinCoordIndex in joints:
|
||||
if not joint_name:
|
||||
joint_name = vrmlname
|
||||
new_segment = armature_data.edit_bones.new(joint_name)
|
||||
child.blendData = child.blendObject = new_segment
|
||||
matrix_world_inv = skeleton.matrix_world.inverted()
|
||||
new_segment.head = joint_end
|
||||
new_segment.tail = joint_start
|
||||
# if joint_name != vrmlname:
|
||||
jointSkin[joint_name] = {
|
||||
'skinCoordWeight' : skinCoordWeight,
|
||||
'skinCoordIndex' : skinCoordIndex
|
||||
}
|
||||
for segment in segments:
|
||||
parent_joint, child_joint = segment
|
||||
if parent_joint in skeleton.data.edit_bones:
|
||||
parent = skeleton.data.edit_bones[parent_joint] # some things don't have a parent
|
||||
else:
|
||||
parent = None
|
||||
|
||||
if child_joint in skeleton.data.edit_bones:
|
||||
child = skeleton.data.edit_bones[child_joint]
|
||||
else:
|
||||
child = armature_data.edit_bones.new(child_joint)
|
||||
child.parent = parent
|
||||
else:
|
||||
print("Couldn't find child HAnimJoint")
|
||||
|
||||
|
||||
return skeleton
|
||||
|
||||
|
||||
def importHAnimJoints(joints, segments, children, ancestry, parent_bone_name, parent_center=[0, 0, 0]):
|
||||
for child in children:
|
||||
child_bone_name = child.getDefName() or child.getFieldAsString('name', None, ancestry) or parent_bone_name
|
||||
segments.append((parent_bone_name, child_bone_name))
|
||||
importHAnimJoint(joints, segments, child, ancestry, parent_bone_name, parent_center)
|
||||
|
||||
def importHAnimJoint(joints, segments, child, ancestry, parent_bone_name=None, parent_center=[0, 0, 0]):
|
||||
if child:
|
||||
child_bone_name = child.getDefName()
|
||||
if not child_bone_name:
|
||||
child_bone_name = child.getFieldAsString('name', None, ancestry)
|
||||
if not child_bone_name:
|
||||
child_bone_name = 'Armature'
|
||||
child_center = child.getFieldAsFloatTuple('center', None, ancestry)
|
||||
skinCoordWeight = child.getFieldAsArray('skinCoordWeight', 0, ancestry)
|
||||
skinCoordIndex = child.getFieldAsArray('skinCoordIndex', 0, ancestry)
|
||||
# I don't understand reviewer's comment:
|
||||
# "better move the nl after the validation section"
|
||||
if skinCoordWeight is None:
|
||||
skinCoordWeight = ()
|
||||
if skinCoordIndex is None:
|
||||
skinCoordIndex = ()
|
||||
|
||||
if not child_center:
|
||||
child_center = [0, 0, 0]
|
||||
joints.append((child_bone_name, tuple(child_center), tuple(parent_center), skinCoordWeight, skinCoordIndex))
|
||||
|
||||
# print(f"Joint IHAJ {joints[-1]}")
|
||||
children = child.getChildrenBySpec('HAnimJoint')
|
||||
if children:
|
||||
importHAnimJoints(joints, segments, children, ancestry, child_bone_name, child_center)
|
||||
else:
|
||||
childname = child.getFieldAsString('name', '', ancestry)
|
||||
if DEBUG:
|
||||
print(f"Didn't find children, {children} for {childname}")
|
||||
else:
|
||||
print(f"Didn't find child, {child}")
|
||||
|
||||
# -----------------------------------------------------------------------------------
|
||||
# Lighting
|
||||
|
||||
@ -3416,10 +3627,24 @@ def importTransform(bpycollection, node, ancestry, global_matrix):
|
||||
|
||||
bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
|
||||
|
||||
# so they are not too annoying
|
||||
# so the EMPTY is not too annoying
|
||||
bpyob.empty_display_type = 'PLAIN_AXES'
|
||||
bpyob.empty_display_size = 0.2
|
||||
|
||||
def importHAnimSegment(bpycollection, node, ancestry, global_matrix):
|
||||
name = node.getDefName() or node.getFieldAsString('name', None, ancestry) or 'HAnimSegment'
|
||||
|
||||
bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
|
||||
bpycollection.objects.link(bpyob)
|
||||
bpyob.select_set(True)
|
||||
|
||||
bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
|
||||
|
||||
bpyob.empty_display_type = 'PLAIN_AXES'
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
`curoff += 3`
|
||||
# so the EMPTY is not too annoying
|
||||
bpyob.empty_display_size = 0.2
|
||||
|
||||
return bpyob
|
||||
|
||||
#def importTimeSensor(node):
|
||||
def action_fcurve_ensure(action, data_path, array_index):
|
||||
@ -3429,7 +3654,6 @@ def action_fcurve_ensure(action, data_path, array_index):
|
||||
|
||||
return action.fcurves.new(data_path=data_path, index=array_index)
|
||||
|
||||
|
||||
def translatePositionInterpolator(node, action, ancestry):
|
||||
key = node.getFieldAsArray('key', 0, ancestry)
|
||||
keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
|
||||
@ -3437,26 +3661,62 @@ def translatePositionInterpolator(node, action, ancestry):
|
||||
loc_x = action_fcurve_ensure(action, "location", 0)
|
||||
loc_y = action_fcurve_ensure(action, "location", 1)
|
||||
loc_z = action_fcurve_ensure(action, "location", 2)
|
||||
if DEBUG:
|
||||
print (f"key {key} keyValue {keyValue} {action}")
|
||||
|
||||
for i, time in enumerate(key):
|
||||
try:
|
||||
x, y, z = keyValue[i]
|
||||
except:
|
||||
if DEBUG:
|
||||
print (f"i {i} x {x} y {y} z {z}")
|
||||
except: # There's 4 exception possible here, so just wildcard
|
||||
continue
|
||||
|
||||
loc_x.keyframe_points.insert(time, x)
|
||||
loc_y.keyframe_points.insert(time, y)
|
||||
loc_z.keyframe_points.insert(time, z)
|
||||
loc_x.keyframe_points.insert(time*TIME_MULTIPLIER, x)
|
||||
loc_y.keyframe_points.insert(time*TIME_MULTIPLIER, y)
|
||||
loc_z.keyframe_points.insert(time*TIME_MULTIPLIER, z)
|
||||
|
||||
for fcu in (loc_x, loc_y, loc_z):
|
||||
for kf in fcu.keyframe_points:
|
||||
kf.interpolation = 'BEZIER'
|
||||
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
could get indented further, as could get indented further, as `pose_bone` is None if ``skeleton` is None
|
||||
def translateCoordinateInterpolator(node, action, ancestry):
|
||||
key = node.getFieldAsArray('key', 0, ancestry)
|
||||
keyValue = node.getFieldAsArray('keyValue', 0, ancestry)
|
||||
offset = int(len(keyValue) / len(key) / 3) # values divide by times divided by axes
|
||||
if DEBUG:
|
||||
print(f"ci {offset} = {len(keyValue)} / {len(key)}")
|
||||
loc_x = action_fcurve_ensure(action, "location", 0)
|
||||
loc_y = action_fcurve_ensure(action, "location", 1)
|
||||
loc_z = action_fcurve_ensure(action, "location", 2)
|
||||
|
||||
curoff = 0
|
||||
for i, time in enumerate(key): # loop through time
|
||||
# 0 1 2
|
||||
for off in range(offset): # for each data point
|
||||
# 0 1 2 up to offset
|
||||
# curoff = i*offset+off
|
||||
#print(f" coordinate index {off} num coordinates {offset} time index {i} time {time} current offset {curoff}")
|
||||
# then a vec3f
|
||||
x = keyValue[curoff+0]
|
||||
y = keyValue[curoff+1]
|
||||
z = keyValue[curoff+2]
|
||||
loc_x.keyframe_points.insert(time*TIME_MULTIPLIER, x)
|
||||
loc_y.keyframe_points.insert(time*TIME_MULTIPLIER, y)
|
||||
loc_z.keyframe_points.insert(time*TIME_MULTIPLIER, z)
|
||||
|
||||
curoff += 3
|
||||
|
||||
for fcu in (loc_x, loc_y, loc_z):
|
||||
for kf in fcu.keyframe_points:
|
||||
kf.interpolation = 'LINEAR'
|
||||
|
||||
|
||||
def translateOrientationInterpolator(node, action, ancestry):
|
||||
def translateOrientationInterpolator(node, action, ancestry, to_node):
|
||||
key = node.getFieldAsArray('key', 0, ancestry)
|
||||
keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
|
||||
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
which errors are expected? Currently it's a wildcard, thus ideally provide the exception type which errors are expected? Currently it's a wildcard, thus ideally provide the exception type
|
||||
node.rotation_mode = 'XYZ'
|
||||
|
||||
rot_x = action_fcurve_ensure(action, "rotation_euler", 0)
|
||||
rot_y = action_fcurve_ensure(action, "rotation_euler", 1)
|
||||
rot_z = action_fcurve_ensure(action, "rotation_euler", 2)
|
||||
@ -3464,22 +3724,35 @@ def translateOrientationInterpolator(node, action, ancestry):
|
||||
for i, time in enumerate(key):
|
||||
try:
|
||||
x, y, z, w = keyValue[i]
|
||||
except:
|
||||
except: # There's 4 exception possible here, so just wildcard
|
||||
continue
|
||||
|
||||
mtx = translateRotation((x, y, z, w))
|
||||
eul = mtx.to_euler()
|
||||
rot_x.keyframe_points.insert(time, eul.x)
|
||||
rot_y.keyframe_points.insert(time, eul.y)
|
||||
rot_z.keyframe_points.insert(time, eul.z)
|
||||
rot_x.keyframe_points.insert(time*TIME_MULTIPLIER, eul.x)
|
||||
rot_y.keyframe_points.insert(time*TIME_MULTIPLIER, eul.y)
|
||||
rot_z.keyframe_points.insert(time*TIME_MULTIPLIER, eul.z)
|
||||
|
||||
for fcu in (rot_x, rot_y, rot_z):
|
||||
for kf in fcu.keyframe_points:
|
||||
kf.interpolation = 'LINEAR'
|
||||
kf.interpolation = 'BEZIER'
|
||||
|
||||
def translateBoneOrientationInterpolator(node, action, ancestry, to_id=None, skeleton=None):
|
||||
key = node.getFieldAsArray('key', 0, ancestry)
|
||||
keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
|
||||
|
||||
pose_bone = None
|
||||
if skeleton:
|
||||
pose_bone = skeleton.pose.bones.get(to_id)
|
||||
if pose_bone:
|
||||
pose_bone.rotation_mode = 'AXIS_ANGLE'
|
||||
for time, (x, y, z, w) in zip(key, keyValue):
|
||||
pose_bone.rotation_axis_angle = (w, x, y, z)
|
||||
pose_bone.keyframe_insert(data_path="rotation_axis_angle", frame=time * TIME_MULTIPLIER)
|
||||
|
||||
|
||||
# Untested!
|
||||
def translateScalarInterpolator(node, action, ancestry):
|
||||
def translateScaleInterpolator(node, action, ancestry):
|
||||
key = node.getFieldAsArray('key', 0, ancestry)
|
||||
keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
|
||||
|
||||
@ -3490,13 +3763,26 @@ def translateScalarInterpolator(node, action, ancestry):
|
||||
for i, time in enumerate(key):
|
||||
try:
|
||||
x, y, z = keyValue[i]
|
||||
except:
|
||||
except: # There's 4 exception possible here, so just wildcard
|
||||
continue
|
||||
|
||||
sca_x.keyframe_points.new(time, x)
|
||||
sca_y.keyframe_points.new(time, y)
|
||||
sca_z.keyframe_points.new(time, z)
|
||||
sca_x.keyframe_points.insert(time*TIME_MULTIPLIER, x)
|
||||
sca_y.keyframe_points.insert(time*TIME_MULTIPLIER, y)
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
print needed? print needed?
|
||||
sca_z.keyframe_points.insert(time*TIME_MULTIPLIER, z)
|
||||
|
||||
def translateScalarInterpolator(node, action, ancestry, to_node, data_path):
|
||||
key = node.getFieldAsArray('key', 0, ancestry)
|
||||
keyValue = node.getFieldAsArray('keyValue', 0, ancestry)
|
||||
|
||||
scalar = action_fcurve_ensure(action, data_path, 0)
|
||||
|
||||
for i, time in enumerate(key):
|
||||
try:
|
||||
s = keyValue[i]
|
||||
except: # There's 4 exception possible here, so just wildcard
|
||||
continue
|
||||
|
||||
scalar.keyframe_points.insert(time*TIME_MULTIPLIER, s)
|
||||
|
||||
def translateTimeSensor(node, action, ancestry):
|
||||
"""
|
||||
@ -3527,14 +3813,7 @@ def translateTimeSensor(node, action, ancestry):
|
||||
if loop:
|
||||
time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
|
||||
|
||||
|
||||
def importRoute(node, ancestry):
|
||||
"""
|
||||
Animation route only at the moment
|
||||
"""
|
||||
|
||||
if not hasattr(node, 'fields'):
|
||||
return
|
||||
def importRouteFromTo(node, from_id, from_type, to_id, to_type, ancestry, skeleton, hasMesh):
|
||||
|
||||
routeIpoDict = node.getRouteIpoDict()
|
||||
|
||||
@ -3542,11 +3821,63 @@ def importRoute(node, ancestry):
|
||||
try:
|
||||
action = routeIpoDict[act_id]
|
||||
except:
|
||||
action = routeIpoDict[act_id] = bpy.data.actions.new('web3d_ipo')
|
||||
action = routeIpoDict[act_id] = bpy.data.actions.new(act_id)
|
||||
#print(f"return action {act_id} {action}")
|
||||
return action
|
||||
|
||||
# for getting definitions
|
||||
defDict = node.getDefDict()
|
||||
|
||||
if from_type == 'value_changed':
|
||||
if to_type in ('set_translation', 'set_position'): # set translation may need some matrix multiplication
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
if DEBUG:
|
||||
print(f"Trying to create a position interpolator for something from {from_id} to {to_id} (may need something special?)")
|
||||
translatePositionInterpolator(set_data_from_node, action, ancestry)
|
||||
|
||||
if to_type in {'rotation', "set_rotation"} and defDict[to_id].getSpec() == 'TextureTransform':
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
to_node = defDict[to_id]
|
||||
Bujus_Krachus marked this conversation as resolved
Cedric Steiert
commented
any specific error to be expected? any specific error to be expected?
|
||||
translateScalarInterpolator(set_data_from_node, action, ancestry, to_node, "rotation")
|
||||
elif to_type in {'set_orientation', 'rotation', "set_rotation"}:
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
to_node = defDict[to_id]
|
||||
if skeleton and skeleton.pose.bones.get(to_id):
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
could be simplified using could be simplified using `or`
|
||||
# print(f"Creating animation for joint {to_id}")
|
||||
translateBoneOrientationInterpolator(set_data_from_node, action, ancestry, to_id, skeleton)
|
||||
if not hasMesh:
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
print needed? print needed?
|
||||
# print(f"Creating orientation animation for {to_id}")
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
wi? wi?
|
||||
translateOrientationInterpolator(set_data_from_node, action, ancestry, to_node)
|
||||
|
||||
if to_type == 'set_scale':
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
translateScaleInterpolator(set_data_from_node, action, ancestry)
|
||||
|
||||
if to_type == 'set_point':
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
translateCoordinateInterpolator(set_data_from_node, action, ancestry)
|
||||
|
||||
elif from_type == 'bindTime':
|
||||
action = getIpo(from_id)
|
||||
time_node = defDict[to_id]
|
||||
translateTimeSensor(time_node, action, ancestry)
|
||||
|
||||
def importRoute(node, ancestry, skeleton=None, hasMesh=None):
|
||||
"""
|
||||
Animation route only at the moment
|
||||
"""
|
||||
|
||||
if node.getFieldAsString("fromNode", None, ancestry) and node.getFieldAsString("toNode", None, ancestry) and node.getFieldAsString("fromField", None, ancestry) and node.getFieldAsString("toField", None, ancestry):
|
||||
pass
|
||||
elif not hasattr(node, 'fields'):
|
||||
# print(f"return not hasattr fields")
|
||||
return
|
||||
|
||||
"""
|
||||
Handles routing nodes to each other
|
||||
|
||||
@ -3557,50 +3888,36 @@ ROUTE vpTs.fraction_changed TO vpOI.set_fraction
|
||||
ROUTE champFly001.bindTime TO vpTs.set_startTime
|
||||
"""
|
||||
|
||||
#from_id, from_type = node.id[1].split('.')
|
||||
#to_id, to_type = node.id[3].split('.')
|
||||
|
||||
#value_changed
|
||||
set_position_node = None
|
||||
set_orientation_node = None
|
||||
time_node = None
|
||||
|
||||
for field in node.fields:
|
||||
if field and field[0] == 'ROUTE':
|
||||
try:
|
||||
if len(node.fields) <= 0:
|
||||
from_id = node.getFieldAsString("fromNode", None, ancestry)
|
||||
from_type = node.getFieldAsString("fromField", None, ancestry)
|
||||
to_id = node.getFieldAsString("toNode", None, ancestry)
|
||||
to_type = node.getFieldAsString("toField", None, ancestry)
|
||||
if from_id and from_type and to_id and to_type:
|
||||
# print(f"ROUTE from {from_id}.{from_type} to {to_id}.{to_type}")
|
||||
importRouteFromTo(node, from_id, from_type, to_id, to_type, ancestry, skeleton, hasMesh)
|
||||
else:
|
||||
for field in node.fields:
|
||||
# print(f"return field {field}")
|
||||
if field and field[0] == 'ROUTE':
|
||||
from_id, from_type = field[1].split('.')
|
||||
to_id, to_type = field[3].split('.')
|
||||
except:
|
||||
print("Warning, invalid ROUTE", field)
|
||||
continue
|
||||
|
||||
if from_type == 'value_changed':
|
||||
if to_type == 'set_position':
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
translatePositionInterpolator(set_data_from_node, action, ancestry)
|
||||
|
||||
if to_type in {'set_orientation', 'rotation'}:
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
translateOrientationInterpolator(set_data_from_node, action, ancestry)
|
||||
|
||||
if to_type == 'set_scale':
|
||||
action = getIpo(to_id)
|
||||
set_data_from_node = defDict[from_id]
|
||||
translateScalarInterpolator(set_data_from_node, action, ancestry)
|
||||
|
||||
elif from_type == 'bindTime':
|
||||
action = getIpo(from_id)
|
||||
time_node = defDict[to_id]
|
||||
translateTimeSensor(time_node, action, ancestry)
|
||||
# print(f"ROUTE from {from_id}.{from_type} to {to_id}.{to_type}")
|
||||
importRouteFromTo(node, from_id, from_type, to_id, to_type, ancestry, skeleton, hasMesh)
|
||||
|
||||
def importSkinWeights(obj, joint, jointCoord, end):
|
||||
group = obj.vertex_groups.get(joint) or obj.vertex_groups.new(name=joint)
|
||||
# print(f"Created group {joint}")
|
||||
# print(f"Index {end} joint {joint}")
|
||||
for weight_index in range(len(jointCoord['skinCoordIndex'])):
|
||||
# print(f"Index {end} joint {joint} {jointCoord['skinCoordIndex'][weight_index]} weight {jointCoord['skinCoordWeight'][weight_index]}")
|
||||
group.add([jointCoord['skinCoordIndex'][weight_index]], jointCoord['skinCoordWeight'][weight_index], 'REPLACE')
|
||||
|
||||
def load_web3d(
|
||||
bpycontext,
|
||||
filepath,
|
||||
*,
|
||||
PREF_FLAT=False,
|
||||
PREF_FLAT=False, # So Tranforms will be imported
|
||||
PREF_CIRCLE_DIV=16,
|
||||
file_unit='M',
|
||||
global_scale=1.0,
|
||||
@ -3637,6 +3954,33 @@ def load_web3d(
|
||||
# fill with tuples - (node, [parents-parent, parent])
|
||||
all_nodes = root_node.getSerialized([], [])
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
dead code & print needed? dead code & print needed?
|
||||
|
||||
all_shapes = []
|
||||
skeleton = None
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
print needed? print needed?
|
||||
meshobj = None
|
||||
shape = None
|
||||
site = None
|
||||
displacers = {}
|
||||
skinCoord = None
|
||||
hAnimJoint = None
|
||||
hAnimSegment = None
|
||||
hAnimSite = None
|
||||
group = None
|
||||
|
||||
# collect shapes for sites
|
||||
for node, ancestry in all_nodes:
|
||||
Bujus_Krachus marked this conversation as resolved
Cedric Steiert
commented
dead code dead code
|
||||
spec = node.getSpec()
|
||||
|
||||
if spec.endswith('Shape'):
|
||||
shape = importShape(bpycollection, node, ancestry, global_matrix)
|
||||
if shape:
|
||||
if shape[1]:
|
||||
bpy.context.view_layer.objects.active = shape[1]
|
||||
shape.append(site)
|
||||
site = None
|
||||
all_shapes.append(shape)
|
||||
Bujus_Krachus marked this conversation as resolved
Cedric Steiert
commented
dead code & print needed above? dead code & print needed above?
|
||||
elif spec.endswith('HAnimSite'):
|
||||
site = node
|
||||
|
||||
for node, ancestry in all_nodes:
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
more readable and a extra unnecessary check gets saved:
more readable and a extra unnecessary check gets saved:
```python
if meshobj:
if child_joint not in imported:
...
if parent_joint not in imported:
...
```
|
||||
#if 'castle.wrl' not in node.getFilename():
|
||||
# continue
|
||||
@ -3652,27 +3996,104 @@ def load_web3d(
|
||||
# Note, include this function so the VRML/X3D importer can be extended
|
||||
# by an external script. - gets first pick
|
||||
pass
|
||||
if spec == 'Shape':
|
||||
importShape(bpycollection, node, ancestry, global_matrix)
|
||||
elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
|
||||
if spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
dead code dead code
|
||||
importLamp(bpycollection, node, spec, ancestry, global_matrix)
|
||||
elif spec == 'Viewpoint':
|
||||
importViewpoint(bpycollection, node, ancestry, global_matrix)
|
||||
elif spec == 'HAnimHumanoid':
|
||||
joints = []
|
||||
segments = []
|
||||
jointSkin = {}
|
||||
skeleton = importHAnimHumanoid(bpycollection, node, ancestry, global_matrix, joints, segments, jointSkin)
|
||||
skinCoord = node.getChildBySpec('Coordinate')
|
||||
if skinCoord:
|
||||
#if skinCoord.getFieldAsString("containerField", None, ancestry) == "skinCoord":
|
||||
if DEBUG:
|
||||
print(f"Skin coord is {skinCoord}")
|
||||
for shape in all_shapes:
|
||||
if shape:
|
||||
if DEBUG:
|
||||
print(f"Skin mesh is found")
|
||||
if shape[0] and shape[1] and shape[2] and skinCoord.getRealNode().getDefName() == shape[2].getRealNode().getDefName():
|
||||
if DEBUG:
|
||||
print("Got mesh obj")
|
||||
meshobj = shape[1]
|
||||
meshobj.modifiers.new(name='ArmatureToMesh', type='ARMATURE')
|
||||
meshobj.modifiers['ArmatureToMesh'].object = skeleton
|
||||
else:
|
||||
print(f"DEFs match? missing shape[:]? skinCoord.getRealNode().getDefName() == shape[2].getRealNode().getDefName()")
|
||||
else:
|
||||
print(f"no shape {shape} ? all shapes is {all_shapes}")
|
||||
|
||||
else:
|
||||
print("No skinCoord, no skin weights, no skin animation")
|
||||
|
||||
|
||||
if DEBUG:
|
||||
print(f"mesh is {meshobj}")
|
||||
#bpy.ops.object.mode_set(mode="EDIT")
|
||||
bpy.ops.object.mode_set(mode="OBJECT")
|
||||
imported = []
|
||||
if DEBUG:
|
||||
print(f"Number of segments {len(segments)}")
|
||||
for segment in segments:
|
||||
parent_joint, child_joint = segment
|
||||
# print(f"Segment {parent_joint} {child_joint} loading weights")
|
||||
if meshobj:
|
||||
if child_joint not in imported:
|
||||
importSkinWeights(meshobj, child_joint, jointSkin[child_joint], "child")
|
||||
imported.append(child_joint)
|
||||
if parent_joint not in imported:
|
||||
importSkinWeights(meshobj, parent_joint, jointSkin[parent_joint], "parent")
|
||||
imported.append(parent_joint)
|
||||
|
||||
elif spec in ('HAnimSegment'):
|
||||
child_segment_name = node.getDefName()
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
second check of skeleton is not needed, as it's above already second check of skeleton is not needed, as it's above already
|
||||
hAnimSegment = importHAnimSegment(bpycollection, node, ancestry, global_matrix)
|
||||
attachMesh(all_shapes, child_segment_name, hAnimSegment) # mesh is in all_shapes
|
||||
hAnimSegment.parent = skeleton
|
||||
hAnimSegment.parent_bone = parent_joint_name
|
||||
hAnimSegment.parent_type = 'BONE'
|
||||
elif spec in ('HAnimDisplacer'):
|
||||
# TODO Intended to be implemented
|
||||
#if meshobj:
|
||||
#importHAnimDisplacer(node, ancestry, meshobj, displacers)
|
||||
pass
|
||||
elif spec in ('HAnimHumanoid'):
|
||||
humanoid_name = node.getDefName()
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
dead code dead code
|
||||
hAnimHumanoid = importTransform(bpycollection, node, ancestry, global_matrix)
|
||||
elif spec in ('HAnimJoint'):
|
||||
parent_joint_name = node.getDefName()
|
||||
hAnimJoint = importTransform(bpycollection, node, ancestry, global_matrix)
|
||||
elif spec in ('Group'):
|
||||
group_name = node.getDefName()
|
||||
group = importTransform(bpycollection, node, ancestry, global_matrix)
|
||||
elif spec in ('HAnimSite'):
|
||||
site_name = node.getDefName()
|
||||
hAnimSite = importTransform(bpycollection, node, ancestry, global_matrix)
|
||||
attachMesh(all_shapes, site_name, hAnimSite) # mesh is in all_shapes
|
||||
elif spec == 'Transform':
|
||||
# Only use transform nodes when we are not importing a flat object hierarchy
|
||||
if PREF_FLAT == False:
|
||||
importTransform(bpycollection, node, ancestry, global_matrix)
|
||||
'''
|
||||
'''
|
||||
# These are delt with later within importRoute
|
||||
elif spec=='PositionInterpolator':
|
||||
action = bpy.data.ipos.new('web3d_ipo', 'Object')
|
||||
translatePositionInterpolator(node, action)
|
||||
'''
|
||||
translatePositionInterpolator(node, action, ancestry)
|
||||
'''
|
||||
|
||||
# After we import all nodes, route events - anim paths
|
||||
if skeleton:
|
||||
bpy.ops.object.mode_set(mode='POSE')
|
||||
for node, ancestry in all_nodes:
|
||||
importRoute(node, ancestry)
|
||||
importRoute(node, ancestry, skeleton, meshobj)
|
||||
bpy.context.scene.frame_set(0)
|
||||
|
||||
if not skeleton and shape is not None and shape[1]:
|
||||
bpy.context.view_layer.objects.active = shape[1]
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
for node, ancestry in all_nodes:
|
||||
if node.isRoot():
|
||||
# we know that all nodes referenced from will be in
|
||||
@ -3684,16 +4105,42 @@ def load_web3d(
|
||||
|
||||
# Assign anim curves
|
||||
node = defDict[key]
|
||||
# print(f"key {key} action {action} node {node}")
|
||||
bone = None
|
||||
if skeleton:
|
||||
if key in skeleton.pose.bones:
|
||||
bone = skeleton.pose.bones[key]
|
||||
else:
|
||||
print(f"There's no pose bone associated with key {key}, probably using a regular interpolator")
|
||||
else:
|
||||
print(f"There's no skeleton")
|
||||
if node.blendData is None: # Add an object if we need one for animation
|
||||
bpyob = node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
|
||||
bpycollection.objects.link(bpyob)
|
||||
bpyob.select_set(True)
|
||||
|
||||
if node.blendData.animation_data is None:
|
||||
node.blendData.animation_data_create()
|
||||
|
||||
node.blendData.animation_data.action = action
|
||||
# print(f"Adding some blendData to the node for {key}. Did you forget to add it?")
|
||||
node.blendData = node.blendObject = bpy.data.objects.new(key, None)
|
||||
bpycollection.objects.link(node.blendObject)
|
||||
node.blendObject.select_set(True)
|
||||
|
||||
if hasattr(node.blendData, "animation_data"):
|
||||
if not node.blendData.animation_data:
|
||||
#print(f"Adding animation data for {node.blendData.name} 2 ")
|
||||
node.blendData.animation_data_create()
|
||||
else:
|
||||
#print(f"Node {node.blendData.name} has animation_data")
|
||||
pass
|
||||
if not node.blendData.animation_data.action:
|
||||
#print(f"Setting an action {node.blendData.name}")
|
||||
node.blendData.animation_data.action = action
|
||||
else:
|
||||
# print(f"Node {node.blendData.name} has actionnode. {node.blendData.animation_data.action}")
|
||||
pass
|
||||
# to disable NLA, comment out these 3 lines
|
||||
# print(f"Adding an nla_track to {node.blendData.name} {key}")
|
||||
track = node.blendData.animation_data.nla_tracks.new()
|
||||
track.name = "NLATRACK "+key
|
||||
node.blendData.animation_data.nla_tracks[track.name].strips.new(name=key, start=0, action=bpy.data.actions[key])
|
||||
else:
|
||||
# print(f"Node {node.blendData.name} has blendData, but no animation_data")
|
||||
pass
|
||||
# Add in hierarchy
|
||||
if PREF_FLAT is False:
|
||||
child_dict = {}
|
||||
@ -3721,13 +4168,36 @@ def load_web3d(
|
||||
|
||||
# Parent
|
||||
for parent, children in child_dict.items():
|
||||
for c in children:
|
||||
c.parent = parent
|
||||
if parent and children:
|
||||
for c in children:
|
||||
if c:
|
||||
if type(c) == type(parent):
|
||||
Bujus_Krachus marked this conversation as resolved
Outdated
Cedric Steiert
commented
what's the effect, why is this change needed? what's the effect, why is this change needed?
|
||||
c.parent = parent
|
||||
else:
|
||||
if isinstance(c, bpy.types.EditBone):
|
||||
if DEBUG:
|
||||
print(f"Child is EditBone")
|
||||
if isinstance(parent, bpy.types.EditBone):
|
||||
if DEBUG:
|
||||
print(f"Parent is EditBone")
|
||||
c.parent = skeleton # Armature object
|
||||
print(f"Can't handle parent-child relationship, child {c} type {type(c)}, parent {parent} type {type(parent)}")
|
||||
else:
|
||||
print("Not a child")
|
||||
else:
|
||||
print("Children or parent may be None")
|
||||
|
||||
# update deps
|
||||
bpycontext.view_layer.update()
|
||||
del child_dict
|
||||
|
||||
def attachMesh(all_shapes, parent_name, parent_obj):
|
||||
# print(f"Found parent {parent_name}")
|
||||
for shape in all_shapes:
|
||||
if shape and shape[0] and shape[1] and shape[2] and shape[3]:
|
||||
if shape[3].getDefName() and parent_name == shape[3].getDefName(): # shape[3] is shape's parent node
|
||||
meshobj = shape[1]
|
||||
meshobj.parent = parent_obj
|
||||
|
||||
def load_with_profiler(
|
||||
context,
|
||||
@ -3756,7 +4226,7 @@ def load(context,
|
||||
|
||||
# loadWithProfiler(operator, context, filepath, global_matrix)
|
||||
load_web3d(context, filepath,
|
||||
PREF_FLAT=True,
|
||||
PREF_FLAT=False, # So Tranforms will be imported
|
||||
PREF_CIRCLE_DIV=16,
|
||||
global_scale=global_scale,
|
||||
global_matrix=global_matrix,
|
||||
|
Ideally be a tad more concise, maybe something along the lines like
TIME_MULTIPLIER