Pose Library: Update to use the asset shelf (when enabled) #104546
@ -433,6 +433,8 @@ class add_mesh_bolt(Operator, AddObjectHelper):
|
|||||||
obj.data.use_auto_smooth = use_auto_smooth
|
obj.data.use_auto_smooth = use_auto_smooth
|
||||||
if use_smooth:
|
if use_smooth:
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
|
||||||
bpy.data.meshes.remove(mesh)
|
bpy.data.meshes.remove(mesh)
|
||||||
|
|
||||||
|
@ -700,6 +700,8 @@ class AntAddLandscape(bpy.types.Operator):
|
|||||||
|
|
||||||
if self.smooth_mesh:
|
if self.smooth_mesh:
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
|
||||||
if not self.at_cursor:
|
if not self.at_cursor:
|
||||||
new_ob.location = (0.0, 0.0, 0.0)
|
new_ob.location = (0.0, 0.0, 0.0)
|
||||||
@ -746,6 +748,8 @@ class AntAddLandscape(bpy.types.Operator):
|
|||||||
|
|
||||||
if self.smooth_mesh:
|
if self.smooth_mesh:
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
|
||||||
if not self.at_cursor:
|
if not self.at_cursor:
|
||||||
wobj.location = (0.0, 0.0, 0.0)
|
wobj.location = (0.0, 0.0, 0.0)
|
||||||
|
@ -226,6 +226,8 @@ class AntLandscapeRegenerate(bpy.types.Operator):
|
|||||||
|
|
||||||
if ob['smooth_mesh']:
|
if ob['smooth_mesh']:
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
|
||||||
# Landscape Material
|
# Landscape Material
|
||||||
if ob['land_material'] != "" and ob['land_material'] in bpy.data.materials:
|
if ob['land_material'] != "" and ob['land_material'] in bpy.data.materials:
|
||||||
@ -269,6 +271,8 @@ class AntLandscapeRegenerate(bpy.types.Operator):
|
|||||||
|
|
||||||
if ob['smooth_mesh']:
|
if ob['smooth_mesh']:
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
|
||||||
# Water Material
|
# Water Material
|
||||||
if ob['water_material'] != "" and ob['water_material'] in bpy.data.materials:
|
if ob['water_material'] != "" and ob['water_material'] in bpy.data.materials:
|
||||||
@ -1003,6 +1007,8 @@ class Eroder(bpy.types.Operator):
|
|||||||
|
|
||||||
if self.smooth:
|
if self.smooth:
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
self.stats.time()
|
self.stats.time()
|
||||||
self.stats.memory()
|
self.stats.memory()
|
||||||
if self.showmeshstats:
|
if self.showmeshstats:
|
||||||
|
@ -1093,17 +1093,17 @@ def make_track_chunk(ID, ob, ob_pos, ob_rot, ob_size):
|
|||||||
for i, frame in enumerate(kframes):
|
for i, frame in enumerate(kframes):
|
||||||
position = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'location']
|
position = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'location']
|
||||||
if not position:
|
if not position:
|
||||||
position.append(ob_pos)
|
position = ob_pos
|
||||||
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
||||||
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
||||||
track_chunk.add_variable("position", _3ds_point_3d(position))
|
track_chunk.add_variable("position", _3ds_point_3d(position))
|
||||||
|
|
||||||
elif ID == ROT_TRACK_TAG: # Rotation
|
elif ID == ROT_TRACK_TAG: # Rotation
|
||||||
for i, frame in enumerate(kframes):
|
for i, frame in enumerate(kframes):
|
||||||
|
quat = ob_rot
|
||||||
rotation = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'rotation_euler']
|
rotation = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'rotation_euler']
|
||||||
if not rotation:
|
if rotation:
|
||||||
rotation.append(ob_rot)
|
quat = mathutils.Euler(rotation).to_quaternion()
|
||||||
quat = mathutils.Euler(rotation).to_quaternion()
|
|
||||||
axis_angle = quat.angle, quat.axis[0], quat.axis[1], quat.axis[2]
|
axis_angle = quat.angle, quat.axis[0], quat.axis[1], quat.axis[2]
|
||||||
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
||||||
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
||||||
@ -1113,7 +1113,7 @@ def make_track_chunk(ID, ob, ob_pos, ob_rot, ob_size):
|
|||||||
for i, frame in enumerate(kframes):
|
for i, frame in enumerate(kframes):
|
||||||
size = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'scale']
|
size = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'scale']
|
||||||
if not size:
|
if not size:
|
||||||
size.append(ob_size)
|
size = ob_size
|
||||||
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
||||||
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
||||||
track_chunk.add_variable("scale", _3ds_point_3d(size))
|
track_chunk.add_variable("scale", _3ds_point_3d(size))
|
||||||
@ -1122,7 +1122,7 @@ def make_track_chunk(ID, ob, ob_pos, ob_rot, ob_size):
|
|||||||
for i, frame in enumerate(kframes):
|
for i, frame in enumerate(kframes):
|
||||||
roll = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'rotation_euler']
|
roll = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'rotation_euler']
|
||||||
if not roll:
|
if not roll:
|
||||||
roll.append(ob_rot)
|
roll = ob_rot.to_euler()
|
||||||
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
||||||
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
||||||
track_chunk.add_variable("roll", _3ds_float(round(math.degrees(roll[1]), 4)))
|
track_chunk.add_variable("roll", _3ds_float(round(math.degrees(roll[1]), 4)))
|
||||||
@ -1146,7 +1146,7 @@ def make_track_chunk(ID, ob, ob_pos, ob_rot, ob_size):
|
|||||||
for i, frame in enumerate(kframes):
|
for i, frame in enumerate(kframes):
|
||||||
color = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'color']
|
color = [fc.evaluate(frame) for fc in fcurves if fc is not None and fc.data_path == 'color']
|
||||||
if not color:
|
if not color:
|
||||||
color.append(ob.data.color[:3])
|
color = ob.data.color[:3]
|
||||||
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
track_chunk.add_variable("tcb_frame", _3ds_uint(int(frame)))
|
||||||
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
track_chunk.add_variable("tcb_flags", _3ds_ushort())
|
||||||
track_chunk.add_variable("color", _3ds_float_color(color))
|
track_chunk.add_variable("color", _3ds_float_color(color))
|
||||||
|
@ -33,8 +33,8 @@ COLOR_F = 0x0010 # color defined as 3 floats
|
|||||||
COLOR_24 = 0x0011 # color defined as 3 bytes
|
COLOR_24 = 0x0011 # color defined as 3 bytes
|
||||||
LIN_COLOR_24 = 0x0012 # linear byte color
|
LIN_COLOR_24 = 0x0012 # linear byte color
|
||||||
LIN_COLOR_F = 0x0013 # linear float color
|
LIN_COLOR_F = 0x0013 # linear float color
|
||||||
PCT_SHORT = 0x30 # percentage short
|
PCT_SHORT = 0x0030 # percentage short
|
||||||
PCT_FLOAT = 0x31 # percentage float
|
PCT_FLOAT = 0x0031 # percentage float
|
||||||
MASTERSCALE = 0x0100 # Master scale factor
|
MASTERSCALE = 0x0100 # Master scale factor
|
||||||
|
|
||||||
# >----- sub defines of OBJECTINFO
|
# >----- sub defines of OBJECTINFO
|
||||||
@ -224,13 +224,6 @@ def read_string(file):
|
|||||||
# IMPORT #
|
# IMPORT #
|
||||||
##########
|
##########
|
||||||
|
|
||||||
def process_next_object_chunk(file, previous_chunk):
|
|
||||||
new_chunk = Chunk()
|
|
||||||
|
|
||||||
while (previous_chunk.bytes_read < previous_chunk.length):
|
|
||||||
# read the next chunk
|
|
||||||
read_chunk(file, new_chunk)
|
|
||||||
|
|
||||||
def skip_to_end(file, skip_chunk):
|
def skip_to_end(file, skip_chunk):
|
||||||
buffer_size = skip_chunk.length - skip_chunk.bytes_read
|
buffer_size = skip_chunk.length - skip_chunk.bytes_read
|
||||||
binary_format = '%ic' % buffer_size
|
binary_format = '%ic' % buffer_size
|
||||||
@ -454,6 +447,11 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
smoothface = myContextMesh_smooth[f]
|
smoothface = myContextMesh_smooth[f]
|
||||||
if smoothface > 0:
|
if smoothface > 0:
|
||||||
bmesh.polygons[f].use_smooth = True
|
bmesh.polygons[f].use_smooth = True
|
||||||
|
else:
|
||||||
|
bmesh.polygons[f].use_smooth = False
|
||||||
|
else:
|
||||||
|
for poly in bmesh.polygons:
|
||||||
|
poly.use_smooth = False
|
||||||
|
|
||||||
if contextMatrix:
|
if contextMatrix:
|
||||||
if WORLD_MATRIX:
|
if WORLD_MATRIX:
|
||||||
@ -568,7 +566,8 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
(uoffset, voffset, 0), angle, tintcolor, mapto)
|
(uoffset, voffset, 0), angle, tintcolor, mapto)
|
||||||
|
|
||||||
def apply_constrain(vec):
|
def apply_constrain(vec):
|
||||||
consize = mathutils.Vector(vec) * (CONSTRAIN * 0.1) if CONSTRAIN != 0.0 else mathutils.Vector(vec)
|
convector = mathutils.Vector.Fill(3, (CONSTRAIN * 0.1))
|
||||||
|
consize = mathutils.Vector(vec) * convector if CONSTRAIN != 0.0 else mathutils.Vector(vec)
|
||||||
return consize
|
return consize
|
||||||
|
|
||||||
def calc_target(location, target):
|
def calc_target(location, target):
|
||||||
@ -576,8 +575,8 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
tilt = 0.0
|
tilt = 0.0
|
||||||
pos = location + target # Target triangulation
|
pos = location + target # Target triangulation
|
||||||
if abs(location[0] - target[0]) > abs(location[1] - target[1]):
|
if abs(location[0] - target[0]) > abs(location[1] - target[1]):
|
||||||
foc = math.copysign(math.sqrt(pow(pos[0],2) + pow(pos[1],2)),pos[0])
|
foc = math.copysign(math.sqrt(pow(pos[0],2) + pow(pos[1],2)), pos[0])
|
||||||
dia = math.copysign(math.sqrt(pow(foc,2) + pow(target[2],2)),pos[0])
|
dia = math.copysign(math.sqrt(pow(foc,2) + pow(target[2],2)), pos[0])
|
||||||
pitch = math.radians(90) - math.copysign(math.acos(foc / dia), pos[2])
|
pitch = math.radians(90) - math.copysign(math.acos(foc / dia), pos[2])
|
||||||
if location[0] > target[0]:
|
if location[0] > target[0]:
|
||||||
tilt = math.copysign(pitch, pos[0])
|
tilt = math.copysign(pitch, pos[0])
|
||||||
@ -586,8 +585,8 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
tilt = -1 * (math.copysign(pitch, pos[0]))
|
tilt = -1 * (math.copysign(pitch, pos[0]))
|
||||||
pan = -1 * (math.radians(90) - math.atan(pos[1] / foc))
|
pan = -1 * (math.radians(90) - math.atan(pos[1] / foc))
|
||||||
elif abs(location[1] - target[1]) > abs(location[0] - target[0]):
|
elif abs(location[1] - target[1]) > abs(location[0] - target[0]):
|
||||||
foc = math.copysign(math.sqrt(pow(pos[1],2) + pow(pos[0],2)),pos[1])
|
foc = math.copysign(math.sqrt(pow(pos[1],2) + pow(pos[0],2)), pos[1])
|
||||||
dia = math.copysign(math.sqrt(pow(foc,2) + pow(target[2],2)),pos[1])
|
dia = math.copysign(math.sqrt(pow(foc,2) + pow(target[2],2)), pos[1])
|
||||||
pitch = math.radians(90) - math.copysign(math.acos(foc / dia), pos[2])
|
pitch = math.radians(90) - math.copysign(math.acos(foc / dia), pos[2])
|
||||||
if location[1] > target[1]:
|
if location[1] > target[1]:
|
||||||
tilt = math.copysign(pitch, pos[1])
|
tilt = math.copysign(pitch, pos[1])
|
||||||
@ -1122,12 +1121,16 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
new_chunk.bytes_read += read_str_len
|
new_chunk.bytes_read += read_str_len
|
||||||
|
|
||||||
elif new_chunk.ID == OBJECT_INSTANCE_NAME:
|
elif new_chunk.ID == OBJECT_INSTANCE_NAME:
|
||||||
object_name, read_str_len = read_string(file)
|
instance_name, read_str_len = read_string(file)
|
||||||
if child.name == '$$$DUMMY':
|
if child.name == '$$$DUMMY':
|
||||||
child.name = object_name
|
child.name = instance_name
|
||||||
else:
|
else: # Child is an instance
|
||||||
child.name += "." + object_name
|
child = child.copy()
|
||||||
object_dictionary[object_name] = child
|
child.name = object_name + "." + instance_name
|
||||||
|
context.view_layer.active_layer_collection.collection.objects.link(child)
|
||||||
|
object_dict[object_id] = child
|
||||||
|
object_list[-1] = child
|
||||||
|
object_dictionary[child.name] = child
|
||||||
new_chunk.bytes_read += read_str_len
|
new_chunk.bytes_read += read_str_len
|
||||||
|
|
||||||
elif new_chunk.ID == OBJECT_PIVOT: # Pivot
|
elif new_chunk.ID == OBJECT_PIVOT: # Pivot
|
||||||
@ -1344,10 +1347,9 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
elif parent not in object_dict:
|
elif parent not in object_dict:
|
||||||
if ob.parent != object_list[parent]:
|
if ob.parent != object_list[parent]:
|
||||||
ob.parent = object_list[parent]
|
ob.parent = object_list[parent]
|
||||||
elif ob.parent != object_dict[parent]:
|
|
||||||
ob.parent = object_dict.get(parent)
|
|
||||||
else:
|
else:
|
||||||
print("\tWarning: Cannot assign self to parent ", ob.name)
|
if ob.parent != object_dict[parent]:
|
||||||
|
ob.parent = object_dict.get(parent)
|
||||||
|
|
||||||
#pivot_list[ind] += pivot_list[parent] # Not sure this is correct, should parent space matrix be applied before combining?
|
#pivot_list[ind] += pivot_list[parent] # Not sure this is correct, should parent space matrix be applied before combining?
|
||||||
|
|
||||||
@ -1357,6 +1359,7 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
|
|||||||
for ob in objs:
|
for ob in objs:
|
||||||
if parent is not None:
|
if parent is not None:
|
||||||
ob.parent = parent
|
ob.parent = parent
|
||||||
|
parent_dictionary.clear()
|
||||||
|
|
||||||
# If hierarchy
|
# If hierarchy
|
||||||
hierarchy = dict(zip(childs_list, parent_list))
|
hierarchy = dict(zip(childs_list, parent_list))
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "FBX format",
|
"name": "FBX format",
|
||||||
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
|
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
|
||||||
"version": (5, 3, 4),
|
"version": (5, 5, 0),
|
||||||
"blender": (3, 6, 0),
|
"blender": (3, 6, 0),
|
||||||
"location": "File > Import-Export",
|
"location": "File > Import-Export",
|
||||||
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",
|
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",
|
||||||
|
@ -49,6 +49,9 @@ from .fbx_utils import (
|
|||||||
units_blender_to_fbx_factor, units_convertor, units_convertor_iter,
|
units_blender_to_fbx_factor, units_convertor, units_convertor_iter,
|
||||||
matrix4_to_array, similar_values, shape_difference_exclude_similar, astype_view_signedness, fast_first_axis_unique,
|
matrix4_to_array, similar_values, shape_difference_exclude_similar, astype_view_signedness, fast_first_axis_unique,
|
||||||
fast_first_axis_flat,
|
fast_first_axis_flat,
|
||||||
|
# Attribute helpers.
|
||||||
|
MESH_ATTRIBUTE_CORNER_EDGE, MESH_ATTRIBUTE_SHARP_EDGE, MESH_ATTRIBUTE_EDGE_VERTS, MESH_ATTRIBUTE_CORNER_VERT,
|
||||||
|
MESH_ATTRIBUTE_SHARP_FACE, MESH_ATTRIBUTE_POSITION, MESH_ATTRIBUTE_MATERIAL_INDEX,
|
||||||
# Mesh transform helpers.
|
# Mesh transform helpers.
|
||||||
vcos_transformed, nors_transformed,
|
vcos_transformed, nors_transformed,
|
||||||
# UUID from key.
|
# UUID from key.
|
||||||
@ -888,13 +891,13 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
elem_data_single_int32(geom, b"GeometryVersion", FBX_GEOMETRY_VERSION)
|
elem_data_single_int32(geom, b"GeometryVersion", FBX_GEOMETRY_VERSION)
|
||||||
|
|
||||||
|
attributes = me.attributes
|
||||||
|
|
||||||
# Vertex cos.
|
# Vertex cos.
|
||||||
co_bl_dtype = np.single
|
pos_fbx_dtype = np.float64
|
||||||
co_fbx_dtype = np.float64
|
t_pos = MESH_ATTRIBUTE_POSITION.to_ndarray(attributes)
|
||||||
t_co = np.empty(len(me.vertices) * 3, dtype=co_bl_dtype)
|
elem_data_single_float64_array(geom, b"Vertices", vcos_transformed(t_pos, geom_mat_co, pos_fbx_dtype))
|
||||||
me.vertices.foreach_get("co", t_co)
|
del t_pos
|
||||||
elem_data_single_float64_array(geom, b"Vertices", vcos_transformed(t_co, geom_mat_co, co_fbx_dtype))
|
|
||||||
del t_co
|
|
||||||
|
|
||||||
# Polygon indices.
|
# Polygon indices.
|
||||||
#
|
#
|
||||||
@ -907,29 +910,26 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
# dtypes matching the C data. Matching the C datatype avoids iteration and casting of every element in foreach_get's
|
# dtypes matching the C data. Matching the C datatype avoids iteration and casting of every element in foreach_get's
|
||||||
# C code.
|
# C code.
|
||||||
bl_vertex_index_dtype = bl_edge_index_dtype = bl_loop_index_dtype = np.uintc
|
bl_loop_index_dtype = np.uintc
|
||||||
|
|
||||||
# Start vertex indices of loops. May contain elements for loops added for the export of loose edges.
|
# Start vertex indices of loops (corners). May contain elements for loops added for the export of loose edges.
|
||||||
t_lvi = np.empty(len(me.loops), dtype=bl_vertex_index_dtype)
|
t_lvi = MESH_ATTRIBUTE_CORNER_VERT.to_ndarray(attributes)
|
||||||
|
|
||||||
# Loop start indices of polygons. May contain elements for the polygons added for the export of loose edges.
|
# Loop start indices of polygons. May contain elements for the polygons added for the export of loose edges.
|
||||||
t_ls = np.empty(len(me.polygons), dtype=bl_loop_index_dtype)
|
t_ls = np.empty(len(me.polygons), dtype=bl_loop_index_dtype)
|
||||||
|
|
||||||
# Vertex indices of edges (unsorted, unlike Mesh.edge_keys), flattened into an array twice the length of the number
|
# Vertex indices of edges (unsorted, unlike Mesh.edge_keys), flattened into an array twice the length of the number
|
||||||
# of edges.
|
# of edges.
|
||||||
t_ev = np.empty(len(me.edges) * 2, dtype=bl_vertex_index_dtype)
|
t_ev = MESH_ATTRIBUTE_EDGE_VERTS.to_ndarray(attributes)
|
||||||
# Each edge has two vertex indices, so it's useful to view the array as 2d where each element on the first axis is a
|
# Each edge has two vertex indices, so it's useful to view the array as 2d where each element on the first axis is a
|
||||||
# pair of vertex indices
|
# pair of vertex indices
|
||||||
t_ev_pair_view = t_ev.view()
|
t_ev_pair_view = t_ev.view()
|
||||||
t_ev_pair_view.shape = (-1, 2)
|
t_ev_pair_view.shape = (-1, 2)
|
||||||
|
|
||||||
# Edge indices of loops. May contain elements for loops added for the export of loose edges.
|
# Edge indices of loops (corners). May contain elements for loops added for the export of loose edges.
|
||||||
t_lei = np.empty(len(me.loops), dtype=bl_edge_index_dtype)
|
t_lei = MESH_ATTRIBUTE_CORNER_EDGE.to_ndarray(attributes)
|
||||||
|
|
||||||
me.loops.foreach_get("vertex_index", t_lvi)
|
|
||||||
me.polygons.foreach_get("loop_start", t_ls)
|
me.polygons.foreach_get("loop_start", t_ls)
|
||||||
me.edges.foreach_get("vertices", t_ev)
|
|
||||||
me.loops.foreach_get("edge_index", t_lei)
|
|
||||||
|
|
||||||
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
|
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
|
||||||
if scene_data.settings.use_mesh_edges:
|
if scene_data.settings.use_mesh_edges:
|
||||||
@ -1025,12 +1025,18 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
# Smoothing.
|
# Smoothing.
|
||||||
if smooth_type in {'FACE', 'EDGE'}:
|
if smooth_type in {'FACE', 'EDGE'}:
|
||||||
ps_fbx_dtype = np.int32
|
ps_fbx_dtype = np.int32
|
||||||
poly_use_smooth_dtype = bool
|
|
||||||
edge_use_sharp_dtype = bool
|
|
||||||
_map = b""
|
_map = b""
|
||||||
if smooth_type == 'FACE':
|
if smooth_type == 'FACE':
|
||||||
t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype)
|
# The FBX integer values are usually interpreted as boolean where 0 is False (sharp) and 1 is True
|
||||||
me.polygons.foreach_get("use_smooth", t_ps)
|
# (smooth).
|
||||||
|
# The values may also be used to represent smoothing group bitflags, but this does not seem well-supported.
|
||||||
|
t_ps = MESH_ATTRIBUTE_SHARP_FACE.get_ndarray(attributes)
|
||||||
|
if t_ps is not None:
|
||||||
|
# FBX sharp is False, but Blender sharp is True, so invert.
|
||||||
|
t_ps = np.logical_not(t_ps)
|
||||||
|
else:
|
||||||
|
# The mesh has no "sharp_face" attribute, so every face is smooth.
|
||||||
|
t_ps = np.ones(len(me.polygons), dtype=ps_fbx_dtype)
|
||||||
_map = b"ByPolygon"
|
_map = b"ByPolygon"
|
||||||
else: # EDGE
|
else: # EDGE
|
||||||
_map = b"ByEdge"
|
_map = b"ByEdge"
|
||||||
@ -1045,37 +1051,40 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
mesh_t_ls_view = t_ls[:mesh_poly_nbr]
|
mesh_t_ls_view = t_ls[:mesh_poly_nbr]
|
||||||
mesh_t_lei_view = t_lei[:mesh_loop_nbr]
|
mesh_t_lei_view = t_lei[:mesh_loop_nbr]
|
||||||
|
|
||||||
# - Get sharp edges from flat shaded faces
|
|
||||||
# Get the 'use_smooth' attribute of all polygons.
|
|
||||||
p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype)
|
|
||||||
me.polygons.foreach_get('use_smooth', p_use_smooth_mask)
|
|
||||||
# Invert to get all flat shaded polygons.
|
|
||||||
p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask)
|
|
||||||
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of
|
|
||||||
# that polygon.
|
|
||||||
# Polygon sides can be calculated from the element-wise difference of loop starts appended by the number
|
|
||||||
# of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total' attribute of
|
|
||||||
# polygons, but since we already have t_ls, it tends to be quicker to calculate from t_ls when above
|
|
||||||
# around 10_000 polygons.
|
|
||||||
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
|
|
||||||
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
|
|
||||||
# Convert flat shaded loops to flat shaded (sharp) edge indices.
|
|
||||||
# Note that if an edge is in multiple loops that are part of flat shaded faces, its edge index will end
|
|
||||||
# up in sharp_edge_indices_from_polygons multiple times.
|
|
||||||
sharp_edge_indices_from_polygons = mesh_t_lei_view[p_flat_loop_mask]
|
|
||||||
|
|
||||||
# - Get sharp edges from edges marked as sharp
|
|
||||||
e_use_sharp_mask = np.empty(mesh_edge_nbr, dtype=edge_use_sharp_dtype)
|
|
||||||
me.edges.foreach_get('use_edge_sharp', e_use_sharp_mask)
|
|
||||||
|
|
||||||
# - Get sharp edges from edges used by more than two loops (and therefore more than two faces)
|
# - Get sharp edges from edges used by more than two loops (and therefore more than two faces)
|
||||||
e_more_than_two_faces_mask = np.bincount(mesh_t_lei_view, minlength=mesh_edge_nbr) > 2
|
e_more_than_two_faces_mask = np.bincount(mesh_t_lei_view, minlength=mesh_edge_nbr) > 2
|
||||||
|
|
||||||
# - Combine with edges that are sharp because they're in more than two faces
|
# - Get sharp edges from the "sharp_edge" attribute. The attribute may not exist, in which case, there
|
||||||
e_use_sharp_mask = np.logical_or(e_use_sharp_mask, e_more_than_two_faces_mask, out=e_use_sharp_mask)
|
# are no edges marked as sharp.
|
||||||
|
e_use_sharp_mask = MESH_ATTRIBUTE_SHARP_EDGE.get_ndarray(attributes)
|
||||||
|
if e_use_sharp_mask is not None:
|
||||||
|
# - Combine with edges that are sharp because they're in more than two faces
|
||||||
|
e_use_sharp_mask = np.logical_or(e_use_sharp_mask, e_more_than_two_faces_mask, out=e_use_sharp_mask)
|
||||||
|
else:
|
||||||
|
e_use_sharp_mask = e_more_than_two_faces_mask
|
||||||
|
|
||||||
# - Combine with edges that are sharp because a polygon they're in has flat shading
|
# - Get sharp edges from flat shaded faces
|
||||||
e_use_sharp_mask[sharp_edge_indices_from_polygons] = True
|
p_flat_mask = MESH_ATTRIBUTE_SHARP_FACE.get_ndarray(attributes)
|
||||||
|
if p_flat_mask is not None:
|
||||||
|
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides
|
||||||
|
# of that polygon.
|
||||||
|
# Polygon sides can be calculated from the element-wise difference of loop starts appended by the
|
||||||
|
# number of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total'
|
||||||
|
# attribute of polygons, but since we already have t_ls, it tends to be quicker to calculate from
|
||||||
|
# t_ls.
|
||||||
|
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
|
||||||
|
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
|
||||||
|
# Convert flat shaded loops to flat shaded (sharp) edge indices.
|
||||||
|
# Note that if an edge is in multiple loops that are part of flat shaded faces, its edge index will
|
||||||
|
# end up in sharp_edge_indices_from_polygons multiple times.
|
||||||
|
sharp_edge_indices_from_polygons = mesh_t_lei_view[p_flat_loop_mask]
|
||||||
|
|
||||||
|
# - Combine with edges that are sharp because a polygon they're in has flat shading
|
||||||
|
e_use_sharp_mask[sharp_edge_indices_from_polygons] = True
|
||||||
|
del sharp_edge_indices_from_polygons
|
||||||
|
del p_flat_loop_mask
|
||||||
|
del polygon_sides
|
||||||
|
del p_flat_mask
|
||||||
|
|
||||||
# - Convert sharp edges to sharp edge keys (t_pvi)
|
# - Convert sharp edges to sharp edge keys (t_pvi)
|
||||||
ek_use_sharp_mask = e_use_sharp_mask[t_pvi_edge_indices]
|
ek_use_sharp_mask = e_use_sharp_mask[t_pvi_edge_indices]
|
||||||
@ -1084,11 +1093,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
t_ps = np.invert(ek_use_sharp_mask, out=ek_use_sharp_mask)
|
t_ps = np.invert(ek_use_sharp_mask, out=ek_use_sharp_mask)
|
||||||
del ek_use_sharp_mask
|
del ek_use_sharp_mask
|
||||||
del e_use_sharp_mask
|
del e_use_sharp_mask
|
||||||
del sharp_edge_indices_from_polygons
|
|
||||||
del p_flat_loop_mask
|
|
||||||
del polygon_sides
|
|
||||||
del p_flat_mask
|
|
||||||
del p_use_smooth_mask
|
|
||||||
del mesh_t_lei_view
|
del mesh_t_lei_view
|
||||||
del mesh_t_ls_view
|
del mesh_t_ls_view
|
||||||
else:
|
else:
|
||||||
@ -1109,19 +1113,25 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
ec_fbx_dtype = np.float64
|
ec_fbx_dtype = np.float64
|
||||||
if t_pvi_edge_indices.size:
|
if t_pvi_edge_indices.size:
|
||||||
ec_bl_dtype = np.single
|
ec_bl_dtype = np.single
|
||||||
t_ec_raw = np.empty(len(me.edges), dtype=ec_bl_dtype)
|
edge_creases = me.edge_creases
|
||||||
me.edges.foreach_get('crease', t_ec_raw)
|
if edge_creases:
|
||||||
|
t_ec_raw = np.empty(len(me.edges), dtype=ec_bl_dtype)
|
||||||
|
edge_creases.data.foreach_get("value", t_ec_raw)
|
||||||
|
|
||||||
# Convert to t_pvi edge-keys.
|
# Convert to t_pvi edge-keys.
|
||||||
t_ec_ek_raw = t_ec_raw[t_pvi_edge_indices]
|
t_ec_ek_raw = t_ec_raw[t_pvi_edge_indices]
|
||||||
|
|
||||||
# Blender squares those values before sending them to OpenSubdiv, when other software don't,
|
# Blender squares those values before sending them to OpenSubdiv, when other software don't,
|
||||||
# so we need to compensate that to get similar results through FBX...
|
# so we need to compensate that to get similar results through FBX...
|
||||||
# Use the precision of the fbx dtype for the calculation since it's usually higher precision.
|
# Use the precision of the fbx dtype for the calculation since it's usually higher precision.
|
||||||
t_ec_ek_raw = t_ec_ek_raw.astype(ec_fbx_dtype, copy=False)
|
t_ec_ek_raw = t_ec_ek_raw.astype(ec_fbx_dtype, copy=False)
|
||||||
t_ec = np.square(t_ec_ek_raw, out=t_ec_ek_raw)
|
t_ec = np.square(t_ec_ek_raw, out=t_ec_ek_raw)
|
||||||
del t_ec_ek_raw
|
del t_ec_ek_raw
|
||||||
del t_ec_raw
|
del t_ec_raw
|
||||||
|
else:
|
||||||
|
# todo: Blender edge creases are optional now, we may be able to avoid writing the array to FBX when
|
||||||
|
# there are no edge creases.
|
||||||
|
t_ec = np.zeros(t_pvi_edge_indices.shape, dtype=ec_fbx_dtype)
|
||||||
else:
|
else:
|
||||||
t_ec = np.empty(0, dtype=ec_fbx_dtype)
|
t_ec = np.empty(0, dtype=ec_fbx_dtype)
|
||||||
|
|
||||||
@ -1336,7 +1346,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
elem_data_single_string(lay_uv, b"MappingInformationType", b"ByPolygonVertex")
|
elem_data_single_string(lay_uv, b"MappingInformationType", b"ByPolygonVertex")
|
||||||
elem_data_single_string(lay_uv, b"ReferenceInformationType", b"IndexToDirect")
|
elem_data_single_string(lay_uv, b"ReferenceInformationType", b"IndexToDirect")
|
||||||
|
|
||||||
uvlayer.data.foreach_get("uv", t_luv)
|
uvlayer.uv.foreach_get("vector", t_luv)
|
||||||
|
|
||||||
# t_luv_fast_pair_view is a view in a dtype that compares elements by individual bytes, but float types have
|
# t_luv_fast_pair_view is a view in a dtype that compares elements by individual bytes, but float types have
|
||||||
# separate byte representations of positive and negative zero. For uniqueness, these should be considered
|
# separate byte representations of positive and negative zero. For uniqueness, these should be considered
|
||||||
@ -1412,11 +1422,13 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
elem_data_single_int32(lay_ma, b"Version", FBX_GEOMETRY_MATERIAL_VERSION)
|
elem_data_single_int32(lay_ma, b"Version", FBX_GEOMETRY_MATERIAL_VERSION)
|
||||||
elem_data_single_string(lay_ma, b"Name", b"")
|
elem_data_single_string(lay_ma, b"Name", b"")
|
||||||
nbr_mats = len(me_fbxmaterials_idx)
|
nbr_mats = len(me_fbxmaterials_idx)
|
||||||
if nbr_mats > 1:
|
multiple_fbx_mats = nbr_mats > 1
|
||||||
bl_pm_dtype = np.uintc
|
# If a mesh does not have more than one material its material_index attribute can be ignored.
|
||||||
|
# If a mesh has multiple materials but all its polygons are assigned to the first material, its
|
||||||
|
# material_index attribute may not exist.
|
||||||
|
t_pm = None if not multiple_fbx_mats else MESH_ATTRIBUTE_MATERIAL_INDEX.get_ndarray(attributes)
|
||||||
|
if t_pm is not None:
|
||||||
fbx_pm_dtype = np.int32
|
fbx_pm_dtype = np.int32
|
||||||
t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype)
|
|
||||||
me.polygons.foreach_get("material_index", t_pm)
|
|
||||||
|
|
||||||
# We have to validate mat indices, and map them to FBX indices.
|
# We have to validate mat indices, and map them to FBX indices.
|
||||||
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
|
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
|
||||||
@ -1428,7 +1440,10 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
# Set material indices that are out of bounds to the default material index
|
# Set material indices that are out of bounds to the default material index
|
||||||
mat_idx_limit = len(me_blmaterials)
|
mat_idx_limit = len(me_blmaterials)
|
||||||
t_pm[t_pm >= mat_idx_limit] = def_me_blmaterial_idx
|
# Material indices shouldn't be negative, but they technically could be. Viewing as unsigned before
|
||||||
|
# checking for indices that are too large means that a single >= check will pick up both negative
|
||||||
|
# indices and indices that are too large.
|
||||||
|
t_pm[t_pm.view("u%i" % t_pm.itemsize) >= mat_idx_limit] = def_me_blmaterial_idx
|
||||||
|
|
||||||
# Map to FBX indices. Materials not in me_fbxmaterials_idx will be set to the default material index.
|
# Map to FBX indices. Materials not in me_fbxmaterials_idx will be set to the default material index.
|
||||||
blmat_fbx_idx = np.fromiter((me_fbxmaterials_idx.get(m, def_ma) for m in me_blmaterials),
|
blmat_fbx_idx = np.fromiter((me_fbxmaterials_idx.get(m, def_ma) for m in me_blmaterials),
|
||||||
@ -1442,11 +1457,18 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
# indices??? *sigh*).
|
# indices??? *sigh*).
|
||||||
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
||||||
elem_data_single_int32_array(lay_ma, b"Materials", t_pm)
|
elem_data_single_int32_array(lay_ma, b"Materials", t_pm)
|
||||||
del t_pm
|
|
||||||
else:
|
else:
|
||||||
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
|
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
|
||||||
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
||||||
elem_data_single_int32_array(lay_ma, b"Materials", [0])
|
if multiple_fbx_mats:
|
||||||
|
# There's no material_index attribute, so every material index is effectively zero.
|
||||||
|
# In the order of the mesh's materials, get the FBX index of the first material that is exported.
|
||||||
|
all_same_idx = next(me_fbxmaterials_idx[m] for m in me_blmaterials if m in me_fbxmaterials_idx)
|
||||||
|
else:
|
||||||
|
# There's only one fbx material, so the index will always be zero.
|
||||||
|
all_same_idx = 0
|
||||||
|
elem_data_single_int32_array(lay_ma, b"Materials", [all_same_idx])
|
||||||
|
del t_pm
|
||||||
|
|
||||||
# And the "layer TOC"...
|
# And the "layer TOC"...
|
||||||
|
|
||||||
@ -1719,6 +1741,14 @@ def fbx_data_video_elements(root, vid, scene_data):
|
|||||||
#~ else:
|
#~ else:
|
||||||
#~ elem_data_single_bytes(fbx_vid, b"Content", b"")
|
#~ elem_data_single_bytes(fbx_vid, b"Content", b"")
|
||||||
|
|
||||||
|
# Blender currently has no UI for editing custom properties on Images, but the importer will import Image custom
|
||||||
|
# properties from either a Video Node or a Texture Node, preferring a Video node if one exists. We'll propagate
|
||||||
|
# these custom properties only to Video Nodes because that is most likely where they were imported from, and Texture
|
||||||
|
# Nodes are more like Blender's Shader Nodes than Images, which is what we're exporting here.
|
||||||
|
if scene_data.settings.use_custom_props:
|
||||||
|
fbx_data_element_custom_properties(props, vid)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def fbx_data_armature_elements(root, arm_obj, scene_data):
|
def fbx_data_armature_elements(root, arm_obj, scene_data):
|
||||||
"""
|
"""
|
||||||
@ -2627,10 +2657,10 @@ def fbx_data_from_scene(scene, depsgraph, settings):
|
|||||||
# Get and cache only the cos that we need
|
# Get and cache only the cos that we need
|
||||||
@cache
|
@cache
|
||||||
def sk_cos(shape_key):
|
def sk_cos(shape_key):
|
||||||
_cos = np.empty(len(me.vertices) * 3, dtype=co_bl_dtype)
|
|
||||||
if shape_key == sk_base:
|
if shape_key == sk_base:
|
||||||
me.vertices.foreach_get("co", _cos)
|
_cos = MESH_ATTRIBUTE_POSITION.to_ndarray(me.attributes)
|
||||||
else:
|
else:
|
||||||
|
_cos = np.empty(len(me.vertices) * 3, dtype=co_bl_dtype)
|
||||||
shape_key.data.foreach_get("co", _cos)
|
shape_key.data.foreach_get("co", _cos)
|
||||||
return vcos_transformed(_cos, geom_mat_co, co_fbx_dtype)
|
return vcos_transformed(_cos, geom_mat_co, co_fbx_dtype)
|
||||||
|
|
||||||
|
@ -9,6 +9,8 @@ import time
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from itertools import zip_longest, chain
|
from itertools import zip_longest, chain
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Callable
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
@ -412,8 +414,13 @@ def nors_transformed(raw_nors, m=None, dtype=None):
|
|||||||
|
|
||||||
|
|
||||||
def astype_view_signedness(arr, new_dtype):
|
def astype_view_signedness(arr, new_dtype):
|
||||||
"""Unsafely views arr as new_dtype if the itemsize and byteorder of arr matches but the signedness does not,
|
"""Unsafely views arr as new_dtype if the itemsize and byteorder of arr matches but the signedness does not.
|
||||||
otherwise calls np.ndarray.astype with copy=False.
|
|
||||||
|
Safely views arr as new_dtype if both arr and new_dtype have the same itemsize, byteorder and signedness, but could
|
||||||
|
have a different character code, e.g. 'i' and 'l'. np.ndarray.astype with copy=False does not normally create this
|
||||||
|
view, but Blender can be picky about the character code used, so this function will create the view.
|
||||||
|
|
||||||
|
Otherwise, calls np.ndarray.astype with copy=False.
|
||||||
|
|
||||||
The benefit of copy=False is that if the array can be safely viewed as the new type, then a view is made, instead of
|
The benefit of copy=False is that if the array can be safely viewed as the new type, then a view is made, instead of
|
||||||
a copy with the new type.
|
a copy with the new type.
|
||||||
@ -434,13 +441,14 @@ def astype_view_signedness(arr, new_dtype):
|
|||||||
# else is left to .astype.
|
# else is left to .astype.
|
||||||
arr_kind = arr_dtype.kind
|
arr_kind = arr_dtype.kind
|
||||||
new_kind = new_dtype.kind
|
new_kind = new_dtype.kind
|
||||||
|
# Signed and unsigned int are opposite in terms of signedness. Other types don't have signedness.
|
||||||
|
integer_kinds = {'i', 'u'}
|
||||||
if (
|
if (
|
||||||
# Signed and unsigned int are opposite in terms of signedness. Other types don't have signedness.
|
arr_kind in integer_kinds and new_kind in integer_kinds
|
||||||
((arr_kind == 'i' and new_kind == 'u') or (arr_kind == 'u' and new_kind == 'i'))
|
|
||||||
and arr_dtype.itemsize == new_dtype.itemsize
|
and arr_dtype.itemsize == new_dtype.itemsize
|
||||||
and arr_dtype.byteorder == new_dtype.byteorder
|
and arr_dtype.byteorder == new_dtype.byteorder
|
||||||
):
|
):
|
||||||
# new_dtype has opposite signedness and matching itemsize and byteorder, so return a view of the new type.
|
# arr and new_dtype have signedness and matching itemsize and byteorder, so return a view of the new type.
|
||||||
return arr.view(new_dtype)
|
return arr.view(new_dtype)
|
||||||
else:
|
else:
|
||||||
return arr.astype(new_dtype, copy=False)
|
return arr.astype(new_dtype, copy=False)
|
||||||
@ -592,6 +600,147 @@ def ensure_object_not_in_edit_mode(context, obj):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
# ##### Attribute utils. #####
|
||||||
|
AttributeDataTypeInfo = namedtuple("AttributeDataTypeInfo", ["dtype", "foreach_attribute", "item_size"])
|
||||||
|
_attribute_data_type_info_lookup = {
|
||||||
|
'FLOAT': AttributeDataTypeInfo(np.single, "value", 1),
|
||||||
|
'INT': AttributeDataTypeInfo(np.intc, "value", 1),
|
||||||
|
'FLOAT_VECTOR': AttributeDataTypeInfo(np.single, "vector", 3),
|
||||||
|
'FLOAT_COLOR': AttributeDataTypeInfo(np.single, "color", 4), # color_srgb is an alternative
|
||||||
|
'BYTE_COLOR': AttributeDataTypeInfo(np.single, "color", 4), # color_srgb is an alternative
|
||||||
|
'STRING': AttributeDataTypeInfo(None, "value", 1), # Not usable with foreach_get/set
|
||||||
|
'BOOLEAN': AttributeDataTypeInfo(bool, "value", 1),
|
||||||
|
'FLOAT2': AttributeDataTypeInfo(np.single, "vector", 2),
|
||||||
|
'INT8': AttributeDataTypeInfo(np.intc, "value", 1),
|
||||||
|
'INT32_2D': AttributeDataTypeInfo(np.intc, "value", 2),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def attribute_get(attributes, name, data_type, domain):
|
||||||
|
"""Get an attribute by its name, data_type and domain.
|
||||||
|
|
||||||
|
Returns None if no attribute with this name, data_type and domain exists."""
|
||||||
|
attr = attributes.get(name)
|
||||||
|
if not attr:
|
||||||
|
return None
|
||||||
|
if attr.data_type == data_type and attr.domain == domain:
|
||||||
|
return attr
|
||||||
|
# It shouldn't normally happen, but it's possible there are multiple attributes with the same name, but different
|
||||||
|
# data_types or domains.
|
||||||
|
for attr in attributes:
|
||||||
|
if attr.name == name and attr.data_type == data_type and attr.domain == domain:
|
||||||
|
return attr
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def attribute_foreach_set(attribute, array_or_list, foreach_attribute=None):
|
||||||
|
"""Set every value of an attribute with foreach_set."""
|
||||||
|
if foreach_attribute is None:
|
||||||
|
foreach_attribute = _attribute_data_type_info_lookup[attribute.data_type].foreach_attribute
|
||||||
|
attribute.data.foreach_set(foreach_attribute, array_or_list)
|
||||||
|
|
||||||
|
|
||||||
|
def attribute_to_ndarray(attribute, foreach_attribute=None):
|
||||||
|
"""Create a NumPy ndarray from an attribute."""
|
||||||
|
data = attribute.data
|
||||||
|
data_type_info = _attribute_data_type_info_lookup[attribute.data_type]
|
||||||
|
ndarray = np.empty(len(data) * data_type_info.item_size, dtype=data_type_info.dtype)
|
||||||
|
if foreach_attribute is None:
|
||||||
|
foreach_attribute = data_type_info.foreach_attribute
|
||||||
|
data.foreach_get(foreach_attribute, ndarray)
|
||||||
|
return ndarray
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AttributeDescription:
|
||||||
|
"""Helper class to reduce duplicate code for handling built-in Blender attributes."""
|
||||||
|
name: str
|
||||||
|
# Valid identifiers can be found in bpy.types.Attribute.bl_rna.properties["data_type"].enum_items
|
||||||
|
data_type: str
|
||||||
|
# Valid identifiers can be found in bpy.types.Attribute.bl_rna.properties["domain"].enum_items
|
||||||
|
domain: str
|
||||||
|
# Some attributes are required to exist if certain conditions are met. If a required attribute does not exist when
|
||||||
|
# attempting to get it, an AssertionError is raised.
|
||||||
|
is_required_check: Callable[[bpy.types.AttributeGroup], bool] = None
|
||||||
|
# NumPy dtype that matches the internal C data of this attribute.
|
||||||
|
dtype: np.dtype = field(init=False)
|
||||||
|
# The default attribute name to use with foreach_get and foreach_set.
|
||||||
|
foreach_attribute: str = field(init=False)
|
||||||
|
# The number of elements per value of the attribute when flattened into a 1-dimensional list/array.
|
||||||
|
item_size: int = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
data_type_info = _attribute_data_type_info_lookup[self.data_type]
|
||||||
|
self.dtype = data_type_info.dtype
|
||||||
|
self.foreach_attribute = data_type_info.foreach_attribute
|
||||||
|
self.item_size = data_type_info.item_size
|
||||||
|
|
||||||
|
def is_required(self, attributes):
|
||||||
|
"""Check if the attribute is required to exist in the provided attributes."""
|
||||||
|
is_required_check = self.is_required_check
|
||||||
|
return is_required_check and is_required_check(attributes)
|
||||||
|
|
||||||
|
def get(self, attributes):
|
||||||
|
"""Get the attribute.
|
||||||
|
|
||||||
|
If the attribute is required, but does not exist, an AssertionError is raised, otherwise None is returned."""
|
||||||
|
attr = attribute_get(attributes, self.name, self.data_type, self.domain)
|
||||||
|
if not attr and self.is_required(attributes):
|
||||||
|
raise AssertionError("Required attribute '%s' with type '%s' and domain '%s' not found in %r"
|
||||||
|
% (self.name, self.data_type, self.domain, attributes))
|
||||||
|
return attr
|
||||||
|
|
||||||
|
def ensure(self, attributes):
|
||||||
|
"""Get the attribute, creating it if it does not exist.
|
||||||
|
|
||||||
|
Raises a RuntimeError if the attribute could not be created, which should only happen when attempting to create
|
||||||
|
an attribute with a reserved name, but with the wrong data_type or domain. See usage of
|
||||||
|
BuiltinCustomDataLayerProvider in Blender source for most reserved names.
|
||||||
|
|
||||||
|
There is no guarantee that the returned attribute has the desired name because the name could already be in use
|
||||||
|
by another attribute with a different data_type and/or domain."""
|
||||||
|
attr = self.get(attributes)
|
||||||
|
if attr:
|
||||||
|
return attr
|
||||||
|
|
||||||
|
attr = attributes.new(self.name, self.data_type, self.domain)
|
||||||
|
if not attr:
|
||||||
|
raise RuntimeError("Could not create attribute '%s' with type '%s' and domain '%s' in %r"
|
||||||
|
% (self.name, self.data_type, self.domain, attributes))
|
||||||
|
return attr
|
||||||
|
|
||||||
|
def foreach_set(self, attributes, array_or_list, foreach_attribute=None):
|
||||||
|
"""Get the attribute, creating it if it does not exist, and then set every value in the attribute."""
|
||||||
|
attribute_foreach_set(self.ensure(attributes), array_or_list, foreach_attribute)
|
||||||
|
|
||||||
|
def get_ndarray(self, attributes, foreach_attribute=None):
|
||||||
|
"""Get the attribute and if it exists, return a NumPy ndarray containing its data, otherwise return None."""
|
||||||
|
attr = self.get(attributes)
|
||||||
|
return attribute_to_ndarray(attr, foreach_attribute) if attr else None
|
||||||
|
|
||||||
|
def to_ndarray(self, attributes, foreach_attribute=None):
|
||||||
|
"""Get the attribute and if it exists, return a NumPy ndarray containing its data, otherwise return a
|
||||||
|
zero-length ndarray."""
|
||||||
|
ndarray = self.get_ndarray(attributes, foreach_attribute)
|
||||||
|
return ndarray if ndarray is not None else np.empty(0, dtype=self.dtype)
|
||||||
|
|
||||||
|
|
||||||
|
# Built-in Blender attributes
|
||||||
|
# Only attributes used by the importer/exporter are included here.
|
||||||
|
# See usage of BuiltinCustomDataLayerProvider in Blender source to find most built-in attributes.
|
||||||
|
MESH_ATTRIBUTE_MATERIAL_INDEX = AttributeDescription("material_index", 'INT', 'FACE')
|
||||||
|
MESH_ATTRIBUTE_POSITION = AttributeDescription("position", 'FLOAT_VECTOR', 'POINT',
|
||||||
|
is_required_check=lambda attributes: bool(attributes.id_data.vertices))
|
||||||
|
MESH_ATTRIBUTE_SHARP_EDGE = AttributeDescription("sharp_edge", 'BOOLEAN', 'EDGE')
|
||||||
|
MESH_ATTRIBUTE_EDGE_VERTS = AttributeDescription(".edge_verts", 'INT32_2D', 'EDGE',
|
||||||
|
is_required_check=lambda attributes: bool(attributes.id_data.edges))
|
||||||
|
MESH_ATTRIBUTE_CORNER_VERT = AttributeDescription(".corner_vert", 'INT', 'CORNER',
|
||||||
|
is_required_check=lambda attributes: bool(attributes.id_data.loops))
|
||||||
|
MESH_ATTRIBUTE_CORNER_EDGE = AttributeDescription(".corner_edge", 'INT', 'CORNER',
|
||||||
|
is_required_check=lambda attributes: bool(attributes.id_data.loops))
|
||||||
|
MESH_ATTRIBUTE_SHARP_FACE = AttributeDescription("sharp_face", 'BOOLEAN', 'FACE')
|
||||||
|
|
||||||
|
|
||||||
# ##### UIDs code. #####
|
# ##### UIDs code. #####
|
||||||
|
|
||||||
# ID class (mere int).
|
# ID class (mere int).
|
||||||
|
@ -41,6 +41,12 @@ from .fbx_utils import (
|
|||||||
nors_transformed,
|
nors_transformed,
|
||||||
parray_as_ndarray,
|
parray_as_ndarray,
|
||||||
astype_view_signedness,
|
astype_view_signedness,
|
||||||
|
MESH_ATTRIBUTE_MATERIAL_INDEX,
|
||||||
|
MESH_ATTRIBUTE_POSITION,
|
||||||
|
MESH_ATTRIBUTE_EDGE_VERTS,
|
||||||
|
MESH_ATTRIBUTE_CORNER_VERT,
|
||||||
|
MESH_ATTRIBUTE_SHARP_FACE,
|
||||||
|
MESH_ATTRIBUTE_SHARP_EDGE,
|
||||||
)
|
)
|
||||||
|
|
||||||
# global singleton, assign on execution
|
# global singleton, assign on execution
|
||||||
@ -1199,12 +1205,14 @@ def blen_read_geom_layer_material(fbx_obj, mesh):
|
|||||||
layer_id = b'Materials'
|
layer_id = b'Materials'
|
||||||
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
|
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
|
||||||
|
|
||||||
blen_data = mesh.polygons
|
blen_data = MESH_ATTRIBUTE_MATERIAL_INDEX.ensure(mesh.attributes).data
|
||||||
|
fbx_item_size = 1
|
||||||
|
assert(fbx_item_size == MESH_ATTRIBUTE_MATERIAL_INDEX.item_size)
|
||||||
blen_read_geom_array_mapped_polygon(
|
blen_read_geom_array_mapped_polygon(
|
||||||
mesh, blen_data, "material_index", np.uintc,
|
mesh, blen_data, MESH_ATTRIBUTE_MATERIAL_INDEX.foreach_attribute, MESH_ATTRIBUTE_MATERIAL_INDEX.dtype,
|
||||||
fbx_layer_data, None,
|
fbx_layer_data, None,
|
||||||
fbx_layer_mapping, fbx_layer_ref,
|
fbx_layer_mapping, fbx_layer_ref,
|
||||||
1, 1, layer_id,
|
1, fbx_item_size, layer_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -1227,7 +1235,7 @@ def blen_read_geom_layer_uv(fbx_obj, mesh):
|
|||||||
"" % (layer_id, fbx_layer_name, mesh.name))
|
"" % (layer_id, fbx_layer_name, mesh.name))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
blen_data = uv_lay.data
|
blen_data = uv_lay.uv
|
||||||
|
|
||||||
# some valid files omit this data
|
# some valid files omit this data
|
||||||
if fbx_layer_data is None:
|
if fbx_layer_data is None:
|
||||||
@ -1235,7 +1243,7 @@ def blen_read_geom_layer_uv(fbx_obj, mesh):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
blen_read_geom_array_mapped_polyloop(
|
blen_read_geom_array_mapped_polyloop(
|
||||||
mesh, blen_data, "uv", np.single,
|
mesh, blen_data, "vector", np.single,
|
||||||
fbx_layer_data, fbx_layer_index,
|
fbx_layer_data, fbx_layer_index,
|
||||||
fbx_layer_mapping, fbx_layer_ref,
|
fbx_layer_mapping, fbx_layer_ref,
|
||||||
2, 2, layer_id,
|
2, 2, layer_id,
|
||||||
@ -1307,25 +1315,29 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
|
|||||||
print("warning skipping sharp edges data, no valid edges...")
|
print("warning skipping sharp edges data, no valid edges...")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
blen_data = mesh.edges
|
blen_data = MESH_ATTRIBUTE_SHARP_EDGE.ensure(mesh.attributes).data
|
||||||
|
fbx_item_size = 1
|
||||||
|
assert(fbx_item_size == MESH_ATTRIBUTE_SHARP_EDGE.item_size)
|
||||||
blen_read_geom_array_mapped_edge(
|
blen_read_geom_array_mapped_edge(
|
||||||
mesh, blen_data, "use_edge_sharp", bool,
|
mesh, blen_data, MESH_ATTRIBUTE_SHARP_EDGE.foreach_attribute, MESH_ATTRIBUTE_SHARP_EDGE.dtype,
|
||||||
fbx_layer_data, None,
|
fbx_layer_data, None,
|
||||||
fbx_layer_mapping, fbx_layer_ref,
|
fbx_layer_mapping, fbx_layer_ref,
|
||||||
1, 1, layer_id,
|
1, fbx_item_size, layer_id,
|
||||||
xform=np.logical_not,
|
xform=np.logical_not, # in FBX, 0 (False) is sharp, but in Blender True is sharp.
|
||||||
)
|
)
|
||||||
# We only set sharp edges here, not face smoothing itself...
|
# We only set sharp edges here, not face smoothing itself...
|
||||||
mesh.use_auto_smooth = True
|
mesh.use_auto_smooth = True
|
||||||
return False
|
return False
|
||||||
elif fbx_layer_mapping == b'ByPolygon':
|
elif fbx_layer_mapping == b'ByPolygon':
|
||||||
blen_data = mesh.polygons
|
blen_data = MESH_ATTRIBUTE_SHARP_FACE.ensure(mesh.attributes).data
|
||||||
|
fbx_item_size = 1
|
||||||
|
assert(fbx_item_size == MESH_ATTRIBUTE_SHARP_FACE.item_size)
|
||||||
return blen_read_geom_array_mapped_polygon(
|
return blen_read_geom_array_mapped_polygon(
|
||||||
mesh, blen_data, "use_smooth", bool,
|
mesh, blen_data, MESH_ATTRIBUTE_SHARP_FACE.foreach_attribute, MESH_ATTRIBUTE_SHARP_FACE.dtype,
|
||||||
fbx_layer_data, None,
|
fbx_layer_data, None,
|
||||||
fbx_layer_mapping, fbx_layer_ref,
|
fbx_layer_mapping, fbx_layer_ref,
|
||||||
1, 1, layer_id,
|
1, fbx_item_size, layer_id,
|
||||||
xform=lambda s: (s != 0), # smoothgroup bitflags, treat as booleans for now
|
xform=lambda s: (s == 0), # smoothgroup bitflags, treat as booleans for now
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
|
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
|
||||||
@ -1360,9 +1372,9 @@ def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
|
|||||||
print("warning skipping edge crease data, no valid edges...")
|
print("warning skipping edge crease data, no valid edges...")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
blen_data = mesh.edges
|
blen_data = mesh.edge_creases_ensure().data
|
||||||
return blen_read_geom_array_mapped_edge(
|
return blen_read_geom_array_mapped_edge(
|
||||||
mesh, blen_data, "crease", np.single,
|
mesh, blen_data, "value", np.single,
|
||||||
fbx_layer_data, None,
|
fbx_layer_data, None,
|
||||||
fbx_layer_mapping, fbx_layer_ref,
|
fbx_layer_mapping, fbx_layer_ref,
|
||||||
1, 1, layer_id,
|
1, 1, layer_id,
|
||||||
@ -1412,8 +1424,7 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
|
|||||||
mesh.loops.foreach_set("normal", loop_normals.ravel())
|
mesh.loops.foreach_set("normal", loop_normals.ravel())
|
||||||
elif blen_data_type == "Vertices":
|
elif blen_data_type == "Vertices":
|
||||||
# We have to copy vnors to lnors! Far from elegant, but simple.
|
# We have to copy vnors to lnors! Far from elegant, but simple.
|
||||||
loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
|
loop_vertex_indices = MESH_ATTRIBUTE_CORNER_VERT.to_ndarray(mesh.attributes)
|
||||||
mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
|
|
||||||
mesh.loops.foreach_set("normal", bdata[loop_vertex_indices].ravel())
|
mesh.loops.foreach_set("normal", bdata[loop_vertex_indices].ravel())
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -1440,8 +1451,6 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex'))
|
fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex'))
|
||||||
fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges'))
|
fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges'))
|
||||||
|
|
||||||
bl_vcos_dtype = np.single
|
|
||||||
|
|
||||||
# The dtypes when empty don't matter, but are set to what the fbx arrays are expected to be.
|
# The dtypes when empty don't matter, but are set to what the fbx arrays are expected to be.
|
||||||
fbx_verts = parray_as_ndarray(fbx_verts) if fbx_verts else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
|
fbx_verts = parray_as_ndarray(fbx_verts) if fbx_verts else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
|
||||||
fbx_polys = parray_as_ndarray(fbx_polys) if fbx_polys else np.empty(0, dtype=data_types.ARRAY_INT32)
|
fbx_polys = parray_as_ndarray(fbx_polys) if fbx_polys else np.empty(0, dtype=data_types.ARRAY_INT32)
|
||||||
@ -1458,18 +1467,19 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
tot_edges = len(fbx_edges)
|
tot_edges = len(fbx_edges)
|
||||||
|
|
||||||
mesh = bpy.data.meshes.new(name=elem_name_utf8)
|
mesh = bpy.data.meshes.new(name=elem_name_utf8)
|
||||||
|
attributes = mesh.attributes
|
||||||
|
|
||||||
if tot_verts:
|
if tot_verts:
|
||||||
if geom_mat_co is not None:
|
if geom_mat_co is not None:
|
||||||
fbx_verts = vcos_transformed(fbx_verts, geom_mat_co, bl_vcos_dtype)
|
fbx_verts = vcos_transformed(fbx_verts, geom_mat_co, MESH_ATTRIBUTE_POSITION.dtype)
|
||||||
else:
|
else:
|
||||||
fbx_verts = fbx_verts.astype(bl_vcos_dtype, copy=False)
|
fbx_verts = fbx_verts.astype(MESH_ATTRIBUTE_POSITION.dtype, copy=False)
|
||||||
|
|
||||||
mesh.vertices.add(tot_verts)
|
mesh.vertices.add(tot_verts)
|
||||||
mesh.vertices.foreach_set("co", fbx_verts.ravel())
|
MESH_ATTRIBUTE_POSITION.foreach_set(attributes, fbx_verts.ravel())
|
||||||
|
|
||||||
if tot_loops:
|
if tot_loops:
|
||||||
bl_loop_start_dtype = bl_loop_vertex_index_dtype = np.uintc
|
bl_loop_start_dtype = np.uintc
|
||||||
|
|
||||||
mesh.loops.add(tot_loops)
|
mesh.loops.add(tot_loops)
|
||||||
# The end of each polygon is specified by an inverted index.
|
# The end of each polygon is specified by an inverted index.
|
||||||
@ -1480,7 +1490,8 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
# Un-invert the loop ends.
|
# Un-invert the loop ends.
|
||||||
fbx_polys[fbx_loop_end_idx] ^= -1
|
fbx_polys[fbx_loop_end_idx] ^= -1
|
||||||
# Set loop vertex indices, casting to the Blender C type first for performance.
|
# Set loop vertex indices, casting to the Blender C type first for performance.
|
||||||
mesh.loops.foreach_set("vertex_index", astype_view_signedness(fbx_polys, bl_loop_vertex_index_dtype))
|
MESH_ATTRIBUTE_CORNER_VERT.foreach_set(
|
||||||
|
attributes, astype_view_signedness(fbx_polys, MESH_ATTRIBUTE_CORNER_VERT.dtype))
|
||||||
|
|
||||||
poly_loop_starts = np.empty(tot_polys, dtype=bl_loop_start_dtype)
|
poly_loop_starts = np.empty(tot_polys, dtype=bl_loop_start_dtype)
|
||||||
# The first loop is always a loop start.
|
# The first loop is always a loop start.
|
||||||
@ -1497,7 +1508,6 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
|
|
||||||
if tot_edges:
|
if tot_edges:
|
||||||
# edges in fact index the polygons (NOT the vertices)
|
# edges in fact index the polygons (NOT the vertices)
|
||||||
bl_edge_vertex_indices_dtype = np.uintc
|
|
||||||
|
|
||||||
# The first vertex index of each edge is the vertex index of the corresponding loop in fbx_polys.
|
# The first vertex index of each edge is the vertex index of the corresponding loop in fbx_polys.
|
||||||
edges_a = fbx_polys[fbx_edges]
|
edges_a = fbx_polys[fbx_edges]
|
||||||
@ -1521,12 +1531,12 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
# Stack edges_a and edges_b as individual columns like np.column_stack((edges_a, edges_b)).
|
# Stack edges_a and edges_b as individual columns like np.column_stack((edges_a, edges_b)).
|
||||||
# np.concatenate is used because np.column_stack doesn't allow specifying the dtype of the returned array.
|
# np.concatenate is used because np.column_stack doesn't allow specifying the dtype of the returned array.
|
||||||
edges_conv = np.concatenate((edges_a.reshape(-1, 1), edges_b.reshape(-1, 1)),
|
edges_conv = np.concatenate((edges_a.reshape(-1, 1), edges_b.reshape(-1, 1)),
|
||||||
axis=1, dtype=bl_edge_vertex_indices_dtype, casting='unsafe')
|
axis=1, dtype=MESH_ATTRIBUTE_EDGE_VERTS.dtype, casting='unsafe')
|
||||||
|
|
||||||
# Add the edges and set their vertex indices.
|
# Add the edges and set their vertex indices.
|
||||||
mesh.edges.add(len(edges_conv))
|
mesh.edges.add(len(edges_conv))
|
||||||
# ravel() because edges_conv must be flat and C-contiguous when passed to foreach_set.
|
# ravel() because edges_conv must be flat and C-contiguous when passed to foreach_set.
|
||||||
mesh.edges.foreach_set("vertices", edges_conv.ravel())
|
MESH_ATTRIBUTE_EDGE_VERTS.foreach_set(attributes, edges_conv.ravel())
|
||||||
elif tot_edges:
|
elif tot_edges:
|
||||||
print("ERROR: No polygons, but edges exist. Ignoring the edges!")
|
print("ERROR: No polygons, but edges exist. Ignoring the edges!")
|
||||||
|
|
||||||
@ -1554,7 +1564,9 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
mesh.loops.foreach_get("normal", clnors)
|
mesh.loops.foreach_get("normal", clnors)
|
||||||
|
|
||||||
if not ok_smooth:
|
if not ok_smooth:
|
||||||
mesh.polygons.foreach_set("use_smooth", np.full(len(mesh.polygons), True, dtype=bool))
|
sharp_face = MESH_ATTRIBUTE_SHARP_FACE.get(attributes)
|
||||||
|
if sharp_face:
|
||||||
|
attributes.remove(sharp_face)
|
||||||
ok_smooth = True
|
ok_smooth = True
|
||||||
|
|
||||||
# Iterating clnors into a nested tuple first is faster than passing clnors.reshape(-1, 3) directly into
|
# Iterating clnors into a nested tuple first is faster than passing clnors.reshape(-1, 3) directly into
|
||||||
@ -1566,7 +1578,9 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
|
|||||||
mesh.free_normals_split()
|
mesh.free_normals_split()
|
||||||
|
|
||||||
if not ok_smooth:
|
if not ok_smooth:
|
||||||
mesh.polygons.foreach_set("use_smooth", np.full(len(mesh.polygons), True, dtype=bool))
|
sharp_face = MESH_ATTRIBUTE_SHARP_FACE.get(attributes)
|
||||||
|
if sharp_face:
|
||||||
|
attributes.remove(sharp_face)
|
||||||
|
|
||||||
if settings.use_custom_props:
|
if settings.use_custom_props:
|
||||||
blen_read_custom_properties(fbx_obj, mesh, settings)
|
blen_read_custom_properties(fbx_obj, mesh, settings)
|
||||||
@ -1579,9 +1593,7 @@ def blen_read_shapes(fbx_tmpl, fbx_data, objects, me, scene):
|
|||||||
# No shape key data. Nothing to do.
|
# No shape key data. Nothing to do.
|
||||||
return
|
return
|
||||||
|
|
||||||
bl_vcos_dtype = np.single
|
me_vcos = MESH_ATTRIBUTE_POSITION.to_ndarray(me.attributes)
|
||||||
me_vcos = np.empty(len(me.vertices) * 3, dtype=bl_vcos_dtype)
|
|
||||||
me.vertices.foreach_get("co", me_vcos)
|
|
||||||
me_vcos_vector_view = me_vcos.reshape(-1, 3)
|
me_vcos_vector_view = me_vcos.reshape(-1, 3)
|
||||||
|
|
||||||
objects = list({node.bl_obj for node in objects})
|
objects = list({node.bl_obj for node in objects})
|
||||||
@ -1775,10 +1787,12 @@ def blen_read_texture_image(fbx_tmpl, fbx_obj, basedir, settings):
|
|||||||
return image
|
return image
|
||||||
|
|
||||||
|
|
||||||
def blen_read_camera(fbx_tmpl, fbx_obj, global_scale):
|
def blen_read_camera(fbx_tmpl, fbx_obj, settings):
|
||||||
# meters to inches
|
# meters to inches
|
||||||
M2I = 0.0393700787
|
M2I = 0.0393700787
|
||||||
|
|
||||||
|
global_scale = settings.global_scale
|
||||||
|
|
||||||
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
|
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
|
||||||
|
|
||||||
fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
|
fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
|
||||||
@ -1806,10 +1820,13 @@ def blen_read_camera(fbx_tmpl, fbx_obj, global_scale):
|
|||||||
camera.clip_start = elem_props_get_number(fbx_props, b'NearPlane', 0.01) * global_scale
|
camera.clip_start = elem_props_get_number(fbx_props, b'NearPlane', 0.01) * global_scale
|
||||||
camera.clip_end = elem_props_get_number(fbx_props, b'FarPlane', 100.0) * global_scale
|
camera.clip_end = elem_props_get_number(fbx_props, b'FarPlane', 100.0) * global_scale
|
||||||
|
|
||||||
|
if settings.use_custom_props:
|
||||||
|
blen_read_custom_properties(fbx_obj, camera, settings)
|
||||||
|
|
||||||
return camera
|
return camera
|
||||||
|
|
||||||
|
|
||||||
def blen_read_light(fbx_tmpl, fbx_obj, global_scale):
|
def blen_read_light(fbx_tmpl, fbx_obj, settings):
|
||||||
import math
|
import math
|
||||||
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
|
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
|
||||||
|
|
||||||
@ -1839,13 +1856,16 @@ def blen_read_light(fbx_tmpl, fbx_obj, global_scale):
|
|||||||
# TODO, cycles nodes???
|
# TODO, cycles nodes???
|
||||||
lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
|
lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
|
||||||
lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0
|
lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0
|
||||||
lamp.distance = elem_props_get_number(fbx_props, b'DecayStart', 25.0) * global_scale
|
lamp.distance = elem_props_get_number(fbx_props, b'DecayStart', 25.0) * settings.global_scale
|
||||||
lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True)
|
lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True)
|
||||||
if hasattr(lamp, "cycles"):
|
if hasattr(lamp, "cycles"):
|
||||||
lamp.cycles.cast_shadow = lamp.use_shadow
|
lamp.cycles.cast_shadow = lamp.use_shadow
|
||||||
# Keeping this for now, but this is not used nor exposed anymore afaik...
|
# Keeping this for now, but this is not used nor exposed anymore afaik...
|
||||||
lamp.shadow_color = elem_props_get_color_rgb(fbx_props, b'ShadowColor', (0.0, 0.0, 0.0))
|
lamp.shadow_color = elem_props_get_color_rgb(fbx_props, b'ShadowColor', (0.0, 0.0, 0.0))
|
||||||
|
|
||||||
|
if settings.use_custom_props:
|
||||||
|
blen_read_custom_properties(fbx_obj, lamp, settings)
|
||||||
|
|
||||||
return lamp
|
return lamp
|
||||||
|
|
||||||
|
|
||||||
@ -1859,7 +1879,7 @@ class FbxImportHelperNode:
|
|||||||
__slots__ = (
|
__slots__ = (
|
||||||
'_parent', 'anim_compensation_matrix', 'is_global_animation', 'armature_setup', 'armature', 'bind_matrix',
|
'_parent', 'anim_compensation_matrix', 'is_global_animation', 'armature_setup', 'armature', 'bind_matrix',
|
||||||
'bl_bone', 'bl_data', 'bl_obj', 'bone_child_matrix', 'children', 'clusters',
|
'bl_bone', 'bl_data', 'bl_obj', 'bone_child_matrix', 'children', 'clusters',
|
||||||
'fbx_elem', 'fbx_name', 'fbx_transform_data', 'fbx_type',
|
'fbx_elem', 'fbx_data_elem', 'fbx_name', 'fbx_transform_data', 'fbx_type',
|
||||||
'is_armature', 'has_bone_children', 'is_bone', 'is_root', 'is_leaf',
|
'is_armature', 'has_bone_children', 'is_bone', 'is_root', 'is_leaf',
|
||||||
'matrix', 'matrix_as_parent', 'matrix_geom', 'meshes', 'post_matrix', 'pre_matrix')
|
'matrix', 'matrix_as_parent', 'matrix_geom', 'meshes', 'post_matrix', 'pre_matrix')
|
||||||
|
|
||||||
@ -1867,6 +1887,7 @@ class FbxImportHelperNode:
|
|||||||
self.fbx_name = elem_name_ensure_class(fbx_elem, b'Model') if fbx_elem else 'Unknown'
|
self.fbx_name = elem_name_ensure_class(fbx_elem, b'Model') if fbx_elem else 'Unknown'
|
||||||
self.fbx_type = fbx_elem.props[2] if fbx_elem else None
|
self.fbx_type = fbx_elem.props[2] if fbx_elem else None
|
||||||
self.fbx_elem = fbx_elem
|
self.fbx_elem = fbx_elem
|
||||||
|
self.fbx_data_elem = None # FBX elem of a connected NodeAttribute/Geometry for helpers whose bl_data does not exist or is yet to be created.
|
||||||
self.bl_obj = None
|
self.bl_obj = None
|
||||||
self.bl_data = bl_data
|
self.bl_data = bl_data
|
||||||
self.bl_bone = None # Name of bone if this is a bone (this may be different to fbx_name if there was a name conflict in Blender!)
|
self.bl_bone = None # Name of bone if this is a bone (this may be different to fbx_name if there was a name conflict in Blender!)
|
||||||
@ -2197,7 +2218,7 @@ class FbxImportHelperNode:
|
|||||||
for child in self.children:
|
for child in self.children:
|
||||||
child.collect_armature_meshes()
|
child.collect_armature_meshes()
|
||||||
|
|
||||||
def build_skeleton(self, arm, parent_matrix, parent_bone_size=1, force_connect_children=False):
|
def build_skeleton(self, arm, parent_matrix, settings, parent_bone_size=1):
|
||||||
def child_connect(par_bone, child_bone, child_head, connect_ctx):
|
def child_connect(par_bone, child_bone, child_head, connect_ctx):
|
||||||
# child_bone or child_head may be None.
|
# child_bone or child_head may be None.
|
||||||
force_connect_children, connected = connect_ctx
|
force_connect_children, connected = connect_ctx
|
||||||
@ -2244,6 +2265,9 @@ class FbxImportHelperNode:
|
|||||||
self.bl_obj = arm.bl_obj
|
self.bl_obj = arm.bl_obj
|
||||||
self.bl_data = arm.bl_data
|
self.bl_data = arm.bl_data
|
||||||
self.bl_bone = bone.name # Could be different from the FBX name!
|
self.bl_bone = bone.name # Could be different from the FBX name!
|
||||||
|
# Read EditBone custom props the NodeAttribute
|
||||||
|
if settings.use_custom_props and self.fbx_data_elem:
|
||||||
|
blen_read_custom_properties(self.fbx_data_elem, bone, settings)
|
||||||
|
|
||||||
# get average distance to children
|
# get average distance to children
|
||||||
bone_size = 0.0
|
bone_size = 0.0
|
||||||
@ -2272,6 +2296,7 @@ class FbxImportHelperNode:
|
|||||||
# while Blender attaches to the tail.
|
# while Blender attaches to the tail.
|
||||||
self.bone_child_matrix = Matrix.Translation(-bone_tail)
|
self.bone_child_matrix = Matrix.Translation(-bone_tail)
|
||||||
|
|
||||||
|
force_connect_children = settings.force_connect_children
|
||||||
connect_ctx = [force_connect_children, ...]
|
connect_ctx = [force_connect_children, ...]
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
if child.is_leaf and force_connect_children:
|
if child.is_leaf and force_connect_children:
|
||||||
@ -2280,8 +2305,7 @@ class FbxImportHelperNode:
|
|||||||
child_head = (bone_matrix @ child.get_bind_matrix().normalized()).translation
|
child_head = (bone_matrix @ child.get_bind_matrix().normalized()).translation
|
||||||
child_connect(bone, None, child_head, connect_ctx)
|
child_connect(bone, None, child_head, connect_ctx)
|
||||||
elif child.is_bone and not child.ignore:
|
elif child.is_bone and not child.ignore:
|
||||||
child_bone = child.build_skeleton(arm, bone_matrix, bone_size,
|
child_bone = child.build_skeleton(arm, bone_matrix, settings, bone_size)
|
||||||
force_connect_children=force_connect_children)
|
|
||||||
# Connection to parent.
|
# Connection to parent.
|
||||||
child_connect(bone, child_bone, None, connect_ctx)
|
child_connect(bone, child_bone, None, connect_ctx)
|
||||||
|
|
||||||
@ -2376,15 +2400,18 @@ class FbxImportHelperNode:
|
|||||||
|
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def set_pose_matrix(self, arm):
|
def set_pose_matrix_and_custom_props(self, arm, settings):
|
||||||
pose_bone = arm.bl_obj.pose.bones[self.bl_bone]
|
pose_bone = arm.bl_obj.pose.bones[self.bl_bone]
|
||||||
pose_bone.matrix_basis = self.get_bind_matrix().inverted_safe() @ self.get_matrix()
|
pose_bone.matrix_basis = self.get_bind_matrix().inverted_safe() @ self.get_matrix()
|
||||||
|
|
||||||
|
if settings.use_custom_props:
|
||||||
|
blen_read_custom_properties(self.fbx_elem, pose_bone, settings)
|
||||||
|
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
if child.ignore:
|
if child.ignore:
|
||||||
continue
|
continue
|
||||||
if child.is_bone:
|
if child.is_bone:
|
||||||
child.set_pose_matrix(arm)
|
child.set_pose_matrix_and_custom_props(arm, settings)
|
||||||
|
|
||||||
def merge_weights(self, combined_weights, fbx_cluster):
|
def merge_weights(self, combined_weights, fbx_cluster):
|
||||||
indices = elem_prop_first(elem_find_first(fbx_cluster, b'Indexes', default=None), default=())
|
indices = elem_prop_first(elem_find_first(fbx_cluster, b'Indexes', default=None), default=())
|
||||||
@ -2480,18 +2507,18 @@ class FbxImportHelperNode:
|
|||||||
if child.ignore:
|
if child.ignore:
|
||||||
continue
|
continue
|
||||||
if child.is_bone:
|
if child.is_bone:
|
||||||
child.build_skeleton(self, Matrix(), force_connect_children=settings.force_connect_children)
|
child.build_skeleton(self, Matrix(), settings)
|
||||||
|
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
|
||||||
arm.hide_viewport = is_hidden
|
arm.hide_viewport = is_hidden
|
||||||
|
|
||||||
# Set pose matrix
|
# Set pose matrix and PoseBone custom properties
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
if child.ignore:
|
if child.ignore:
|
||||||
continue
|
continue
|
||||||
if child.is_bone:
|
if child.is_bone:
|
||||||
child.set_pose_matrix(self)
|
child.set_pose_matrix_and_custom_props(self, settings)
|
||||||
|
|
||||||
# Add bone children:
|
# Add bone children:
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
@ -2886,7 +2913,7 @@ def load(operator, context, filepath="",
|
|||||||
continue
|
continue
|
||||||
if fbx_obj.props[-1] == b'Camera':
|
if fbx_obj.props[-1] == b'Camera':
|
||||||
assert(blen_data is None)
|
assert(blen_data is None)
|
||||||
fbx_item[1] = blen_read_camera(fbx_tmpl, fbx_obj, global_scale)
|
fbx_item[1] = blen_read_camera(fbx_tmpl, fbx_obj, settings)
|
||||||
_(); del _
|
_(); del _
|
||||||
|
|
||||||
# ----
|
# ----
|
||||||
@ -2900,7 +2927,7 @@ def load(operator, context, filepath="",
|
|||||||
continue
|
continue
|
||||||
if fbx_obj.props[-1] == b'Light':
|
if fbx_obj.props[-1] == b'Light':
|
||||||
assert(blen_data is None)
|
assert(blen_data is None)
|
||||||
fbx_item[1] = blen_read_light(fbx_tmpl, fbx_obj, global_scale)
|
fbx_item[1] = blen_read_light(fbx_tmpl, fbx_obj, settings)
|
||||||
_(); del _
|
_(); del _
|
||||||
|
|
||||||
# ----
|
# ----
|
||||||
@ -2969,6 +2996,9 @@ def load(operator, context, filepath="",
|
|||||||
if fbx_sdata.id not in {b'Geometry', b'NodeAttribute'}:
|
if fbx_sdata.id not in {b'Geometry', b'NodeAttribute'}:
|
||||||
continue
|
continue
|
||||||
parent.bl_data = bl_data
|
parent.bl_data = bl_data
|
||||||
|
if bl_data is None:
|
||||||
|
# If there's no bl_data, add the fbx_sdata so that it can be read when creating the bl_data/bone
|
||||||
|
parent.fbx_data_elem = fbx_sdata
|
||||||
else:
|
else:
|
||||||
# set parent
|
# set parent
|
||||||
child.parent = parent
|
child.parent = parent
|
||||||
@ -3495,19 +3525,18 @@ def load(operator, context, filepath="",
|
|||||||
if fbx_obj.props[-1] == b'Mesh':
|
if fbx_obj.props[-1] == b'Mesh':
|
||||||
mesh = fbx_item[1]
|
mesh = fbx_item[1]
|
||||||
|
|
||||||
if decal_offset != 0.0:
|
num_verts = len(mesh.vertices)
|
||||||
|
if decal_offset != 0.0 and num_verts > 0:
|
||||||
for material in mesh.materials:
|
for material in mesh.materials:
|
||||||
if material in material_decals:
|
if material in material_decals:
|
||||||
num_verts = len(mesh.vertices)
|
blen_norm_dtype = np.single
|
||||||
blen_cos_dtype = blen_norm_dtype = np.single
|
vcos = MESH_ATTRIBUTE_POSITION.to_ndarray(mesh.attributes)
|
||||||
vcos = np.empty(num_verts * 3, dtype=blen_cos_dtype)
|
|
||||||
vnorm = np.empty(num_verts * 3, dtype=blen_norm_dtype)
|
vnorm = np.empty(num_verts * 3, dtype=blen_norm_dtype)
|
||||||
mesh.vertices.foreach_get("co", vcos)
|
mesh.vertex_normals.foreach_get("vector", vnorm)
|
||||||
mesh.vertices.foreach_get("normal", vnorm)
|
|
||||||
|
|
||||||
vcos += vnorm * decal_offset
|
vcos += vnorm * decal_offset
|
||||||
|
|
||||||
mesh.vertices.foreach_set("co", vcos)
|
MESH_ATTRIBUTE_POSITION.foreach_set(mesh.attributes, vcos)
|
||||||
break
|
break
|
||||||
|
|
||||||
for obj in (obj for obj in bpy.data.objects if obj.data == mesh):
|
for obj in (obj for obj in bpy.data.objects if obj.data == mesh):
|
||||||
|
@ -5,7 +5,8 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
'name': 'glTF 2.0 format',
|
'name': 'glTF 2.0 format',
|
||||||
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
||||||
"version": (4, 0, 3),
|
"version": (4, 0, 5),
|
||||||
|
"version": (3, 6, 27),
|
||||||
'blender': (3, 5, 0),
|
'blender': (3, 5, 0),
|
||||||
'location': 'File > Import-Export',
|
'location': 'File > Import-Export',
|
||||||
'description': 'Import-Export as glTF 2.0',
|
'description': 'Import-Export as glTF 2.0',
|
||||||
|
@ -248,7 +248,7 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
|
|||||||
(len(indices), DataType.num_elements(gltf.data.accessors[prim.attributes[attr]].type)),
|
(len(indices), DataType.num_elements(gltf.data.accessors[prim.attributes[attr]].type)),
|
||||||
dtype=ComponentType.to_numpy_dtype(gltf.data.accessors[prim.attributes[attr]].component_type)
|
dtype=ComponentType.to_numpy_dtype(gltf.data.accessors[prim.attributes[attr]].component_type)
|
||||||
)
|
)
|
||||||
attribute_data[idx] = np.concatenate((attribute_data[idx], attr_data))
|
attribute_data[idx] = np.concatenate((attribute_data[idx], attr_data[unique_indices]))
|
||||||
|
|
||||||
# Accessors are cached in case they are shared between primitives; clear
|
# Accessors are cached in case they are shared between primitives; clear
|
||||||
# the cache now that all prims are done.
|
# the cache now that all prims are done.
|
||||||
@ -642,7 +642,8 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
|
|||||||
num_polys = len(mesh.polygons)
|
num_polys = len(mesh.polygons)
|
||||||
|
|
||||||
if gltf.import_settings['import_shading'] == "FLAT":
|
if gltf.import_settings['import_shading'] == "FLAT":
|
||||||
# Polys are flat by default; don't have to do anything
|
# Polys are smooth by default, setting to flat
|
||||||
|
mesh.shade_flat()
|
||||||
return
|
return
|
||||||
|
|
||||||
if gltf.import_settings['import_shading'] == "SMOOTH":
|
if gltf.import_settings['import_shading'] == "SMOOTH":
|
||||||
|
@ -3015,6 +3015,8 @@ def importShape_ProcessObject(
|
|||||||
if creaseAngle is not None:
|
if creaseAngle is not None:
|
||||||
bpydata.auto_smooth_angle = creaseAngle
|
bpydata.auto_smooth_angle = creaseAngle
|
||||||
bpydata.use_auto_smooth = True
|
bpydata.use_auto_smooth = True
|
||||||
|
else:
|
||||||
|
bpydata.polygons.foreach_set("use_smooth", [False] * len(bpydata.polygons))
|
||||||
|
|
||||||
# Only ever 1 material per shape
|
# Only ever 1 material per shape
|
||||||
if bpymat:
|
if bpymat:
|
||||||
|
@ -563,11 +563,14 @@ class MUV_OT_UVInspection_PaintUVIsland(bpy.types.Operator):
|
|||||||
"name": "",
|
"name": "",
|
||||||
"location": (0, 0, 0),
|
"location": (0, 0, 0),
|
||||||
"mouse": (0, 0),
|
"mouse": (0, 0),
|
||||||
|
"mouse_event": (0.0, 0.0),
|
||||||
"size": 0,
|
"size": 0,
|
||||||
"pressure": 0,
|
"pressure": 0,
|
||||||
"pen_flip": False,
|
"pen_flip": False,
|
||||||
"time": 0,
|
"time": 0,
|
||||||
"is_start": False
|
"is_start": False,
|
||||||
|
"x_tilt": 0.0,
|
||||||
|
"y_tilt": 0.0,
|
||||||
}])
|
}])
|
||||||
|
|
||||||
if compat.check_version(2, 80, 0) < 0:
|
if compat.check_version(2, 80, 0) < 0:
|
||||||
|
@ -2255,7 +2255,10 @@ class tissue_update_tessellate(Operator):
|
|||||||
use_bmesh = not (bool_shapekeys and fill_mode == 'PATCH' and component_mode != 'OBJECT')
|
use_bmesh = not (bool_shapekeys and fill_mode == 'PATCH' and component_mode != 'OBJECT')
|
||||||
merge_components(new_ob, ob.tissue_tessellate, use_bmesh)
|
merge_components(new_ob, ob.tissue_tessellate, use_bmesh)
|
||||||
|
|
||||||
if bool_smooth: bpy.ops.object.shade_smooth()
|
if bool_smooth:
|
||||||
|
bpy.ops.object.shade_smooth()
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
|
||||||
for mesh in bpy.data.meshes:
|
for mesh in bpy.data.meshes:
|
||||||
if not mesh.users: bpy.data.meshes.remove(mesh)
|
if not mesh.users: bpy.data.meshes.remove(mesh)
|
||||||
|
@ -103,7 +103,7 @@ def files_to_clean_file_names_for_sockets(files, sockets):
|
|||||||
socket_tags = socket[1]
|
socket_tags = socket[1]
|
||||||
all_tags.update(socket_tags)
|
all_tags.update(socket_tags)
|
||||||
|
|
||||||
while True:
|
while len(names_to_tag_lists) > 1:
|
||||||
something_changed = False
|
something_changed = False
|
||||||
|
|
||||||
# Common prefixes / suffixes provide zero information about what file
|
# Common prefixes / suffixes provide zero information about what file
|
||||||
|
@ -255,6 +255,38 @@ class TestPutFileNamesInSockets(unittest.TestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_single_file_good(self):
|
||||||
|
"""Regression test for https://projects.blender.org/blender/blender-addons/issues/104573"""
|
||||||
|
|
||||||
|
files = [
|
||||||
|
MockFile("banana-color.webp"),
|
||||||
|
]
|
||||||
|
sockets = sockets_fixture()
|
||||||
|
match_files_to_socket_names(files, sockets)
|
||||||
|
|
||||||
|
assert_sockets(
|
||||||
|
self,
|
||||||
|
sockets,
|
||||||
|
{
|
||||||
|
"Base Color": "banana-color.webp",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_single_file_bad(self):
|
||||||
|
"""Regression test for https://projects.blender.org/blender/blender-addons/issues/104573"""
|
||||||
|
|
||||||
|
files = [
|
||||||
|
MockFile("README-banana.txt"),
|
||||||
|
]
|
||||||
|
sockets = sockets_fixture()
|
||||||
|
match_files_to_socket_names(files, sockets)
|
||||||
|
|
||||||
|
assert_sockets(
|
||||||
|
self,
|
||||||
|
sockets,
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main(verbosity=2)
|
unittest.main(verbosity=2)
|
||||||
|
@ -6,8 +6,8 @@ bl_info = {
|
|||||||
"name": "Collection Manager",
|
"name": "Collection Manager",
|
||||||
"description": "Manage collections and their objects",
|
"description": "Manage collections and their objects",
|
||||||
"author": "Ryan Inch",
|
"author": "Ryan Inch",
|
||||||
"version": (2, 24, 4),
|
"version": (2, 24, 8),
|
||||||
"blender": (2, 80, 0),
|
"blender": (3, 0, 0),
|
||||||
"location": "View3D - Object Mode (Shortcut - M)",
|
"location": "View3D - Object Mode (Shortcut - M)",
|
||||||
"warning": '', # used for warning icon and text in addons panel
|
"warning": '', # used for warning icon and text in addons panel
|
||||||
"doc_url": "{BLENDER_MANUAL_URL}/addons/interface/collection_manager.html",
|
"doc_url": "{BLENDER_MANUAL_URL}/addons/interface/collection_manager.html",
|
||||||
|
@ -764,7 +764,7 @@ class CMSendReport(Operator):
|
|||||||
if length > max_len:
|
if length > max_len:
|
||||||
max_len = length
|
max_len = length
|
||||||
|
|
||||||
return wm.invoke_popup(self, width=(30 + (max_len*5.5)))
|
return wm.invoke_popup(self, width=int(30 + (max_len*5.5)))
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
self.report({'INFO'}, self.message)
|
self.report({'INFO'}, self.message)
|
||||||
|
@ -1509,7 +1509,7 @@ class CMRestoreDisabledObjectsOperator(Operator):
|
|||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
for obj in bpy.data.objects:
|
for obj in bpy.data.objects:
|
||||||
if obj.hide_viewport:
|
if obj.name in context.view_layer.objects and obj.hide_viewport:
|
||||||
obj.hide_viewport = False
|
obj.hide_viewport = False
|
||||||
obj.select_set(True)
|
obj.select_set(True)
|
||||||
|
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
import time
|
import time
|
||||||
from math import cos, sin, pi, floor
|
from math import cos, sin, pi, floor
|
||||||
import bpy
|
import bpy
|
||||||
import bgl
|
|
||||||
import blf
|
import blf
|
||||||
import gpu
|
import gpu
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
@ -91,15 +90,12 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
bl = round(bl * scale_factor())
|
bl = round(bl * scale_factor())
|
||||||
br = round(br * scale_factor())
|
br = round(br * scale_factor())
|
||||||
|
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
gpu.state.blend_set('ALPHA')
|
||||||
|
|
||||||
if outline:
|
if outline:
|
||||||
thickness = round(2 * scale_factor())
|
thickness = round(2 * scale_factor())
|
||||||
thickness = max(thickness, 2)
|
thickness = max(thickness, 2)
|
||||||
|
shader.uniform_float("lineWidth", thickness)
|
||||||
bgl.glLineWidth(thickness)
|
|
||||||
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
|
||||||
bgl.glHint(bgl.GL_LINE_SMOOTH_HINT, bgl.GL_NICEST)
|
|
||||||
|
|
||||||
draw_type = 'TRI_FAN' if not outline else 'LINE_STRIP'
|
draw_type = 'TRI_FAN' if not outline else 'LINE_STRIP'
|
||||||
|
|
||||||
@ -115,10 +111,15 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
sine = tl * sin(side * 2 * pi / sides) + vert_y
|
sine = tl * sin(side * 2 * pi / sides) + vert_y
|
||||||
vertices.append((cosine,sine))
|
vertices.append((cosine,sine))
|
||||||
|
|
||||||
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
if not outline:
|
||||||
shader.bind()
|
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
||||||
shader.uniform_float("color", color)
|
shader.bind()
|
||||||
batch.draw(shader)
|
shader.uniform_float("color", color)
|
||||||
|
batch.draw(shader)
|
||||||
|
else:
|
||||||
|
batch = batch_for_shader(shader, draw_type, {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
# top right corner
|
# top right corner
|
||||||
vert_x = area["vert"][0] + area["width"] - tr
|
vert_x = area["vert"][0] + area["width"] - tr
|
||||||
@ -132,10 +133,15 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
sine = tr * sin(side * 2 * pi / sides) + vert_y
|
sine = tr * sin(side * 2 * pi / sides) + vert_y
|
||||||
vertices.append((cosine,sine))
|
vertices.append((cosine,sine))
|
||||||
|
|
||||||
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
if not outline:
|
||||||
shader.bind()
|
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
||||||
shader.uniform_float("color", color)
|
shader.bind()
|
||||||
batch.draw(shader)
|
shader.uniform_float("color", color)
|
||||||
|
batch.draw(shader)
|
||||||
|
else:
|
||||||
|
batch = batch_for_shader(shader, draw_type, {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
# bottom left corner
|
# bottom left corner
|
||||||
vert_x = area["vert"][0] + bl
|
vert_x = area["vert"][0] + bl
|
||||||
@ -149,10 +155,15 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
sine = bl * sin(side * 2 * pi / sides) + vert_y
|
sine = bl * sin(side * 2 * pi / sides) + vert_y
|
||||||
vertices.append((cosine,sine))
|
vertices.append((cosine,sine))
|
||||||
|
|
||||||
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
if not outline:
|
||||||
shader.bind()
|
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
||||||
shader.uniform_float("color", color)
|
shader.bind()
|
||||||
batch.draw(shader)
|
shader.uniform_float("color", color)
|
||||||
|
batch.draw(shader)
|
||||||
|
else:
|
||||||
|
batch = batch_for_shader(shader, draw_type, {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
# bottom right corner
|
# bottom right corner
|
||||||
vert_x = area["vert"][0] + area["width"] - br
|
vert_x = area["vert"][0] + area["width"] - br
|
||||||
@ -166,10 +177,15 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
sine = br * sin(side * 2 * pi / sides) + vert_y
|
sine = br * sin(side * 2 * pi / sides) + vert_y
|
||||||
vertices.append((cosine,sine))
|
vertices.append((cosine,sine))
|
||||||
|
|
||||||
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
if not outline:
|
||||||
shader.bind()
|
batch = batch_for_shader(shader, draw_type, {"pos": vertices})
|
||||||
shader.uniform_float("color", color)
|
shader.bind()
|
||||||
batch.draw(shader)
|
shader.uniform_float("color", color)
|
||||||
|
batch.draw(shader)
|
||||||
|
else:
|
||||||
|
batch = batch_for_shader(shader, draw_type, {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
if not outline:
|
if not outline:
|
||||||
vertices = []
|
vertices = []
|
||||||
@ -249,6 +265,7 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
])
|
])
|
||||||
|
|
||||||
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
|
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
|
||||||
|
shader.bind()
|
||||||
|
|
||||||
shader.uniform_float("color", color)
|
shader.uniform_float("color", color)
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
@ -263,7 +280,8 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
(le_x, bl_vert[1] - (overlap if bl == 0 else 0))
|
(le_x, bl_vert[1] - (overlap if bl == 0 else 0))
|
||||||
]
|
]
|
||||||
|
|
||||||
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": vertices})
|
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
# right edge
|
# right edge
|
||||||
@ -273,7 +291,8 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
(re_x, br_vert[1] - (overlap if br == 0 else 0))
|
(re_x, br_vert[1] - (overlap if br == 0 else 0))
|
||||||
]
|
]
|
||||||
|
|
||||||
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": vertices})
|
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
# top edge
|
# top edge
|
||||||
@ -283,7 +302,8 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
(tr_vert[0] + (overlap if tr == 0 else 0), te_y)
|
(tr_vert[0] + (overlap if tr == 0 else 0), te_y)
|
||||||
]
|
]
|
||||||
|
|
||||||
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": vertices})
|
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
# bottom edge
|
# bottom edge
|
||||||
@ -293,12 +313,12 @@ def draw_rounded_rect(area, shader, color, tl=5, tr=5, bl=5, br=5, outline=False
|
|||||||
(br_vert[0] + (overlap if br == 0 else 0), be_y)
|
(br_vert[0] + (overlap if br == 0 else 0), be_y)
|
||||||
]
|
]
|
||||||
|
|
||||||
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": vertices})
|
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_LINE_SMOOTH)
|
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_BLEND)
|
gpu.state.blend_set('NONE')
|
||||||
|
|
||||||
def mouse_in_area(mouse_pos, area, buf = 0):
|
def mouse_in_area(mouse_pos, area, buf = 0):
|
||||||
x = mouse_pos[0]
|
x = mouse_pos[0]
|
||||||
@ -377,6 +397,7 @@ class QCDMoveWidget(Operator):
|
|||||||
}
|
}
|
||||||
|
|
||||||
last_type = ''
|
last_type = ''
|
||||||
|
last_type_value = ''
|
||||||
initialized = False
|
initialized = False
|
||||||
moved = False
|
moved = False
|
||||||
|
|
||||||
@ -394,7 +415,7 @@ class QCDMoveWidget(Operator):
|
|||||||
if len(self.areas) == 1:
|
if len(self.areas) == 1:
|
||||||
return {'RUNNING_MODAL'}
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
if self.last_type == 'LEFTMOUSE' and event.value == 'PRESS' and event.type == 'MOUSEMOVE':
|
if self.last_type == 'LEFTMOUSE' and self.last_type_value == 'PRESS' and event.type == 'MOUSEMOVE':
|
||||||
if mouse_in_area(self.mouse_pos, self.areas["Grab Bar"]):
|
if mouse_in_area(self.mouse_pos, self.areas["Grab Bar"]):
|
||||||
x_offset = self.areas["Main Window"]["vert"][0] - self.mouse_pos[0]
|
x_offset = self.areas["Main Window"]["vert"][0] - self.mouse_pos[0]
|
||||||
x = event.mouse_region_x + x_offset
|
x = event.mouse_region_x + x_offset
|
||||||
@ -460,6 +481,7 @@ class QCDMoveWidget(Operator):
|
|||||||
|
|
||||||
if event.type != 'MOUSEMOVE' and event.type != 'INBETWEEN_MOUSEMOVE':
|
if event.type != 'MOUSEMOVE' and event.type != 'INBETWEEN_MOUSEMOVE':
|
||||||
self.last_type = event.type
|
self.last_type = event.type
|
||||||
|
self.last_type_value = event.value
|
||||||
|
|
||||||
return {'RUNNING_MODAL'}
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
@ -741,7 +763,7 @@ def draw_callback_px(self, context):
|
|||||||
allocate_main_ui(self, context)
|
allocate_main_ui(self, context)
|
||||||
|
|
||||||
shader = gpu.shader.from_builtin('UNIFORM_COLOR')
|
shader = gpu.shader.from_builtin('UNIFORM_COLOR')
|
||||||
shader.bind()
|
line_shader = gpu.shader.from_builtin('POLYLINE_SMOOTH_COLOR')
|
||||||
|
|
||||||
addon_prefs = context.preferences.addons[__package__].preferences
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
@ -749,7 +771,7 @@ def draw_callback_px(self, context):
|
|||||||
main_window = self.areas["Main Window"]
|
main_window = self.areas["Main Window"]
|
||||||
outline_color = addon_prefs.qcd_ogl_widget_menu_back_outline
|
outline_color = addon_prefs.qcd_ogl_widget_menu_back_outline
|
||||||
background_color = addon_prefs.qcd_ogl_widget_menu_back_inner
|
background_color = addon_prefs.qcd_ogl_widget_menu_back_inner
|
||||||
draw_rounded_rect(main_window, shader, outline_color[:] + (1,), outline=True)
|
draw_rounded_rect(main_window, line_shader, outline_color[:] + (1,), outline=True)
|
||||||
draw_rounded_rect(main_window, shader, background_color)
|
draw_rounded_rect(main_window, shader, background_color)
|
||||||
|
|
||||||
# draw window title
|
# draw window title
|
||||||
@ -765,9 +787,6 @@ def draw_callback_px(self, context):
|
|||||||
blf.color(font_id, text_color[0], text_color[1], text_color[2], 1)
|
blf.color(font_id, text_color[0], text_color[1], text_color[2], 1)
|
||||||
blf.draw(font_id, text)
|
blf.draw(font_id, text)
|
||||||
|
|
||||||
# refresh shader - not sure why this is needed
|
|
||||||
shader.bind()
|
|
||||||
|
|
||||||
in_tooltip_area = False
|
in_tooltip_area = False
|
||||||
tooltip_slot_idx = None
|
tooltip_slot_idx = None
|
||||||
|
|
||||||
@ -833,7 +852,7 @@ def draw_callback_px(self, context):
|
|||||||
|
|
||||||
# draw button
|
# draw button
|
||||||
outline_color = addon_prefs.qcd_ogl_widget_tool_outline
|
outline_color = addon_prefs.qcd_ogl_widget_tool_outline
|
||||||
draw_rounded_rect(button_area, shader, outline_color[:] + (1,), tl, tr, bl, br, outline=True)
|
draw_rounded_rect(button_area, line_shader, outline_color[:] + (1,), tl, tr, bl, br, outline=True)
|
||||||
draw_rounded_rect(button_area, shader, button_color, tl, tr, bl, br)
|
draw_rounded_rect(button_area, shader, button_color, tl, tr, bl, br)
|
||||||
|
|
||||||
# ACTIVE OBJECT
|
# ACTIVE OBJECT
|
||||||
@ -841,14 +860,15 @@ def draw_callback_px(self, context):
|
|||||||
active_object_indicator = self.areas[f"Button {slot_num} Active Object Indicator"]
|
active_object_indicator = self.areas[f"Button {slot_num} Active Object Indicator"]
|
||||||
|
|
||||||
vertices = get_circle_coords(active_object_indicator)
|
vertices = get_circle_coords(active_object_indicator)
|
||||||
shader.uniform_float("color", icon_color[:] + (1,))
|
|
||||||
batch = batch_for_shader(shader, 'TRI_FAN', {"pos": vertices})
|
batch = batch_for_shader(shader, 'TRI_FAN', {"pos": vertices})
|
||||||
|
shader.bind()
|
||||||
|
shader.uniform_float("color", icon_color[:] + (1,))
|
||||||
|
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
gpu.state.blend_set('ALPHA')
|
||||||
|
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_BLEND)
|
gpu.state.blend_set('NONE')
|
||||||
|
|
||||||
# SELECTED OBJECTS
|
# SELECTED OBJECTS
|
||||||
elif not set(selected_objects).isdisjoint(collection_objects):
|
elif not set(selected_objects).isdisjoint(collection_objects):
|
||||||
@ -856,18 +876,16 @@ def draw_callback_px(self, context):
|
|||||||
|
|
||||||
alpha = addon_prefs.qcd_ogl_selected_icon_alpha
|
alpha = addon_prefs.qcd_ogl_selected_icon_alpha
|
||||||
vertices = get_circle_coords(selected_object_indicator)
|
vertices = get_circle_coords(selected_object_indicator)
|
||||||
shader.uniform_float("color", icon_color[:] + (alpha,))
|
line_shader.uniform_float("lineWidth", 2 * scale_factor())
|
||||||
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": vertices})
|
color = icon_color[:] + (alpha,)
|
||||||
|
batch = batch_for_shader(line_shader, 'LINE_STRIP', {"pos": [(v[0], v[1], 0) for v in vertices], "color": [color for v in vertices]})
|
||||||
|
shader.bind()
|
||||||
|
|
||||||
bgl.glLineWidth(2 * scale_factor())
|
gpu.state.blend_set('ALPHA')
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
|
||||||
bgl.glEnable(bgl.GL_LINE_SMOOTH)
|
|
||||||
bgl.glHint(bgl.GL_LINE_SMOOTH_HINT, bgl.GL_NICEST)
|
|
||||||
|
|
||||||
batch.draw(shader)
|
batch.draw(line_shader)
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_LINE_SMOOTH)
|
gpu.state.blend_set('NONE')
|
||||||
bgl.glDisable(bgl.GL_BLEND)
|
|
||||||
|
|
||||||
# OBJECTS
|
# OBJECTS
|
||||||
elif collection_objects:
|
elif collection_objects:
|
||||||
@ -875,14 +893,15 @@ def draw_callback_px(self, context):
|
|||||||
|
|
||||||
alpha = addon_prefs.qcd_ogl_objects_icon_alpha
|
alpha = addon_prefs.qcd_ogl_objects_icon_alpha
|
||||||
vertices, indices = get_coords(object_indicator)
|
vertices, indices = get_coords(object_indicator)
|
||||||
shader.uniform_float("color", icon_color[:] + (alpha,))
|
|
||||||
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
|
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
|
||||||
|
shader.bind()
|
||||||
|
shader.uniform_float("color", icon_color[:] + (alpha,))
|
||||||
|
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
gpu.state.blend_set('ALPHA')
|
||||||
|
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_BLEND)
|
gpu.state.blend_set('NONE')
|
||||||
|
|
||||||
|
|
||||||
# X ICON
|
# X ICON
|
||||||
@ -891,17 +910,15 @@ def draw_callback_px(self, context):
|
|||||||
X_icon_color = addon_prefs.qcd_ogl_widget_menu_back_text
|
X_icon_color = addon_prefs.qcd_ogl_widget_menu_back_text
|
||||||
|
|
||||||
vertices, indices = get_x_coords(X_icon)
|
vertices, indices = get_x_coords(X_icon)
|
||||||
shader.uniform_float("color", X_icon_color[:] + (1,))
|
|
||||||
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
|
batch = batch_for_shader(shader, 'TRIS', {"pos": vertices}, indices=indices)
|
||||||
|
shader.bind()
|
||||||
|
shader.uniform_float("color", X_icon_color[:] + (1,))
|
||||||
|
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
gpu.state.blend_set('ALPHA')
|
||||||
bgl.glEnable(bgl.GL_POLYGON_SMOOTH)
|
|
||||||
bgl.glHint(bgl.GL_POLYGON_SMOOTH_HINT, bgl.GL_NICEST)
|
|
||||||
|
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_POLYGON_SMOOTH)
|
gpu.state.blend_set('NONE')
|
||||||
bgl.glDisable(bgl.GL_BLEND)
|
|
||||||
|
|
||||||
if in_tooltip_area:
|
if in_tooltip_area:
|
||||||
if self.draw_tooltip:
|
if self.draw_tooltip:
|
||||||
@ -912,7 +929,7 @@ def draw_callback_px(self, context):
|
|||||||
" * Shift+LMB - Toggle objects\' slot."
|
" * Shift+LMB - Toggle objects\' slot."
|
||||||
)
|
)
|
||||||
|
|
||||||
draw_tooltip(self, context, shader, f"{slot_string}{hotkey_string}")
|
draw_tooltip(self, context, shader, line_shader, f"{slot_string}{hotkey_string}")
|
||||||
|
|
||||||
self.hover_time = None
|
self.hover_time = None
|
||||||
|
|
||||||
@ -921,7 +938,7 @@ def draw_callback_px(self, context):
|
|||||||
self.hover_time = time.time()
|
self.hover_time = time.time()
|
||||||
|
|
||||||
|
|
||||||
def draw_tooltip(self, context, shader, message):
|
def draw_tooltip(self, context, shader, line_shader, message):
|
||||||
addon_prefs = context.preferences.addons[__package__].preferences
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
font_id = 0
|
font_id = 0
|
||||||
@ -962,7 +979,7 @@ def draw_tooltip(self, context, shader, message):
|
|||||||
|
|
||||||
outline_color = addon_prefs.qcd_ogl_widget_tooltip_outline
|
outline_color = addon_prefs.qcd_ogl_widget_tooltip_outline
|
||||||
background_color = addon_prefs.qcd_ogl_widget_tooltip_inner
|
background_color = addon_prefs.qcd_ogl_widget_tooltip_inner
|
||||||
draw_rounded_rect(tooltip, shader, outline_color[:] + (1,), outline=True)
|
draw_rounded_rect(tooltip, line_shader, outline_color[:] + (1,), outline=True)
|
||||||
draw_rounded_rect(tooltip, shader, background_color)
|
draw_rounded_rect(tooltip, shader, background_color)
|
||||||
|
|
||||||
line_pos = padding + line_height
|
line_pos = padding + line_height
|
||||||
|
@ -717,8 +717,14 @@ class ViewQCDSlot(Operator):
|
|||||||
apply_to_children(qcd_laycol, exclude_all_children)
|
apply_to_children(qcd_laycol, exclude_all_children)
|
||||||
|
|
||||||
if orig_active_object:
|
if orig_active_object:
|
||||||
if orig_active_object.name in context.view_layer.objects:
|
try:
|
||||||
context.view_layer.objects.active = orig_active_object
|
if orig_active_object.name in context.view_layer.objects:
|
||||||
|
context.view_layer.objects.active = orig_active_object
|
||||||
|
except RuntimeError:
|
||||||
|
# Blender appears to have a race condition here for versions 3.4+,
|
||||||
|
# so if the active object is no longer in the view layer when
|
||||||
|
# attempting to set it just do nothing.
|
||||||
|
pass
|
||||||
|
|
||||||
# restore locked objects back to their original mode
|
# restore locked objects back to their original mode
|
||||||
# needed because of exclude child updates
|
# needed because of exclude child updates
|
||||||
|
Loading…
Reference in New Issue
Block a user