Pose Library: Update to use the asset shelf (when enabled) #104546

Merged
Julian Eisel merged 33 commits from asset-shelf into main 2023-08-04 15:00:21 +02:00
27 changed files with 3538 additions and 324 deletions
Showing only changes of commit d60dfd23e0 - Show all commits

View File

@ -17,6 +17,7 @@ bl_info = {
"category": "Animation", "category": "Animation",
"support": 'OFFICIAL', "support": 'OFFICIAL',
"doc_url": "{BLENDER_MANUAL_URL}/addons/animation/copy_global_transform.html", "doc_url": "{BLENDER_MANUAL_URL}/addons/animation/copy_global_transform.html",
"tracker_url": "https://projects.blender.org/blender/blender-addons/issues",
} }
import ast import ast

View File

@ -98,7 +98,7 @@ def sorted_nodes(bvh_nodes):
def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0): def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
# File loading stuff # File loading stuff
# Open the file for importing # Open the file for importing
file = open(file_path, 'rU') file = open(file_path, 'r')
# Separate into a list of lists, each line a list of words. # Separate into a list of lists, each line a list of words.
file_lines = file.readlines() file_lines = file.readlines()

View File

@ -345,13 +345,11 @@ def load_ply_mesh(filepath, ply_name):
if mesh_faces: if mesh_faces:
loops_vert_idx = [] loops_vert_idx = []
faces_loop_start = [] faces_loop_start = []
faces_loop_total = []
lidx = 0 lidx = 0
for f in mesh_faces: for f in mesh_faces:
nbr_vidx = len(f) nbr_vidx = len(f)
loops_vert_idx.extend(f) loops_vert_idx.extend(f)
faces_loop_start.append(lidx) faces_loop_start.append(lidx)
faces_loop_total.append(nbr_vidx)
lidx += nbr_vidx lidx += nbr_vidx
mesh.loops.add(len(loops_vert_idx)) mesh.loops.add(len(loops_vert_idx))
@ -359,7 +357,6 @@ def load_ply_mesh(filepath, ply_name):
mesh.loops.foreach_set("vertex_index", loops_vert_idx) mesh.loops.foreach_set("vertex_index", loops_vert_idx)
mesh.polygons.foreach_set("loop_start", faces_loop_start) mesh.polygons.foreach_set("loop_start", faces_loop_start)
mesh.polygons.foreach_set("loop_total", faces_loop_total)
if uvindices: if uvindices:
uv_layer = mesh.uv_layers.new() uv_layer = mesh.uv_layers.new()

View File

@ -3,7 +3,7 @@
bl_info = { bl_info = {
"name": "UV Layout", "name": "UV Layout",
"author": "Campbell Barton, Matt Ebb", "author": "Campbell Barton, Matt Ebb",
"version": (1, 1, 5), "version": (1, 1, 6),
"blender": (3, 0, 0), "blender": (3, 0, 0),
"location": "UV Editor > UV > Export UV Layout", "location": "UV Editor > UV > Export UV Layout",
"description": "Export the UV layout as a 2D graphic", "description": "Export the UV layout as a 2D graphic",

View File

@ -6,6 +6,12 @@ from mathutils import Vector, Matrix
from mathutils.geometry import tessellate_polygon from mathutils.geometry import tessellate_polygon
from gpu_extras.batch import batch_for_shader from gpu_extras.batch import batch_for_shader
# Use OIIO if available, else Blender for writing the image.
try:
import OpenImageIO as oiio
except ImportError:
oiio = None
def export(filepath, face_data, colors, width, height, opacity): def export(filepath, face_data, colors, width, height, opacity):
offscreen = gpu.types.GPUOffScreen(width, height) offscreen = gpu.types.GPUOffScreen(width, height)
@ -44,6 +50,12 @@ def get_normalize_uvs_matrix():
matrix.col[3][1] = -1 matrix.col[3][1] = -1
matrix[0][0] = 2 matrix[0][0] = 2
matrix[1][1] = 2 matrix[1][1] = 2
# OIIO writes arrays from the left-upper corner.
if oiio:
matrix.col[3][1] *= -1.0
matrix[1][1] *= -1.0
return matrix return matrix
@ -90,6 +102,14 @@ def draw_lines(face_data):
def save_pixels(filepath, pixel_data, width, height): def save_pixels(filepath, pixel_data, width, height):
if oiio:
spec = oiio.ImageSpec(width, height, 4, "uint8")
image = oiio.ImageOutput.create(filepath)
image.open(filepath, spec)
image.write_image(pixel_data)
image.close()
return
image = bpy.data.images.new("temp", width, height, alpha=True) image = bpy.data.images.new("temp", width, height, alpha=True)
image.filepath = filepath image.filepath = filepath
image.pixels = [v / 255 for v in pixel_data] image.pixels = [v / 255 for v in pixel_data]

164
io_scene_3ds/__init__.py Normal file
View File

@ -0,0 +1,164 @@
# SPDX-License-Identifier: GPL-2.0-or-later
from bpy_extras.io_utils import (
ImportHelper,
ExportHelper,
orientation_helper,
axis_conversion,
)
from bpy.props import (
BoolProperty,
EnumProperty,
FloatProperty,
StringProperty,
)
import bpy
bl_info = {
"name": "Autodesk 3DS format",
"author": "Bob Holcomb, Campbell Barton, Andreas Atteneder, Sebastian Schrand",
"version": (2, 3, 2),
"blender": (3, 6, 0),
"location": "File > Import",
"description": "3DS Import/Export meshes, UVs, materials, textures, "
"cameras & lamps",
"warning": "Images must be in file folder, "
"filenames are limited to DOS 8.3 format",
"doc_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Autodesk_3DS",
"category": "Import-Export",
}
if "bpy" in locals():
import importlib
if "import_3ds" in locals():
importlib.reload(import_3ds)
if "export_3ds" in locals():
importlib.reload(export_3ds)
@orientation_helper(axis_forward='Y', axis_up='Z')
class Import3DS(bpy.types.Operator, ImportHelper):
"""Import from 3DS file format (.3ds)"""
bl_idname = "import_scene.autodesk_3ds"
bl_label = 'Import 3DS'
bl_options = {'UNDO'}
filename_ext = ".3ds"
filter_glob: StringProperty(default="*.3ds", options={'HIDDEN'})
constrain_size: FloatProperty(
name="Size Constraint",
description="Scale the model by 10 until it reaches the "
"size constraint (0 to disable)",
min=0.0, max=1000.0,
soft_min=0.0, soft_max=1000.0,
default=10.0,
)
use_image_search: BoolProperty(
name="Image Search",
description="Search subdirectories for any associated images "
"(Warning, may be slow)",
default=True,
)
use_apply_transform: BoolProperty(
name="Apply Transform",
description="Workaround for object transformations "
"importing incorrectly",
default=True,
)
read_keyframe: bpy.props.BoolProperty(
name="Read Keyframe",
description="Read the keyframe data",
default=True,
)
use_world_matrix: bpy.props.BoolProperty(
name="World Space",
description="Transform to matrix world",
default=False,
)
def execute(self, context):
from . import import_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.axis_forward,
from_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return import_3ds.load(self, context, **keywords)
@orientation_helper(axis_forward='Y', axis_up='Z')
class Export3DS(bpy.types.Operator, ExportHelper):
"""Export to 3DS file format (.3ds)"""
bl_idname = "export_scene.autodesk_3ds"
bl_label = 'Export 3DS'
filename_ext = ".3ds"
filter_glob: StringProperty(
default="*.3ds",
options={'HIDDEN'},
)
use_selection: BoolProperty(
name="Selection Only",
description="Export selected objects only",
default=False,
)
def execute(self, context):
from . import export_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
"check_existing",
))
global_matrix = axis_conversion(to_forward=self.axis_forward,
to_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return export_3ds.save(self, context, **keywords)
# Add to a menu
def menu_func_export(self, context):
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
def menu_func_import(self, context):
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
def register():
bpy.utils.register_class(Import3DS)
bpy.utils.register_class(Export3DS)
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_class(Import3DS)
bpy.utils.unregister_class(Export3DS)
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
# NOTES:
# why add 1 extra vertex? and remove it when done? -
# "Answer - eekadoodle - would need to re-order UV's without this since face
# order isnt always what we give blender, BMesh will solve :D"
#
# disabled scaling to size, this requires exposing bb (easy) and understanding
# how it works (needs some time)
if __name__ == "__main__":
register()

1437
io_scene_3ds/export_3ds.py Normal file

File diff suppressed because it is too large Load Diff

1305
io_scene_3ds/import_3ds.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@
bl_info = { bl_info = {
"name": "FBX format", "name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem", "author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 1, 0), "version": (5, 3, 0),
"blender": (3, 6, 0), "blender": (3, 6, 0),
"location": "File > Import-Export", "location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions", "description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -1431,7 +1431,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
me_fbxmaterials_idx = scene_data.mesh_material_indices.get(me) me_fbxmaterials_idx = scene_data.mesh_material_indices.get(me)
if me_fbxmaterials_idx is not None: if me_fbxmaterials_idx is not None:
# We cannot use me.materials here, as this array is filled with None in case materials are linked to object... # We cannot use me.materials here, as this array is filled with None in case materials are linked to object...
me_blmaterials = [mat_slot.material for mat_slot in me_obj.material_slots] me_blmaterials = me_obj.materials
if me_fbxmaterials_idx and me_blmaterials: if me_fbxmaterials_idx and me_blmaterials:
lay_ma = elem_data_single_int32(geom, b"LayerElementMaterial", 0) lay_ma = elem_data_single_int32(geom, b"LayerElementMaterial", 0)
elem_data_single_int32(lay_ma, b"Version", FBX_GEOMETRY_MATERIAL_VERSION) elem_data_single_int32(lay_ma, b"Version", FBX_GEOMETRY_MATERIAL_VERSION)
@ -2598,6 +2598,14 @@ def fbx_data_from_scene(scene, depsgraph, settings):
bmesh.ops.triangulate(bm, faces=bm.faces) bmesh.ops.triangulate(bm, faces=bm.faces)
bm.to_mesh(tmp_me) bm.to_mesh(tmp_me)
bm.free() bm.free()
# Usually the materials of the evaluated object will be the same, but modifiers, such as Geometry Nodes,
# can change the materials.
orig_mats = tuple(slot.material for slot in ob.material_slots)
eval_mats = tuple(slot.material.original if slot.material else None
for slot in ob_to_convert.material_slots)
if orig_mats != eval_mats:
# Override the default behaviour of getting materials from ob_obj.bdata.material_slots.
ob_obj.override_materials = eval_mats
data_meshes[ob_obj] = (get_blenderID_key(tmp_me), tmp_me, True) data_meshes[ob_obj] = (get_blenderID_key(tmp_me), tmp_me, True)
# Change armatures back. # Change armatures back.
for armature, pose_position in backup_pose_positions: for armature, pose_position in backup_pose_positions:
@ -2713,8 +2721,7 @@ def fbx_data_from_scene(scene, depsgraph, settings):
data_materials = {} data_materials = {}
for ob_obj in objects: for ob_obj in objects:
# If obj is not a valid object for materials, wrapper will just return an empty tuple... # If obj is not a valid object for materials, wrapper will just return an empty tuple...
for ma_s in ob_obj.material_slots: for ma in ob_obj.materials:
ma = ma_s.material
if ma is None: if ma is None:
continue # Empty slots! continue # Empty slots!
# Note theoretically, FBX supports any kind of materials, even GLSL shaders etc. # Note theoretically, FBX supports any kind of materials, even GLSL shaders etc.

View File

@ -244,6 +244,11 @@ def array_to_matrix4(arr):
return Matrix(tuple(zip(*[iter(arr)]*4))).transposed() return Matrix(tuple(zip(*[iter(arr)]*4))).transposed()
def parray_as_ndarray(arr):
"""Convert an array.array into an np.ndarray that shares the same memory"""
return np.frombuffer(arr, dtype=arr.typecode)
def similar_values(v1, v2, e=1e-6): def similar_values(v1, v2, e=1e-6):
"""Return True if v1 and v2 are nearly the same.""" """Return True if v1 and v2 are nearly the same."""
if v1 == v2: if v1 == v2:
@ -1169,7 +1174,7 @@ class ObjectWrapper(metaclass=MetaObjectWrapper):
we need to use a key to identify each. we need to use a key to identify each.
""" """
__slots__ = ( __slots__ = (
'name', 'key', 'bdata', 'parented_to_armature', 'name', 'key', 'bdata', 'parented_to_armature', 'override_materials',
'_tag', '_ref', '_dupli_matrix' '_tag', '_ref', '_dupli_matrix'
) )
@ -1224,6 +1229,7 @@ class ObjectWrapper(metaclass=MetaObjectWrapper):
self.bdata = bdata self.bdata = bdata
self._ref = armature self._ref = armature
self.parented_to_armature = False self.parented_to_armature = False
self.override_materials = None
def __eq__(self, other): def __eq__(self, other):
return isinstance(other, self.__class__) and self.key == other.key return isinstance(other, self.__class__) and self.key == other.key
@ -1438,11 +1444,14 @@ class ObjectWrapper(metaclass=MetaObjectWrapper):
return () return ()
bones = property(get_bones) bones = property(get_bones)
def get_material_slots(self): def get_materials(self):
override_materials = self.override_materials
if override_materials is not None:
return override_materials
if self._tag in {'OB', 'DP'}: if self._tag in {'OB', 'DP'}:
return self.bdata.material_slots return tuple(slot.material for slot in self.bdata.material_slots)
return () return ()
material_slots = property(get_material_slots) materials = property(get_materials)
def is_deformed_by_armature(self, arm_obj): def is_deformed_by_armature(self, arm_obj):
if not (self.is_object and self.type == 'MESH'): if not (self.is_object and self.type == 'MESH'):

View File

@ -18,6 +18,9 @@ import bpy
from bpy.app.translations import pgettext_tip as tip_ from bpy.app.translations import pgettext_tip as tip_
from mathutils import Matrix, Euler, Vector from mathutils import Matrix, Euler, Vector
# Also imported in .fbx_utils, so importing here is unlikely to further affect Blender startup time.
import numpy as np
# ----- # -----
# Utils # Utils
from . import parse_fbx, fbx_utils from . import parse_fbx, fbx_utils
@ -34,6 +37,10 @@ from .fbx_utils import (
similar_values, similar_values,
similar_values_iter, similar_values_iter,
FBXImportSettings, FBXImportSettings,
vcos_transformed,
nors_transformed,
parray_as_ndarray,
astype_view_signedness,
) )
# global singleton, assign on execution # global singleton, assign on execution
@ -454,8 +461,9 @@ def add_vgroup_to_objects(vg_indices, vg_weights, vg_name, objects):
vg = obj.vertex_groups.get(vg_name) vg = obj.vertex_groups.get(vg_name)
if vg is None: if vg is None:
vg = obj.vertex_groups.new(name=vg_name) vg = obj.vertex_groups.new(name=vg_name)
vg_add = vg.add
for i, w in zip(vg_indices, vg_weights): for i, w in zip(vg_indices, vg_weights):
vg.add((i,), w, 'REPLACE') vg_add((i,), w, 'REPLACE')
def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_prepost_rot): def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_prepost_rot):
@ -777,87 +785,258 @@ def blen_read_geom_layerinfo(fbx_layer):
) )
def blen_read_geom_array_setattr(generator, blen_data, blen_attr, fbx_data, stride, item_size, descr, xform): def blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size):
"""Generic fbx_layer to blen_data setter, generator is expected to yield tuples (ble_idx, fbx_idx).""" """Validate blen_data when it's not a bpy_prop_collection.
max_blen_idx = len(blen_data) - 1 Returns whether blen_data is a bpy_prop_collection"""
max_fbx_idx = len(fbx_data) - 1 blen_data_is_collection = isinstance(blen_data, bpy.types.bpy_prop_collection)
print_error = True if not blen_data_is_collection:
if item_size > 1:
assert(len(blen_data.shape) == 2)
assert(blen_data.shape[1] == item_size)
assert(blen_data.dtype == blen_dtype)
return blen_data_is_collection
def check_skip(blen_idx, fbx_idx):
nonlocal print_error
if fbx_idx < 0: # Negative values mean 'skip'.
return True
if blen_idx > max_blen_idx:
if print_error:
print("ERROR: too much data in this Blender layer, compared to elements in mesh, skipping!")
print_error = False
return True
if fbx_idx + item_size - 1 > max_fbx_idx:
if print_error:
print("ERROR: not enough data in this FBX layer, skipping!")
print_error = False
return True
return False
if xform is not None: def blen_read_geom_parse_fbx_data(fbx_data, stride, item_size):
if isinstance(blen_data, list): """Parse fbx_data as an array.array into a 2d np.ndarray that shares the same memory, where each row is a single
if item_size == 1: item"""
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): # Technically stride < item_size could be supported, but there's probably not a use case for it since it would
blen_data[blen_idx] = xform(fbx_data[fbx_idx]) # result in a view of the data with self-overlapping memory.
else: assert(stride >= item_size)
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): # View the array.array as an np.ndarray.
blen_data[blen_idx] = xform(fbx_data[fbx_idx:fbx_idx + item_size]) fbx_data_np = parray_as_ndarray(fbx_data)
else:
if item_size == 1: if stride == item_size:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): if item_size > 1:
setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx])) # Need to make sure fbx_data_np has a whole number of items to be able to view item_size elements per row.
else: items_remainder = len(fbx_data_np) % item_size
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): if items_remainder:
setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx:fbx_idx + item_size])) print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
fbx_data_np = fbx_data_np[:-items_remainder]
fbx_data_np = fbx_data_np.reshape(-1, item_size)
else: else:
if isinstance(blen_data, list): # Create a view of fbx_data_np that is only the first item_size elements of each stride. Note that the view will
if item_size == 1: # not be C-contiguous.
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): stride_remainder = len(fbx_data_np) % stride
blen_data[blen_idx] = fbx_data[fbx_idx] if stride_remainder:
if stride_remainder < item_size:
print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
# Not enough in the remainder for a full item, so cut off the partial stride
fbx_data_np = fbx_data_np[:-stride_remainder]
# Reshape to one stride per row and then create a view that includes only the first item_size elements
# of each stride.
fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
else: else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): print("ERROR: not a whole number of strides in this FBX layer! There are a whole number of items, but"
blen_data[blen_idx] = fbx_data[fbx_idx:fbx_idx + item_size] " this could indicate an error!")
# There is not a whole number of strides, but there is a whole number of items.
# This is a pain to deal with because fbx_data_np.reshape(-1, stride) is not possible.
# A view of just the items can be created using stride_tricks.as_strided by specifying the shape and
# strides of the view manually.
# Extreme care must be taken when using stride_tricks.as_strided because improper usage can result in
# a view that gives access to memory outside the array.
from numpy.lib import stride_tricks
# fbx_data_np should always start off as flat and C-contiguous.
assert(fbx_data_np.strides == (fbx_data_np.itemsize,))
num_whole_strides = len(fbx_data_np) // stride
# Plus the one partial stride that is enough elements for a complete item.
num_items = num_whole_strides + 1
shape = (num_items, item_size)
# strides are the number of bytes to step to get to the next element, for each axis.
step_per_item = fbx_data_np.itemsize * stride
step_per_item_element = fbx_data_np.itemsize
strides = (step_per_item, step_per_item_element)
fbx_data_np = stride_tricks.as_strided(fbx_data_np, shape, strides)
else: else:
if item_size == 1: # There's a whole number of strides, so first reshape to one stride per row and then create a view that
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): # includes only the first item_size elements of each stride.
setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx]) fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx:fbx_idx + item_size])
for blen_idx, fbx_idx in generator: return fbx_data_np
if check_skip(blen_idx, fbx_idx):
continue
_process(blen_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx)
# generic generators. def blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np, is_indices=False):
def blen_read_geom_array_gen_allsame(data_len): """Check that there are the same number of items in blen_data and fbx_data_np.
return zip(*(range(data_len), (0,) * data_len))
Returns a tuple of two elements:
0: fbx_data_np or, if fbx_data_np contains more items than blen_data, a view of fbx_data_np with the excess
items removed
1: Whether the returned fbx_data_np contains enough items to completely fill blen_data"""
bl_num_items = len(blen_data)
fbx_num_items = len(fbx_data_np)
enough_data = fbx_num_items >= bl_num_items
if not enough_data:
if is_indices:
print("ERROR: not enough indices in this FBX layer, missing data will be left as default!")
else:
print("ERROR: not enough data in this FBX layer, missing data will be left as default!")
elif fbx_num_items > bl_num_items:
if is_indices:
print("ERROR: too many indices in this FBX layer, skipping excess!")
else:
print("ERROR: too much data in this FBX layer, skipping excess!")
fbx_data_np = fbx_data_np[:bl_num_items]
return fbx_data_np, enough_data
def blen_read_geom_array_gen_direct(fbx_data, stride): def blen_read_geom_xform(fbx_data_np, xform):
fbx_data_len = len(fbx_data) """xform is either None, or a function that takes fbx_data_np as its only positional argument and returns an
return zip(*(range(fbx_data_len // stride), range(0, fbx_data_len, stride))) np.ndarray with the same total number of elements as fbx_data_np.
It is acceptable for xform to return an array with a different dtype to fbx_data_np.
Returns xform(fbx_data_np) when xform is not None and ensures the result of xform(fbx_data_np) has the same shape as
fbx_data_np before returning it.
When xform is None, fbx_data_np is returned as is."""
if xform is not None:
item_size = fbx_data_np.shape[1]
fbx_total_data = fbx_data_np.size
fbx_data_np = xform(fbx_data_np)
# The amount of data should not be changed by xform
assert(fbx_data_np.size == fbx_total_data)
# Ensure fbx_data_np is still item_size elements per row
if len(fbx_data_np.shape) != 2 or fbx_data_np.shape[1] != item_size:
fbx_data_np = fbx_data_np.reshape(-1, item_size)
return fbx_data_np
def blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride): def blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
return ((bi, fi * stride) for bi, fi in enumerate(fbx_layer_index)) xform):
"""Generic fbx_layer to blen_data foreach setter for Direct layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np, enough_data = blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
if blen_data_is_collection:
if not enough_data:
blen_total_data = len(blen_data) * item_size
buffer = np.empty(blen_total_data, dtype=blen_dtype)
# It's not clear what values should be used for the missing data, so read the current values into a buffer.
blen_data.foreach_get(blen_attr, buffer)
# Change the buffer shape to one item per row
buffer.shape = (-1, item_size)
# Copy the fbx data into the start of the buffer
buffer[:len(fbx_data_np)] = fbx_data_np
else:
# Convert the buffer to the Blender C type of blen_attr
buffer = astype_view_signedness(fbx_data_np, blen_dtype)
# Set blen_attr of blen_data. The buffer must be flat and C-contiguous, which ravel() ensures
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
assert(blen_data.size % item_size == 0)
blen_data = blen_data.view()
blen_data.shape = (-1, item_size)
blen_data[:len(fbx_data_np)] = fbx_data_np
def blen_read_geom_array_gen_direct_looptovert(mesh, fbx_data, stride): def blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, fbx_layer_index, stride,
fbx_data_len = len(fbx_data) // stride item_size, descr, xform):
loops = mesh.loops """Generic fbx_layer to blen_data foreach setter for IndexToDirect layers.
for p in mesh.polygons: blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
for lidx in p.loop_indices: fbx_data must be an array.array or a 1d np.ndarray."""
vidx = loops[lidx].vertex_index fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
if vidx < fbx_data_len: fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
yield lidx, vidx * stride
# fbx_layer_index is allowed to be a 1d np.ndarray for use with blen_read_geom_array_foreach_set_looptovert.
if not isinstance(fbx_layer_index, np.ndarray):
fbx_layer_index = parray_as_ndarray(fbx_layer_index)
fbx_layer_index, enough_indices = blen_read_geom_check_fbx_data_length(blen_data, fbx_layer_index, is_indices=True)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
blen_data_items_len = len(blen_data)
blen_data_len = blen_data_items_len * item_size
fbx_num_items = len(fbx_data_np)
# Find all indices that are out of bounds of fbx_data_np.
min_index_inclusive = -fbx_num_items
max_index_inclusive = fbx_num_items - 1
valid_index_mask = np.equal(fbx_layer_index, fbx_layer_index.clip(min_index_inclusive, max_index_inclusive))
indices_invalid = not valid_index_mask.all()
fbx_data_items = fbx_data_np.reshape(-1, item_size)
if indices_invalid or not enough_indices:
if blen_data_is_collection:
buffer = np.empty(blen_data_len, dtype=blen_dtype)
buffer_item_view = buffer.view()
buffer_item_view.shape = (-1, item_size)
# Since we don't know what the default values should be for the missing data, read the current values into a
# buffer.
blen_data.foreach_get(blen_attr, buffer)
else:
buffer_item_view = blen_data
if not enough_indices:
# Reduce the length of the view to the same length as the number of indices.
buffer_item_view = buffer_item_view[:len(fbx_layer_index)]
# Copy the result of indexing fbx_data_items by each element in fbx_layer_index into the buffer.
if indices_invalid:
print("ERROR: indices in this FBX layer out of bounds of the FBX data, skipping invalid indices!")
buffer_item_view[valid_index_mask] = fbx_data_items[fbx_layer_index[valid_index_mask]]
else:
buffer_item_view[:] = fbx_data_items[fbx_layer_index]
if blen_data_is_collection:
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
if blen_data_is_collection:
# Cast the buffer to the Blender C type of blen_attr
fbx_data_items = astype_view_signedness(fbx_data_items, blen_dtype)
buffer_items = fbx_data_items[fbx_layer_index]
blen_data.foreach_set(blen_attr, buffer_items.ravel())
else:
blen_data[:] = fbx_data_items[fbx_layer_index]
def blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
xform):
"""Generic fbx_layer to blen_data foreach setter for AllSame layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
fbx_items_len = len(fbx_data_np)
blen_items_len = len(blen_data)
if fbx_items_len < 1:
print("ERROR: not enough data in this FBX layer, skipping!")
return
if blen_data_is_collection:
# Create an array filled with the value from fbx_data_np
buffer = np.full((blen_items_len, item_size), fbx_data_np[0], dtype=blen_dtype)
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
blen_data[:] = fbx_data_np[0]
def blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size,
descr, xform):
"""Generic fbx_layer to blen_data foreach setter for polyloop ByVertice layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array"""
# The fbx_data is mapped to vertices. To expand fbx_data to polygon loops, get an array of the vertex index of each
# polygon loop that will then be used to index fbx_data
loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, loop_vertex_indices, stride,
item_size, descr, xform)
# generic error printers. # generic error printers.
@ -872,7 +1051,7 @@ def blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet=False):
def blen_read_geom_array_mapped_vert( def blen_read_geom_array_mapped_vert(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -881,15 +1060,15 @@ def blen_read_geom_array_mapped_vert(
if fbx_layer_mapping == b'ByVertice': if fbx_layer_mapping == b'ByVertice':
if fbx_layer_ref == b'Direct': if fbx_layer_ref == b'Direct':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -899,7 +1078,7 @@ def blen_read_geom_array_mapped_vert(
def blen_read_geom_array_mapped_edge( def blen_read_geom_array_mapped_edge(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -907,15 +1086,15 @@ def blen_read_geom_array_mapped_edge(
): ):
if fbx_layer_mapping == b'ByEdge': if fbx_layer_mapping == b'ByEdge':
if fbx_layer_ref == b'Direct': if fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -925,7 +1104,7 @@ def blen_read_geom_array_mapped_edge(
def blen_read_geom_array_mapped_polygon( def blen_read_geom_array_mapped_polygon(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -937,22 +1116,22 @@ def blen_read_geom_array_mapped_polygon(
# We fallback to 'Direct' mapping in this case. # We fallback to 'Direct' mapping in this case.
#~ assert(fbx_layer_index is not None) #~ assert(fbx_layer_index is not None)
if fbx_layer_index is None: if fbx_layer_index is None:
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
else: else:
blen_read_geom_array_setattr(blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride), blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) fbx_layer_index, stride, item_size, descr, xform)
return True return True
elif fbx_layer_ref == b'Direct': elif fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -962,7 +1141,7 @@ def blen_read_geom_array_mapped_polygon(
def blen_read_geom_array_mapped_polyloop( def blen_read_geom_array_mapped_polyloop(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -974,29 +1153,29 @@ def blen_read_geom_array_mapped_polyloop(
# We fallback to 'Direct' mapping in this case. # We fallback to 'Direct' mapping in this case.
#~ assert(fbx_layer_index is not None) #~ assert(fbx_layer_index is not None)
if fbx_layer_index is None: if fbx_layer_index is None:
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
else: else:
blen_read_geom_array_setattr(blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride), blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) fbx_layer_index, stride, item_size, descr, xform)
return True return True
elif fbx_layer_ref == b'Direct': elif fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'ByVertice': elif fbx_layer_mapping == b'ByVertice':
if fbx_layer_ref == b'Direct': if fbx_layer_ref == b'Direct':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct_looptovert(mesh, fbx_layer_data, stride), blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -1021,7 +1200,7 @@ def blen_read_geom_layer_material(fbx_obj, mesh):
blen_data = mesh.polygons blen_data = mesh.polygons
blen_read_geom_array_mapped_polygon( blen_read_geom_array_mapped_polygon(
mesh, blen_data, "material_index", mesh, blen_data, "material_index", np.uintc,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
@ -1055,7 +1234,7 @@ def blen_read_geom_layer_uv(fbx_obj, mesh):
continue continue
blen_read_geom_array_mapped_polyloop( blen_read_geom_array_mapped_polyloop(
mesh, blen_data, "uv", mesh, blen_data, "uv", np.single,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
2, 2, layer_id, 2, 2, layer_id,
@ -1095,7 +1274,7 @@ def blen_read_geom_layer_color(fbx_obj, mesh, colors_type):
continue continue
blen_read_geom_array_mapped_polyloop( blen_read_geom_array_mapped_polyloop(
mesh, blen_data, color_prop_name, mesh, blen_data, color_prop_name, np.single,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
4, 4, layer_id, 4, 4, layer_id,
@ -1129,11 +1308,11 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
blen_data = mesh.edges blen_data = mesh.edges
blen_read_geom_array_mapped_edge( blen_read_geom_array_mapped_edge(
mesh, blen_data, "use_edge_sharp", mesh, blen_data, "use_edge_sharp", bool,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
xform=lambda s: not s, xform=np.logical_not,
) )
# We only set sharp edges here, not face smoothing itself... # We only set sharp edges here, not face smoothing itself...
mesh.use_auto_smooth = True mesh.use_auto_smooth = True
@ -1141,7 +1320,7 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
elif fbx_layer_mapping == b'ByPolygon': elif fbx_layer_mapping == b'ByPolygon':
blen_data = mesh.polygons blen_data = mesh.polygons
return blen_read_geom_array_mapped_polygon( return blen_read_geom_array_mapped_polygon(
mesh, blen_data, "use_smooth", mesh, blen_data, "use_smooth", bool,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
@ -1152,8 +1331,6 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
return False return False
def blen_read_geom_layer_edge_crease(fbx_obj, mesh): def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
from math import sqrt
fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease') fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease')
if fbx_layer is None: if fbx_layer is None:
@ -1184,13 +1361,13 @@ def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
blen_data = mesh.edges blen_data = mesh.edges
return blen_read_geom_array_mapped_edge( return blen_read_geom_array_mapped_edge(
mesh, blen_data, "crease", mesh, blen_data, "crease", np.single,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
# Blender squares those values before sending them to OpenSubdiv, when other software don't, # Blender squares those values before sending them to OpenSubdiv, when other software don't,
# so we need to compensate that to get similar results through FBX... # so we need to compensate that to get similar results through FBX...
xform=sqrt, xform=np.sqrt,
) )
else: else:
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping)) print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
@ -1215,22 +1392,28 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
print("warning %r %r missing data" % (layer_id, fbx_layer_name)) print("warning %r %r missing data" % (layer_id, fbx_layer_name))
return False return False
# try loops, then vertices. # Normals are temporarily set here so that they can be retrieved again after a call to Mesh.validate().
bl_norm_dtype = np.single
item_size = 3
# try loops, then polygons, then vertices.
tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop), tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop),
(mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon), (mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon),
(mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert)) (mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert))
for blen_data, blen_data_type, is_fake, func in tries: for blen_data, blen_data_type, is_fake, func in tries:
bdata = [None] * len(blen_data) if is_fake else blen_data bdata = np.zeros((len(blen_data), item_size), dtype=bl_norm_dtype) if is_fake else blen_data
if func(mesh, bdata, "normal", if func(mesh, bdata, "normal", bl_norm_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, 3, layer_id, xform, True): fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, item_size, layer_id, xform, True):
if blen_data_type == "Polygons": if blen_data_type == "Polygons":
for pidx, p in enumerate(mesh.polygons): # To expand to per-loop normals, repeat each per-polygon normal by the number of loops of each polygon.
for lidx in range(p.loop_start, p.loop_start + p.loop_total): poly_loop_totals = np.empty(len(mesh.polygons), dtype=np.uintc)
mesh.loops[lidx].normal[:] = bdata[pidx] mesh.polygons.foreach_get("loop_total", poly_loop_totals)
loop_normals = np.repeat(bdata, poly_loop_totals, axis=0)
mesh.loops.foreach_set("normal", loop_normals.ravel())
elif blen_data_type == "Vertices": elif blen_data_type == "Vertices":
# We have to copy vnors to lnors! Far from elegant, but simple. # We have to copy vnors to lnors! Far from elegant, but simple.
for l in mesh.loops: loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
l.normal[:] = bdata[l.vertex_index] mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
mesh.loops.foreach_set("normal", bdata[loop_vertex_indices].ravel())
return True return True
blen_read_geom_array_error_mapping("normal", fbx_layer_mapping) blen_read_geom_array_error_mapping("normal", fbx_layer_mapping)
@ -1239,9 +1422,6 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
def blen_read_geom(fbx_tmpl, fbx_obj, settings): def blen_read_geom(fbx_tmpl, fbx_obj, settings):
from itertools import chain
import array
# Vertices are in object space, but we are post-multiplying all transforms with the inverse of the # Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
# global matrix, so we need to apply the global matrix to the vertices to get the correct result. # global matrix, so we need to apply the global matrix to the vertices to get the correct result.
geom_mat_co = settings.global_matrix if settings.bake_space_transform else None geom_mat_co = settings.global_matrix if settings.bake_space_transform else None
@ -1259,73 +1439,95 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex')) fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex'))
fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges')) fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges'))
if geom_mat_co is not None: bl_vcos_dtype = np.single
def _vcos_transformed_gen(raw_cos, m=None):
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
return chain(*(m @ Vector(v) for v in zip(*(iter(raw_cos),) * 3)))
fbx_verts = array.array(fbx_verts.typecode, _vcos_transformed_gen(fbx_verts, geom_mat_co))
if fbx_verts is None: # The dtypes when empty don't matter, but are set to what the fbx arrays are expected to be.
fbx_verts = () fbx_verts = parray_as_ndarray(fbx_verts) if fbx_verts else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
if fbx_polys is None: fbx_polys = parray_as_ndarray(fbx_polys) if fbx_polys else np.empty(0, dtype=data_types.ARRAY_INT32)
fbx_polys = () fbx_edges = parray_as_ndarray(fbx_edges) if fbx_edges else np.empty(0, dtype=data_types.ARRAY_INT32)
# Each vert is a 3d vector so is made of 3 components.
tot_verts = len(fbx_verts) // 3
if tot_verts * 3 != len(fbx_verts):
print("ERROR: Not a whole number of vertices. Ignoring the partial vertex!")
# Remove any remainder.
fbx_verts = fbx_verts[:tot_verts * 3]
tot_loops = len(fbx_polys)
tot_edges = len(fbx_edges)
mesh = bpy.data.meshes.new(name=elem_name_utf8) mesh = bpy.data.meshes.new(name=elem_name_utf8)
mesh.vertices.add(len(fbx_verts) // 3)
mesh.vertices.foreach_set("co", fbx_verts)
if fbx_polys: if tot_verts:
mesh.loops.add(len(fbx_polys)) if geom_mat_co is not None:
poly_loop_starts = [] fbx_verts = vcos_transformed(fbx_verts, geom_mat_co, bl_vcos_dtype)
poly_loop_totals = [] else:
poly_loop_prev = 0 fbx_verts = fbx_verts.astype(bl_vcos_dtype, copy=False)
for i, l in enumerate(mesh.loops):
index = fbx_polys[i]
if index < 0:
poly_loop_starts.append(poly_loop_prev)
poly_loop_totals.append((i - poly_loop_prev) + 1)
poly_loop_prev = i + 1
index ^= -1
l.vertex_index = index
mesh.polygons.add(len(poly_loop_starts)) mesh.vertices.add(tot_verts)
mesh.vertices.foreach_set("co", fbx_verts.ravel())
if tot_loops:
bl_loop_start_dtype = bl_loop_vertex_index_dtype = np.uintc
mesh.loops.add(tot_loops)
# The end of each polygon is specified by an inverted index.
fbx_loop_end_idx = np.flatnonzero(fbx_polys < 0)
tot_polys = len(fbx_loop_end_idx)
# Un-invert the loop ends.
fbx_polys[fbx_loop_end_idx] ^= -1
# Set loop vertex indices, casting to the Blender C type first for performance.
mesh.loops.foreach_set("vertex_index", astype_view_signedness(fbx_polys, bl_loop_vertex_index_dtype))
poly_loop_starts = np.empty(tot_polys, dtype=bl_loop_start_dtype)
# The first loop is always a loop start.
poly_loop_starts[0] = 0
# Ignoring the last loop end, the indices after every loop end are the remaining loop starts.
poly_loop_starts[1:] = fbx_loop_end_idx[:-1] + 1
mesh.polygons.add(tot_polys)
mesh.polygons.foreach_set("loop_start", poly_loop_starts) mesh.polygons.foreach_set("loop_start", poly_loop_starts)
mesh.polygons.foreach_set("loop_total", poly_loop_totals)
blen_read_geom_layer_material(fbx_obj, mesh) blen_read_geom_layer_material(fbx_obj, mesh)
blen_read_geom_layer_uv(fbx_obj, mesh) blen_read_geom_layer_uv(fbx_obj, mesh)
blen_read_geom_layer_color(fbx_obj, mesh, settings.colors_type) blen_read_geom_layer_color(fbx_obj, mesh, settings.colors_type)
if fbx_edges: if tot_edges:
# edges in fact index the polygons (NOT the vertices) # edges in fact index the polygons (NOT the vertices)
import array bl_edge_vertex_indices_dtype = np.uintc
tot_edges = len(fbx_edges)
edges_conv = array.array('i', [0]) * (tot_edges * 2)
edge_index = 0 # The first vertex index of each edge is the vertex index of the corresponding loop in fbx_polys.
for i in fbx_edges: edges_a = fbx_polys[fbx_edges]
e_a = fbx_polys[i]
if e_a >= 0:
e_b = fbx_polys[i + 1]
if e_b < 0:
e_b ^= -1
else:
# Last index of polygon, wrap back to the start.
# ideally we wouldn't have to search back, # The second vertex index of each edge is the vertex index of the next loop in the same polygon. The
# but it should only be 2-3 iterations. # complexity here is that if the first vertex index was the last loop of that polygon in fbx_polys, the next
j = i - 1 # loop in the polygon is the first loop of that polygon, which is not the next loop in fbx_polys.
while j >= 0 and fbx_polys[j] >= 0:
j -= 1
e_a ^= -1
e_b = fbx_polys[j + 1]
edges_conv[edge_index] = e_a # Copy fbx_polys, but rolled backwards by 1 so that indexing the result by [fbx_edges] will get the next
edges_conv[edge_index + 1] = e_b # loop of the same polygon unless the first vertex index was the last loop of the polygon.
edge_index += 2 fbx_polys_next = np.roll(fbx_polys, -1)
# Get the first loop of each polygon and set them into fbx_polys_next at the same indices as the last loop
# of each polygon in fbx_polys.
fbx_polys_next[fbx_loop_end_idx] = fbx_polys[poly_loop_starts]
mesh.edges.add(tot_edges) # Indexing fbx_polys_next by fbx_edges now gets the vertex index of the next loop in fbx_polys.
mesh.edges.foreach_set("vertices", edges_conv) edges_b = fbx_polys_next[fbx_edges]
# edges_a and edges_b need to be combined so that the first vertex index of each edge is immediately
# followed by the second vertex index of that same edge.
# Stack edges_a and edges_b as individual columns like np.column_stack((edges_a, edges_b)).
# np.concatenate is used because np.column_stack doesn't allow specifying the dtype of the returned array.
edges_conv = np.concatenate((edges_a.reshape(-1, 1), edges_b.reshape(-1, 1)),
axis=1, dtype=bl_edge_vertex_indices_dtype, casting='unsafe')
# Add the edges and set their vertex indices.
mesh.edges.add(len(edges_conv))
# ravel() because edges_conv must be flat and C-contiguous when passed to foreach_set.
mesh.edges.foreach_set("vertices", edges_conv.ravel())
elif tot_edges:
print("ERROR: No polygons, but edges exist. Ignoring the edges!")
# must be after edge, face loading. # must be after edge, face loading.
ok_smooth = blen_read_geom_layer_smooth(fbx_obj, mesh) ok_smooth = blen_read_geom_layer_smooth(fbx_obj, mesh)
@ -1340,21 +1542,23 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
if geom_mat_no is None: if geom_mat_no is None:
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh) ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh)
else: else:
def nortrans(v): ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh,
return geom_mat_no @ Vector(v) lambda v_array: nors_transformed(v_array, geom_mat_no))
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh, nortrans)
mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here! mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here!
if ok_normals: if ok_normals:
clnors = array.array('f', [0.0] * (len(mesh.loops) * 3)) bl_nors_dtype = np.single
clnors = np.empty(len(mesh.loops) * 3, dtype=bl_nors_dtype)
mesh.loops.foreach_get("normal", clnors) mesh.loops.foreach_get("normal", clnors)
if not ok_smooth: if not ok_smooth:
mesh.polygons.foreach_set("use_smooth", [True] * len(mesh.polygons)) mesh.polygons.foreach_set("use_smooth", np.full(len(mesh.polygons), True, dtype=bool))
ok_smooth = True ok_smooth = True
mesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) # Iterating clnors into a nested tuple first is faster than passing clnors.reshape(-1, 3) directly into
# normals_split_custom_set. We use clnors.data since it is a memoryview, which is faster to iterate than clnors.
mesh.normals_split_custom_set(tuple(zip(*(iter(clnors.data),) * 3)))
mesh.use_auto_smooth = True mesh.use_auto_smooth = True
else: else:
mesh.calc_normals() mesh.calc_normals()
@ -1363,7 +1567,7 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
mesh.free_normals_split() mesh.free_normals_split()
if not ok_smooth: if not ok_smooth:
mesh.polygons.foreach_set("use_smooth", [True] * len(mesh.polygons)) mesh.polygons.foreach_set("use_smooth", np.full(len(mesh.polygons), True, dtype=bool))
if settings.use_custom_props: if settings.use_custom_props:
blen_read_custom_properties(fbx_obj, mesh, settings) blen_read_custom_properties(fbx_obj, mesh, settings)
@ -1371,46 +1575,78 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
return mesh return mesh
def blen_read_shape(fbx_tmpl, fbx_sdata, fbx_bcdata, meshes, scene): def blen_read_shapes(fbx_tmpl, fbx_data, objects, me, scene):
elem_name_utf8 = elem_name_ensure_class(fbx_sdata, b'Geometry') if not fbx_data:
indices = elem_prop_first(elem_find_first(fbx_sdata, b'Indexes'), default=()) # No shape key data. Nothing to do.
dvcos = tuple(co for co in zip(*[iter(elem_prop_first(elem_find_first(fbx_sdata, b'Vertices'), default=()))] * 3)) return
# We completely ignore normals here!
weight = elem_prop_first(elem_find_first(fbx_bcdata, b'DeformPercent'), default=100.0) / 100.0
vgweights = tuple(vgw / 100.0 for vgw in elem_prop_first(elem_find_first(fbx_bcdata, b'FullWeights'), default=()))
# Special case, in case all weights are the same, FullWeight can have only one element - *sigh!* bl_vcos_dtype = np.single
nbr_indices = len(indices) me_vcos = np.empty(len(me.vertices) * 3, dtype=bl_vcos_dtype)
if len(vgweights) == 1 and nbr_indices > 1: me.vertices.foreach_get("co", me_vcos)
vgweights = (vgweights[0],) * nbr_indices me_vcos_vector_view = me_vcos.reshape(-1, 3)
assert(len(vgweights) == nbr_indices == len(dvcos)) objects = list({node.bl_obj for node in objects})
create_vg = bool(set(vgweights) - {1.0}) assert(objects)
keyblocks = [] bc_uuid_to_keyblocks = {}
for bc_uuid, fbx_sdata, fbx_bcdata in fbx_data:
elem_name_utf8 = elem_name_ensure_class(fbx_sdata, b'Geometry')
indices = elem_prop_first(elem_find_first(fbx_sdata, b'Indexes'))
dvcos = elem_prop_first(elem_find_first(fbx_sdata, b'Vertices'))
for me, objects in meshes: indices = parray_as_ndarray(indices) if indices else np.empty(0, dtype=data_types.ARRAY_INT32)
vcos = tuple((idx, me.vertices[idx].co + Vector(dvco)) for idx, dvco in zip(indices, dvcos)) dvcos = parray_as_ndarray(dvcos) if dvcos else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
objects = list({node.bl_obj for node in objects})
assert(objects)
# If there's not a whole number of vectors, trim off the remainder.
# 3 components per vector.
remainder = len(dvcos) % 3
if remainder:
dvcos = dvcos[:-remainder]
dvcos = dvcos.reshape(-1, 3)
# We completely ignore normals here!
weight = elem_prop_first(elem_find_first(fbx_bcdata, b'DeformPercent'), default=100.0) / 100.0
vgweights = elem_prop_first(elem_find_first(fbx_bcdata, b'FullWeights'))
vgweights = parray_as_ndarray(vgweights) if vgweights else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
# Not doing the division in-place in-case it's possible for FBX shape keys to be used by more than one mesh.
vgweights = vgweights / 100.0
create_vg = (vgweights != 1.0).any()
# Special case, in case all weights are the same, FullWeight can have only one element - *sigh!*
nbr_indices = len(indices)
if len(vgweights) == 1 and nbr_indices > 1:
vgweights = np.full_like(indices, vgweights[0], dtype=vgweights.dtype)
assert(len(vgweights) == nbr_indices == len(dvcos))
# To add shape keys to the mesh, an Object using the mesh is needed.
if me.shape_keys is None: if me.shape_keys is None:
objects[0].shape_key_add(name="Basis", from_mix=False) objects[0].shape_key_add(name="Basis", from_mix=False)
kb = objects[0].shape_key_add(name=elem_name_utf8, from_mix=False) kb = objects[0].shape_key_add(name=elem_name_utf8, from_mix=False)
me.shape_keys.use_relative = True # Should already be set as such. me.shape_keys.use_relative = True # Should already be set as such.
for idx, co in vcos: # Only need to set the shape key co if there are any non-zero dvcos.
kb.data[idx].co[:] = co if dvcos.any():
shape_cos = me_vcos_vector_view.copy()
shape_cos[indices] += dvcos
kb.data.foreach_set("co", shape_cos.ravel())
kb.value = weight kb.value = weight
# Add vgroup if necessary. # Add vgroup if necessary.
if create_vg: if create_vg:
vgoups = add_vgroup_to_objects(indices, vgweights, kb.name, objects) # VertexGroup.add only allows sequences of int indices, but iterating the indices array directly would
# produce numpy scalars of types such as np.int32. The underlying memoryview of the indices array, however,
# does produce standard Python ints when iterated, so pass indices.data to add_vgroup_to_objects instead of
# indices.
# memoryviews tend to be faster to iterate than numpy arrays anyway, so vgweights.data is passed too.
add_vgroup_to_objects(indices.data, vgweights.data, kb.name, objects)
kb.vertex_group = kb.name kb.vertex_group = kb.name
keyblocks.append(kb) bc_uuid_to_keyblocks.setdefault(bc_uuid, []).append(kb)
return bc_uuid_to_keyblocks
return keyblocks
# -------- # --------
@ -2861,6 +3097,7 @@ def load(operator, context, filepath="",
def _(): def _():
fbx_tmpl = fbx_template_get((b'Geometry', b'KFbxShape')) fbx_tmpl = fbx_template_get((b'Geometry', b'KFbxShape'))
mesh_to_shapes = {}
for s_uuid, s_item in fbx_table_nodes.items(): for s_uuid, s_item in fbx_table_nodes.items():
fbx_sdata, bl_sdata = s_item = fbx_table_nodes.get(s_uuid, (None, None)) fbx_sdata, bl_sdata = s_item = fbx_table_nodes.get(s_uuid, (None, None))
if fbx_sdata is None or fbx_sdata.id != b'Geometry' or fbx_sdata.props[2] != b'Shape': if fbx_sdata is None or fbx_sdata.id != b'Geometry' or fbx_sdata.props[2] != b'Shape':
@ -2873,8 +3110,6 @@ def load(operator, context, filepath="",
fbx_bcdata, _bl_bcdata = fbx_table_nodes.get(bc_uuid, (None, None)) fbx_bcdata, _bl_bcdata = fbx_table_nodes.get(bc_uuid, (None, None))
if fbx_bcdata is None or fbx_bcdata.id != b'Deformer' or fbx_bcdata.props[2] != b'BlendShapeChannel': if fbx_bcdata is None or fbx_bcdata.id != b'Deformer' or fbx_bcdata.props[2] != b'BlendShapeChannel':
continue continue
meshes = []
objects = []
for bs_uuid, bs_ctype in fbx_connection_map.get(bc_uuid, ()): for bs_uuid, bs_ctype in fbx_connection_map.get(bc_uuid, ()):
if bs_ctype.props[0] != b'OO': if bs_ctype.props[0] != b'OO':
continue continue
@ -2889,20 +3124,29 @@ def load(operator, context, filepath="",
continue continue
# Blenmeshes are assumed already created at that time! # Blenmeshes are assumed already created at that time!
assert(isinstance(bl_mdata, bpy.types.Mesh)) assert(isinstance(bl_mdata, bpy.types.Mesh))
# And we have to find all objects using this mesh! # Group shapes by mesh so that each mesh only needs to be processed once for all of its shape
objects = [] # keys.
for o_uuid, o_ctype in fbx_connection_map.get(m_uuid, ()): if bl_mdata not in mesh_to_shapes:
if o_ctype.props[0] != b'OO': # And we have to find all objects using this mesh!
continue objects = []
node = fbx_helper_nodes[o_uuid] for o_uuid, o_ctype in fbx_connection_map.get(m_uuid, ()):
if node: if o_ctype.props[0] != b'OO':
objects.append(node) continue
meshes.append((bl_mdata, objects)) node = fbx_helper_nodes[o_uuid]
if node:
objects.append(node)
shapes_list = []
mesh_to_shapes[bl_mdata] = (objects, shapes_list)
else:
shapes_list = mesh_to_shapes[bl_mdata][1]
shapes_list.append((bc_uuid, fbx_sdata, fbx_bcdata))
# BlendShape deformers are only here to connect BlendShapeChannels to meshes, nothing else to do. # BlendShape deformers are only here to connect BlendShapeChannels to meshes, nothing else to do.
# Iterate through each mesh and create its shape keys
for bl_mdata, (objects, shapes) in mesh_to_shapes.items():
for bc_uuid, keyblocks in blen_read_shapes(fbx_tmpl, shapes, objects, bl_mdata, scene).items():
# keyblocks is a list of tuples (mesh, keyblock) matching that shape/blendshapechannel, for animation. # keyblocks is a list of tuples (mesh, keyblock) matching that shape/blendshapechannel, for animation.
keyblocks = blen_read_shape(fbx_tmpl, fbx_sdata, fbx_bcdata, meshes, scene) blend_shape_channels.setdefault(bc_uuid, []).extend(keyblocks)
blend_shape_channels[bc_uuid] = keyblocks
_(); del _ _(); del _
if settings.use_subsurf: if settings.use_subsurf:
@ -3224,8 +3468,16 @@ def load(operator, context, filepath="",
if decal_offset != 0.0: if decal_offset != 0.0:
for material in mesh.materials: for material in mesh.materials:
if material in material_decals: if material in material_decals:
for v in mesh.vertices: num_verts = len(mesh.vertices)
v.co += v.normal * decal_offset blen_cos_dtype = blen_norm_dtype = np.single
vcos = np.empty(num_verts * 3, dtype=blen_cos_dtype)
vnorm = np.empty(num_verts * 3, dtype=blen_norm_dtype)
mesh.vertices.foreach_get("co", vcos)
mesh.vertices.foreach_get("normal", vnorm)
vcos += vnorm * decal_offset
mesh.vertices.foreach_set("co", vcos)
break break
for obj in (obj for obj in bpy.data.objects if obj.data == mesh): for obj in (obj for obj in bpy.data.objects if obj.data == mesh):

View File

@ -4,7 +4,7 @@
bl_info = { bl_info = {
'name': 'glTF 2.0 format', 'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors', 'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (3, 6, 6), "version": (3, 6, 14),
'blender': (3, 5, 0), 'blender': (3, 5, 0),
'location': 'File > Import-Export', 'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0', 'description': 'Import-Export as glTF 2.0',
@ -106,7 +106,7 @@ def on_export_format_changed(self, context):
class ConvertGLTF2_Base: class ConvertGLTF2_Base:
"""Base class containing options that should be exposed during both import and export.""" """Base class containing options that should be exposed during both import and export."""
convert_lighting_mode: EnumProperty( export_import_convert_lighting_mode: EnumProperty(
name='Lighting Mode', name='Lighting Mode',
items=( items=(
('SPEC', 'Standard', 'Physically-based glTF lighting units (cd, lx, nt)'), ('SPEC', 'Standard', 'Physically-based glTF lighting units (cd, lx, nt)'),
@ -786,7 +786,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
export_settings['gltf_morph_anim'] = False export_settings['gltf_morph_anim'] = False
export_settings['gltf_lights'] = self.export_lights export_settings['gltf_lights'] = self.export_lights
export_settings['gltf_lighting_mode'] = self.convert_lighting_mode export_settings['gltf_lighting_mode'] = self.export_import_convert_lighting_mode
export_settings['gltf_binary'] = bytearray() export_settings['gltf_binary'] = bytearray()
export_settings['gltf_binaryfilename'] = ( export_settings['gltf_binaryfilename'] = (
@ -1043,7 +1043,7 @@ class GLTF_PT_export_data_lighting(bpy.types.Panel):
sfile = context.space_data sfile = context.space_data
operator = sfile.active_operator operator = sfile.active_operator
layout.prop(operator, 'convert_lighting_mode') layout.prop(operator, 'export_import_convert_lighting_mode')
class GLTF_PT_export_data_shapekeys(bpy.types.Panel): class GLTF_PT_export_data_shapekeys(bpy.types.Panel):
bl_space_type = 'FILE_BROWSER' bl_space_type = 'FILE_BROWSER'
@ -1489,7 +1489,7 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
layout.prop(self, 'import_shading') layout.prop(self, 'import_shading')
layout.prop(self, 'guess_original_bind_pose') layout.prop(self, 'guess_original_bind_pose')
layout.prop(self, 'bone_heuristic') layout.prop(self, 'bone_heuristic')
layout.prop(self, 'convert_lighting_mode') layout.prop(self, 'export_import_convert_lighting_mode')
def invoke(self, context, event): def invoke(self, context, event):
import sys import sys

View File

@ -14,7 +14,6 @@ def get_mesh_cache_key(blender_mesh,
blender_object, blender_object,
vertex_groups, vertex_groups,
modifiers, modifiers,
skip_filter,
materials, materials,
original_mesh, original_mesh,
export_settings): export_settings):
@ -34,21 +33,19 @@ def get_mesh_cache_key(blender_mesh,
return ( return (
(id(mesh_to_id_cache),), (id(mesh_to_id_cache),),
(modifiers,), (modifiers,),
(skip_filter,), #TODO to check if still needed
mats mats
) )
@cached_by_key(key=get_mesh_cache_key) @cached_by_key(key=get_mesh_cache_key)
def gather_mesh(blender_mesh: bpy.types.Mesh, def gather_mesh(blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
skip_filter: bool,
materials: Tuple[bpy.types.Material], materials: Tuple[bpy.types.Material],
original_mesh: bpy.types.Mesh, original_mesh: bpy.types.Mesh,
export_settings export_settings
) -> Optional[gltf2_io.Mesh]: ) -> Optional[gltf2_io.Mesh]:
if not skip_filter and not __filter_mesh(blender_mesh, vertex_groups, modifiers, export_settings): if not __filter_mesh(blender_mesh, vertex_groups, modifiers, export_settings):
return None return None
mesh = gltf2_io.Mesh( mesh = gltf2_io.Mesh(
@ -75,25 +72,21 @@ def gather_mesh(blender_mesh: bpy.types.Mesh,
blender_object, blender_object,
vertex_groups, vertex_groups,
modifiers, modifiers,
skip_filter,
materials) materials)
return mesh return mesh
def __filter_mesh(blender_mesh: bpy.types.Mesh, def __filter_mesh(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> bool: ) -> bool:
if blender_mesh.users == 0:
return False
return True return True
def __gather_extensions(blender_mesh: bpy.types.Mesh, def __gather_extensions(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> Any: ) -> Any:
@ -101,7 +94,7 @@ def __gather_extensions(blender_mesh: bpy.types.Mesh,
def __gather_extras(blender_mesh: bpy.types.Mesh, def __gather_extras(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> Optional[Dict[Any, Any]]: ) -> Optional[Dict[Any, Any]]:
@ -128,7 +121,7 @@ def __gather_extras(blender_mesh: bpy.types.Mesh,
def __gather_name(blender_mesh: bpy.types.Mesh, def __gather_name(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> str: ) -> str:
@ -137,7 +130,7 @@ def __gather_name(blender_mesh: bpy.types.Mesh,
def __gather_primitives(blender_mesh: bpy.types.Mesh, def __gather_primitives(blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
materials: Tuple[bpy.types.Material], materials: Tuple[bpy.types.Material],
export_settings export_settings
@ -151,7 +144,7 @@ def __gather_primitives(blender_mesh: bpy.types.Mesh,
def __gather_weights(blender_mesh: bpy.types.Mesh, def __gather_weights(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> Optional[List[float]]: ) -> Optional[List[float]]:

View File

@ -182,11 +182,7 @@ def __gather_mesh(vnode, blender_object, export_settings):
# Be sure that object is valid (no NaN for example) # Be sure that object is valid (no NaN for example)
blender_object.data.validate() blender_object.data.validate()
# If not using vertex group, they are irrelevant for caching --> ensure that they do not trigger a cache miss
vertex_groups = blender_object.vertex_groups
modifiers = blender_object.modifiers modifiers = blender_object.modifiers
if len(vertex_groups) == 0:
vertex_groups = None
if len(modifiers) == 0: if len(modifiers) == 0:
modifiers = None modifiers = None
@ -194,7 +190,9 @@ def __gather_mesh(vnode, blender_object, export_settings):
if export_settings['gltf_apply']: if export_settings['gltf_apply']:
if modifiers is None: # If no modifier, use original mesh, it will instance all shared mesh in a single glTF mesh if modifiers is None: # If no modifier, use original mesh, it will instance all shared mesh in a single glTF mesh
blender_mesh = blender_object.data blender_mesh = blender_object.data
skip_filter = False # Keep materials from object, as no modifiers are applied, so no risk that
# modifiers changed them
materials = tuple(ms.material for ms in blender_object.material_slots)
else: else:
armature_modifiers = {} armature_modifiers = {}
if export_settings['gltf_skins']: if export_settings['gltf_skins']:
@ -209,26 +207,28 @@ def __gather_mesh(vnode, blender_object, export_settings):
blender_mesh = blender_mesh_owner.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph) blender_mesh = blender_mesh_owner.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph)
for prop in blender_object.data.keys(): for prop in blender_object.data.keys():
blender_mesh[prop] = blender_object.data[prop] blender_mesh[prop] = blender_object.data[prop]
skip_filter = True
if export_settings['gltf_skins']: if export_settings['gltf_skins']:
# restore Armature modifiers # restore Armature modifiers
for idx, show_viewport in armature_modifiers.items(): for idx, show_viewport in armature_modifiers.items():
blender_object.modifiers[idx].show_viewport = show_viewport blender_object.modifiers[idx].show_viewport = show_viewport
# Keep materials from the newly created tmp mesh
materials = tuple(mat for mat in blender_mesh.materials)
if len(materials) == 1 and materials[0] is None:
materials = tuple(ms.material for ms in blender_object.material_slots)
else: else:
blender_mesh = blender_object.data blender_mesh = blender_object.data
skip_filter = False
# If no skin are exported, no need to have vertex group, this will create a cache miss # If no skin are exported, no need to have vertex group, this will create a cache miss
if not export_settings['gltf_skins']: if not export_settings['gltf_skins']:
vertex_groups = None
modifiers = None modifiers = None
else: else:
# Check if there is an armature modidier # Check if there is an armature modidier
if len([mod for mod in blender_object.modifiers if mod.type == "ARMATURE"]) == 0: if len([mod for mod in blender_object.modifiers if mod.type == "ARMATURE"]) == 0:
vertex_groups = None # Not needed if no armature, avoid a cache miss
modifiers = None modifiers = None
# Keep materials from object, as no modifiers are applied, so no risk that
materials = tuple(ms.material for ms in blender_object.material_slots) # modifiers changed them
materials = tuple(ms.material for ms in blender_object.material_slots)
# retrieve armature # retrieve armature
# Because mesh data will be transforms to skeleton space, # Because mesh data will be transforms to skeleton space,
@ -241,9 +241,8 @@ def __gather_mesh(vnode, blender_object, export_settings):
result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh, result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups, blender_object.vertex_groups,
modifiers, modifiers,
skip_filter,
materials, materials,
None, None,
export_settings) export_settings)
@ -279,17 +278,14 @@ def __gather_mesh_from_nonmesh(blender_object, export_settings):
needs_to_mesh_clear = True needs_to_mesh_clear = True
skip_filter = True
materials = tuple([ms.material for ms in blender_object.material_slots if ms.material is not None]) materials = tuple([ms.material for ms in blender_object.material_slots if ms.material is not None])
vertex_groups = None
modifiers = None modifiers = None
blender_object_for_skined_data = None blender_object_for_skined_data = None
result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh, result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
blender_object_for_skined_data, blender_object_for_skined_data,
vertex_groups, blender_object.vertex_groups,
modifiers, modifiers,
skip_filter,
materials, materials,
blender_object.data, blender_object.data,
export_settings) export_settings)
@ -361,8 +357,7 @@ def gather_skin(vnode, export_settings):
return None return None
# no skin needed when the modifier is linked without having a vertex group # no skin needed when the modifier is linked without having a vertex group
vertex_groups = blender_object.vertex_groups if len(blender_object.vertex_groups) == 0:
if len(vertex_groups) == 0:
return None return None
# check if any vertices in the mesh are part of a vertex group # check if any vertices in the mesh are part of a vertex group

View File

@ -15,9 +15,9 @@ from .material import gltf2_blender_gather_materials
from .material.extensions import gltf2_blender_gather_materials_variants from .material.extensions import gltf2_blender_gather_materials_variants
@cached @cached
def get_primitive_cache_key( def gather_primitive_cache_key(
blender_mesh, blender_mesh,
blender_object, uuid_for_skined_data,
vertex_groups, vertex_groups,
modifiers, modifiers,
materials, materials,
@ -36,11 +36,11 @@ def get_primitive_cache_key(
) )
@cached_by_key(key=get_primitive_cache_key) @cached_by_key(key=gather_primitive_cache_key)
def gather_primitives( def gather_primitives(
blender_mesh: bpy.types.Mesh, blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
materials: Tuple[bpy.types.Material], materials: Tuple[bpy.types.Material],
export_settings export_settings
@ -92,11 +92,33 @@ def gather_primitives(
return primitives return primitives
@cached @cached
def get_primitive_cache_key(
blender_mesh,
uuid_for_skined_data,
vertex_groups,
modifiers,
export_settings):
# Use id of mesh
# Do not use bpy.types that can be unhashable
# Do not use mesh name, that can be not unique (when linked)
# Do not use materials here
# TODO check what is really needed for modifiers
return (
(id(blender_mesh),),
(modifiers,)
)
@cached_by_key(key=get_primitive_cache_key)
def __gather_cache_primitives( def __gather_cache_primitives(
blender_mesh: bpy.types.Mesh, blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> List[dict]: ) -> List[dict]:

View File

@ -85,7 +85,7 @@ class PrimitiveCreator:
# Check if we have to export skin # Check if we have to export skin
self.armature = None self.armature = None
self.skin = None self.skin = None
if self.blender_vertex_groups and self.export_settings['gltf_skins']: if self.export_settings['gltf_skins']:
if self.modifiers is not None: if self.modifiers is not None:
modifiers_dict = {m.type: m for m in self.modifiers} modifiers_dict = {m.type: m for m in self.modifiers}
if "ARMATURE" in modifiers_dict: if "ARMATURE" in modifiers_dict:
@ -197,15 +197,6 @@ class PrimitiveCreator:
attr['skip_getting_to_dots'] = True attr['skip_getting_to_dots'] = True
self.blender_attributes.append(attr) self.blender_attributes.append(attr)
# Manage uvs TEX_COORD_x
for tex_coord_i in range(self.tex_coord_max):
attr = {}
attr['blender_data_type'] = 'FLOAT2'
attr['blender_domain'] = 'CORNER'
attr['gltf_attribute_name'] = 'TEXCOORD_' + str(tex_coord_i)
attr['get'] = self.get_function()
self.blender_attributes.append(attr)
# Manage NORMALS # Manage NORMALS
if self.use_normals: if self.use_normals:
attr = {} attr = {}
@ -216,6 +207,15 @@ class PrimitiveCreator:
attr['get'] = self.get_function() attr['get'] = self.get_function()
self.blender_attributes.append(attr) self.blender_attributes.append(attr)
# Manage uvs TEX_COORD_x
for tex_coord_i in range(self.tex_coord_max):
attr = {}
attr['blender_data_type'] = 'FLOAT2'
attr['blender_domain'] = 'CORNER'
attr['gltf_attribute_name'] = 'TEXCOORD_' + str(tex_coord_i)
attr['get'] = self.get_function()
self.blender_attributes.append(attr)
# Manage TANGENT # Manage TANGENT
if self.use_tangents: if self.use_tangents:
attr = {} attr = {}
@ -269,6 +269,13 @@ class PrimitiveCreator:
attr['len'] = gltf2_blender_conversion.get_data_length(attr['blender_data_type']) attr['len'] = gltf2_blender_conversion.get_data_length(attr['blender_data_type'])
attr['type'] = gltf2_blender_conversion.get_numpy_type(attr['blender_data_type']) attr['type'] = gltf2_blender_conversion.get_numpy_type(attr['blender_data_type'])
# Now we have all attribtues, we can change order if we want
# Note that the glTF specification doesn't say anything about order
# Attributes are defined only by name
# But if user want it in a particular order, he can use this hook to perform it
export_user_extensions('gather_attributes_change', self.export_settings, self.blender_attributes)
def create_dots_data_structure(self): def create_dots_data_structure(self):
# Now that we get all attributes that are going to be exported, create numpy array that will store them # Now that we get all attributes that are going to be exported, create numpy array that will store them
dot_fields = [('vertex_index', np.uint32)] dot_fields = [('vertex_index', np.uint32)]
@ -698,6 +705,8 @@ class PrimitiveCreator:
self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3) self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3)
self.normals = np.round(self.normals, NORMALS_ROUNDING_DIGIT) self.normals = np.round(self.normals, NORMALS_ROUNDING_DIGIT)
# Force normalization of normals in case some normals are not (why ?)
PrimitiveCreator.normalize_vecs(self.normals)
self.morph_normals = [] self.morph_normals = []
for key_block in key_blocks: for key_block in key_blocks:

View File

@ -2,12 +2,11 @@
# Copyright 2018-2021 The glTF-Blender-IO authors. # Copyright 2018-2021 The glTF-Blender-IO authors.
import re import re
import os import os
import urllib.parse
from typing import List from typing import List
from ... import get_version_string from ... import get_version_string
from ...io.com import gltf2_io, gltf2_io_extensions from ...io.com import gltf2_io, gltf2_io_extensions
from ...io.com.gltf2_io_path import path_to_uri from ...io.com.gltf2_io_path import path_to_uri, uri_to_path
from ...io.exp import gltf2_io_binary_data, gltf2_io_buffer, gltf2_io_image_data from ...io.exp import gltf2_io_binary_data, gltf2_io_buffer, gltf2_io_image_data
from ...io.exp.gltf2_io_user_extensions import export_user_extensions from ...io.exp.gltf2_io_user_extensions import export_user_extensions
@ -110,7 +109,7 @@ class GlTF2Exporter:
if is_glb: if is_glb:
uri = None uri = None
elif output_path and buffer_name: elif output_path and buffer_name:
with open(output_path + buffer_name, 'wb') as f: with open(output_path + uri_to_path(buffer_name), 'wb') as f:
f.write(self.__buffer.to_bytes()) f.write(self.__buffer.to_bytes())
uri = buffer_name uri = buffer_name
else: else:

View File

@ -179,6 +179,8 @@ class BlenderGlTF():
# Try to use name from extras.targetNames # Try to use name from extras.targetNames
try: try:
shapekey_name = str(mesh.extras['targetNames'][sk]) shapekey_name = str(mesh.extras['targetNames'][sk])
if shapekey_name == "": # Issue when shapekey name is empty
shapekey_name = None
except Exception: except Exception:
pass pass

View File

@ -46,27 +46,27 @@ class BlenderLight():
sun = bpy.data.lights.new(name=pylight['name'], type="SUN") sun = bpy.data.lights.new(name=pylight['name'], type="SUN")
if 'intensity' in pylight.keys(): if 'intensity' in pylight.keys():
if gltf.import_settings['convert_lighting_mode'] == 'SPEC': if gltf.import_settings['export_import_convert_lighting_mode'] == 'SPEC':
sun.energy = pylight['intensity'] / PBR_WATTS_TO_LUMENS sun.energy = pylight['intensity'] / PBR_WATTS_TO_LUMENS
elif gltf.import_settings['convert_lighting_mode'] == 'COMPAT': elif gltf.import_settings['export_import_convert_lighting_mode'] == 'COMPAT':
sun.energy = pylight['intensity'] sun.energy = pylight['intensity']
elif gltf.import_settings['convert_lighting_mode'] == 'RAW': elif gltf.import_settings['export_import_convert_lighting_mode'] == 'RAW':
sun.energy = pylight['intensity'] sun.energy = pylight['intensity']
else: else:
raise ValueError(gltf.import_settings['convert_lighting_mode']) raise ValueError(gltf.import_settings['export_import_convert_lighting_mode'])
return sun return sun
@staticmethod @staticmethod
def _calc_energy_pointlike(gltf, pylight): def _calc_energy_pointlike(gltf, pylight):
if gltf.import_settings['convert_lighting_mode'] == 'SPEC': if gltf.import_settings['export_import_convert_lighting_mode'] == 'SPEC':
return pylight['intensity'] / PBR_WATTS_TO_LUMENS * 4 * pi return pylight['intensity'] / PBR_WATTS_TO_LUMENS * 4 * pi
elif gltf.import_settings['convert_lighting_mode'] == 'COMPAT': elif gltf.import_settings['export_import_convert_lighting_mode'] == 'COMPAT':
return pylight['intensity'] * 4 * pi return pylight['intensity'] * 4 * pi
elif gltf.import_settings['convert_lighting_mode'] == 'RAW': elif gltf.import_settings['export_import_convert_lighting_mode'] == 'RAW':
return pylight['intensity'] return pylight['intensity']
else: else:
raise ValueError(gltf.import_settings['convert_lighting_mode']) raise ValueError(gltf.import_settings['export_import_convert_lighting_mode'])
@staticmethod @staticmethod
def create_point(gltf, light_id): def create_point(gltf, light_id):

View File

@ -596,7 +596,22 @@ def skin_into_bind_pose(gltf, skin_idx, vert_joints, vert_weights, locs, vert_no
for i in range(4): for i in range(4):
skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]] skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]]
weight_sums += ws[:, i] weight_sums += ws[:, i]
# Normalize weights to one; necessary for old files / quantized weights
# Some invalid files have 0 weight sum.
# To avoid to have this vertices at 0.0 / 0.0 / 0.0
# We set all weight ( aka 1.0 ) to the first bone
zeros_indices = np.where(weight_sums == 0)[0]
if zeros_indices.shape[0] > 0:
print_console('ERROR', 'File is invalid: Some vertices are not assigned to bone(s) ')
vert_weights[0][:, 0][zeros_indices] = 1.0 # Assign to first bone with all weight
# Reprocess IBM for these vertices
skinning_mats[zeros_indices] = np.zeros((4, 4), dtype=np.float32)
for js, ws in zip(vert_joints, vert_weights):
for i in range(4):
skinning_mats[zeros_indices] += ws[:, i][zeros_indices].reshape(len(ws[zeros_indices]), 1, 1) * joint_mats[js[:, i][zeros_indices]]
weight_sums[zeros_indices] += ws[:, i][zeros_indices]
skinning_mats /= weight_sums.reshape(num_verts, 1, 1) skinning_mats /= weight_sums.reshape(num_verts, 1, 1)
skinning_mats_3x3 = skinning_mats[:, :3, :3] skinning_mats_3x3 = skinning_mats[:, :3, :3]

View File

@ -690,11 +690,9 @@ def create_mesh(new_objects,
nbr_vidx = len(face_vert_loc_indices) nbr_vidx = len(face_vert_loc_indices)
faces_loop_start.append(lidx) faces_loop_start.append(lidx)
lidx += nbr_vidx lidx += nbr_vidx
faces_loop_total = tuple(len(face_vert_loc_indices) for (face_vert_loc_indices, _, _, _, _, _, _) in faces)
me.loops.foreach_set("vertex_index", loops_vert_idx) me.loops.foreach_set("vertex_index", loops_vert_idx)
me.polygons.foreach_set("loop_start", faces_loop_start) me.polygons.foreach_set("loop_start", faces_loop_start)
me.polygons.foreach_set("loop_total", faces_loop_total)
faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces) faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces)
me.polygons.foreach_set("material_index", faces_ma_index) me.polygons.foreach_set("material_index", faces_ma_index)

View File

@ -1256,7 +1256,7 @@ def gzipOpen(path):
if data is None: if data is None:
try: try:
filehandle = open(path, 'rU', encoding='utf-8', errors='surrogateescape') filehandle = open(path, 'r', encoding='utf-8', errors='surrogateescape')
data = filehandle.read() data = filehandle.read()
filehandle.close() filehandle.close()
except: except:
@ -1720,7 +1720,6 @@ def importMesh_IndexedTriangleSet(geom, ancestry):
bpymesh.loops.add(num_polys * 3) bpymesh.loops.add(num_polys * 3)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
bpymesh.polygons.foreach_set("vertices", index) bpymesh.polygons.foreach_set("vertices", index)
return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry) return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
@ -1742,7 +1741,6 @@ def importMesh_IndexedTriangleStripSet(geom, ancestry):
bpymesh.loops.add(num_polys * 3) bpymesh.loops.add(num_polys * 3)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
def triangles(): def triangles():
i = 0 i = 0
@ -1778,7 +1776,6 @@ def importMesh_IndexedTriangleFanSet(geom, ancestry):
bpymesh.loops.add(num_polys * 3) bpymesh.loops.add(num_polys * 3)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
def triangles(): def triangles():
i = 0 i = 0
@ -1808,7 +1805,6 @@ def importMesh_TriangleSet(geom, ancestry):
bpymesh.loops.add(num_polys * 3) bpymesh.loops.add(num_polys * 3)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
if ccw: if ccw:
fv = [i for i in range(n)] fv = [i for i in range(n)]
@ -1830,7 +1826,6 @@ def importMesh_TriangleStripSet(geom, ancestry):
bpymesh.loops.add(num_polys * 3) bpymesh.loops.add(num_polys * 3)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
def triangles(): def triangles():
b = 0 b = 0
@ -1856,7 +1851,6 @@ def importMesh_TriangleFanSet(geom, ancestry):
bpymesh.loops.add(num_polys * 3) bpymesh.loops.add(num_polys * 3)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
def triangles(): def triangles():
b = 0 b = 0
@ -2067,7 +2061,6 @@ def importMesh_ElevationGrid(geom, ancestry):
bpymesh.loops.add(num_polys * 4) bpymesh.loops.add(num_polys * 4)
bpymesh.polygons.add(num_polys) bpymesh.polygons.add(num_polys)
bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 4, 4)) bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 4, 4))
bpymesh.polygons.foreach_set("loop_total", (4,) * num_polys)
# If the ccw is off, we flip the 2nd and the 4th vertices of each face. # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
# For quad tessfaces, it was important that the final vertex index was not 0 # For quad tessfaces, it was important that the final vertex index was not 0
# (Blender treated it as a triangle then). # (Blender treated it as a triangle then).
@ -2481,7 +2474,6 @@ def importMesh_Sphere(geom, ancestry):
tuple(range(0, ns * 3, 3)) + tuple(range(0, ns * 3, 3)) +
tuple(range(ns * 3, num_loop - ns * 3, 4)) + tuple(range(ns * 3, num_loop - ns * 3, 4)) +
tuple(range(num_loop - ns * 3, num_loop, 3))) tuple(range(num_loop - ns * 3, num_loop, 3)))
bpymesh.polygons.foreach_set("loop_total", (3,) * ns + (4,) * num_quad + (3,) * ns)
vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
fb = (nr - 1) * ns # First face index for the bottom cap fb = (nr - 1) * ns # First face index for the bottom cap

View File

@ -3,8 +3,8 @@
bl_info = { bl_info = {
"name": "Node Wrangler", "name": "Node Wrangler",
"author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer", "author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer",
"version": (3, 44), "version": (3, 45),
"blender": (3, 4, 0), "blender": (3, 6, 0),
"location": "Node Editor Toolbar or Shift-W", "location": "Node Editor Toolbar or Shift-W",
"description": "Various tools to enhance and speed up node-based workflow", "description": "Various tools to enhance and speed up node-based workflow",
"warning": "", "warning": "",

View File

@ -1,6 +1,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later # SPDX-License-Identifier: GPL-2.0-or-later
import bpy import bpy
from bpy_extras.node_utils import connect_sockets
from math import hypot from math import hypot
@ -29,48 +30,42 @@ def node_mid_pt(node, axis):
def autolink(node1, node2, links): def autolink(node1, node2, links):
link_made = False
available_inputs = [inp for inp in node2.inputs if inp.enabled] available_inputs = [inp for inp in node2.inputs if inp.enabled]
available_outputs = [outp for outp in node1.outputs if outp.enabled] available_outputs = [outp for outp in node1.outputs if outp.enabled]
for outp in available_outputs: for outp in available_outputs:
for inp in available_inputs: for inp in available_inputs:
if not inp.is_linked and inp.name == outp.name: if not inp.is_linked and inp.name == outp.name:
link_made = True connect_sockets(outp, inp)
links.new(outp, inp)
return True return True
for outp in available_outputs: for outp in available_outputs:
for inp in available_inputs: for inp in available_inputs:
if not inp.is_linked and inp.type == outp.type: if not inp.is_linked and inp.type == outp.type:
link_made = True connect_sockets(outp, inp)
links.new(outp, inp)
return True return True
# force some connection even if the type doesn't match # force some connection even if the type doesn't match
if available_outputs: if available_outputs:
for inp in available_inputs: for inp in available_inputs:
if not inp.is_linked: if not inp.is_linked:
link_made = True connect_sockets(available_outputs[0], inp)
links.new(available_outputs[0], inp)
return True return True
# even if no sockets are open, force one of matching type # even if no sockets are open, force one of matching type
for outp in available_outputs: for outp in available_outputs:
for inp in available_inputs: for inp in available_inputs:
if inp.type == outp.type: if inp.type == outp.type:
link_made = True connect_sockets(outp, inp)
links.new(outp, inp)
return True return True
# do something! # do something!
for outp in available_outputs: for outp in available_outputs:
for inp in available_inputs: for inp in available_inputs:
link_made = True connect_sockets(outp, inp)
links.new(outp, inp)
return True return True
print("Could not make a link from " + node1.name + " to " + node2.name) print("Could not make a link from " + node1.name + " to " + node2.name)
return link_made return False
def abs_node_location(node): def abs_node_location(node):

View File

@ -5,7 +5,7 @@
import bpy import bpy
from .shading import write_object_material_interior from .shading import write_object_material_interior
def export_meta(file, metas, tab_write, DEF_MAT_NAME): def export_meta(file, metas, material_names_dictionary, tab_write, DEF_MAT_NAME):
"""write all POV blob primitives and Blender Metas to exported file """ """write all POV blob primitives and Blender Metas to exported file """
# TODO - blenders 'motherball' naming is not supported. # TODO - blenders 'motherball' naming is not supported.
@ -221,7 +221,8 @@ def export_meta(file, metas, tab_write, DEF_MAT_NAME):
write_object_material_interior(file, one_material, mob, tab_write) write_object_material_interior(file, one_material, mob, tab_write)
# write_object_material_interior(file, one_material, elems[1]) # write_object_material_interior(file, one_material, elems[1])
tab_write(file, "radiosity{importance %3g}\n" % mob.pov.importance_value) tab_write(file, "radiosity{importance %3g}\n" % mob.pov.importance_value)
tab_write(file, "}\n\n") # End of Metaball block
tab_write(file, "}\n\n") # End of Metaball block
''' '''

View File

@ -554,6 +554,7 @@ def write_pov(filename, scene=None, info_callback=None):
model_meta_topology.export_meta(file, model_meta_topology.export_meta(file,
[m for m in sel if m.type == 'META'], [m for m in sel if m.type == 'META'],
material_names_dictionary,
tab_write, tab_write,
DEF_MAT_NAME,) DEF_MAT_NAME,)