FBX Import: Speed up geometry layers with foreach_set and numpy #104487

Merged
Bastien Montagne merged 1 commits from Mysteryem/blender-addons:fbx_import_geom_layers_foreach_pr into main 2023-04-03 14:54:56 +02:00
Showing only changes of commit 9859e253b5 - Show all commits

View File

@ -785,87 +785,258 @@ def blen_read_geom_layerinfo(fbx_layer):
)
def blen_read_geom_array_setattr(generator, blen_data, blen_attr, fbx_data, stride, item_size, descr, xform):
"""Generic fbx_layer to blen_data setter, generator is expected to yield tuples (ble_idx, fbx_idx)."""
max_blen_idx = len(blen_data) - 1
max_fbx_idx = len(fbx_data) - 1
print_error = True
def blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size):
"""Validate blen_data when it's not a bpy_prop_collection.
Returns whether blen_data is a bpy_prop_collection"""
blen_data_is_collection = isinstance(blen_data, bpy.types.bpy_prop_collection)
if not blen_data_is_collection:
if item_size > 1:
assert(len(blen_data.shape) == 2)
assert(blen_data.shape[1] == item_size)
assert(blen_data.dtype == blen_dtype)
return blen_data_is_collection
def check_skip(blen_idx, fbx_idx):
nonlocal print_error
if fbx_idx < 0: # Negative values mean 'skip'.
return True
if blen_idx > max_blen_idx:
if print_error:
print("ERROR: too much data in this Blender layer, compared to elements in mesh, skipping!")
print_error = False
return True
if fbx_idx + item_size - 1 > max_fbx_idx:
if print_error:
print("ERROR: not enough data in this FBX layer, skipping!")
print_error = False
return True
return False
def blen_read_geom_parse_fbx_data(fbx_data, stride, item_size):
"""Parse fbx_data as an array.array into a 2d np.ndarray that shares the same memory, where each row is a single
item"""
# Technically stride < item_size could be supported, but there's probably not a use case for it since it would
# result in a view of the data with self-overlapping memory.
assert(stride >= item_size)
# View the array.array as an np.ndarray.
fbx_data_np = parray_as_ndarray(fbx_data)
if stride == item_size:
if item_size > 1:
# Need to make sure fbx_data_np has a whole number of items to be able to view item_size elements per row.
items_remainder = len(fbx_data_np) % item_size
if items_remainder:
print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
fbx_data_np = fbx_data_np[:-items_remainder]
fbx_data_np = fbx_data_np.reshape(-1, item_size)
else:
# Create a view of fbx_data_np that is only the first item_size elements of each stride. Note that the view will
# not be C-contiguous.
stride_remainder = len(fbx_data_np) % stride
if stride_remainder:
if stride_remainder < item_size:
print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
# Not enough in the remainder for a full item, so cut off the partial stride
fbx_data_np = fbx_data_np[:-stride_remainder]
# Reshape to one stride per row and then create a view that includes only the first item_size elements
# of each stride.
fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
else:
print("ERROR: not a whole number of strides in this FBX layer! There are a whole number of items, but"
" this could indicate an error!")
# There is not a whole number of strides, but there is a whole number of items.
# This is a pain to deal with because fbx_data_np.reshape(-1, stride) is not possible.
# A view of just the items can be created using stride_tricks.as_strided by specifying the shape and
# strides of the view manually.
# Extreme care must be taken when using stride_tricks.as_strided because improper usage can result in
# a view that gives access to memory outside the array.
from numpy.lib import stride_tricks
# fbx_data_np should always start off as flat and C-contiguous.
assert(fbx_data_np.strides == (fbx_data_np.itemsize,))
num_whole_strides = len(fbx_data_np) // stride
# Plus the one partial stride that is enough elements for a complete item.
num_items = num_whole_strides + 1
shape = (num_items, item_size)
# strides are the number of bytes to step to get to the next element, for each axis.
step_per_item = fbx_data_np.itemsize * stride
step_per_item_element = fbx_data_np.itemsize
strides = (step_per_item, step_per_item_element)
fbx_data_np = stride_tricks.as_strided(fbx_data_np, shape, strides)
else:
# There's a whole number of strides, so first reshape to one stride per row and then create a view that
# includes only the first item_size elements of each stride.
fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
return fbx_data_np
def blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np, is_indices=False):
"""Check that there are the same number of items in blen_data and fbx_data_np.
Returns a tuple of two elements:
0: fbx_data_np or, if fbx_data_np contains more items than blen_data, a view of fbx_data_np with the excess
items removed
1: Whether the returned fbx_data_np contains enough items to completely fill blen_data"""
bl_num_items = len(blen_data)
fbx_num_items = len(fbx_data_np)
enough_data = fbx_num_items >= bl_num_items
if not enough_data:
if is_indices:
print("ERROR: not enough indices in this FBX layer, missing data will be left as default!")
else:
print("ERROR: not enough data in this FBX layer, missing data will be left as default!")
elif fbx_num_items > bl_num_items:
if is_indices:
print("ERROR: too many indices in this FBX layer, skipping excess!")
else:
print("ERROR: too much data in this FBX layer, skipping excess!")
fbx_data_np = fbx_data_np[:bl_num_items]
return fbx_data_np, enough_data
def blen_read_geom_xform(fbx_data_np, xform):
"""xform is either None, or a function that takes fbx_data_np as its only positional argument and returns an
np.ndarray with the same total number of elements as fbx_data_np.
It is acceptable for xform to return an array with a different dtype to fbx_data_np.
Returns xform(fbx_data_np) when xform is not None and ensures the result of xform(fbx_data_np) has the same shape as
fbx_data_np before returning it.
When xform is None, fbx_data_np is returned as is."""
if xform is not None:
if isinstance(blen_data, list):
if item_size == 1:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
blen_data[blen_idx] = xform(fbx_data[fbx_idx])
item_size = fbx_data_np.shape[1]
fbx_total_data = fbx_data_np.size
fbx_data_np = xform(fbx_data_np)
# The amount of data should not be changed by xform
assert(fbx_data_np.size == fbx_total_data)
# Ensure fbx_data_np is still item_size elements per row
if len(fbx_data_np.shape) != 2 or fbx_data_np.shape[1] != item_size:
fbx_data_np = fbx_data_np.reshape(-1, item_size)
return fbx_data_np
def blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
xform):
"""Generic fbx_layer to blen_data foreach setter for Direct layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np, enough_data = blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
if blen_data_is_collection:
if not enough_data:
blen_total_data = len(blen_data) * item_size
buffer = np.empty(blen_total_data, dtype=blen_dtype)
# It's not clear what values should be used for the missing data, so read the current values into a buffer.
blen_data.foreach_get(blen_attr, buffer)
# Change the buffer shape to one item per row
buffer.shape = (-1, item_size)
# Copy the fbx data into the start of the buffer
buffer[:len(fbx_data_np)] = fbx_data_np
else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
blen_data[blen_idx] = xform(fbx_data[fbx_idx:fbx_idx + item_size])
# Convert the buffer to the Blender C type of blen_attr
buffer = astype_view_signedness(fbx_data_np, blen_dtype)
# Set blen_attr of blen_data. The buffer must be flat and C-contiguous, which ravel() ensures
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
if item_size == 1:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx]))
assert(blen_data.size % item_size == 0)
blen_data = blen_data.view()
blen_data.shape = (-1, item_size)
blen_data[:len(fbx_data_np)] = fbx_data_np
def blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, fbx_layer_index, stride,
item_size, descr, xform):
"""Generic fbx_layer to blen_data foreach setter for IndexToDirect layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array or a 1d np.ndarray."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
# fbx_layer_index is allowed to be a 1d np.ndarray for use with blen_read_geom_array_foreach_set_looptovert.
if not isinstance(fbx_layer_index, np.ndarray):
fbx_layer_index = parray_as_ndarray(fbx_layer_index)
fbx_layer_index, enough_indices = blen_read_geom_check_fbx_data_length(blen_data, fbx_layer_index, is_indices=True)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
blen_data_items_len = len(blen_data)
blen_data_len = blen_data_items_len * item_size
fbx_num_items = len(fbx_data_np)
# Find all indices that are out of bounds of fbx_data_np.
min_index_inclusive = -fbx_num_items
max_index_inclusive = fbx_num_items - 1
valid_index_mask = np.equal(fbx_layer_index, fbx_layer_index.clip(min_index_inclusive, max_index_inclusive))
indices_invalid = not valid_index_mask.all()
fbx_data_items = fbx_data_np.reshape(-1, item_size)
if indices_invalid or not enough_indices:
if blen_data_is_collection:
buffer = np.empty(blen_data_len, dtype=blen_dtype)
buffer_item_view = buffer.view()
buffer_item_view.shape = (-1, item_size)
# Since we don't know what the default values should be for the missing data, read the current values into a
# buffer.
blen_data.foreach_get(blen_attr, buffer)
else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx:fbx_idx + item_size]))
buffer_item_view = blen_data
if not enough_indices:
# Reduce the length of the view to the same length as the number of indices.
buffer_item_view = buffer_item_view[:len(fbx_layer_index)]
# Copy the result of indexing fbx_data_items by each element in fbx_layer_index into the buffer.
if indices_invalid:
print("ERROR: indices in this FBX layer out of bounds of the FBX data, skipping invalid indices!")
buffer_item_view[valid_index_mask] = fbx_data_items[fbx_layer_index[valid_index_mask]]
else:
if isinstance(blen_data, list):
if item_size == 1:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
blen_data[blen_idx] = fbx_data[fbx_idx]
buffer_item_view[:] = fbx_data_items[fbx_layer_index]
if blen_data_is_collection:
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
blen_data[blen_idx] = fbx_data[fbx_idx:fbx_idx + item_size]
if blen_data_is_collection:
# Cast the buffer to the Blender C type of blen_attr
fbx_data_items = astype_view_signedness(fbx_data_items, blen_dtype)
buffer_items = fbx_data_items[fbx_layer_index]
blen_data.foreach_set(blen_attr, buffer_items.ravel())
else:
if item_size == 1:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx])
blen_data[:] = fbx_data_items[fbx_layer_index]
def blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
xform):
"""Generic fbx_layer to blen_data foreach setter for AllSame layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
fbx_items_len = len(fbx_data_np)
blen_items_len = len(blen_data)
if fbx_items_len < 1:
print("ERROR: not enough data in this FBX layer, skipping!")
return
if blen_data_is_collection:
# Create an array filled with the value from fbx_data_np
buffer = np.full((blen_items_len, item_size), fbx_data_np[0], dtype=blen_dtype)
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx:fbx_idx + item_size])
for blen_idx, fbx_idx in generator:
if check_skip(blen_idx, fbx_idx):
continue
_process(blen_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx)
blen_data[:] = fbx_data_np[0]
# generic generators.
def blen_read_geom_array_gen_allsame(data_len):
return zip(*(range(data_len), (0,) * data_len))
def blen_read_geom_array_gen_direct(fbx_data, stride):
fbx_data_len = len(fbx_data)
return zip(*(range(fbx_data_len // stride), range(0, fbx_data_len, stride)))
def blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride):
return ((bi, fi * stride) for bi, fi in enumerate(fbx_layer_index))
def blen_read_geom_array_gen_direct_looptovert(mesh, fbx_data, stride):
fbx_data_len = len(fbx_data) // stride
loops = mesh.loops
for p in mesh.polygons:
for lidx in p.loop_indices:
vidx = loops[lidx].vertex_index
if vidx < fbx_data_len:
yield lidx, vidx * stride
def blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size,
descr, xform):
"""Generic fbx_layer to blen_data foreach setter for polyloop ByVertice layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array"""
# The fbx_data is mapped to vertices. To expand fbx_data to polygon loops, get an array of the vertex index of each
# polygon loop that will then be used to index fbx_data
loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, loop_vertex_indices, stride,
item_size, descr, xform)
# generic error printers.
@ -880,7 +1051,7 @@ def blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet=False):
def blen_read_geom_array_mapped_vert(
mesh, blen_data, blen_attr,
mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr,
@ -889,15 +1060,15 @@ def blen_read_geom_array_mapped_vert(
if fbx_layer_mapping == b'ByVertice':
if fbx_layer_ref == b'Direct':
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else:
@ -907,7 +1078,7 @@ def blen_read_geom_array_mapped_vert(
def blen_read_geom_array_mapped_edge(
mesh, blen_data, blen_attr,
mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr,
@ -915,15 +1086,15 @@ def blen_read_geom_array_mapped_edge(
):
if fbx_layer_mapping == b'ByEdge':
if fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else:
@ -933,7 +1104,7 @@ def blen_read_geom_array_mapped_edge(
def blen_read_geom_array_mapped_polygon(
mesh, blen_data, blen_attr,
mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr,
@ -945,22 +1116,22 @@ def blen_read_geom_array_mapped_polygon(
# We fallback to 'Direct' mapping in this case.
#~ assert(fbx_layer_index is not None)
if fbx_layer_index is None:
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
else:
blen_read_geom_array_setattr(blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
fbx_layer_index, stride, item_size, descr, xform)
return True
elif fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else:
@ -970,7 +1141,7 @@ def blen_read_geom_array_mapped_polygon(
def blen_read_geom_array_mapped_polyloop(
mesh, blen_data, blen_attr,
mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr,
@ -982,29 +1153,29 @@ def blen_read_geom_array_mapped_polyloop(
# We fallback to 'Direct' mapping in this case.
#~ assert(fbx_layer_index is not None)
if fbx_layer_index is None:
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
else:
blen_read_geom_array_setattr(blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
fbx_layer_index, stride, item_size, descr, xform)
return True
elif fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'ByVertice':
if fbx_layer_ref == b'Direct':
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct_looptovert(mesh, fbx_layer_data, stride),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
item_size, descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else:
@ -1029,7 +1200,7 @@ def blen_read_geom_layer_material(fbx_obj, mesh):
blen_data = mesh.polygons
blen_read_geom_array_mapped_polygon(
mesh, blen_data, "material_index",
mesh, blen_data, "material_index", np.uintc,
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id,
@ -1063,7 +1234,7 @@ def blen_read_geom_layer_uv(fbx_obj, mesh):
continue
blen_read_geom_array_mapped_polyloop(
mesh, blen_data, "uv",
mesh, blen_data, "uv", np.single,
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
2, 2, layer_id,
@ -1103,7 +1274,7 @@ def blen_read_geom_layer_color(fbx_obj, mesh, colors_type):
continue
blen_read_geom_array_mapped_polyloop(
mesh, blen_data, color_prop_name,
mesh, blen_data, color_prop_name, np.single,
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
4, 4, layer_id,
@ -1137,11 +1308,11 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
blen_data = mesh.edges
blen_read_geom_array_mapped_edge(
mesh, blen_data, "use_edge_sharp",
mesh, blen_data, "use_edge_sharp", bool,
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id,
xform=lambda s: not s,
xform=np.logical_not,
)
# We only set sharp edges here, not face smoothing itself...
mesh.use_auto_smooth = True
@ -1149,7 +1320,7 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
elif fbx_layer_mapping == b'ByPolygon':
blen_data = mesh.polygons
return blen_read_geom_array_mapped_polygon(
mesh, blen_data, "use_smooth",
mesh, blen_data, "use_smooth", bool,
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id,
@ -1160,8 +1331,6 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
return False
def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
from math import sqrt
fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease')
if fbx_layer is None:
@ -1192,13 +1361,13 @@ def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
blen_data = mesh.edges
return blen_read_geom_array_mapped_edge(
mesh, blen_data, "crease",
mesh, blen_data, "crease", np.single,
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id,
# Blender squares those values before sending them to OpenSubdiv, when other software don't,
# so we need to compensate that to get similar results through FBX...
xform=sqrt,
xform=np.sqrt,
)
else:
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
@ -1223,22 +1392,28 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
print("warning %r %r missing data" % (layer_id, fbx_layer_name))
return False
# try loops, then vertices.
# Normals are temporarily set here so that they can be retrieved again after a call to Mesh.validate().
bl_norm_dtype = np.single
item_size = 3
# try loops, then polygons, then vertices.
tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop),
(mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon),
(mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert))
for blen_data, blen_data_type, is_fake, func in tries:
bdata = [None] * len(blen_data) if is_fake else blen_data
if func(mesh, bdata, "normal",
fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, 3, layer_id, xform, True):
bdata = np.zeros((len(blen_data), item_size), dtype=bl_norm_dtype) if is_fake else blen_data
if func(mesh, bdata, "normal", bl_norm_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, item_size, layer_id, xform, True):
if blen_data_type == "Polygons":
for pidx, p in enumerate(mesh.polygons):
for lidx in range(p.loop_start, p.loop_start + p.loop_total):
mesh.loops[lidx].normal[:] = bdata[pidx]
# To expand to per-loop normals, repeat each per-polygon normal by the number of loops of each polygon.
poly_loop_totals = np.empty(len(mesh.polygons), dtype=np.uintc)
mesh.polygons.foreach_get("loop_total", poly_loop_totals)
loop_normals = np.repeat(bdata, poly_loop_totals, axis=0)
mesh.loops.foreach_set("normal", loop_normals.ravel())
elif blen_data_type == "Vertices":
# We have to copy vnors to lnors! Far from elegant, but simple.
for l in mesh.loops:
l.normal[:] = bdata[l.vertex_index]
loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
mesh.loops.foreach_set("normal", bdata[loop_vertex_indices].ravel())
return True
blen_read_geom_array_error_mapping("normal", fbx_layer_mapping)
@ -1348,9 +1523,8 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
if geom_mat_no is None:
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh)
else:
def nortrans(v):
return geom_mat_no @ Vector(v)
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh, nortrans)
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh,
lambda v_array: nors_transformed(v_array, geom_mat_no))
mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here!