FBX IO: Speed up export by multithreading array compression #105018

Merged
Thomas Barlow merged 6 commits from Mysteryem/blender-addons:fbx_multithread_array_compression_pr into main 2024-01-12 21:39:20 +01:00
4 changed files with 92 additions and 29 deletions

View File

@ -5,7 +5,7 @@
bl_info = {
"name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 11, 3),
"version": (5, 11, 4),
"blender": (4, 1, 0),
"location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -4,10 +4,13 @@
try:
from . import data_types
from .fbx_utils_threading import MultiThreadedTaskConsumer
except:
import data_types
from fbx_utils_threading import MultiThreadedTaskConsumer
from struct import pack
from contextlib import contextmanager
import array
import numpy as np
import zlib
@ -51,6 +54,57 @@ class FBXElem:
self._end_offset = -1
self._props_length = -1
@classmethod
@contextmanager
def enable_multithreading_cm(cls):
"""Temporarily enable multithreaded array compression.
The context manager handles starting up and shutting down the threads.
Only exits once all the threads are done (either all tasks were completed or an error occurred and the threads
were stopped prematurely).
Writing to a file is temporarily disabled as a safeguard."""
# __enter__()
orig_func = cls._add_compressed_array_helper
orig_write = cls._write
def insert_compressed_array(props, insert_at, data, length):
# zlib.compress releases the GIL, so can be multithreaded.
data = zlib.compress(data, 1)
comp_len = len(data)
encoding = 1
data = pack('<3I', length, encoding, comp_len) + data
props[insert_at] = data
with MultiThreadedTaskConsumer.new_cpu_bound_cm(insert_compressed_array) as wrapped_func:
try:
def _add_compressed_array_helper_multi(self, data, length):
# Append a dummy value that will be replaced with the compressed array data later.
self.props.append(...)
# The index to insert the compressed array into.
insert_at = len(self.props) - 1
# Schedule the array to be compressed on a separate thread and then inserted into the hierarchy at
# `insert_at`.
wrapped_func(self.props, insert_at, data, length)
# As an extra safeguard, temporarily replace the `_write` function to raise an error if called.
def temp_write(*_args, **_kwargs):
raise RuntimeError("Writing is not allowed until multithreaded array compression has been disabled")
cls._add_compressed_array_helper = _add_compressed_array_helper_multi
cls._write = temp_write
# Return control back to the caller of __enter__().
yield
finally:
# __exit__()
# Restore the original functions.
cls._add_compressed_array_helper = orig_func
cls._write = orig_write
# Exiting the MultiThreadedTaskConsumer context manager will wait for all scheduled tasks to complete.
def add_bool(self, data):
assert(isinstance(data, bool))
data = pack('?', data)
@ -130,21 +184,26 @@ class FBXElem:
self.props_type.append(data_types.STRING)
self.props.append(data)
def _add_compressed_array_helper(self, data, length):
"""Note: This function may be swapped out by enable_multithreading_cm with an equivalent that supports
multithreading."""
data = zlib.compress(data, 1)
comp_len = len(data)
encoding = 1
data = pack('<3I', length, encoding, comp_len) + data
self.props.append(data)
def _add_array_helper(self, data, prop_type, length):
self.props_type.append(prop_type)
# mimic behavior of fbxconverter (also common sense)
# we could make this configurable.
encoding = 0 if len(data) <= 128 else 1
if encoding == 0:
pass
data = pack('<3I', length, encoding, len(data)) + data
self.props.append(data)
elif encoding == 1:
data = zlib.compress(data, 1)
comp_len = len(data)
data = pack('<3I', length, encoding, comp_len) + data
self.props_type.append(prop_type)
self.props.append(data)
self._add_compressed_array_helper(data, length)
def _add_parray_helper(self, data, array_type, prop_type):
assert (isinstance(data, array.array))

View File

@ -3495,31 +3495,35 @@ def save_single(operator, scene, depsgraph, filepath="",
# Generate some data about exported scene...
scene_data = fbx_data_from_scene(scene, depsgraph, settings)
root = elem_empty(None, b"") # Root element has no id, as it is not saved per se!
# Enable multithreaded array compression in FBXElem and wait until all threads are done before exiting the context
# manager.
with encode_bin.FBXElem.enable_multithreading_cm():
# Writing elements into an FBX hierarchy can now begin.
root = elem_empty(None, b"") # Root element has no id, as it is not saved per se!
# Mostly FBXHeaderExtension and GlobalSettings.
fbx_header_elements(root, scene_data)
# Mostly FBXHeaderExtension and GlobalSettings.
fbx_header_elements(root, scene_data)
# Documents and References are pretty much void currently.
fbx_documents_elements(root, scene_data)
fbx_references_elements(root, scene_data)
# Documents and References are pretty much void currently.
fbx_documents_elements(root, scene_data)
fbx_references_elements(root, scene_data)
# Templates definitions.
fbx_definitions_elements(root, scene_data)
# Templates definitions.
fbx_definitions_elements(root, scene_data)
# Actual data.
fbx_objects_elements(root, scene_data)
# Actual data.
fbx_objects_elements(root, scene_data)
# How data are inter-connected.
fbx_connections_elements(root, scene_data)
# How data are inter-connected.
fbx_connections_elements(root, scene_data)
# Animation.
fbx_takes_elements(root, scene_data)
# Animation.
fbx_takes_elements(root, scene_data)
# Cleanup!
fbx_scene_data_cleanup(scene_data)
# Cleanup!
fbx_scene_data_cleanup(scene_data)
# And we are down, we can write the whole thing!
# And we are done, all multithreaded tasks are complete, and we can write the whole thing to file!
encode_bin.write(filepath, root, FBX_VERSION)
# Clear cached ObjectWrappers!

View File

@ -133,10 +133,10 @@ def json2fbx(fn):
fn_fbx = "%s.fbx" % os.path.splitext(fn)[0]
print("Writing: %r " % fn_fbx, end="")
json_root = []
with open(fn) as f_json:
json_root = json.load(f_json)
fbx_root, fbx_version = parse_json(json_root)
with encode_bin.FBXElem.enable_multithreading_cm():
fbx_root, fbx_version = parse_json(json_root)
print("(Version %d) ..." % fbx_version)
encode_bin.write(fn_fbx, fbx_root, fbx_version)