FBX IO: Speed up export by multithreading array compression #105018

Merged
Thomas Barlow merged 6 commits from Mysteryem/blender-addons:fbx_multithread_array_compression_pr into main 2024-01-12 21:39:20 +01:00
4 changed files with 92 additions and 29 deletions

View File

@ -5,7 +5,7 @@
bl_info = { bl_info = {
"name": "FBX format", "name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem", "author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 11, 3), "version": (5, 11, 4),
"blender": (4, 1, 0), "blender": (4, 1, 0),
"location": "File > Import-Export", "location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions", "description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -4,10 +4,13 @@
try: try:
from . import data_types from . import data_types
from .fbx_utils_threading import MultiThreadedTaskConsumer
except: except:
import data_types import data_types
from fbx_utils_threading import MultiThreadedTaskConsumer
from struct import pack from struct import pack
from contextlib import contextmanager
import array import array
import numpy as np import numpy as np
import zlib import zlib
@ -51,6 +54,57 @@ class FBXElem:
self._end_offset = -1 self._end_offset = -1
self._props_length = -1 self._props_length = -1
@classmethod
@contextmanager
def enable_multithreading_cm(cls):
"""Temporarily enable multithreaded array compression.
The context manager handles starting up and shutting down the threads.
Only exits once all the threads are done (either all tasks were completed or an error occurred and the threads
were stopped prematurely).
Writing to a file is temporarily disabled as a safeguard."""
# __enter__()
orig_func = cls._add_compressed_array_helper
orig_write = cls._write
def insert_compressed_array(props, insert_at, data, length):
# zlib.compress releases the GIL, so can be multithreaded.
data = zlib.compress(data, 1)
comp_len = len(data)
encoding = 1
data = pack('<3I', length, encoding, comp_len) + data
props[insert_at] = data
with MultiThreadedTaskConsumer.new_cpu_bound_cm(insert_compressed_array) as wrapped_func:
try:
def _add_compressed_array_helper_multi(self, data, length):
# Append a dummy value that will be replaced with the compressed array data later.
self.props.append(...)
# The index to insert the compressed array into.
insert_at = len(self.props) - 1
# Schedule the array to be compressed on a separate thread and then inserted into the hierarchy at
# `insert_at`.
wrapped_func(self.props, insert_at, data, length)
# As an extra safeguard, temporarily replace the `_write` function to raise an error if called.
def temp_write(*_args, **_kwargs):
raise RuntimeError("Writing is not allowed until multithreaded array compression has been disabled")
cls._add_compressed_array_helper = _add_compressed_array_helper_multi
cls._write = temp_write
# Return control back to the caller of __enter__().
yield
finally:
# __exit__()
# Restore the original functions.
cls._add_compressed_array_helper = orig_func
cls._write = orig_write
# Exiting the MultiThreadedTaskConsumer context manager will wait for all scheduled tasks to complete.
def add_bool(self, data): def add_bool(self, data):
assert(isinstance(data, bool)) assert(isinstance(data, bool))
data = pack('?', data) data = pack('?', data)
@ -130,21 +184,26 @@ class FBXElem:
self.props_type.append(data_types.STRING) self.props_type.append(data_types.STRING)
self.props.append(data) self.props.append(data)
def _add_compressed_array_helper(self, data, length):
"""Note: This function may be swapped out by enable_multithreading_cm with an equivalent that supports
multithreading."""
data = zlib.compress(data, 1)
comp_len = len(data)
encoding = 1
data = pack('<3I', length, encoding, comp_len) + data
self.props.append(data)
def _add_array_helper(self, data, prop_type, length): def _add_array_helper(self, data, prop_type, length):
self.props_type.append(prop_type)
# mimic behavior of fbxconverter (also common sense) # mimic behavior of fbxconverter (also common sense)
# we could make this configurable. # we could make this configurable.
encoding = 0 if len(data) <= 128 else 1 encoding = 0 if len(data) <= 128 else 1
if encoding == 0: if encoding == 0:
pass data = pack('<3I', length, encoding, len(data)) + data
elif encoding == 1:
data = zlib.compress(data, 1)
comp_len = len(data)
data = pack('<3I', length, encoding, comp_len) + data
self.props_type.append(prop_type)
self.props.append(data) self.props.append(data)
elif encoding == 1:
self._add_compressed_array_helper(data, length)
def _add_parray_helper(self, data, array_type, prop_type): def _add_parray_helper(self, data, array_type, prop_type):
assert (isinstance(data, array.array)) assert (isinstance(data, array.array))

View File

@ -3495,6 +3495,10 @@ def save_single(operator, scene, depsgraph, filepath="",
# Generate some data about exported scene... # Generate some data about exported scene...
scene_data = fbx_data_from_scene(scene, depsgraph, settings) scene_data = fbx_data_from_scene(scene, depsgraph, settings)
# Enable multithreaded array compression in FBXElem and wait until all threads are done before exiting the context
# manager.
with encode_bin.FBXElem.enable_multithreading_cm():
# Writing elements into an FBX hierarchy can now begin.
root = elem_empty(None, b"") # Root element has no id, as it is not saved per se! root = elem_empty(None, b"") # Root element has no id, as it is not saved per se!
# Mostly FBXHeaderExtension and GlobalSettings. # Mostly FBXHeaderExtension and GlobalSettings.
@ -3519,7 +3523,7 @@ def save_single(operator, scene, depsgraph, filepath="",
# Cleanup! # Cleanup!
fbx_scene_data_cleanup(scene_data) fbx_scene_data_cleanup(scene_data)
# And we are down, we can write the whole thing! # And we are done, all multithreaded tasks are complete, and we can write the whole thing to file!
encode_bin.write(filepath, root, FBX_VERSION) encode_bin.write(filepath, root, FBX_VERSION)
# Clear cached ObjectWrappers! # Clear cached ObjectWrappers!

View File

@ -133,9 +133,9 @@ def json2fbx(fn):
fn_fbx = "%s.fbx" % os.path.splitext(fn)[0] fn_fbx = "%s.fbx" % os.path.splitext(fn)[0]
print("Writing: %r " % fn_fbx, end="") print("Writing: %r " % fn_fbx, end="")
json_root = []
with open(fn) as f_json: with open(fn) as f_json:
json_root = json.load(f_json) json_root = json.load(f_json)
with encode_bin.FBXElem.enable_multithreading_cm():
fbx_root, fbx_version = parse_json(json_root) fbx_root, fbx_version = parse_json(json_root)
print("(Version %d) ..." % fbx_version) print("(Version %d) ..." % fbx_version)
encode_bin.write(fn_fbx, fbx_root, fbx_version) encode_bin.write(fn_fbx, fbx_root, fbx_version)