Asset Pipeline: Improve Performance #235

Merged
2 changed files with 14 additions and 2 deletions
Showing only changes of commit fd0e3f1b79 - Show all commits

View File

@ -20,7 +20,7 @@
import logging
from typing import List, Tuple
from . import prefs
from . import prefs, constants
def get_logger(name="asset_pipeline"):
@ -59,6 +59,8 @@ PROFILE_KEYS = {
"TOTAL": "Total time to sync this direction",
}
TD_KEYS = [type for type in constants.TRANSFER_DATA_TYPES]
INFO_KEYS = ["TOTAL"] # Profile Keys to print in the logger's info mode
_profiler_instance = None
@ -98,6 +100,13 @@ class Profiler:
return
for key, value in profiles.items():
seconds = self.get_non_scientific_number(value)
# Special case for transfer data keys
if key in TD_KEYS:
name = constants.TRANSFER_DATA_TYPES[key][0]
self._logger.debug(
f"{direction} TD: {name.upper()} - {seconds} seconds to transfer {name} data for all objects"
)
continue
msg = f"{direction} {key} - {seconds} seconds {PROFILE_KEYS[key]}"
if key in INFO_KEYS:
self._logger.info(msg)

View File

@ -1,5 +1,5 @@
import bpy
import time
from .transfer_functions import (
attributes,
constraints,
@ -200,6 +200,7 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
transfer_data_map: Map generated by class AssetTransferMapping
"""
# Create/isolate tmp collection to reduce depsgraph update time
profiler = logging.get_profiler()
td_col = bpy.data.collections.new("ISO_COL_TEMP")
with isolate_collection(td_col):
# Loop over objects in Transfer data map
@ -208,7 +209,9 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
td_types = transfer_data_map[source_obj]["td_types"]
with link_objs_to_collection(set([target_obj, source_obj]), td_col):
for td_type_key, td_dicts in td_types.items():
start_time = time.time()
apply_transfer_data_items(
context, source_obj, target_obj, td_type_key, td_dicts
)
profiler.add(time.time() - start_time, td_type_key)
bpy.data.collections.remove(td_col)