Asset Pipeline: Improve Performance #235

Merged
4 changed files with 84 additions and 9 deletions
Showing only changes of commit 524601f9ff - Show all commits

View File

@ -45,3 +45,67 @@ def get_logger(name="asset_pipeline"):
# add ch to logger
logger.addHandler(ch)
return logger
PROFILE_KEYS = {
"IMPORT": "To import Collection & add suffixes",
"MAPPING": "To create Asset Mapping",
"TRANSFER_DATA": "To apply all Transferable Data",
"OBJECTS": "To remap all Obejcts",
"INDEXES": "To restore Active Indexes on all Objects",
"COLLECTIONS": "To remap all Collections",
"SHARED_IDS": "To remap all Shared IDs",
"MERGE": "To complete entire merge process",
}
INFO_KEYS = ["MERGE"] # Profile Keys to print in the logger's info mode
_profiler_instance = None
def get_profiler():
global _profiler_instance
if not _profiler_instance:
_profiler_instance = Profiler()
return _profiler_instance
class Profiler:
def __init__(self) -> None:
self.pull_profiles = {}
self.push_profiles = {}
self._logger = get_logger()
def add(self, elapsed_time: int, key: str):
if self._is_push:
profiles = self.push_profiles
else: # is pull
profiles = self.pull_profiles
if key not in profiles:
profiles[key] = elapsed_time
else:
profiles[key] += elapsed_time
def log_all(self):
self.log_profiles("PULL", self.pull_profiles)
self.log_profiles("PUSH", self.push_profiles)
def log_profiles(self, direction: str, profiles: dict):
if profiles == {}:
return
for key, value in profiles.items():
msg = f"{direction} {key} - {value} seconds {PROFILE_KEYS[key]}"
if key in INFO_KEYS:
self._logger.info(msg)
else:
self._logger.debug(msg)
def reset(self):
self.pull_profiles = {}
self._is_push = False
self._logger = get_logger()
def set_push(self, is_push=True):
self._is_push = is_push

View File

@ -178,6 +178,7 @@ def merge_task_layer(
"""
logger = logging.get_logger()
profiles = logging.get_profiler()
start_time = time.time()
local_col = context.scene.asset_pipeline.asset_collection
@ -191,6 +192,7 @@ def merge_task_layer(
appended_col = import_data_from_lib(external_file, "collections", col_base_name)
merge_add_suffix_to_hierarchy(appended_col, external_suffix)
imported_time = time.time()
profiles.add((imported_time - start_time), "IMPORT")
local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"]
external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"]
@ -211,6 +213,7 @@ def merge_task_layer(
error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n"
return error_msg
mapped_time = time.time()
profiles.add((mapped_time - imported_time), "MAPPING")
# Remove all Transferable Data from target objects
for source_obj in map.object_map:
@ -219,12 +222,14 @@ def merge_task_layer(
apply_transfer_data(context, map.transfer_data_map)
apply_td_time = time.time()
profiles.add((apply_td_time - mapped_time), "TRANSFER_DATA")
for source_obj in map.object_map:
target_obj = map.object_map[source_obj]
remap_user(source_obj, target_obj)
transfer_data_clean(target_obj)
obj_remap_time = time.time()
profiles.add((obj_remap_time - apply_td_time), "OBJECTS")
# Restore Active UV Layer and Active Color Attributes
for _, index_map_item in map.index_map.items():
@ -234,6 +239,7 @@ def merge_task_layer(
target_obj, index_map_item.get('active_color_attribute_name')
)
index_time = time.time()
profiles.add((index_time - obj_remap_time), "INDEXES")
for col in map.collection_map:
remap_user(col, map.collection_map[col])
@ -244,21 +250,16 @@ def merge_task_layer(
for col in map.external_col_to_remove:
local_col.children.unlink(col)
col_remap_time = time.time()
profiles.add((col_remap_time - index_time), "COLLECTIONS")
for id in map.shared_id_map:
remap_user(id, map.shared_id_map[id])
shared_id_remap_time = time.time()
profiles.add((shared_id_remap_time - col_remap_time), "SHARED_IDS")
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True)
merge_remove_suffix_from_hierarchy(local_col)
logger.debug(f"{imported_time - start_time} sec: To import Collection & add suffixes")
logger.debug(f"{shared_id_remap_time - col_remap_time} sec: To remap all Shared Ids")
logger.debug(f"{mapped_time - imported_time} sec: To create Asset Mapping")
logger.debug(f"{apply_td_time - mapped_time} sec: To apply all Transferable Data")
logger.debug(f"{obj_remap_time - apply_td_time} sec: To remap all Objects")
logger.debug(f"{index_time - obj_remap_time} sec: To restore Active Indexes on all Objects")
logger.debug(f"{col_remap_time - index_time} sec: To remap all Collections")
logger.info(f"{time.time() - start_time} sec: To complete entire merge process")
profiles.add((time.time() - start_time), "MERGE")
def import_data_from_lib(
libpath: Path,

View File

@ -5,7 +5,7 @@ from pathlib import Path
from typing import Set
import os
from . import constants, config, opscore
from . import constants, config, opscore, logging
from .asset_catalog import get_asset_catalog_items, get_asset_id
from .config import verify_task_layer_json_data
from .hooks import Hooks, get_production_hook_dir, get_asset_hook_dir
@ -309,6 +309,8 @@ class ASSETPIPE_OT_sync_pull(bpy.types.Operator):
opscore.sync_draw(self, context)
def execute(self, context: bpy.types.Context):
profiler = logging.get_profiler()
profiler.reset()
asset_col = context.scene.asset_pipeline.asset_collection
if self.save:
save_images()
@ -324,6 +326,7 @@ class ASSETPIPE_OT_sync_pull(bpy.types.Operator):
hooks_instance.execute_hooks(merge_mode="pull", merge_status='post', asset_col=asset_col)
self.report({'INFO'}, "Asset Pull Complete")
profiler.log_all()
return {'FINISHED'}
@ -370,12 +373,15 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator):
opscore.sync_draw(self, context)
def execute(self, context: bpy.types.Context):
profiler = logging.get_profiler()
profiler.reset()
asset_col = context.scene.asset_pipeline.asset_collection
hooks_instance = Hooks()
hooks_instance.load_hooks(context)
save_images()
bpy.ops.wm.save_mainfile()
# Seperate if statement so hook can execute before updating ownership/prep sync
if self.pull:
hooks_instance.execute_hooks(merge_mode="pull", merge_status='pre', asset_col=asset_col)
# Find current task Layer
@ -387,6 +393,8 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator):
hooks_instance.execute_hooks(
merge_mode="pull", merge_status='post', asset_col=asset_col
)
profiler.set_push()
hooks_instance.execute_hooks(merge_mode="push", merge_status='pre', asset_col=asset_col)
bpy.ops.wm.save_mainfile(filepath=self._current_file.__str__())
@ -395,6 +403,7 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator):
self.report({'INFO'}, "Asset Sync Complete")
else:
self.report({'INFO'}, "Asset Force Push Complete")
profiler.log_all()
return {'FINISHED'}

View File

@ -20,6 +20,7 @@ from .merge.task_layer import draw_task_layer_selection
from .asset_catalog import get_asset_id
from . import prefs
def sync_poll(cls, context):
if any([img.is_dirty for img in bpy.data.images]):
cls.poll_message_set("Please save unsaved Images")