Asset Pipeline: Improve Performance #235

Merged
Showing only changes of commit 0c6489cba3 - Show all commits

View File

@ -21,6 +21,7 @@ from .transfer_data.transfer_functions.transfer_function_util.active_indexes imp
from pathlib import Path from pathlib import Path
from typing import Dict from typing import Dict
from .. import constants, logging from .. import constants, logging
import time
def ownership_transfer_data_cleanup( def ownership_transfer_data_cleanup(
@ -175,6 +176,10 @@ def merge_task_layer(
local_tls: (list[str]): list of task layers that are local to the current file local_tls: (list[str]): list of task layers that are local to the current file
external_file (Path): external file to pull data into the current file from external_file (Path): external file to pull data into the current file from
""" """
logger = logging.get_logger()
start_time = time.time()
local_col = context.scene.asset_pipeline.asset_collection local_col = context.scene.asset_pipeline.asset_collection
if not local_col: if not local_col:
return "Unable to find Asset Collection" return "Unable to find Asset Collection"
@ -185,6 +190,10 @@ def merge_task_layer(
appended_col = import_data_from_lib(external_file, "collections", col_base_name) appended_col = import_data_from_lib(external_file, "collections", col_base_name)
merge_add_suffix_to_hierarchy(appended_col, external_suffix) merge_add_suffix_to_hierarchy(appended_col, external_suffix)
imported_time = time.time()
logger.debug(
f"{imported_time - start_time} sec: Imported Collection from Publish & Add Suffixes"
)
local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"] local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"]
external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"] external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"]
@ -204,6 +213,8 @@ def merge_task_layer(
type_name = get_id_type_name(type(conflict_obj)) type_name = get_id_type_name(type(conflict_obj))
error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n" error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n"
return error_msg return error_msg
mapped_time = time.time()
logger.debug(f"{mapped_time - imported_time} sec: To Create Asset Mapping")
# Remove all Transferable Data from target objects # Remove all Transferable Data from target objects
for source_obj in map.object_map: for source_obj in map.object_map:
@ -211,11 +222,15 @@ def merge_task_layer(
target_obj.transfer_data_ownership.clear() target_obj.transfer_data_ownership.clear()
apply_transfer_data(context, map.transfer_data_map) apply_transfer_data(context, map.transfer_data_map)
apply_td_time = time.time()
logger.debug(f"{apply_td_time - mapped_time} sec: To Apply all Transferable Data")
for source_obj in map.object_map: for source_obj in map.object_map:
target_obj = map.object_map[source_obj] target_obj = map.object_map[source_obj]
remap_user(source_obj, target_obj) remap_user(source_obj, target_obj)
transfer_data_clean(target_obj) transfer_data_clean(target_obj)
obj_remap_time = time.time()
logger.debug(f"{obj_remap_time - apply_td_time} sec: To remap all objects")
# Restore Active UV Layer and Active Color Attributes # Restore Active UV Layer and Active Color Attributes
for _, index_map_item in map.index_map.items(): for _, index_map_item in map.index_map.items():
@ -224,6 +239,8 @@ def merge_task_layer(
transfer_active_color_attribute_index( transfer_active_color_attribute_index(
target_obj, index_map_item.get('active_color_attribute_name') target_obj, index_map_item.get('active_color_attribute_name')
) )
index_time = time.time()
logger.debug(f"{index_time - obj_remap_time} sec: To restore active indexes on all objects")
for col in map.collection_map: for col in map.collection_map:
remap_user(col, map.collection_map[col]) remap_user(col, map.collection_map[col])
@ -233,13 +250,17 @@ def merge_task_layer(
for col in map.external_col_to_remove: for col in map.external_col_to_remove:
local_col.children.unlink(col) local_col.children.unlink(col)
col_remap_time = time.time()
logger.debug(f"{col_remap_time - index_time} sec: To remap all collections")
for id in map.shared_id_map: for id in map.shared_id_map:
remap_user(id, map.shared_id_map[id]) remap_user(id, map.shared_id_map[id])
shared_id_remap_time = time.time()
logger.debug(f"{shared_id_remap_time - col_remap_time} sec: To remap all shared ids")
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True) bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True)
merge_remove_suffix_from_hierarchy(local_col) merge_remove_suffix_from_hierarchy(local_col)
logger.info(f"{time.time() - start_time} sec: To complete entire merge process")
def import_data_from_lib( def import_data_from_lib(
libpath: Path, libpath: Path,