From d9665dadd4139dd74a1b7d497b8a19cfc665bc92 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:30:42 -0400 Subject: [PATCH 01/15] Group Transfer Data Items by Object/Type --- .../asset_pipeline/merge/asset_mapping.py | 42 +++++++++++-------- .../merge/transfer_data/transfer_core.py | 36 ++++++++++++++-- .../merge/transfer_data/transfer_util.py | 6 +++ .../addons/asset_pipeline/props.py | 1 + 4 files changed, 64 insertions(+), 21 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py index abe955fb..ce39817d 100644 --- a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py +++ b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py @@ -169,7 +169,7 @@ class AssetTransferMapping: return coll_map - def _transfer_data_get_map_content(self, obj, target_obj, transfer_data_item): + def _create_temp_transfer_data_item(self, source_obj, target_obj, transfer_data_item): temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data temp_transfer_data_item_index = len(temp_transfer_data) temp_transfer_data_item = transfer_data_add_entry( @@ -178,17 +178,11 @@ class AssetTransferMapping: td_type_key=transfer_data_item.type, task_layer_name=transfer_data_item.owner, surrender=transfer_data_item.surrender, + target_obj=target_obj, + obj=source_obj, ) - map_item = { - 'transfer_data_item_index': temp_transfer_data_item_index, - 'source_obj': obj, - 'target_obj': target_obj, - } - # Names of each map item need to be unique - # below name avoids name conflicts between different types - name = transfer_data_item.name + '_' + transfer_data_item.type + obj.name - return name, map_item + return temp_transfer_data_item_index def _transfer_data_pair_not_local(self, td_1, td_2): # Returns true if neither owners are local to current file @@ -243,11 +237,17 @@ class AssetTransferMapping: """Adds item to Transfer Data Map""" if self._transfer_data_is_surrendered(transfer_data_item): return - - name, map_item = self._transfer_data_get_map_content( + td_type_key = transfer_data_item.type + temp_td_index = self._create_temp_transfer_data_item( source_obj, target_obj, transfer_data_item ) - self.transfer_data_map[name] = map_item + if not source_obj in self.transfer_data_map: + self.transfer_data_map[source_obj] = {td_type_key: [temp_td_index]} + + if not td_type_key in self.transfer_data_map[source_obj]: + self.transfer_data_map[source_obj][td_type_key] = [temp_td_index] + else: + self.transfer_data_map[source_obj][td_type_key].append(temp_td_index) def _transfer_data_map_item(self, source_obj, target_obj, transfer_data_item): """Verifies if Transfer Data Item is valid/can be mapped""" @@ -284,11 +284,17 @@ class AssetTransferMapping: # Generate a Map of Indexes that need to be set post merge # Stores active_uv & active_color_attribute index_map = {} - for _, item in self.transfer_data_map.items(): - temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data - temp_transfer_data_item = temp_transfer_data[item.get('transfer_data_item_index')] - source_obj = item.get('source_obj') - target_obj = item.get('target_obj') + temp_transfer_data_items = [] + temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data + + for source_obj in self.transfer_data_map: + for td_type_key, td_indexes in self.transfer_data_map[source_obj].items(): + for index in td_indexes: + temp_transfer_data_items.append(temp_transfer_data[index]) + + for temp_transfer_data_item in temp_transfer_data_items: + source_obj = temp_transfer_data_item.obj + target_obj = temp_transfer_data_item.target_obj if temp_transfer_data_item.type != constants.MATERIAL_SLOT_KEY: continue diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index 70b0ee73..916638de 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -9,15 +9,19 @@ from .transfer_functions import ( vertex_groups, materials, ) - +from typing import List from ... import constants, logging - +from ...props import AssetTransferDataTemp +from bpy.types import PropertyGroup from .transfer_util import ( transfer_data_add_entry, check_transfer_data_entry, ) -def copy_transfer_data_ownership(transfer_data_item, target_obj: bpy.types.Object) -> None: + +def copy_transfer_data_ownership( + transfer_data_item: AssetTransferDataTemp, target_obj: bpy.types.Object +) -> None: """Copy Transferable Data item to object if non entry exists Args: @@ -114,6 +118,12 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: transfer_data_item = temp_transfer_data[transfer_data.get('transfer_data_item_index')] target_obj = transfer_data.get('target_obj') source_obj = transfer_data.get('source_obj') + + +def apply_transfer_data_items(context, td_type_key: str, transfer_data_items: List[PropertyGroup]): + for transfer_data_item in transfer_data_items: + target_obj = transfer_data_item.target_obj + source_obj = transfer_data_item.obj if target_obj is None: logger.warning(f"Failed to Transfer Data for {transfer_data_item.id_data.name}") continue @@ -188,3 +198,23 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: transfer_data_item=transfer_data_item, target_obj=target_obj, ) + transfer_data_item.copy_transfer_data_ownership() + + +def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: + """Apply all Transferable Data from Transferable Data map onto objects. + Copies any Transferable Data owned by local layer onto objects owned by external layers. + Applies Transferable Data from external layers onto objects owned by local layers + + Transfer_data_map is generated by class 'AssetTransferMapping' + + Args: + context (bpy.types.Context): context of .blend file + transfer_data_map: Map generated by class AssetTransferMapping + """ + + temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data + for source_obj in transfer_data_map: + for td_type_key, td_indexes in transfer_data_map[source_obj].items(): + transfer_data_items = [temp_transfer_data[index] for index in td_indexes] + apply_transfer_data_items(context, td_type_key, transfer_data_items) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py index 8452ec2f..c408ab9a 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py @@ -30,6 +30,8 @@ def transfer_data_add_entry( td_type_key: str, task_layer_name: str, surrender: bool, + target_obj: bpy.types.Object = None, + obj: bpy.types.Object = None, ): """Add entry to Transferable Data ownership @@ -44,6 +46,10 @@ def transfer_data_add_entry( transfer_data_item.owner = task_layer_name transfer_data_item.type = td_type_key transfer_data_item.surrender = surrender + if target_obj: + transfer_data_item.target_obj = target_obj + if obj: + transfer_data_item.obj = obj return transfer_data_item diff --git a/scripts-blender/addons/asset_pipeline/props.py b/scripts-blender/addons/asset_pipeline/props.py index 5a112ebb..56d0f0e5 100644 --- a/scripts-blender/addons/asset_pipeline/props.py +++ b/scripts-blender/addons/asset_pipeline/props.py @@ -201,6 +201,7 @@ class AssetPipeline(bpy.types.PropertyGroup): description="Select Asset Library Catalog for the current Asset, this value will be updated each time you Push to an 'Active' Publish", ) # type: ignore + @bpy.app.handlers.persistent def set_asset_collection_name_post_file_load(_): # Version the PointerProperty to the StringProperty, and the left-over pointer. -- 2.30.2 From a5015a740a9e02c2d76679341612f02d7ad6cf74 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:31:12 -0400 Subject: [PATCH 02/15] Transfer All Vertex Groups per Object at Once --- .../merge/transfer_data/transfer_core.py | 137 ++++++++---------- .../transfer_functions/vertex_groups.py | 80 +++++----- 2 files changed, 107 insertions(+), 110 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index 916638de..7a4364d6 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -121,85 +121,70 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: def apply_transfer_data_items(context, td_type_key: str, transfer_data_items: List[PropertyGroup]): + # Get source/target from first item in list, because all items in list are same object/type + target_obj = transfer_data_items[0].target_obj + source_obj = transfer_data_items[0].obj + + if target_obj is None: + print(f"Failed to Transfer Data for {transfer_data_item.id_data.name}") + return + for transfer_data_item in transfer_data_items: - target_obj = transfer_data_item.target_obj - source_obj = transfer_data_item.obj - if target_obj is None: - logger.warning(f"Failed to Transfer Data for {transfer_data_item.id_data.name}") - continue - if transfer_data_item is None: - continue - if source_obj != target_obj: - if transfer_data_item.type == constants.VERTEX_GROUP_KEY: - logger.debug( - f"Transferring Vertex Group {transfer_data_item.name} from {source_obj.name} to {target_obj.name}." - ) - vertex_groups.transfer_vertex_group( - context=context, - vertex_group_name=transfer_data_item.name, - target_obj=target_obj, - source_obj=source_obj, - ) - if transfer_data_item.type == constants.MODIFIER_KEY: - logger.debug( - f"Transferring Modifier{transfer_data_item.name} from {source_obj.name} to {target_obj.name}." - ) - modifers.transfer_modifier( - modifier_name=transfer_data_item.name, - target_obj=target_obj, - source_obj=source_obj, - ) - if transfer_data_item.type == constants.CONSTRAINT_KEY: - logger.debug( - f"Transferring Constraint {transfer_data_item.name} from {source_obj.name} to {target_obj.name}." - ) - constraints.transfer_constraint( - constraint_name=transfer_data_item.name, - target_obj=target_obj, - source_obj=source_obj, - ) - if transfer_data_item.type == constants.MATERIAL_SLOT_KEY: - logger.debug(f"Transferring Materiald from {source_obj.name} to {target_obj.name}.") - materials.transfer_materials( - target_obj=target_obj, - source_obj=source_obj, - ) - if transfer_data_item.type == constants.SHAPE_KEY_KEY: - logger.debug( - f"Transferring Shape Key {transfer_data_item.name} from {source_obj.name} to {target_obj.name}." - ) - shape_keys.transfer_shape_key( - context=context, - target_obj=target_obj, - source_obj=source_obj, - shape_key_name=transfer_data_item.name, - ) - if transfer_data_item.type == constants.ATTRIBUTE_KEY: - logger.debug( - f"Transferring Attribute {transfer_data_item.name} from {source_obj.name} to {target_obj.name}." - ) - attributes.transfer_attribute( - target_obj=target_obj, - source_obj=source_obj, - attribute_name=transfer_data_item.name, - ) - if transfer_data_item.type == constants.PARENT_KEY: - logger.debug( - f"Transferring Parent Relationship from {source_obj.name} to {target_obj.name}." - ) - parent.transfer_parent( - target_obj=target_obj, - source_obj=source_obj, - ) - logger.debug( - f"Copying Ownership Data for {transfer_data_item.name} from {source_obj.name} to {target_obj.name}." - ) - copy_transfer_data_ownership( - transfer_data_item=transfer_data_item, - target_obj=target_obj, - ) transfer_data_item.copy_transfer_data_ownership() + if source_obj == target_obj: + return + + if td_type_key == constants.VERTEX_GROUP_KEY: + # Transfer All Vertex Groups in one go + print(f"Transferring all Vertex Groups from {source_obj.name} to {target_obj.name}") + vertex_groups.transfer_vertex_groups( + vertex_group_names=[item.name for item in transfer_data_items], + target_obj=target_obj, + source_obj=source_obj, + ) + if td_type_key == constants.MODIFIER_KEY: + for transfer_data_item in transfer_data_items: + modifers.transfer_modifier( + modifier_name=transfer_data_item.name, + target_obj=target_obj, + source_obj=source_obj, + ) + if td_type_key == constants.CONSTRAINT_KEY: + for transfer_data_item in transfer_data_items: + constraints.transfer_constraint( + constraint_name=transfer_data_item.name, + target_obj=target_obj, + source_obj=source_obj, + ) + if td_type_key == constants.MATERIAL_SLOT_KEY: + for transfer_data_item in transfer_data_items: + materials.transfer_materials( + target_obj=target_obj, + source_obj=source_obj, + ) + if td_type_key == constants.SHAPE_KEY_KEY: + for transfer_data_item in transfer_data_items: + shape_keys.transfer_shape_key( + context=context, + target_obj=target_obj, + source_obj=source_obj, + shape_key_name=transfer_data_item.name, + ) + if td_type_key == constants.ATTRIBUTE_KEY: + for transfer_data_item in transfer_data_items: + attributes.transfer_attribute( + target_obj=target_obj, + source_obj=source_obj, + attribute_name=transfer_data_item.name, + ) + if td_type_key == constants.PARENT_KEY: + for transfer_data_item in transfer_data_items: + parent.transfer_parent( + target_obj=target_obj, + source_obj=source_obj, + ) + def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: """Apply all Transferable Data from Transferable Data map onto objects. diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py index af86fb8b..04241b92 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py @@ -35,33 +35,26 @@ def init_vertex_groups(scene, obj): ) -def transfer_vertex_group( - context, - vertex_group_name: str, +def transfer_vertex_groups( + vertex_group_names: List[str], target_obj: bpy.types.Object, source_obj: bpy.types.Object, ): - logger = logging.get_logger() - if target_obj == source_obj: - return - - if not source_obj.vertex_groups.get(vertex_group_name): - logger.error(f"Vertex Group {vertex_group_name} not found in {source_obj.name}") - return + for vertex_group_name in vertex_group_names: + if not source_obj.vertex_groups.get(vertex_group_name): + print(f"ERROR Vertex Group {vertex_group_name} not found in {source_obj.name}") + return # If topology matches transfer directly, otherwise use vertex proximity if is_obdata_identical(source_obj, target_obj): - transfer_single_vgroup_by_topology( - source_obj, target_obj, vertex_group_name - ) + for vertex_group_name in vertex_group_names: + transfer_single_vgroup_by_topology(source_obj, target_obj, vertex_group_name) else: - precalc_and_transfer_single_group( - source_obj, target_obj, vertex_group_name, expand=2 - ) + precalc_and_transfer_multiple_groups(source_obj, target_obj, vertex_group_names, expand=2) + def transfer_single_vgroup_by_topology(source_obj, target_obj, vgroup_name): - """ Function to quickly transfer single vertex group between mesh objects in case of matching topology. - """ + """Function to quickly transfer single vertex group between mesh objects in case of matching topology.""" # Remove group from the target obj if it already exists. TODO: de-duplicate tgt_vg = target_obj.vertex_groups.get(vgroup_name) @@ -75,6 +68,33 @@ def transfer_single_vgroup_by_topology(source_obj, target_obj, vgroup_name): if vgroup_src.index in [g.group for g in v.groups]: vgroup_tgt.add([v.index], vgroup_src.weight(v.index), 'REPLACE') + +def precalc_and_transfer_multiple_groups(source_obj, target_obj, vgroup_names, expand=2): + """Convenience function to transfer a single group. For transferring multiple groups, + this is very inefficient and shouldn't be used. + + Instead, you should: + - build_kd_tree ONCE per source mesh. + - build_vert_influence_map and transfer_vertex_groups ONCE per object pair. + """ + + # Remove group from the target obj if it already exists. TODO: de-duplicate + vgroups = [source_obj.vertex_groups[name] for name in vgroup_names] + for vgroup_name in vgroup_names: + target_vgroup = target_obj.vertex_groups.get(vgroup_name) + if target_vgroup: + target_obj.vertex_groups.remove(target_vgroup) + + kd_tree = build_kdtree(source_obj.data) + vert_influence_map = build_vert_influence_map(source_obj, target_obj, kd_tree, expand) + transfer_multiple_vertex_groups( + source_obj, + target_obj, + vert_influence_map, + vgroups, + ) + + def precalc_and_transfer_single_group(source_obj, target_obj, vgroup_name, expand=2): """Convenience function to transfer a single group. For transferring multiple groups, this is very inefficient and shouldn't be used. @@ -90,10 +110,9 @@ def precalc_and_transfer_single_group(source_obj, target_obj, vgroup_name, expan target_obj.vertex_groups.remove(tgt_vg) kd_tree = build_kdtree(source_obj.data) - vert_influence_map = build_vert_influence_map( - source_obj, target_obj, kd_tree, expand - ) - transfer_vertex_groups( + vert_influence_map = build_vert_influence_map(source_obj, target_obj, kd_tree, expand) + + transfer_multiple_vertex_groups( source_obj, target_obj, vert_influence_map, @@ -110,9 +129,7 @@ def build_kdtree(mesh): def build_vert_influence_map(obj_from, obj_to, kd_tree, expand=2): - verts_of_edge = { - i: (e.vertices[0], e.vertices[1]) for i, e in enumerate(obj_from.data.edges) - } + verts_of_edge = {i: (e.vertices[0], e.vertices[1]) for i, e in enumerate(obj_from.data.edges)} edges_of_vert: Dict[int, List[int]] = {} for edge_idx, edge in enumerate(obj_from.data.edges): @@ -166,29 +183,24 @@ def get_source_vert_influences( parts_sum = sum(parts) influences = [ - (idx, 1 if dist == 0 else part / parts_sum) - for part, dist in zip(parts, distances) + (idx, 1 if dist == 0 else part / parts_sum) for part, dist in zip(parts, distances) ] return influences -def get_nearest_vert( - coords: Vector, kd_tree: kdtree.KDTree -) -> Tuple[Vector, int, float]: +def get_nearest_vert(coords: Vector, kd_tree: kdtree.KDTree) -> Tuple[Vector, int, float]: """Return coordinate, index, and distance of nearest vert to coords in kd_tree.""" return kd_tree.find(coords) -def other_vert_of_edge( - edge: int, vert: int, verts_of_edge: Dict[int, Tuple[int, int]] -) -> int: +def other_vert_of_edge(edge: int, vert: int, verts_of_edge: Dict[int, Tuple[int, int]]) -> int: verts = verts_of_edge[edge] assert vert in verts, f"Vert {vert} not part of edge {edge}." return verts[0] if vert == verts[1] else verts[1] -def transfer_vertex_groups(obj_from, obj_to, vert_influence_map, src_vgroups): +def transfer_multiple_vertex_groups(obj_from, obj_to, vert_influence_map, src_vgroups): """Transfer src_vgroups in obj_from to obj_to using a pre-calculated vert_influence_map.""" for src_vg in src_vgroups: -- 2.30.2 From f2247a8383c459670e7cd81881eae0162ec3b157 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:31:36 -0400 Subject: [PATCH 03/15] Don't Use Temp Transfer Data During Merge --- .../asset_pipeline/merge/asset_mapping.py | 75 +++++++--------- .../merge/transfer_data/transfer_core.py | 88 +++++++------------ .../addons/asset_pipeline/props.py | 14 ++- 3 files changed, 78 insertions(+), 99 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py index ce39817d..db9b7d36 100644 --- a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py +++ b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py @@ -169,20 +169,13 @@ class AssetTransferMapping: return coll_map - def _create_temp_transfer_data_item(self, source_obj, target_obj, transfer_data_item): - temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data - temp_transfer_data_item_index = len(temp_transfer_data) - temp_transfer_data_item = transfer_data_add_entry( - transfer_data=temp_transfer_data, - name=transfer_data_item.name, - td_type_key=transfer_data_item.type, - task_layer_name=transfer_data_item.owner, - surrender=transfer_data_item.surrender, - target_obj=target_obj, - obj=source_obj, - ) - - return temp_transfer_data_item_index + def _get_transfer_data_dict(self, source_obj, target_obj, transfer_data_item): + return { + 'name': transfer_data_item.name, + "owner": transfer_data_item.owner, + "surrender": transfer_data_item.surrender, + "target_obj": target_obj, + } def _transfer_data_pair_not_local(self, td_1, td_2): # Returns true if neither owners are local to current file @@ -238,16 +231,16 @@ class AssetTransferMapping: if self._transfer_data_is_surrendered(transfer_data_item): return td_type_key = transfer_data_item.type - temp_td_index = self._create_temp_transfer_data_item( + transfer_data_dict = self._get_transfer_data_dict( source_obj, target_obj, transfer_data_item ) if not source_obj in self.transfer_data_map: - self.transfer_data_map[source_obj] = {td_type_key: [temp_td_index]} + self.transfer_data_map[source_obj] = {td_type_key: [transfer_data_dict]} if not td_type_key in self.transfer_data_map[source_obj]: - self.transfer_data_map[source_obj][td_type_key] = [temp_td_index] + self.transfer_data_map[source_obj][td_type_key] = [transfer_data_dict] else: - self.transfer_data_map[source_obj][td_type_key].append(temp_td_index) + self.transfer_data_map[source_obj][td_type_key].append(transfer_data_dict) def _transfer_data_map_item(self, source_obj, target_obj, transfer_data_item): """Verifies if Transfer Data Item is valid/can be mapped""" @@ -268,12 +261,11 @@ class AssetTransferMapping: def _gen_transfer_data_map(self): # Generate Mapping for Transfer Data Items - temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data - temp_transfer_data.clear() for objs in self.object_map.items(): - source_obj, target_obj = objs + _, target_obj = objs for obj in objs: # Must execute for both objs in map (so we map external and local TD) + # Must include maps even if obj==target_obj to preserve exisiting local TD entry for transfer_data_item in obj.transfer_data_ownership: if self._transfer_data_check_conflict(obj, transfer_data_item): continue @@ -284,32 +276,27 @@ class AssetTransferMapping: # Generate a Map of Indexes that need to be set post merge # Stores active_uv & active_color_attribute index_map = {} - temp_transfer_data_items = [] - temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data for source_obj in self.transfer_data_map: - for td_type_key, td_indexes in self.transfer_data_map[source_obj].items(): - for index in td_indexes: - temp_transfer_data_items.append(temp_transfer_data[index]) + for td_type_key, td_items in self.transfer_data_map[source_obj].items(): + for td_item in td_items: + target_obj = td_item["target_obj"] + if td_type_key != constants.MATERIAL_SLOT_KEY: + continue + if source_obj.type != 'MESH': + continue - for temp_transfer_data_item in temp_transfer_data_items: - source_obj = temp_transfer_data_item.obj - target_obj = temp_transfer_data_item.target_obj - - if temp_transfer_data_item.type != constants.MATERIAL_SLOT_KEY: - continue - if source_obj.type != 'MESH': - continue - - active_uv_name = ( - source_obj.data.uv_layers.active.name if source_obj.data.uv_layers.active else '' - ) - active_color_attribute_name = source_obj.data.color_attributes.active_color_name - index_map[source_obj] = { - 'active_uv_name': active_uv_name, - 'active_color_attribute_name': active_color_attribute_name, - 'target_obj': target_obj, - } + active_uv_name = ( + source_obj.data.uv_layers.active.name + if source_obj.data.uv_layers.active + else '' + ) + active_color_attribute_name = source_obj.data.color_attributes.active_color_name + index_map[source_obj] = { + 'active_uv_name': active_uv_name, + 'active_color_attribute_name': active_color_attribute_name, + 'target_obj': target_obj, + } return index_map diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index 7a4364d6..734ba5b7 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -10,37 +10,34 @@ from .transfer_functions import ( materials, ) from typing import List -from ... import constants, logging -from ...props import AssetTransferDataTemp -from bpy.types import PropertyGroup +from ... import constants from .transfer_util import ( transfer_data_add_entry, check_transfer_data_entry, ) -def copy_transfer_data_ownership( - transfer_data_item: AssetTransferDataTemp, target_obj: bpy.types.Object -) -> None: +# TODO use logging module here +def copy_transfer_data_ownership(td_type_key: str, transfer_data_dict: dict) -> None: """Copy Transferable Data item to object if non entry exists Args: transfer_data_item: Item of bpy.types.CollectionProperty from source object target_obj (bpy.types.Object): Object to add Transferable Data item to """ - transfer_data = target_obj.transfer_data_ownership + transfer_data = transfer_data_dict["target_obj"].transfer_data_ownership matches = check_transfer_data_entry( transfer_data, - transfer_data_item.name, - transfer_data_item.type, + transfer_data_dict["name"], + td_type_key, ) if len(matches) == 0: transfer_data_add_entry( transfer_data, - transfer_data_item.name, - transfer_data_item.type, - transfer_data_item.owner, - transfer_data_item.surrender, + transfer_data_dict["name"], + td_type_key, + transfer_data_dict["owner"], + transfer_data_dict["surrender"], ) @@ -100,38 +97,23 @@ def init_transfer_data( attributes.init_attributes(scene, obj) -def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: - """Apply all Transferable Data from Transferable Data map onto objects. - Copies any Transferable Data owned by local layer onto objects owned by external layers. - Applies Transferable Data from external layers onto objects owned by local layers - - Transfer_data_map is generated by class 'AssetTransferMapping' - - Args: - context (bpy.types.Context): context of .blend file - transfer_data_map: Map generated by class AssetTransferMapping - """ - logger = logging.get_logger() - for name in transfer_data_map: - temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data - transfer_data = transfer_data_map[name] - transfer_data_item = temp_transfer_data[transfer_data.get('transfer_data_item_index')] - target_obj = transfer_data.get('target_obj') - source_obj = transfer_data.get('source_obj') - - -def apply_transfer_data_items(context, td_type_key: str, transfer_data_items: List[PropertyGroup]): +def apply_transfer_data_items( + context, + source_obj: bpy.types.Object, + td_type_key: str, + transfer_data_dicts: List[dict], +): # Get source/target from first item in list, because all items in list are same object/type - target_obj = transfer_data_items[0].target_obj - source_obj = transfer_data_items[0].obj + target_obj = transfer_data_dicts[0]["target_obj"] if target_obj is None: - print(f"Failed to Transfer Data for {transfer_data_item.id_data.name}") + print(f"Failed to Transfer Data for {transfer_data_dict.id_data.name}") return - for transfer_data_item in transfer_data_items: - transfer_data_item.copy_transfer_data_ownership() + for transfer_data_dict in transfer_data_dicts: + copy_transfer_data_ownership(td_type_key, transfer_data_dict) + # if TD Source is Target, restore the ownership data but don't transfer anything if source_obj == target_obj: return @@ -139,47 +121,47 @@ def apply_transfer_data_items(context, td_type_key: str, transfer_data_items: Li # Transfer All Vertex Groups in one go print(f"Transferring all Vertex Groups from {source_obj.name} to {target_obj.name}") vertex_groups.transfer_vertex_groups( - vertex_group_names=[item.name for item in transfer_data_items], + vertex_group_names=[item["name"] for item in transfer_data_dicts], target_obj=target_obj, source_obj=source_obj, ) if td_type_key == constants.MODIFIER_KEY: - for transfer_data_item in transfer_data_items: + for transfer_data_dict in transfer_data_dicts: modifers.transfer_modifier( - modifier_name=transfer_data_item.name, + modifier_name=transfer_data_dict["name"], target_obj=target_obj, source_obj=source_obj, ) if td_type_key == constants.CONSTRAINT_KEY: - for transfer_data_item in transfer_data_items: + for transfer_data_dict in transfer_data_dicts: constraints.transfer_constraint( - constraint_name=transfer_data_item.name, + constraint_name=transfer_data_dict["name"], target_obj=target_obj, source_obj=source_obj, ) if td_type_key == constants.MATERIAL_SLOT_KEY: - for transfer_data_item in transfer_data_items: + for transfer_data_dict in transfer_data_dicts: materials.transfer_materials( target_obj=target_obj, source_obj=source_obj, ) if td_type_key == constants.SHAPE_KEY_KEY: - for transfer_data_item in transfer_data_items: + for transfer_data_dict in transfer_data_dicts: shape_keys.transfer_shape_key( context=context, target_obj=target_obj, source_obj=source_obj, - shape_key_name=transfer_data_item.name, + shape_key_name=transfer_data_dict["name"], ) if td_type_key == constants.ATTRIBUTE_KEY: - for transfer_data_item in transfer_data_items: + for transfer_data_dict in transfer_data_dicts: attributes.transfer_attribute( target_obj=target_obj, source_obj=source_obj, - attribute_name=transfer_data_item.name, + attribute_name=transfer_data_dict["name"], ) if td_type_key == constants.PARENT_KEY: - for transfer_data_item in transfer_data_items: + for transfer_data_dict in transfer_data_dicts: parent.transfer_parent( target_obj=target_obj, source_obj=source_obj, @@ -198,8 +180,6 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: transfer_data_map: Map generated by class AssetTransferMapping """ - temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data for source_obj in transfer_data_map: - for td_type_key, td_indexes in transfer_data_map[source_obj].items(): - transfer_data_items = [temp_transfer_data[index] for index in td_indexes] - apply_transfer_data_items(context, td_type_key, transfer_data_items) + for td_type_key, td_dicts in transfer_data_map[source_obj].items(): + apply_transfer_data_items(context, source_obj, td_type_key, td_dicts) diff --git a/scripts-blender/addons/asset_pipeline/props.py b/scripts-blender/addons/asset_pipeline/props.py index 56d0f0e5..a6f460f7 100644 --- a/scripts-blender/addons/asset_pipeline/props.py +++ b/scripts-blender/addons/asset_pipeline/props.py @@ -57,7 +57,19 @@ class AssetTransferDataTemp(bpy.types.PropertyGroup): items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS, ) surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False) - obj_name: bpy.props.StringProperty(name="Object Name") + obj: bpy.props.PointerProperty(type=bpy.types.Object) + target_obj: bpy.props.PointerProperty(type=bpy.types.Object) + + def check_transfer_data_entry(self) -> set: + """ + Verifies if Transferable Data entry exists + """ + existing_items = [ + transfer_data_item.name + for transfer_data_item in self.target_obj.transfer_data_ownership + if transfer_data_item.type == self.type + ] + return set([self.name]).intersection(set(existing_items)) class TaskLayerSettings(bpy.types.PropertyGroup): -- 2.30.2 From b832586442be703501c8b0f7b00d3af2e280f407 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:31:36 -0400 Subject: [PATCH 04/15] Store Target Object once in each Transfer Data Dict --- .../asset_pipeline/merge/asset_mapping.py | 49 ++++++++++--------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py index db9b7d36..33ff7f24 100644 --- a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py +++ b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py @@ -235,12 +235,17 @@ class AssetTransferMapping: source_obj, target_obj, transfer_data_item ) if not source_obj in self.transfer_data_map: - self.transfer_data_map[source_obj] = {td_type_key: [transfer_data_dict]} + self.transfer_data_map[source_obj] = { + "target_obj": target_obj, + "td_types": {td_type_key: [transfer_data_dict]}, + } + return - if not td_type_key in self.transfer_data_map[source_obj]: - self.transfer_data_map[source_obj][td_type_key] = [transfer_data_dict] + if not td_type_key in self.transfer_data_map[source_obj]["td_types"]: + self.transfer_data_map[source_obj]["td_types"][td_type_key] = [transfer_data_dict] + return else: - self.transfer_data_map[source_obj][td_type_key].append(transfer_data_dict) + self.transfer_data_map[source_obj]["td_types"][td_type_key].append(transfer_data_dict) def _transfer_data_map_item(self, source_obj, target_obj, transfer_data_item): """Verifies if Transfer Data Item is valid/can be mapped""" @@ -278,25 +283,25 @@ class AssetTransferMapping: index_map = {} for source_obj in self.transfer_data_map: - for td_type_key, td_items in self.transfer_data_map[source_obj].items(): - for td_item in td_items: - target_obj = td_item["target_obj"] - if td_type_key != constants.MATERIAL_SLOT_KEY: - continue - if source_obj.type != 'MESH': - continue + target_obj = self.transfer_data_map[source_obj]["target_obj"] + td_types = self.transfer_data_map[source_obj]["td_types"] + for td_type_key, _ in td_types.items(): + if td_type_key != constants.MATERIAL_SLOT_KEY: + continue + if source_obj.type != 'MESH': + continue - active_uv_name = ( - source_obj.data.uv_layers.active.name - if source_obj.data.uv_layers.active - else '' - ) - active_color_attribute_name = source_obj.data.color_attributes.active_color_name - index_map[source_obj] = { - 'active_uv_name': active_uv_name, - 'active_color_attribute_name': active_color_attribute_name, - 'target_obj': target_obj, - } + active_uv_name = ( + source_obj.data.uv_layers.active.name + if source_obj.data.uv_layers.active + else '' + ) + active_color_attribute_name = source_obj.data.color_attributes.active_color_name + index_map[source_obj] = { + 'active_uv_name': active_uv_name, + 'active_color_attribute_name': active_color_attribute_name, + 'target_obj': target_obj, + } return index_map -- 2.30.2 From b6d4fabb13889dfb5bcfb54443980394c9b7cbf6 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:31:36 -0400 Subject: [PATCH 05/15] Isolate Objects to Collection during Transfer Data Transfer --- .../asset_pipeline/merge/asset_mapping.py | 8 ++--- .../merge/transfer_data/transfer_core.py | 22 ++++++++---- .../merge/transfer_data/transfer_util.py | 35 ++++++++++++++++++- 3 files changed, 53 insertions(+), 12 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py index 33ff7f24..aaa8bfcd 100644 --- a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py +++ b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py @@ -169,12 +169,11 @@ class AssetTransferMapping: return coll_map - def _get_transfer_data_dict(self, source_obj, target_obj, transfer_data_item): + def _get_transfer_data_dict(self, transfer_data_item): return { 'name': transfer_data_item.name, "owner": transfer_data_item.owner, "surrender": transfer_data_item.surrender, - "target_obj": target_obj, } def _transfer_data_pair_not_local(self, td_1, td_2): @@ -231,9 +230,8 @@ class AssetTransferMapping: if self._transfer_data_is_surrendered(transfer_data_item): return td_type_key = transfer_data_item.type - transfer_data_dict = self._get_transfer_data_dict( - source_obj, target_obj, transfer_data_item - ) + transfer_data_dict = self._get_transfer_data_dict(transfer_data_item) + if not source_obj in self.transfer_data_map: self.transfer_data_map[source_obj] = { "target_obj": target_obj, diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index 734ba5b7..613d9ce6 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -14,6 +14,8 @@ from ... import constants from .transfer_util import ( transfer_data_add_entry, check_transfer_data_entry, + link_objs_to_collection, + isolate_collection, ) @@ -100,12 +102,11 @@ def init_transfer_data( def apply_transfer_data_items( context, source_obj: bpy.types.Object, + target_obj: bpy.types.Object, td_type_key: str, transfer_data_dicts: List[dict], ): # Get source/target from first item in list, because all items in list are same object/type - target_obj = transfer_data_dicts[0]["target_obj"] - if target_obj is None: print(f"Failed to Transfer Data for {transfer_data_dict.id_data.name}") return @@ -179,7 +180,16 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: context (bpy.types.Context): context of .blend file transfer_data_map: Map generated by class AssetTransferMapping """ - - for source_obj in transfer_data_map: - for td_type_key, td_dicts in transfer_data_map[source_obj].items(): - apply_transfer_data_items(context, source_obj, td_type_key, td_dicts) + # Create/isolate tmp collection to reduce depsgraph update time + td_col = bpy.data.collections.new("ISO_COL_TEMP") + with isolate_collection(td_col): + # Loop over objects in Transfer data map + for source_obj in transfer_data_map: + target_obj = transfer_data_map[source_obj]["target_obj"] + td_types = transfer_data_map[source_obj]["td_types"] + with link_objs_to_collection(set([target_obj, source_obj]), td_col): + for td_type_key, td_dicts in td_types.items(): + apply_transfer_data_items( + context, source_obj, target_obj, td_type_key, td_dicts + ) + bpy.data.collections.remove(td_col) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py index c408ab9a..01703ed6 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py @@ -1,7 +1,7 @@ import bpy from ..naming import merge_get_basename from ..task_layer import get_transfer_data_owner - +import contextlib def check_transfer_data_entry( transfer_data: bpy.types.CollectionProperty, key: str, td_type_key: str @@ -131,3 +131,36 @@ def transfer_data_item_init( obj_name=obj.name, surrender=auto_surrender, ) + + +@contextlib.contextmanager +def isolate_collection(iso_col: bpy.types.Collection): + col_exclude = {} + view_layer_col = bpy.context.view_layer.layer_collection + view_layer_col.collection.children.link(iso_col) + for col in view_layer_col.children: + col_exclude[col.name] = col.exclude + + try: + # Exclude all collections that are not iso collection + for col in view_layer_col.children: + view_layer_col.exclude = col.name != iso_col.name + yield + + finally: + for col in view_layer_col.children: + view_layer_col.exclude = col_exclude[col.name] + view_layer_col.collection.children.unlink(iso_col) + + +@contextlib.contextmanager +def link_objs_to_collection(objs: set, col: bpy.types.Collection): + ... + try: + for obj in objs: + col.objects.link(obj) + yield + + finally: + for obj in objs: + col.objects.unlink(obj) -- 2.30.2 From 5a475a77313b62df99a57ff971f6a93249569966 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:31:36 -0400 Subject: [PATCH 06/15] Fix big in Transfer Data Transfer --- .../asset_pipeline/merge/transfer_data/transfer_core.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index 613d9ce6..f069c128 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -20,14 +20,16 @@ from .transfer_util import ( # TODO use logging module here -def copy_transfer_data_ownership(td_type_key: str, transfer_data_dict: dict) -> None: +def copy_transfer_data_ownership( + td_type_key: str, target_obj: bpy.types.Object, transfer_data_dict: dict +) -> None: """Copy Transferable Data item to object if non entry exists Args: transfer_data_item: Item of bpy.types.CollectionProperty from source object target_obj (bpy.types.Object): Object to add Transferable Data item to """ - transfer_data = transfer_data_dict["target_obj"].transfer_data_ownership + transfer_data = target_obj.transfer_data_ownership matches = check_transfer_data_entry( transfer_data, transfer_data_dict["name"], @@ -112,7 +114,7 @@ def apply_transfer_data_items( return for transfer_data_dict in transfer_data_dicts: - copy_transfer_data_ownership(td_type_key, transfer_data_dict) + copy_transfer_data_ownership(td_type_key, target_obj, transfer_data_dict) # if TD Source is Target, restore the ownership data but don't transfer anything if source_obj == target_obj: -- 2.30.2 From e235a9dc5ebd3f24791d9a946a62ffc826887801 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 12:45:45 -0400 Subject: [PATCH 07/15] Use Logging for Prints in Transfer Core / Vertex Group --- .../merge/transfer_data/transfer_core.py | 23 ++++++++++++++++--- .../transfer_functions/vertex_groups.py | 3 ++- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index f069c128..83ea7deb 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -10,7 +10,7 @@ from .transfer_functions import ( materials, ) from typing import List -from ... import constants +from ... import constants, logging from .transfer_util import ( transfer_data_add_entry, check_transfer_data_entry, @@ -108,9 +108,10 @@ def apply_transfer_data_items( td_type_key: str, transfer_data_dicts: List[dict], ): + logger = logging.get_logger() # Get source/target from first item in list, because all items in list are same object/type if target_obj is None: - print(f"Failed to Transfer Data for {transfer_data_dict.id_data.name}") + logger.warning(f"Failed to Transfer {td_type_key.title()} from {source_obj.name}") return for transfer_data_dict in transfer_data_dicts: @@ -122,7 +123,7 @@ def apply_transfer_data_items( if td_type_key == constants.VERTEX_GROUP_KEY: # Transfer All Vertex Groups in one go - print(f"Transferring all Vertex Groups from {source_obj.name} to {target_obj.name}") + logger.debug(f"Transferring All Vertex Groups from {source_obj.name} to {target_obj.name}.") vertex_groups.transfer_vertex_groups( vertex_group_names=[item["name"] for item in transfer_data_dicts], target_obj=target_obj, @@ -130,6 +131,9 @@ def apply_transfer_data_items( ) if td_type_key == constants.MODIFIER_KEY: for transfer_data_dict in transfer_data_dicts: + logger.debug( + f"Transferring Modifier {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}." + ) modifers.transfer_modifier( modifier_name=transfer_data_dict["name"], target_obj=target_obj, @@ -137,12 +141,16 @@ def apply_transfer_data_items( ) if td_type_key == constants.CONSTRAINT_KEY: for transfer_data_dict in transfer_data_dicts: + logger.debug( + f"Transferring Constraint {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}." + ) constraints.transfer_constraint( constraint_name=transfer_data_dict["name"], target_obj=target_obj, source_obj=source_obj, ) if td_type_key == constants.MATERIAL_SLOT_KEY: + logger.debug(f"Transferring Materials from {source_obj.name} to {target_obj.name}.") for transfer_data_dict in transfer_data_dicts: materials.transfer_materials( target_obj=target_obj, @@ -150,6 +158,9 @@ def apply_transfer_data_items( ) if td_type_key == constants.SHAPE_KEY_KEY: for transfer_data_dict in transfer_data_dicts: + logger.debug( + f"Transferring Shape Key {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}." + ) shape_keys.transfer_shape_key( context=context, target_obj=target_obj, @@ -158,6 +169,9 @@ def apply_transfer_data_items( ) if td_type_key == constants.ATTRIBUTE_KEY: for transfer_data_dict in transfer_data_dicts: + logger.debug( + f"Transferring Attribute {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}." + ) attributes.transfer_attribute( target_obj=target_obj, source_obj=source_obj, @@ -165,6 +179,9 @@ def apply_transfer_data_items( ) if td_type_key == constants.PARENT_KEY: for transfer_data_dict in transfer_data_dicts: + logger.debug( + f"Transferring Parent Relationship from {source_obj.name} to {target_obj.name}." + ) parent.transfer_parent( target_obj=target_obj, source_obj=source_obj, diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py index 04241b92..a82c267a 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py @@ -40,9 +40,10 @@ def transfer_vertex_groups( target_obj: bpy.types.Object, source_obj: bpy.types.Object, ): + logger = logging.get_logger() for vertex_group_name in vertex_group_names: if not source_obj.vertex_groups.get(vertex_group_name): - print(f"ERROR Vertex Group {vertex_group_name} not found in {source_obj.name}") + logger.error(f"Vertex Group {vertex_group_name} not found in {source_obj.name}") return # If topology matches transfer directly, otherwise use vertex proximity -- 2.30.2 From e26b9c4c9c1180344b8c13d698089f106a09ea83 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 13:15:13 -0400 Subject: [PATCH 08/15] Fix Bug in `AssetTransferDataTemp` --- scripts-blender/addons/asset_pipeline/props.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/props.py b/scripts-blender/addons/asset_pipeline/props.py index a6f460f7..85936a88 100644 --- a/scripts-blender/addons/asset_pipeline/props.py +++ b/scripts-blender/addons/asset_pipeline/props.py @@ -57,8 +57,7 @@ class AssetTransferDataTemp(bpy.types.PropertyGroup): items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS, ) surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False) - obj: bpy.props.PointerProperty(type=bpy.types.Object) - target_obj: bpy.props.PointerProperty(type=bpy.types.Object) + obj_name: bpy.props.StringProperty(name="Object Name", default="") def check_transfer_data_entry(self) -> set: """ @@ -125,12 +124,12 @@ class AssetPipeline(bpy.types.PropertyGroup): temp_transfer_data: bpy.props.CollectionProperty(type=AssetTransferDataTemp) - def add_temp_transfer_data(self, name, owner, type, obj_name, surrender): + def add_temp_transfer_data(self, name, owner, type_key, obj_name, surrender): new_transfer_data = self.temp_transfer_data transfer_data_item = new_transfer_data.add() transfer_data_item.name = name transfer_data_item.owner = owner - transfer_data_item.type = type + transfer_data_item.type = type_key transfer_data_item.obj_name = obj_name transfer_data_item.surrender = surrender -- 2.30.2 From 9e2cf865e41245a84f26ec25ba2b9817f4392f0a Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Mon, 15 Apr 2024 15:46:26 -0400 Subject: [PATCH 09/15] Fix bug in collection isolation --- .../asset_pipeline/merge/transfer_data/transfer_util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py index 01703ed6..8cc1f976 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py @@ -144,12 +144,12 @@ def isolate_collection(iso_col: bpy.types.Collection): try: # Exclude all collections that are not iso collection for col in view_layer_col.children: - view_layer_col.exclude = col.name != iso_col.name + col.exclude = col.name != iso_col.name yield finally: for col in view_layer_col.children: - view_layer_col.exclude = col_exclude[col.name] + col.exclude = col_exclude[col.name] view_layer_col.collection.children.unlink(iso_col) -- 2.30.2 From 0c6489cba3fd31a636c8ee18884ac5fe5b81c07a Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Tue, 16 Apr 2024 10:43:32 -0400 Subject: [PATCH 10/15] Add time profiling to merge process --- .../addons/asset_pipeline/merge/core.py | 23 ++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/core.py b/scripts-blender/addons/asset_pipeline/merge/core.py index 99ec2f3b..70d53168 100644 --- a/scripts-blender/addons/asset_pipeline/merge/core.py +++ b/scripts-blender/addons/asset_pipeline/merge/core.py @@ -21,6 +21,7 @@ from .transfer_data.transfer_functions.transfer_function_util.active_indexes imp from pathlib import Path from typing import Dict from .. import constants, logging +import time def ownership_transfer_data_cleanup( @@ -175,6 +176,10 @@ def merge_task_layer( local_tls: (list[str]): list of task layers that are local to the current file external_file (Path): external file to pull data into the current file from """ + + logger = logging.get_logger() + + start_time = time.time() local_col = context.scene.asset_pipeline.asset_collection if not local_col: return "Unable to find Asset Collection" @@ -185,6 +190,10 @@ def merge_task_layer( appended_col = import_data_from_lib(external_file, "collections", col_base_name) merge_add_suffix_to_hierarchy(appended_col, external_suffix) + imported_time = time.time() + logger.debug( + f"{imported_time - start_time} sec: Imported Collection from Publish & Add Suffixes" + ) local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"] external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"] @@ -204,6 +213,8 @@ def merge_task_layer( type_name = get_id_type_name(type(conflict_obj)) error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n" return error_msg + mapped_time = time.time() + logger.debug(f"{mapped_time - imported_time} sec: To Create Asset Mapping") # Remove all Transferable Data from target objects for source_obj in map.object_map: @@ -211,11 +222,15 @@ def merge_task_layer( target_obj.transfer_data_ownership.clear() apply_transfer_data(context, map.transfer_data_map) + apply_td_time = time.time() + logger.debug(f"{apply_td_time - mapped_time} sec: To Apply all Transferable Data") for source_obj in map.object_map: target_obj = map.object_map[source_obj] remap_user(source_obj, target_obj) transfer_data_clean(target_obj) + obj_remap_time = time.time() + logger.debug(f"{obj_remap_time - apply_td_time} sec: To remap all objects") # Restore Active UV Layer and Active Color Attributes for _, index_map_item in map.index_map.items(): @@ -224,6 +239,8 @@ def merge_task_layer( transfer_active_color_attribute_index( target_obj, index_map_item.get('active_color_attribute_name') ) + index_time = time.time() + logger.debug(f"{index_time - obj_remap_time} sec: To restore active indexes on all objects") for col in map.collection_map: remap_user(col, map.collection_map[col]) @@ -233,13 +250,17 @@ def merge_task_layer( for col in map.external_col_to_remove: local_col.children.unlink(col) + col_remap_time = time.time() + logger.debug(f"{col_remap_time - index_time} sec: To remap all collections") for id in map.shared_id_map: remap_user(id, map.shared_id_map[id]) + shared_id_remap_time = time.time() + logger.debug(f"{shared_id_remap_time - col_remap_time} sec: To remap all shared ids") bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True) merge_remove_suffix_from_hierarchy(local_col) - + logger.info(f"{time.time() - start_time} sec: To complete entire merge process") def import_data_from_lib( libpath: Path, -- 2.30.2 From 4bd9cc2131c5122835066f465a2afc3ff1fd74bc Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Tue, 16 Apr 2024 11:00:28 -0400 Subject: [PATCH 11/15] Move all Timing prints to End of Merge Process --- .../addons/asset_pipeline/merge/core.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/merge/core.py b/scripts-blender/addons/asset_pipeline/merge/core.py index 70d53168..37580fd3 100644 --- a/scripts-blender/addons/asset_pipeline/merge/core.py +++ b/scripts-blender/addons/asset_pipeline/merge/core.py @@ -191,9 +191,6 @@ def merge_task_layer( appended_col = import_data_from_lib(external_file, "collections", col_base_name) merge_add_suffix_to_hierarchy(appended_col, external_suffix) imported_time = time.time() - logger.debug( - f"{imported_time - start_time} sec: Imported Collection from Publish & Add Suffixes" - ) local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"] external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"] @@ -214,7 +211,6 @@ def merge_task_layer( error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n" return error_msg mapped_time = time.time() - logger.debug(f"{mapped_time - imported_time} sec: To Create Asset Mapping") # Remove all Transferable Data from target objects for source_obj in map.object_map: @@ -223,14 +219,12 @@ def merge_task_layer( apply_transfer_data(context, map.transfer_data_map) apply_td_time = time.time() - logger.debug(f"{apply_td_time - mapped_time} sec: To Apply all Transferable Data") for source_obj in map.object_map: target_obj = map.object_map[source_obj] remap_user(source_obj, target_obj) transfer_data_clean(target_obj) obj_remap_time = time.time() - logger.debug(f"{obj_remap_time - apply_td_time} sec: To remap all objects") # Restore Active UV Layer and Active Color Attributes for _, index_map_item in map.index_map.items(): @@ -240,7 +234,6 @@ def merge_task_layer( target_obj, index_map_item.get('active_color_attribute_name') ) index_time = time.time() - logger.debug(f"{index_time - obj_remap_time} sec: To restore active indexes on all objects") for col in map.collection_map: remap_user(col, map.collection_map[col]) @@ -251,15 +244,20 @@ def merge_task_layer( for col in map.external_col_to_remove: local_col.children.unlink(col) col_remap_time = time.time() - logger.debug(f"{col_remap_time - index_time} sec: To remap all collections") for id in map.shared_id_map: remap_user(id, map.shared_id_map[id]) shared_id_remap_time = time.time() - logger.debug(f"{shared_id_remap_time - col_remap_time} sec: To remap all shared ids") bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True) merge_remove_suffix_from_hierarchy(local_col) + logger.debug(f"{imported_time - start_time} sec: To import Collection & add suffixes") + logger.debug(f"{shared_id_remap_time - col_remap_time} sec: To remap all Shared Ids") + logger.debug(f"{mapped_time - imported_time} sec: To create Asset Mapping") + logger.debug(f"{apply_td_time - mapped_time} sec: To apply all Transferable Data") + logger.debug(f"{obj_remap_time - apply_td_time} sec: To remap all Objects") + logger.debug(f"{index_time - obj_remap_time} sec: To restore Active Indexes on all Objects") + logger.debug(f"{col_remap_time - index_time} sec: To remap all Collections") logger.info(f"{time.time() - start_time} sec: To complete entire merge process") def import_data_from_lib( -- 2.30.2 From 524601f9ff5c2969b56f4f058d660ddbfae9a506 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Wed, 17 Apr 2024 12:06:31 -0400 Subject: [PATCH 12/15] Add Profiler --- .../addons/asset_pipeline/logging.py | 64 +++++++++++++++++++ .../addons/asset_pipeline/merge/core.py | 17 ++--- scripts-blender/addons/asset_pipeline/ops.py | 11 +++- .../addons/asset_pipeline/opscore.py | 1 + 4 files changed, 84 insertions(+), 9 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/logging.py b/scripts-blender/addons/asset_pipeline/logging.py index d0faf3f4..38d209d4 100644 --- a/scripts-blender/addons/asset_pipeline/logging.py +++ b/scripts-blender/addons/asset_pipeline/logging.py @@ -45,3 +45,67 @@ def get_logger(name="asset_pipeline"): # add ch to logger logger.addHandler(ch) return logger + + +PROFILE_KEYS = { + "IMPORT": "To import Collection & add suffixes", + "MAPPING": "To create Asset Mapping", + "TRANSFER_DATA": "To apply all Transferable Data", + "OBJECTS": "To remap all Obejcts", + "INDEXES": "To restore Active Indexes on all Objects", + "COLLECTIONS": "To remap all Collections", + "SHARED_IDS": "To remap all Shared IDs", + "MERGE": "To complete entire merge process", +} + +INFO_KEYS = ["MERGE"] # Profile Keys to print in the logger's info mode + +_profiler_instance = None + + +def get_profiler(): + global _profiler_instance + if not _profiler_instance: + _profiler_instance = Profiler() + return _profiler_instance + + +class Profiler: + + def __init__(self) -> None: + self.pull_profiles = {} + self.push_profiles = {} + self._logger = get_logger() + + def add(self, elapsed_time: int, key: str): + if self._is_push: + profiles = self.push_profiles + else: # is pull + profiles = self.pull_profiles + + if key not in profiles: + profiles[key] = elapsed_time + else: + profiles[key] += elapsed_time + + def log_all(self): + self.log_profiles("PULL", self.pull_profiles) + self.log_profiles("PUSH", self.push_profiles) + + def log_profiles(self, direction: str, profiles: dict): + if profiles == {}: + return + for key, value in profiles.items(): + msg = f"{direction} {key} - {value} seconds {PROFILE_KEYS[key]}" + if key in INFO_KEYS: + self._logger.info(msg) + else: + self._logger.debug(msg) + + def reset(self): + self.pull_profiles = {} + self._is_push = False + self._logger = get_logger() + + def set_push(self, is_push=True): + self._is_push = is_push diff --git a/scripts-blender/addons/asset_pipeline/merge/core.py b/scripts-blender/addons/asset_pipeline/merge/core.py index 37580fd3..6c6affd8 100644 --- a/scripts-blender/addons/asset_pipeline/merge/core.py +++ b/scripts-blender/addons/asset_pipeline/merge/core.py @@ -178,6 +178,7 @@ def merge_task_layer( """ logger = logging.get_logger() + profiles = logging.get_profiler() start_time = time.time() local_col = context.scene.asset_pipeline.asset_collection @@ -191,6 +192,7 @@ def merge_task_layer( appended_col = import_data_from_lib(external_file, "collections", col_base_name) merge_add_suffix_to_hierarchy(appended_col, external_suffix) imported_time = time.time() + profiles.add((imported_time - start_time), "IMPORT") local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"] external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"] @@ -211,6 +213,7 @@ def merge_task_layer( error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n" return error_msg mapped_time = time.time() + profiles.add((mapped_time - imported_time), "MAPPING") # Remove all Transferable Data from target objects for source_obj in map.object_map: @@ -219,12 +222,14 @@ def merge_task_layer( apply_transfer_data(context, map.transfer_data_map) apply_td_time = time.time() + profiles.add((apply_td_time - mapped_time), "TRANSFER_DATA") for source_obj in map.object_map: target_obj = map.object_map[source_obj] remap_user(source_obj, target_obj) transfer_data_clean(target_obj) obj_remap_time = time.time() + profiles.add((obj_remap_time - apply_td_time), "OBJECTS") # Restore Active UV Layer and Active Color Attributes for _, index_map_item in map.index_map.items(): @@ -234,6 +239,7 @@ def merge_task_layer( target_obj, index_map_item.get('active_color_attribute_name') ) index_time = time.time() + profiles.add((index_time - obj_remap_time), "INDEXES") for col in map.collection_map: remap_user(col, map.collection_map[col]) @@ -244,21 +250,16 @@ def merge_task_layer( for col in map.external_col_to_remove: local_col.children.unlink(col) col_remap_time = time.time() + profiles.add((col_remap_time - index_time), "COLLECTIONS") for id in map.shared_id_map: remap_user(id, map.shared_id_map[id]) shared_id_remap_time = time.time() + profiles.add((shared_id_remap_time - col_remap_time), "SHARED_IDS") bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True) merge_remove_suffix_from_hierarchy(local_col) - logger.debug(f"{imported_time - start_time} sec: To import Collection & add suffixes") - logger.debug(f"{shared_id_remap_time - col_remap_time} sec: To remap all Shared Ids") - logger.debug(f"{mapped_time - imported_time} sec: To create Asset Mapping") - logger.debug(f"{apply_td_time - mapped_time} sec: To apply all Transferable Data") - logger.debug(f"{obj_remap_time - apply_td_time} sec: To remap all Objects") - logger.debug(f"{index_time - obj_remap_time} sec: To restore Active Indexes on all Objects") - logger.debug(f"{col_remap_time - index_time} sec: To remap all Collections") - logger.info(f"{time.time() - start_time} sec: To complete entire merge process") + profiles.add((time.time() - start_time), "MERGE") def import_data_from_lib( libpath: Path, diff --git a/scripts-blender/addons/asset_pipeline/ops.py b/scripts-blender/addons/asset_pipeline/ops.py index ae790491..af1cb297 100644 --- a/scripts-blender/addons/asset_pipeline/ops.py +++ b/scripts-blender/addons/asset_pipeline/ops.py @@ -5,7 +5,7 @@ from pathlib import Path from typing import Set import os -from . import constants, config, opscore +from . import constants, config, opscore, logging from .asset_catalog import get_asset_catalog_items, get_asset_id from .config import verify_task_layer_json_data from .hooks import Hooks, get_production_hook_dir, get_asset_hook_dir @@ -309,6 +309,8 @@ class ASSETPIPE_OT_sync_pull(bpy.types.Operator): opscore.sync_draw(self, context) def execute(self, context: bpy.types.Context): + profiler = logging.get_profiler() + profiler.reset() asset_col = context.scene.asset_pipeline.asset_collection if self.save: save_images() @@ -324,6 +326,7 @@ class ASSETPIPE_OT_sync_pull(bpy.types.Operator): hooks_instance.execute_hooks(merge_mode="pull", merge_status='post', asset_col=asset_col) self.report({'INFO'}, "Asset Pull Complete") + profiler.log_all() return {'FINISHED'} @@ -370,12 +373,15 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator): opscore.sync_draw(self, context) def execute(self, context: bpy.types.Context): + profiler = logging.get_profiler() + profiler.reset() asset_col = context.scene.asset_pipeline.asset_collection hooks_instance = Hooks() hooks_instance.load_hooks(context) save_images() bpy.ops.wm.save_mainfile() + # Seperate if statement so hook can execute before updating ownership/prep sync if self.pull: hooks_instance.execute_hooks(merge_mode="pull", merge_status='pre', asset_col=asset_col) # Find current task Layer @@ -387,6 +393,8 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator): hooks_instance.execute_hooks( merge_mode="pull", merge_status='post', asset_col=asset_col ) + + profiler.set_push() hooks_instance.execute_hooks(merge_mode="push", merge_status='pre', asset_col=asset_col) bpy.ops.wm.save_mainfile(filepath=self._current_file.__str__()) @@ -395,6 +403,7 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator): self.report({'INFO'}, "Asset Sync Complete") else: self.report({'INFO'}, "Asset Force Push Complete") + profiler.log_all() return {'FINISHED'} diff --git a/scripts-blender/addons/asset_pipeline/opscore.py b/scripts-blender/addons/asset_pipeline/opscore.py index d8486252..037a7616 100644 --- a/scripts-blender/addons/asset_pipeline/opscore.py +++ b/scripts-blender/addons/asset_pipeline/opscore.py @@ -20,6 +20,7 @@ from .merge.task_layer import draw_task_layer_selection from .asset_catalog import get_asset_id from . import prefs + def sync_poll(cls, context): if any([img.is_dirty for img in bpy.data.images]): cls.poll_message_set("Please save unsaved Images") -- 2.30.2 From 064a8fe392d96a552c6c4c21ad734838f416f7fb Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Wed, 17 Apr 2024 13:40:15 -0400 Subject: [PATCH 13/15] Only show 3 significant Digits, in non-scientific notation --- scripts-blender/addons/asset_pipeline/logging.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/scripts-blender/addons/asset_pipeline/logging.py b/scripts-blender/addons/asset_pipeline/logging.py index 38d209d4..a617a787 100644 --- a/scripts-blender/addons/asset_pipeline/logging.py +++ b/scripts-blender/addons/asset_pipeline/logging.py @@ -96,12 +96,24 @@ class Profiler: if profiles == {}: return for key, value in profiles.items(): - msg = f"{direction} {key} - {value} seconds {PROFILE_KEYS[key]}" + seconds = self.get_non_scientific_number(value) + msg = f"{direction} {key} - {seconds} seconds {PROFILE_KEYS[key]}" if key in INFO_KEYS: self._logger.info(msg) else: self._logger.debug(msg) + def get_non_scientific_number(self, x: float): + float_str = f'{x:.64f}'.rstrip('0') + + significant_digits = 0 + for index, c in enumerate(float_str): + if significant_digits == 3: + return float_str[:index:] + + if c != "0" and c != ".": + significant_digits += 1 + def reset(self): self.pull_profiles = {} self._is_push = False -- 2.30.2 From 38652eb458d4e33a08f4ba2e662e80dc7f51bbd4 Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Wed, 17 Apr 2024 13:43:07 -0400 Subject: [PATCH 14/15] Profile total time to execute Push/Pull operations --- scripts-blender/addons/asset_pipeline/logging.py | 3 ++- scripts-blender/addons/asset_pipeline/opscore.py | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/scripts-blender/addons/asset_pipeline/logging.py b/scripts-blender/addons/asset_pipeline/logging.py index a617a787..1a9f0ab6 100644 --- a/scripts-blender/addons/asset_pipeline/logging.py +++ b/scripts-blender/addons/asset_pipeline/logging.py @@ -56,9 +56,10 @@ PROFILE_KEYS = { "COLLECTIONS": "To remap all Collections", "SHARED_IDS": "To remap all Shared IDs", "MERGE": "To complete entire merge process", + "TOTAL": "Total time to sync this direction", } -INFO_KEYS = ["MERGE"] # Profile Keys to print in the logger's info mode +INFO_KEYS = ["TOTAL"] # Profile Keys to print in the logger's info mode _profiler_instance = None diff --git a/scripts-blender/addons/asset_pipeline/opscore.py b/scripts-blender/addons/asset_pipeline/opscore.py index 037a7616..74fdfac5 100644 --- a/scripts-blender/addons/asset_pipeline/opscore.py +++ b/scripts-blender/addons/asset_pipeline/opscore.py @@ -1,4 +1,5 @@ import bpy +import time from pathlib import Path from .merge.publish import ( find_sync_target, @@ -135,6 +136,8 @@ def update_temp_file_paths(self, context, temp_file_path): def sync_execute_pull(self, context): + start_time = time.time() + profiler = logging.get_profiler() logger = logging.get_logger() logger.info("Pulling Asset") temp_file_path = create_temp_file_backup(self, context) @@ -161,9 +164,12 @@ def sync_execute_pull(self, context): context.scene.asset_pipeline.sync_error = True self.report({'ERROR'}, error_msg) return {'CANCELLED'} + profiler.add(time.time() - start_time, "TOTAL") def sync_execute_push(self, context): + start_time = time.time() + profiler = logging.get_profiler() logger = logging.get_logger() logger.info("Pushing Asset") _catalog_id = None @@ -204,3 +210,4 @@ def sync_execute_push(self, context): bpy.ops.wm.save_as_mainfile(filepath=file_path) bpy.ops.wm.open_mainfile(filepath=self._current_file.__str__()) + profiler.add(time.time() - start_time, "TOTAL") -- 2.30.2 From fd0e3f1b79c5d5c7cba0684832b0105e66323b1c Mon Sep 17 00:00:00 2001 From: Nick Alberelli Date: Wed, 17 Apr 2024 14:03:21 -0400 Subject: [PATCH 15/15] Add Transfer Data to TD Keys --- scripts-blender/addons/asset_pipeline/logging.py | 11 ++++++++++- .../merge/transfer_data/transfer_core.py | 5 ++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/scripts-blender/addons/asset_pipeline/logging.py b/scripts-blender/addons/asset_pipeline/logging.py index 1a9f0ab6..1056202f 100644 --- a/scripts-blender/addons/asset_pipeline/logging.py +++ b/scripts-blender/addons/asset_pipeline/logging.py @@ -20,7 +20,7 @@ import logging from typing import List, Tuple -from . import prefs +from . import prefs, constants def get_logger(name="asset_pipeline"): @@ -59,6 +59,8 @@ PROFILE_KEYS = { "TOTAL": "Total time to sync this direction", } +TD_KEYS = [type for type in constants.TRANSFER_DATA_TYPES] + INFO_KEYS = ["TOTAL"] # Profile Keys to print in the logger's info mode _profiler_instance = None @@ -98,6 +100,13 @@ class Profiler: return for key, value in profiles.items(): seconds = self.get_non_scientific_number(value) + # Special case for transfer data keys + if key in TD_KEYS: + name = constants.TRANSFER_DATA_TYPES[key][0] + self._logger.debug( + f"{direction} TD: {name.upper()} - {seconds} seconds to transfer {name} data for all objects" + ) + continue msg = f"{direction} {key} - {seconds} seconds {PROFILE_KEYS[key]}" if key in INFO_KEYS: self._logger.info(msg) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py index 83ea7deb..3867f0c4 100644 --- a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -1,5 +1,5 @@ import bpy - +import time from .transfer_functions import ( attributes, constraints, @@ -200,6 +200,7 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: transfer_data_map: Map generated by class AssetTransferMapping """ # Create/isolate tmp collection to reduce depsgraph update time + profiler = logging.get_profiler() td_col = bpy.data.collections.new("ISO_COL_TEMP") with isolate_collection(td_col): # Loop over objects in Transfer data map @@ -208,7 +209,9 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: td_types = transfer_data_map[source_obj]["td_types"] with link_objs_to_collection(set([target_obj, source_obj]), td_col): for td_type_key, td_dicts in td_types.items(): + start_time = time.time() apply_transfer_data_items( context, source_obj, target_obj, td_type_key, td_dicts ) + profiler.add(time.time() - start_time, td_type_key) bpy.data.collections.remove(td_col) -- 2.30.2