Asset Pipeline: Improve Performance #235

Merged
3 changed files with 78 additions and 99 deletions
Showing only changes of commit f2247a8383 - Show all commits

View File

@ -169,20 +169,13 @@ class AssetTransferMapping:
return coll_map return coll_map
def _create_temp_transfer_data_item(self, source_obj, target_obj, transfer_data_item): def _get_transfer_data_dict(self, source_obj, target_obj, transfer_data_item):
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data return {
temp_transfer_data_item_index = len(temp_transfer_data) 'name': transfer_data_item.name,
temp_transfer_data_item = transfer_data_add_entry( "owner": transfer_data_item.owner,
transfer_data=temp_transfer_data, "surrender": transfer_data_item.surrender,
name=transfer_data_item.name, "target_obj": target_obj,
td_type_key=transfer_data_item.type, }
task_layer_name=transfer_data_item.owner,
surrender=transfer_data_item.surrender,
target_obj=target_obj,
obj=source_obj,
)
return temp_transfer_data_item_index
def _transfer_data_pair_not_local(self, td_1, td_2): def _transfer_data_pair_not_local(self, td_1, td_2):
# Returns true if neither owners are local to current file # Returns true if neither owners are local to current file
@ -238,16 +231,16 @@ class AssetTransferMapping:
if self._transfer_data_is_surrendered(transfer_data_item): if self._transfer_data_is_surrendered(transfer_data_item):
return return
td_type_key = transfer_data_item.type td_type_key = transfer_data_item.type
temp_td_index = self._create_temp_transfer_data_item( transfer_data_dict = self._get_transfer_data_dict(
source_obj, target_obj, transfer_data_item source_obj, target_obj, transfer_data_item
) )
if not source_obj in self.transfer_data_map: if not source_obj in self.transfer_data_map:
self.transfer_data_map[source_obj] = {td_type_key: [temp_td_index]} self.transfer_data_map[source_obj] = {td_type_key: [transfer_data_dict]}
if not td_type_key in self.transfer_data_map[source_obj]: if not td_type_key in self.transfer_data_map[source_obj]:
self.transfer_data_map[source_obj][td_type_key] = [temp_td_index] self.transfer_data_map[source_obj][td_type_key] = [transfer_data_dict]
else: else:
self.transfer_data_map[source_obj][td_type_key].append(temp_td_index) self.transfer_data_map[source_obj][td_type_key].append(transfer_data_dict)
def _transfer_data_map_item(self, source_obj, target_obj, transfer_data_item): def _transfer_data_map_item(self, source_obj, target_obj, transfer_data_item):
"""Verifies if Transfer Data Item is valid/can be mapped""" """Verifies if Transfer Data Item is valid/can be mapped"""
@ -268,12 +261,11 @@ class AssetTransferMapping:
def _gen_transfer_data_map(self): def _gen_transfer_data_map(self):
# Generate Mapping for Transfer Data Items # Generate Mapping for Transfer Data Items
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data
temp_transfer_data.clear()
for objs in self.object_map.items(): for objs in self.object_map.items():
source_obj, target_obj = objs _, target_obj = objs
for obj in objs: for obj in objs:
# Must execute for both objs in map (so we map external and local TD) # Must execute for both objs in map (so we map external and local TD)
# Must include maps even if obj==target_obj to preserve exisiting local TD entry
for transfer_data_item in obj.transfer_data_ownership: for transfer_data_item in obj.transfer_data_ownership:
if self._transfer_data_check_conflict(obj, transfer_data_item): if self._transfer_data_check_conflict(obj, transfer_data_item):
continue continue
@ -284,25 +276,20 @@ class AssetTransferMapping:
# Generate a Map of Indexes that need to be set post merge # Generate a Map of Indexes that need to be set post merge
# Stores active_uv & active_color_attribute # Stores active_uv & active_color_attribute
index_map = {} index_map = {}
temp_transfer_data_items = []
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data
for source_obj in self.transfer_data_map: for source_obj in self.transfer_data_map:
for td_type_key, td_indexes in self.transfer_data_map[source_obj].items(): for td_type_key, td_items in self.transfer_data_map[source_obj].items():
for index in td_indexes: for td_item in td_items:
temp_transfer_data_items.append(temp_transfer_data[index]) target_obj = td_item["target_obj"]
if td_type_key != constants.MATERIAL_SLOT_KEY:
for temp_transfer_data_item in temp_transfer_data_items:
source_obj = temp_transfer_data_item.obj
target_obj = temp_transfer_data_item.target_obj
if temp_transfer_data_item.type != constants.MATERIAL_SLOT_KEY:
continue continue
if source_obj.type != 'MESH': if source_obj.type != 'MESH':
continue continue
active_uv_name = ( active_uv_name = (
source_obj.data.uv_layers.active.name if source_obj.data.uv_layers.active else '' source_obj.data.uv_layers.active.name
if source_obj.data.uv_layers.active
else ''
) )
active_color_attribute_name = source_obj.data.color_attributes.active_color_name active_color_attribute_name = source_obj.data.color_attributes.active_color_name
index_map[source_obj] = { index_map[source_obj] = {

View File

@ -10,37 +10,34 @@ from .transfer_functions import (
materials, materials,
) )
from typing import List from typing import List
from ... import constants, logging from ... import constants
from ...props import AssetTransferDataTemp
from bpy.types import PropertyGroup
from .transfer_util import ( from .transfer_util import (
transfer_data_add_entry, transfer_data_add_entry,
check_transfer_data_entry, check_transfer_data_entry,
) )
def copy_transfer_data_ownership( # TODO use logging module here
transfer_data_item: AssetTransferDataTemp, target_obj: bpy.types.Object def copy_transfer_data_ownership(td_type_key: str, transfer_data_dict: dict) -> None:
) -> None:
"""Copy Transferable Data item to object if non entry exists """Copy Transferable Data item to object if non entry exists
Args: Args:
transfer_data_item: Item of bpy.types.CollectionProperty from source object transfer_data_item: Item of bpy.types.CollectionProperty from source object
target_obj (bpy.types.Object): Object to add Transferable Data item to target_obj (bpy.types.Object): Object to add Transferable Data item to
""" """
transfer_data = target_obj.transfer_data_ownership transfer_data = transfer_data_dict["target_obj"].transfer_data_ownership
matches = check_transfer_data_entry( matches = check_transfer_data_entry(
transfer_data, transfer_data,
transfer_data_item.name, transfer_data_dict["name"],
transfer_data_item.type, td_type_key,
) )
if len(matches) == 0: if len(matches) == 0:
transfer_data_add_entry( transfer_data_add_entry(
transfer_data, transfer_data,
transfer_data_item.name, transfer_data_dict["name"],
transfer_data_item.type, td_type_key,
transfer_data_item.owner, transfer_data_dict["owner"],
transfer_data_item.surrender, transfer_data_dict["surrender"],
) )
@ -100,38 +97,23 @@ def init_transfer_data(
attributes.init_attributes(scene, obj) attributes.init_attributes(scene, obj)
def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: def apply_transfer_data_items(
"""Apply all Transferable Data from Transferable Data map onto objects. context,
Copies any Transferable Data owned by local layer onto objects owned by external layers. source_obj: bpy.types.Object,
Applies Transferable Data from external layers onto objects owned by local layers td_type_key: str,
transfer_data_dicts: List[dict],
Transfer_data_map is generated by class 'AssetTransferMapping' ):
Args:
context (bpy.types.Context): context of .blend file
transfer_data_map: Map generated by class AssetTransferMapping
"""
logger = logging.get_logger()
for name in transfer_data_map:
temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data
transfer_data = transfer_data_map[name]
transfer_data_item = temp_transfer_data[transfer_data.get('transfer_data_item_index')]
target_obj = transfer_data.get('target_obj')
source_obj = transfer_data.get('source_obj')
def apply_transfer_data_items(context, td_type_key: str, transfer_data_items: List[PropertyGroup]):
# Get source/target from first item in list, because all items in list are same object/type # Get source/target from first item in list, because all items in list are same object/type
target_obj = transfer_data_items[0].target_obj target_obj = transfer_data_dicts[0]["target_obj"]
source_obj = transfer_data_items[0].obj
if target_obj is None: if target_obj is None:
print(f"Failed to Transfer Data for {transfer_data_item.id_data.name}") print(f"Failed to Transfer Data for {transfer_data_dict.id_data.name}")
return return
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
transfer_data_item.copy_transfer_data_ownership() copy_transfer_data_ownership(td_type_key, transfer_data_dict)
# if TD Source is Target, restore the ownership data but don't transfer anything
if source_obj == target_obj: if source_obj == target_obj:
return return
@ -139,47 +121,47 @@ def apply_transfer_data_items(context, td_type_key: str, transfer_data_items: Li
# Transfer All Vertex Groups in one go # Transfer All Vertex Groups in one go
print(f"Transferring all Vertex Groups from {source_obj.name} to {target_obj.name}") print(f"Transferring all Vertex Groups from {source_obj.name} to {target_obj.name}")
vertex_groups.transfer_vertex_groups( vertex_groups.transfer_vertex_groups(
vertex_group_names=[item.name for item in transfer_data_items], vertex_group_names=[item["name"] for item in transfer_data_dicts],
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
) )
if td_type_key == constants.MODIFIER_KEY: if td_type_key == constants.MODIFIER_KEY:
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
modifers.transfer_modifier( modifers.transfer_modifier(
modifier_name=transfer_data_item.name, modifier_name=transfer_data_dict["name"],
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
) )
if td_type_key == constants.CONSTRAINT_KEY: if td_type_key == constants.CONSTRAINT_KEY:
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
constraints.transfer_constraint( constraints.transfer_constraint(
constraint_name=transfer_data_item.name, constraint_name=transfer_data_dict["name"],
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
) )
if td_type_key == constants.MATERIAL_SLOT_KEY: if td_type_key == constants.MATERIAL_SLOT_KEY:
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
materials.transfer_materials( materials.transfer_materials(
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
) )
if td_type_key == constants.SHAPE_KEY_KEY: if td_type_key == constants.SHAPE_KEY_KEY:
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
shape_keys.transfer_shape_key( shape_keys.transfer_shape_key(
context=context, context=context,
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
shape_key_name=transfer_data_item.name, shape_key_name=transfer_data_dict["name"],
) )
if td_type_key == constants.ATTRIBUTE_KEY: if td_type_key == constants.ATTRIBUTE_KEY:
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
attributes.transfer_attribute( attributes.transfer_attribute(
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
attribute_name=transfer_data_item.name, attribute_name=transfer_data_dict["name"],
) )
if td_type_key == constants.PARENT_KEY: if td_type_key == constants.PARENT_KEY:
for transfer_data_item in transfer_data_items: for transfer_data_dict in transfer_data_dicts:
parent.transfer_parent( parent.transfer_parent(
target_obj=target_obj, target_obj=target_obj,
source_obj=source_obj, source_obj=source_obj,
@ -198,8 +180,6 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
transfer_data_map: Map generated by class AssetTransferMapping transfer_data_map: Map generated by class AssetTransferMapping
""" """
temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data
for source_obj in transfer_data_map: for source_obj in transfer_data_map:
for td_type_key, td_indexes in transfer_data_map[source_obj].items(): for td_type_key, td_dicts in transfer_data_map[source_obj].items():
transfer_data_items = [temp_transfer_data[index] for index in td_indexes] apply_transfer_data_items(context, source_obj, td_type_key, td_dicts)
apply_transfer_data_items(context, td_type_key, transfer_data_items)

View File

@ -57,7 +57,19 @@ class AssetTransferDataTemp(bpy.types.PropertyGroup):
items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS, items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS,
) )
surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False) surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False)
obj_name: bpy.props.StringProperty(name="Object Name") obj: bpy.props.PointerProperty(type=bpy.types.Object)
target_obj: bpy.props.PointerProperty(type=bpy.types.Object)
def check_transfer_data_entry(self) -> set:
"""
Verifies if Transferable Data entry exists
"""
existing_items = [
transfer_data_item.name
for transfer_data_item in self.target_obj.transfer_data_ownership
if transfer_data_item.type == self.type
]
return set([self.name]).intersection(set(existing_items))
class TaskLayerSettings(bpy.types.PropertyGroup): class TaskLayerSettings(bpy.types.PropertyGroup):