Asset Pipeline: Improve Performance #235

Merged
3 changed files with 53 additions and 12 deletions
Showing only changes of commit b6d4fabb13 - Show all commits

View File

@ -169,12 +169,11 @@ class AssetTransferMapping:
return coll_map
def _get_transfer_data_dict(self, source_obj, target_obj, transfer_data_item):
def _get_transfer_data_dict(self, transfer_data_item):
return {
'name': transfer_data_item.name,
"owner": transfer_data_item.owner,
"surrender": transfer_data_item.surrender,
"target_obj": target_obj,
}
def _transfer_data_pair_not_local(self, td_1, td_2):
@ -231,9 +230,8 @@ class AssetTransferMapping:
if self._transfer_data_is_surrendered(transfer_data_item):
return
td_type_key = transfer_data_item.type
transfer_data_dict = self._get_transfer_data_dict(
source_obj, target_obj, transfer_data_item
)
transfer_data_dict = self._get_transfer_data_dict(transfer_data_item)
if not source_obj in self.transfer_data_map:
self.transfer_data_map[source_obj] = {
"target_obj": target_obj,

View File

@ -14,6 +14,8 @@ from ... import constants
from .transfer_util import (
transfer_data_add_entry,
check_transfer_data_entry,
link_objs_to_collection,
isolate_collection,
)
@ -100,12 +102,11 @@ def init_transfer_data(
def apply_transfer_data_items(
context,
source_obj: bpy.types.Object,
target_obj: bpy.types.Object,
td_type_key: str,
transfer_data_dicts: List[dict],
):
# Get source/target from first item in list, because all items in list are same object/type
target_obj = transfer_data_dicts[0]["target_obj"]
if target_obj is None:
print(f"Failed to Transfer Data for {transfer_data_dict.id_data.name}")
return
@ -179,7 +180,16 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
context (bpy.types.Context): context of .blend file
transfer_data_map: Map generated by class AssetTransferMapping
"""
for source_obj in transfer_data_map:
for td_type_key, td_dicts in transfer_data_map[source_obj].items():
apply_transfer_data_items(context, source_obj, td_type_key, td_dicts)
# Create/isolate tmp collection to reduce depsgraph update time
td_col = bpy.data.collections.new("ISO_COL_TEMP")
with isolate_collection(td_col):
# Loop over objects in Transfer data map
for source_obj in transfer_data_map:
target_obj = transfer_data_map[source_obj]["target_obj"]
td_types = transfer_data_map[source_obj]["td_types"]
with link_objs_to_collection(set([target_obj, source_obj]), td_col):
for td_type_key, td_dicts in td_types.items():
apply_transfer_data_items(
context, source_obj, target_obj, td_type_key, td_dicts
)
bpy.data.collections.remove(td_col)

View File

@ -1,7 +1,7 @@
import bpy
from ..naming import merge_get_basename
from ..task_layer import get_transfer_data_owner
import contextlib
def check_transfer_data_entry(
transfer_data: bpy.types.CollectionProperty, key: str, td_type_key: str
@ -131,3 +131,36 @@ def transfer_data_item_init(
obj_name=obj.name,
surrender=auto_surrender,
)
@contextlib.contextmanager
def isolate_collection(iso_col: bpy.types.Collection):
col_exclude = {}
view_layer_col = bpy.context.view_layer.layer_collection
view_layer_col.collection.children.link(iso_col)
for col in view_layer_col.children:
col_exclude[col.name] = col.exclude
try:
# Exclude all collections that are not iso collection
for col in view_layer_col.children:
view_layer_col.exclude = col.name != iso_col.name
yield
finally:
for col in view_layer_col.children:
view_layer_col.exclude = col_exclude[col.name]
view_layer_col.collection.children.unlink(iso_col)
@contextlib.contextmanager
def link_objs_to_collection(objs: set, col: bpy.types.Collection):
...
try:
for obj in objs:
col.objects.link(obj)
yield
finally:
for obj in objs:
col.objects.unlink(obj)