Asset Pipeline: Improve Performance #235

Merged
9 changed files with 385 additions and 175 deletions

View File

@ -20,7 +20,7 @@
import logging
from typing import List, Tuple
from . import prefs
from . import prefs, constants
def get_logger(name="asset_pipeline"):
@ -45,3 +45,89 @@ def get_logger(name="asset_pipeline"):
# add ch to logger
logger.addHandler(ch)
return logger
PROFILE_KEYS = {
"IMPORT": "To import Collection & add suffixes",
"MAPPING": "To create Asset Mapping",
"TRANSFER_DATA": "To apply all Transferable Data",
"OBJECTS": "To remap all Obejcts",
"INDEXES": "To restore Active Indexes on all Objects",
"COLLECTIONS": "To remap all Collections",
"SHARED_IDS": "To remap all Shared IDs",
"MERGE": "To complete entire merge process",
"TOTAL": "Total time to sync this direction",
}
TD_KEYS = [type for type in constants.TRANSFER_DATA_TYPES]
INFO_KEYS = ["TOTAL"] # Profile Keys to print in the logger's info mode
_profiler_instance = None
def get_profiler():
global _profiler_instance
if not _profiler_instance:
_profiler_instance = Profiler()
return _profiler_instance
class Profiler:
def __init__(self) -> None:
self.pull_profiles = {}
self.push_profiles = {}
self._logger = get_logger()
def add(self, elapsed_time: int, key: str):
if self._is_push:
profiles = self.push_profiles
else: # is pull
profiles = self.pull_profiles
if key not in profiles:
profiles[key] = elapsed_time
else:
profiles[key] += elapsed_time
def log_all(self):
self.log_profiles("PULL", self.pull_profiles)
self.log_profiles("PUSH", self.push_profiles)
def log_profiles(self, direction: str, profiles: dict):
if profiles == {}:
return
for key, value in profiles.items():
seconds = self.get_non_scientific_number(value)
# Special case for transfer data keys
if key in TD_KEYS:
name = constants.TRANSFER_DATA_TYPES[key][0]
self._logger.debug(
f"{direction} TD: {name.upper()} - {seconds} seconds to transfer {name} data for all objects"
)
continue
msg = f"{direction} {key} - {seconds} seconds {PROFILE_KEYS[key]}"
if key in INFO_KEYS:
self._logger.info(msg)
else:
self._logger.debug(msg)
def get_non_scientific_number(self, x: float):
float_str = f'{x:.64f}'.rstrip('0')
significant_digits = 0
for index, c in enumerate(float_str):
if significant_digits == 3:
return float_str[:index:]
if c != "0" and c != ".":
significant_digits += 1
def reset(self):
self.pull_profiles = {}
self._is_push = False
self._logger = get_logger()
def set_push(self, is_push=True):
self._is_push = is_push

View File

@ -169,26 +169,12 @@ class AssetTransferMapping:
return coll_map
def _transfer_data_get_map_content(self, obj, target_obj, transfer_data_item):
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data
temp_transfer_data_item_index = len(temp_transfer_data)
temp_transfer_data_item = transfer_data_add_entry(
transfer_data=temp_transfer_data,
name=transfer_data_item.name,
td_type_key=transfer_data_item.type,
task_layer_name=transfer_data_item.owner,
surrender=transfer_data_item.surrender,
)
map_item = {
'transfer_data_item_index': temp_transfer_data_item_index,
'source_obj': obj,
'target_obj': target_obj,
def _get_transfer_data_dict(self, transfer_data_item):
return {
'name': transfer_data_item.name,
"owner": transfer_data_item.owner,
"surrender": transfer_data_item.surrender,
}
# Names of each map item need to be unique
# below name avoids name conflicts between different types
name = transfer_data_item.name + '_' + transfer_data_item.type + obj.name
return name, map_item
def _transfer_data_pair_not_local(self, td_1, td_2):
# Returns true if neither owners are local to current file
@ -243,11 +229,21 @@ class AssetTransferMapping:
"""Adds item to Transfer Data Map"""
if self._transfer_data_is_surrendered(transfer_data_item):
return
td_type_key = transfer_data_item.type
transfer_data_dict = self._get_transfer_data_dict(transfer_data_item)
name, map_item = self._transfer_data_get_map_content(
source_obj, target_obj, transfer_data_item
)
self.transfer_data_map[name] = map_item
if not source_obj in self.transfer_data_map:
self.transfer_data_map[source_obj] = {
"target_obj": target_obj,
"td_types": {td_type_key: [transfer_data_dict]},
}
return
if not td_type_key in self.transfer_data_map[source_obj]["td_types"]:
self.transfer_data_map[source_obj]["td_types"][td_type_key] = [transfer_data_dict]
return
else:
self.transfer_data_map[source_obj]["td_types"][td_type_key].append(transfer_data_dict)
def _transfer_data_map_item(self, source_obj, target_obj, transfer_data_item):
"""Verifies if Transfer Data Item is valid/can be mapped"""
@ -268,12 +264,11 @@ class AssetTransferMapping:
def _gen_transfer_data_map(self):
# Generate Mapping for Transfer Data Items
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data
temp_transfer_data.clear()
for objs in self.object_map.items():
source_obj, target_obj = objs
_, target_obj = objs
for obj in objs:
# Must execute for both objs in map (so we map external and local TD)
# Must include maps even if obj==target_obj to preserve exisiting local TD entry
for transfer_data_item in obj.transfer_data_ownership:
if self._transfer_data_check_conflict(obj, transfer_data_item):
continue
@ -284,26 +279,27 @@ class AssetTransferMapping:
# Generate a Map of Indexes that need to be set post merge
# Stores active_uv & active_color_attribute
index_map = {}
for _, item in self.transfer_data_map.items():
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data
temp_transfer_data_item = temp_transfer_data[item.get('transfer_data_item_index')]
source_obj = item.get('source_obj')
target_obj = item.get('target_obj')
if temp_transfer_data_item.type != constants.MATERIAL_SLOT_KEY:
continue
if source_obj.type != 'MESH':
continue
for source_obj in self.transfer_data_map:
target_obj = self.transfer_data_map[source_obj]["target_obj"]
td_types = self.transfer_data_map[source_obj]["td_types"]
for td_type_key, _ in td_types.items():
if td_type_key != constants.MATERIAL_SLOT_KEY:
continue
if source_obj.type != 'MESH':
continue
active_uv_name = (
source_obj.data.uv_layers.active.name if source_obj.data.uv_layers.active else ''
)
active_color_attribute_name = source_obj.data.color_attributes.active_color_name
index_map[source_obj] = {
'active_uv_name': active_uv_name,
'active_color_attribute_name': active_color_attribute_name,
'target_obj': target_obj,
}
active_uv_name = (
source_obj.data.uv_layers.active.name
if source_obj.data.uv_layers.active
else ''
)
active_color_attribute_name = source_obj.data.color_attributes.active_color_name
index_map[source_obj] = {
'active_uv_name': active_uv_name,
'active_color_attribute_name': active_color_attribute_name,
'target_obj': target_obj,
}
return index_map

View File

@ -21,6 +21,7 @@ from .transfer_data.transfer_functions.transfer_function_util.active_indexes imp
from pathlib import Path
from typing import Dict
from .. import constants, logging
import time
def ownership_transfer_data_cleanup(
@ -175,6 +176,11 @@ def merge_task_layer(
local_tls: (list[str]): list of task layers that are local to the current file
external_file (Path): external file to pull data into the current file from
"""
logger = logging.get_logger()
profiles = logging.get_profiler()
start_time = time.time()
local_col = context.scene.asset_pipeline.asset_collection
if not local_col:
return "Unable to find Asset Collection"
@ -185,6 +191,8 @@ def merge_task_layer(
appended_col = import_data_from_lib(external_file, "collections", col_base_name)
merge_add_suffix_to_hierarchy(appended_col, external_suffix)
imported_time = time.time()
profiles.add((imported_time - start_time), "IMPORT")
local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"]
external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"]
@ -204,6 +212,8 @@ def merge_task_layer(
type_name = get_id_type_name(type(conflict_obj))
error_msg += f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n"
return error_msg
mapped_time = time.time()
profiles.add((mapped_time - imported_time), "MAPPING")
# Remove all Transferable Data from target objects
for source_obj in map.object_map:
@ -211,11 +221,15 @@ def merge_task_layer(
target_obj.transfer_data_ownership.clear()
apply_transfer_data(context, map.transfer_data_map)
apply_td_time = time.time()
profiles.add((apply_td_time - mapped_time), "TRANSFER_DATA")
for source_obj in map.object_map:
target_obj = map.object_map[source_obj]
remap_user(source_obj, target_obj)
transfer_data_clean(target_obj)
obj_remap_time = time.time()
profiles.add((obj_remap_time - apply_td_time), "OBJECTS")
# Restore Active UV Layer and Active Color Attributes
for _, index_map_item in map.index_map.items():
@ -224,6 +238,8 @@ def merge_task_layer(
transfer_active_color_attribute_index(
target_obj, index_map_item.get('active_color_attribute_name')
)
index_time = time.time()
profiles.add((index_time - obj_remap_time), "INDEXES")
for col in map.collection_map:
remap_user(col, map.collection_map[col])
@ -233,13 +249,17 @@ def merge_task_layer(
for col in map.external_col_to_remove:
local_col.children.unlink(col)
col_remap_time = time.time()
profiles.add((col_remap_time - index_time), "COLLECTIONS")
for id in map.shared_id_map:
remap_user(id, map.shared_id_map[id])
shared_id_remap_time = time.time()
profiles.add((shared_id_remap_time - col_remap_time), "SHARED_IDS")
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True)
merge_remove_suffix_from_hierarchy(local_col)
profiles.add((time.time() - start_time), "MERGE")
def import_data_from_lib(
libpath: Path,

View File

@ -1,5 +1,5 @@
import bpy
import time
from .transfer_functions import (
attributes,
constraints,
@ -9,15 +9,20 @@ from .transfer_functions import (
vertex_groups,
materials,
)
from typing import List
from ... import constants, logging
from .transfer_util import (
transfer_data_add_entry,
check_transfer_data_entry,
link_objs_to_collection,
isolate_collection,
)
def copy_transfer_data_ownership(transfer_data_item, target_obj: bpy.types.Object) -> None:
# TODO use logging module here
def copy_transfer_data_ownership(
td_type_key: str, target_obj: bpy.types.Object, transfer_data_dict: dict
) -> None:
"""Copy Transferable Data item to object if non entry exists
Args:
@ -27,16 +32,16 @@ def copy_transfer_data_ownership(transfer_data_item, target_obj: bpy.types.Objec
transfer_data = target_obj.transfer_data_ownership
matches = check_transfer_data_entry(
transfer_data,
transfer_data_item.name,
transfer_data_item.type,
transfer_data_dict["name"],
td_type_key,
)
if len(matches) == 0:
transfer_data_add_entry(
transfer_data,
transfer_data_item.name,
transfer_data_item.type,
transfer_data_item.owner,
transfer_data_item.surrender,
transfer_data_dict["name"],
td_type_key,
transfer_data_dict["owner"],
transfer_data_dict["surrender"],
)
@ -96,6 +101,93 @@ def init_transfer_data(
attributes.init_attributes(scene, obj)
def apply_transfer_data_items(
context,
source_obj: bpy.types.Object,
target_obj: bpy.types.Object,
td_type_key: str,
transfer_data_dicts: List[dict],
):
logger = logging.get_logger()
# Get source/target from first item in list, because all items in list are same object/type
if target_obj is None:
logger.warning(f"Failed to Transfer {td_type_key.title()} from {source_obj.name}")
return
for transfer_data_dict in transfer_data_dicts:
copy_transfer_data_ownership(td_type_key, target_obj, transfer_data_dict)
# if TD Source is Target, restore the ownership data but don't transfer anything
if source_obj == target_obj:
return
if td_type_key == constants.VERTEX_GROUP_KEY:
# Transfer All Vertex Groups in one go
logger.debug(f"Transferring All Vertex Groups from {source_obj.name} to {target_obj.name}.")
vertex_groups.transfer_vertex_groups(
vertex_group_names=[item["name"] for item in transfer_data_dicts],
target_obj=target_obj,
source_obj=source_obj,
)
if td_type_key == constants.MODIFIER_KEY:
for transfer_data_dict in transfer_data_dicts:
logger.debug(
f"Transferring Modifier {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}."
)
modifers.transfer_modifier(
modifier_name=transfer_data_dict["name"],
target_obj=target_obj,
source_obj=source_obj,
)
if td_type_key == constants.CONSTRAINT_KEY:
for transfer_data_dict in transfer_data_dicts:
logger.debug(
f"Transferring Constraint {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}."
)
constraints.transfer_constraint(
constraint_name=transfer_data_dict["name"],
target_obj=target_obj,
source_obj=source_obj,
)
if td_type_key == constants.MATERIAL_SLOT_KEY:
logger.debug(f"Transferring Materials from {source_obj.name} to {target_obj.name}.")
for transfer_data_dict in transfer_data_dicts:
materials.transfer_materials(
target_obj=target_obj,
source_obj=source_obj,
)
if td_type_key == constants.SHAPE_KEY_KEY:
for transfer_data_dict in transfer_data_dicts:
logger.debug(
f"Transferring Shape Key {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}."
)
shape_keys.transfer_shape_key(
context=context,
target_obj=target_obj,
source_obj=source_obj,
shape_key_name=transfer_data_dict["name"],
)
if td_type_key == constants.ATTRIBUTE_KEY:
for transfer_data_dict in transfer_data_dicts:
logger.debug(
f"Transferring Attribute {transfer_data_dict['name']} from {source_obj.name} to {target_obj.name}."
)
attributes.transfer_attribute(
target_obj=target_obj,
source_obj=source_obj,
attribute_name=transfer_data_dict["name"],
)
if td_type_key == constants.PARENT_KEY:
for transfer_data_dict in transfer_data_dicts:
logger.debug(
f"Transferring Parent Relationship from {source_obj.name} to {target_obj.name}."
)
parent.transfer_parent(
target_obj=target_obj,
source_obj=source_obj,
)
def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
"""Apply all Transferable Data from Transferable Data map onto objects.
Copies any Transferable Data owned by local layer onto objects owned by external layers.
@ -107,84 +199,19 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
context (bpy.types.Context): context of .blend file
transfer_data_map: Map generated by class AssetTransferMapping
"""
logger = logging.get_logger()
for name in transfer_data_map:
temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data
transfer_data = transfer_data_map[name]
transfer_data_item = temp_transfer_data[transfer_data.get('transfer_data_item_index')]
target_obj = transfer_data.get('target_obj')
source_obj = transfer_data.get('source_obj')
if target_obj is None:
logger.warning(f"Failed to Transfer Data for {transfer_data_item.id_data.name}")
continue
if transfer_data_item is None:
continue
if source_obj != target_obj:
if transfer_data_item.type == constants.VERTEX_GROUP_KEY:
logger.debug(
f"Transferring Vertex Group {transfer_data_item.name} from {source_obj.name} to {target_obj.name}."
)
vertex_groups.transfer_vertex_group(
context=context,
vertex_group_name=transfer_data_item.name,
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.MODIFIER_KEY:
logger.debug(
f"Transferring Modifier{transfer_data_item.name} from {source_obj.name} to {target_obj.name}."
)
modifers.transfer_modifier(
modifier_name=transfer_data_item.name,
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.CONSTRAINT_KEY:
logger.debug(
f"Transferring Constraint {transfer_data_item.name} from {source_obj.name} to {target_obj.name}."
)
constraints.transfer_constraint(
constraint_name=transfer_data_item.name,
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.MATERIAL_SLOT_KEY:
logger.debug(f"Transferring Materiald from {source_obj.name} to {target_obj.name}.")
materials.transfer_materials(
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.SHAPE_KEY_KEY:
logger.debug(
f"Transferring Shape Key {transfer_data_item.name} from {source_obj.name} to {target_obj.name}."
)
shape_keys.transfer_shape_key(
context=context,
target_obj=target_obj,
source_obj=source_obj,
shape_key_name=transfer_data_item.name,
)
if transfer_data_item.type == constants.ATTRIBUTE_KEY:
logger.debug(
f"Transferring Attribute {transfer_data_item.name} from {source_obj.name} to {target_obj.name}."
)
attributes.transfer_attribute(
target_obj=target_obj,
source_obj=source_obj,
attribute_name=transfer_data_item.name,
)
if transfer_data_item.type == constants.PARENT_KEY:
logger.debug(
f"Transferring Parent Relationship from {source_obj.name} to {target_obj.name}."
)
parent.transfer_parent(
target_obj=target_obj,
source_obj=source_obj,
)
logger.debug(
f"Copying Ownership Data for {transfer_data_item.name} from {source_obj.name} to {target_obj.name}."
)
copy_transfer_data_ownership(
transfer_data_item=transfer_data_item,
target_obj=target_obj,
)
# Create/isolate tmp collection to reduce depsgraph update time
profiler = logging.get_profiler()
td_col = bpy.data.collections.new("ISO_COL_TEMP")
with isolate_collection(td_col):
# Loop over objects in Transfer data map
for source_obj in transfer_data_map:
target_obj = transfer_data_map[source_obj]["target_obj"]
td_types = transfer_data_map[source_obj]["td_types"]
with link_objs_to_collection(set([target_obj, source_obj]), td_col):
for td_type_key, td_dicts in td_types.items():
start_time = time.time()
apply_transfer_data_items(
context, source_obj, target_obj, td_type_key, td_dicts
)
profiler.add(time.time() - start_time, td_type_key)
bpy.data.collections.remove(td_col)

View File

@ -35,33 +35,27 @@ def init_vertex_groups(scene, obj):
)
def transfer_vertex_group(
context,
vertex_group_name: str,
def transfer_vertex_groups(
vertex_group_names: List[str],
target_obj: bpy.types.Object,
source_obj: bpy.types.Object,
):
logger = logging.get_logger()
if target_obj == source_obj:
return
if not source_obj.vertex_groups.get(vertex_group_name):
logger.error(f"Vertex Group {vertex_group_name} not found in {source_obj.name}")
return
for vertex_group_name in vertex_group_names:
if not source_obj.vertex_groups.get(vertex_group_name):
logger.error(f"Vertex Group {vertex_group_name} not found in {source_obj.name}")
return
# If topology matches transfer directly, otherwise use vertex proximity
if is_obdata_identical(source_obj, target_obj):
transfer_single_vgroup_by_topology(
source_obj, target_obj, vertex_group_name
)
for vertex_group_name in vertex_group_names:
transfer_single_vgroup_by_topology(source_obj, target_obj, vertex_group_name)
else:
precalc_and_transfer_single_group(
source_obj, target_obj, vertex_group_name, expand=2
)
precalc_and_transfer_multiple_groups(source_obj, target_obj, vertex_group_names, expand=2)
def transfer_single_vgroup_by_topology(source_obj, target_obj, vgroup_name):
""" Function to quickly transfer single vertex group between mesh objects in case of matching topology.
"""
"""Function to quickly transfer single vertex group between mesh objects in case of matching topology."""
# Remove group from the target obj if it already exists. TODO: de-duplicate
tgt_vg = target_obj.vertex_groups.get(vgroup_name)
@ -75,6 +69,33 @@ def transfer_single_vgroup_by_topology(source_obj, target_obj, vgroup_name):
if vgroup_src.index in [g.group for g in v.groups]:
vgroup_tgt.add([v.index], vgroup_src.weight(v.index), 'REPLACE')
def precalc_and_transfer_multiple_groups(source_obj, target_obj, vgroup_names, expand=2):
"""Convenience function to transfer a single group. For transferring multiple groups,
this is very inefficient and shouldn't be used.
Instead, you should:
- build_kd_tree ONCE per source mesh.
- build_vert_influence_map and transfer_vertex_groups ONCE per object pair.
"""
# Remove group from the target obj if it already exists. TODO: de-duplicate
vgroups = [source_obj.vertex_groups[name] for name in vgroup_names]
for vgroup_name in vgroup_names:
target_vgroup = target_obj.vertex_groups.get(vgroup_name)
if target_vgroup:
target_obj.vertex_groups.remove(target_vgroup)
kd_tree = build_kdtree(source_obj.data)
vert_influence_map = build_vert_influence_map(source_obj, target_obj, kd_tree, expand)
transfer_multiple_vertex_groups(
source_obj,
target_obj,
vert_influence_map,
vgroups,
)
def precalc_and_transfer_single_group(source_obj, target_obj, vgroup_name, expand=2):
"""Convenience function to transfer a single group. For transferring multiple groups,
this is very inefficient and shouldn't be used.
@ -90,10 +111,9 @@ def precalc_and_transfer_single_group(source_obj, target_obj, vgroup_name, expan
target_obj.vertex_groups.remove(tgt_vg)
kd_tree = build_kdtree(source_obj.data)
vert_influence_map = build_vert_influence_map(
source_obj, target_obj, kd_tree, expand
)
transfer_vertex_groups(
vert_influence_map = build_vert_influence_map(source_obj, target_obj, kd_tree, expand)
transfer_multiple_vertex_groups(
source_obj,
target_obj,
vert_influence_map,
@ -110,9 +130,7 @@ def build_kdtree(mesh):
def build_vert_influence_map(obj_from, obj_to, kd_tree, expand=2):
verts_of_edge = {
i: (e.vertices[0], e.vertices[1]) for i, e in enumerate(obj_from.data.edges)
}
verts_of_edge = {i: (e.vertices[0], e.vertices[1]) for i, e in enumerate(obj_from.data.edges)}
edges_of_vert: Dict[int, List[int]] = {}
for edge_idx, edge in enumerate(obj_from.data.edges):
@ -166,29 +184,24 @@ def get_source_vert_influences(
parts_sum = sum(parts)
influences = [
(idx, 1 if dist == 0 else part / parts_sum)
for part, dist in zip(parts, distances)
(idx, 1 if dist == 0 else part / parts_sum) for part, dist in zip(parts, distances)
]
return influences
def get_nearest_vert(
coords: Vector, kd_tree: kdtree.KDTree
) -> Tuple[Vector, int, float]:
def get_nearest_vert(coords: Vector, kd_tree: kdtree.KDTree) -> Tuple[Vector, int, float]:
"""Return coordinate, index, and distance of nearest vert to coords in kd_tree."""
return kd_tree.find(coords)
def other_vert_of_edge(
edge: int, vert: int, verts_of_edge: Dict[int, Tuple[int, int]]
) -> int:
def other_vert_of_edge(edge: int, vert: int, verts_of_edge: Dict[int, Tuple[int, int]]) -> int:
verts = verts_of_edge[edge]
assert vert in verts, f"Vert {vert} not part of edge {edge}."
return verts[0] if vert == verts[1] else verts[1]
def transfer_vertex_groups(obj_from, obj_to, vert_influence_map, src_vgroups):
def transfer_multiple_vertex_groups(obj_from, obj_to, vert_influence_map, src_vgroups):
"""Transfer src_vgroups in obj_from to obj_to using a pre-calculated vert_influence_map."""
for src_vg in src_vgroups:

View File

@ -1,7 +1,7 @@
import bpy
from ..naming import merge_get_basename
from ..task_layer import get_transfer_data_owner
import contextlib
def check_transfer_data_entry(
transfer_data: bpy.types.CollectionProperty, key: str, td_type_key: str
@ -30,6 +30,8 @@ def transfer_data_add_entry(
td_type_key: str,
task_layer_name: str,
surrender: bool,
target_obj: bpy.types.Object = None,
obj: bpy.types.Object = None,
):
"""Add entry to Transferable Data ownership
@ -44,6 +46,10 @@ def transfer_data_add_entry(
transfer_data_item.owner = task_layer_name
transfer_data_item.type = td_type_key
transfer_data_item.surrender = surrender
if target_obj:
transfer_data_item.target_obj = target_obj
if obj:
transfer_data_item.obj = obj
return transfer_data_item
@ -125,3 +131,36 @@ def transfer_data_item_init(
obj_name=obj.name,
surrender=auto_surrender,
)
@contextlib.contextmanager
def isolate_collection(iso_col: bpy.types.Collection):
col_exclude = {}
view_layer_col = bpy.context.view_layer.layer_collection
view_layer_col.collection.children.link(iso_col)
for col in view_layer_col.children:
col_exclude[col.name] = col.exclude
try:
# Exclude all collections that are not iso collection
for col in view_layer_col.children:
col.exclude = col.name != iso_col.name
yield
finally:
for col in view_layer_col.children:
col.exclude = col_exclude[col.name]
view_layer_col.collection.children.unlink(iso_col)
@contextlib.contextmanager
def link_objs_to_collection(objs: set, col: bpy.types.Collection):
...
try:
for obj in objs:
col.objects.link(obj)
yield
finally:
for obj in objs:
col.objects.unlink(obj)

View File

@ -5,7 +5,7 @@ from pathlib import Path
from typing import Set
import os
from . import constants, config, opscore
from . import constants, config, opscore, logging
from .asset_catalog import get_asset_catalog_items, get_asset_id
from .config import verify_task_layer_json_data
from .hooks import Hooks, get_production_hook_dir, get_asset_hook_dir
@ -309,6 +309,8 @@ class ASSETPIPE_OT_sync_pull(bpy.types.Operator):
opscore.sync_draw(self, context)
def execute(self, context: bpy.types.Context):
profiler = logging.get_profiler()
profiler.reset()
asset_col = context.scene.asset_pipeline.asset_collection
if self.save:
save_images()
@ -324,6 +326,7 @@ class ASSETPIPE_OT_sync_pull(bpy.types.Operator):
hooks_instance.execute_hooks(merge_mode="pull", merge_status='post', asset_col=asset_col)
self.report({'INFO'}, "Asset Pull Complete")
profiler.log_all()
return {'FINISHED'}
@ -370,12 +373,15 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator):
opscore.sync_draw(self, context)
def execute(self, context: bpy.types.Context):
profiler = logging.get_profiler()
profiler.reset()
asset_col = context.scene.asset_pipeline.asset_collection
hooks_instance = Hooks()
hooks_instance.load_hooks(context)
save_images()
bpy.ops.wm.save_mainfile()
# Seperate if statement so hook can execute before updating ownership/prep sync
if self.pull:
hooks_instance.execute_hooks(merge_mode="pull", merge_status='pre', asset_col=asset_col)
# Find current task Layer
@ -387,6 +393,8 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator):
hooks_instance.execute_hooks(
merge_mode="pull", merge_status='post', asset_col=asset_col
)
profiler.set_push()
hooks_instance.execute_hooks(merge_mode="push", merge_status='pre', asset_col=asset_col)
bpy.ops.wm.save_mainfile(filepath=self._current_file.__str__())
@ -395,6 +403,7 @@ class ASSETPIPE_OT_sync_push(bpy.types.Operator):
self.report({'INFO'}, "Asset Sync Complete")
else:
self.report({'INFO'}, "Asset Force Push Complete")
profiler.log_all()
return {'FINISHED'}

View File

@ -1,4 +1,5 @@
import bpy
import time
from pathlib import Path
from .merge.publish import (
find_sync_target,
@ -20,6 +21,7 @@ from .merge.task_layer import draw_task_layer_selection
from .asset_catalog import get_asset_id
from . import prefs
def sync_poll(cls, context):
if any([img.is_dirty for img in bpy.data.images]):
cls.poll_message_set("Please save unsaved Images")
@ -134,6 +136,8 @@ def update_temp_file_paths(self, context, temp_file_path):
def sync_execute_pull(self, context):
start_time = time.time()
profiler = logging.get_profiler()
logger = logging.get_logger()
logger.info("Pulling Asset")
temp_file_path = create_temp_file_backup(self, context)
@ -160,9 +164,12 @@ def sync_execute_pull(self, context):
context.scene.asset_pipeline.sync_error = True
self.report({'ERROR'}, error_msg)
return {'CANCELLED'}
profiler.add(time.time() - start_time, "TOTAL")
def sync_execute_push(self, context):
start_time = time.time()
profiler = logging.get_profiler()
logger = logging.get_logger()
logger.info("Pushing Asset")
_catalog_id = None
@ -203,3 +210,4 @@ def sync_execute_push(self, context):
bpy.ops.wm.save_as_mainfile(filepath=file_path)
bpy.ops.wm.open_mainfile(filepath=self._current_file.__str__())
profiler.add(time.time() - start_time, "TOTAL")

View File

@ -57,7 +57,18 @@ class AssetTransferDataTemp(bpy.types.PropertyGroup):
items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS,
)
surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False)
obj_name: bpy.props.StringProperty(name="Object Name")
obj_name: bpy.props.StringProperty(name="Object Name", default="")
def check_transfer_data_entry(self) -> set:
"""
Verifies if Transferable Data entry exists
"""
existing_items = [
transfer_data_item.name
for transfer_data_item in self.target_obj.transfer_data_ownership
if transfer_data_item.type == self.type
]
return set([self.name]).intersection(set(existing_items))
class TaskLayerSettings(bpy.types.PropertyGroup):
@ -113,12 +124,12 @@ class AssetPipeline(bpy.types.PropertyGroup):
temp_transfer_data: bpy.props.CollectionProperty(type=AssetTransferDataTemp)
def add_temp_transfer_data(self, name, owner, type, obj_name, surrender):
def add_temp_transfer_data(self, name, owner, type_key, obj_name, surrender):
new_transfer_data = self.temp_transfer_data
transfer_data_item = new_transfer_data.add()
transfer_data_item.name = name
transfer_data_item.owner = owner
transfer_data_item.type = type
transfer_data_item.type = type_key
transfer_data_item.obj_name = obj_name
transfer_data_item.surrender = surrender
@ -201,6 +212,7 @@ class AssetPipeline(bpy.types.PropertyGroup):
description="Select Asset Library Catalog for the current Asset, this value will be updated each time you Push to an 'Active' Publish",
) # type: ignore
@bpy.app.handlers.persistent
def set_asset_collection_name_post_file_load(_):
# Version the PointerProperty to the StringProperty, and the left-over pointer.