Asset Pipeline v2 #145
129
scripts-blender/addons/asset_pipeline_2/asset_mapping.py
Normal file
129
scripts-blender/addons/asset_pipeline_2/asset_mapping.py
Normal file
@ -0,0 +1,129 @@
|
||||
import bpy
|
||||
from typing import List, Dict, Union, Any, Set, Optional, Tuple
|
||||
|
||||
from . import util
|
||||
|
||||
|
||||
def get_opposite_suffix(suffix: str):
|
||||
# TODO FIX HACK that is used until I have transfer mapping
|
||||
# TODO Creating a map would be easier that doing this on the fly
|
||||
if suffix.endswith("EXTERNAL"):
|
||||
return "LOCAL"
|
||||
if suffix.endswith("LOCAL"):
|
||||
return "EXTERNAL"
|
||||
|
||||
|
||||
def rreplace(s: str, occurrence=1) -> str:
|
||||
old = s.split(".")[-1]
|
||||
new = get_opposite_suffix(old)
|
||||
li = s.rsplit(old, occurrence)
|
||||
return new.join(li)
|
||||
|
||||
|
||||
class AssetTransferMapping:
|
||||
"""
|
||||
The AssetTranfserMapping class represents a mapping between a source and a target.
|
||||
It contains an object mapping which connects each source object with a target
|
||||
object as well as a collection mapping.
|
||||
The mapping process relies heavily on suffixes, which is why we use
|
||||
MergeCollections as input that store a suffix.
|
||||
|
||||
Instances of this class will be pased TaskLayer data transfer function so Users
|
||||
can easily write their merge instructions.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
local_coll: bpy.types.Collection,
|
||||
external_coll: bpy.types.Collection,
|
||||
local_tls: Set[str],
|
||||
):
|
||||
self._local_col = local_coll
|
||||
self._external_col = external_coll
|
||||
self._local_tls = local_tls
|
||||
|
||||
self.local_obj_to_remove: Set[bpy.types.Object] = set()
|
||||
self.external_obj_to_add: Set[bpy.types.Object] = set()
|
||||
self._no_match_source_objs: Set[bpy.types.Object] = set()
|
||||
self._no_match_target_objs: Set[bpy.types.Object] = set()
|
||||
|
||||
self._no_match_source_colls: Set[bpy.types.Object] = set()
|
||||
self._no_match_target_colls: Set[bpy.types.Object] = set()
|
||||
|
||||
self.generate_mapping()
|
||||
|
||||
def generate_mapping(self) -> None:
|
||||
self.object_map = self._gen_object_map()
|
||||
self.collection_map = self._gen_collection_map()
|
||||
|
||||
def _get_external_object(self, local_obj):
|
||||
external_obj_name = rreplace(
|
||||
local_obj.name,
|
||||
)
|
||||
external_obj = self._external_col.all_objects.get(external_obj_name)
|
||||
if not external_obj:
|
||||
print(f"Failed to find match obj {external_obj_name} for {local_obj.name}")
|
||||
self._no_match_source_objs.add(local_obj)
|
||||
return
|
||||
return external_obj
|
||||
|
||||
def _gen_object_map(self) -> Dict[bpy.types.Object, bpy.types.Object]:
|
||||
"""
|
||||
Tries to link all objects in source collection to an object in
|
||||
target collection. Uses suffixes to match them up.
|
||||
"""
|
||||
object_map: Dict[bpy.types.Object, bpy.types.Object] = {}
|
||||
for local_obj in self._local_col.all_objects:
|
||||
# IF ITEM IS OWNED BY LOCAL TASK LAYERS
|
||||
if local_obj.asset_id_owner in self._local_tls:
|
||||
external_obj = self._get_external_object(local_obj)
|
||||
if external_obj:
|
||||
object_map[external_obj] = local_obj
|
||||
|
||||
# IF ITEM IS NOT OWNED BY LOCAL TASK LAYERS
|
||||
else:
|
||||
external_obj = self._get_external_object(local_obj)
|
||||
if external_obj:
|
||||
object_map[local_obj] = external_obj
|
||||
else:
|
||||
# REMOVE OBJ NOT OWNED BY LOCAL TASK LAYER THAT HAS NO MATCH
|
||||
self.local_obj_to_remove.add(local_obj)
|
||||
|
||||
# Find new objects to add to local_col
|
||||
for external_obj in self._external_col.all_objects:
|
||||
obj = self._local_col.all_objects.get(rreplace(external_obj.name))
|
||||
if not obj and external_obj.asset_id_owner not in self._local_tls:
|
||||
self.external_obj_to_add.add(external_obj)
|
||||
return object_map
|
||||
|
||||
def _gen_collection_map(self) -> Dict[bpy.types.Collection, bpy.types.Collection]:
|
||||
"""
|
||||
Tries to link all source collections to a target collection.
|
||||
Uses suffixes to match them up.
|
||||
"""
|
||||
coll_map: Dict[bpy.types.Collection, bpy.types.Collection] = {}
|
||||
|
||||
# Link top most parents.
|
||||
coll_map[self._local_col] = self._external_col
|
||||
|
||||
# Link up all children.
|
||||
for s_coll in util.traverse_collection_tree(self._local_col):
|
||||
# assert source_obj.name.endswith(self._source_merge_coll.suffix)
|
||||
|
||||
# Replace source object suffix with target suffix to get target object.
|
||||
external_col = rreplace(s_coll.name)
|
||||
t_coll = bpy.data.collections.get(external_col)
|
||||
if t_coll:
|
||||
coll_map[s_coll] = t_coll
|
||||
else:
|
||||
print(
|
||||
f"Failed to find match collection {s_coll.name} for {external_col}"
|
||||
)
|
||||
self._no_match_source_colls.add(s_coll)
|
||||
|
||||
all_tgt_colls = set(self._external_col.children_recursive)
|
||||
all_tgt_colls.add(self._external_col)
|
||||
match_target_colls = set([coll for coll in coll_map.values()])
|
||||
self._no_match_target_colls = all_tgt_colls - match_target_colls
|
||||
|
||||
return coll_map
|
@ -3,6 +3,8 @@ import bpy
|
||||
from . import asset_suffix, datablocks, transferable_data
|
||||
from pathlib import Path
|
||||
|
||||
from .asset_mapping import AssetTransferMapping
|
||||
|
||||
|
||||
# TODO refactor merge functions into a class based on AssetBuilder class of Asset Pipeline 1
|
||||
|
||||
@ -27,7 +29,6 @@ def merge_task_layer(
|
||||
local_col = bpy.data.collections[col_base_name]
|
||||
local_suffix = "LOCAL"
|
||||
external_suffix = "EXTERNAL"
|
||||
target_suffix = "TARGET"
|
||||
asset_suffix.add_suffix_to_hierarchy(local_col, local_suffix)
|
||||
|
||||
import_data_from_lib(target_file, "collections", col_base_name)
|
||||
@ -36,43 +37,41 @@ def merge_task_layer(
|
||||
|
||||
local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"]
|
||||
external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"]
|
||||
target_col = bpy.data.collections.new(f"{col_base_name}.{target_suffix}")
|
||||
|
||||
# TODO Make sure we preserve the collection hirearchies instead of having one flat col
|
||||
# Link Target as new Active Collection
|
||||
context.scene.collection.children.link(target_col)
|
||||
context.scene.collection.children.unlink(local_col)
|
||||
mapping_task_target = AssetTransferMapping(local_col, external_col, local_tls)
|
||||
|
||||
# Find Obj owned by other Current Task Layer
|
||||
external_transfer_objs = []
|
||||
# Find Transfer Data
|
||||
transfer_data = []
|
||||
for obj in external_col.objects:
|
||||
if obj.asset_id_owner not in local_tls:
|
||||
external_transfer_objs.append(obj)
|
||||
# Find Transfer-Data in other Task Layers
|
||||
for item in obj.transfer_data_ownership:
|
||||
if item.owner not in local_tls:
|
||||
transfer_data.append(item)
|
||||
update_task_layer_objects(target_col, external_transfer_objs)
|
||||
|
||||
local_objs = []
|
||||
for obj in local_col.objects:
|
||||
if obj.asset_id_owner in local_tls:
|
||||
local_objs.append(obj)
|
||||
# Find Transfer-Data in other Task Layers
|
||||
for item in obj.transfer_data_ownership:
|
||||
if item.owner in local_tls:
|
||||
transfer_data.append(item)
|
||||
update_task_layer_objects(target_col, local_objs)
|
||||
|
||||
transferable_data.apply_transfer_data(context, transfer_data, target_col)
|
||||
transferable_data.apply_transfer_data(context, transfer_data, local_col)
|
||||
transferable_data.apply_transfer_data(context, transfer_data, external_col)
|
||||
|
||||
datablocks.remap_datablocks_outside_scene(context.scene)
|
||||
for old_obj in mapping_task_target.local_obj_to_remove:
|
||||
# TODO Support collection hirearchies
|
||||
local_col.objects.unlink(old_obj)
|
||||
|
||||
for new_obj in mapping_task_target.external_obj_to_add:
|
||||
# TODO Support collection hirearchies
|
||||
local_col.objects.link(new_obj)
|
||||
|
||||
for source_obj in mapping_task_target._object_map:
|
||||
target_obj = mapping_task_target._object_map[source_obj]
|
||||
datablocks.remap_user(source_obj, target_obj)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(
|
||||
do_local_ids=True, do_linked_ids=False, do_recursive=True
|
||||
)
|
||||
asset_suffix.remove_suffix_from_hierarchy(target_col)
|
||||
asset_suffix.remove_suffix_from_hierarchy(local_col)
|
||||
|
||||
|
||||
def find_published_file_version(file):
|
||||
|
@ -56,6 +56,7 @@ def remap_get_data_blocks(scene: bpy.types.Scene):
|
||||
|
||||
def remap_user(source_datablock: bpy.data, target_datablock: bpy.data):
|
||||
"""Remap datablock and append name to datablock that has been remapped"""
|
||||
print(f"REMAPPING {source_datablock.name} to {target_datablock.name}")
|
||||
source_datablock.user_remap(target_datablock)
|
||||
source_datablock.name += "_Users_Remapped"
|
||||
|
||||
|
@ -91,3 +91,11 @@ def get_storage_of_id(datablock: bpy.types.ID) -> 'bpy_prop_collection':
|
||||
|
||||
fundamental_type = get_fundamental_id_type(datablock)
|
||||
return getattr(bpy.data, ID_CLASS_TO_STORAGE_NAME[fundamental_type])
|
||||
|
||||
|
||||
def traverse_collection_tree(
|
||||
collection: bpy.types.Collection,
|
||||
) -> Generator[bpy.types.Collection, None, None]:
|
||||
yield collection
|
||||
for child in collection.children:
|
||||
yield from traverse_collection_tree(child)
|
||||
|
Loading…
Reference in New Issue
Block a user