Asset Pipeline v2 #145

Closed
Nick Alberelli wants to merge 431 commits from (deleted):feature/asset-pipeline-v2 into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
7 changed files with 83 additions and 76 deletions
Showing only changes of commit 87d4d37184 - Show all commits

View File

@ -1,8 +1,10 @@
import bpy
from typing import Dict, Set
from . import asset_suffix, constants, util, core
from .transfer_data import transfer_core
from . import core # TODO DEBUG WHY THIS DOESN'T WORK
from .asset_suffix import get_target_name, get_basename, get_name_with_asset_prefix
from .util import get_storage_of_id
from .transfer_data.transfer_core import transfer_data_add_entry
from . import constants
class AssetTransferMapping:
@ -47,7 +49,7 @@ class AssetTransferMapping:
self.other_id_map = self._gen_other_id_map()
def _get_external_object(self, local_obj):
external_obj_name = asset_suffix.get_target_name(
external_obj_name = get_target_name(
local_obj.name,
)
external_obj = self._external_col.all_objects.get(external_obj_name)
@ -89,7 +91,7 @@ class AssetTransferMapping:
# Find new objects to add to local_col
for external_obj in self._external_col.all_objects:
local_col_objs = self._local_col.all_objects
obj = local_col_objs.get(asset_suffix.get_target_name(external_obj.name))
obj = local_col_objs.get(get_target_name(external_obj.name))
if not obj and external_obj.asset_id_owner not in self._local_tls:
self.external_obj_to_add.add(external_obj)
return object_map
@ -102,20 +104,15 @@ class AssetTransferMapping:
coll_map: Dict[bpy.types.Collection, bpy.types.Collection] = {}
local_tl_names = [
core.get_name_with_asset_prefix(tl_ui_name)
get_name_with_asset_prefix(tl_ui_name)
for tl_key, tl_ui_name in constants.TASK_LAYER_TYPES.items()
if tl_key in self._local_tls
]
for local_task_layer_col in self._local_col.children:
if (
asset_suffix.get_basename(local_task_layer_col.name)
not in local_tl_names
):
if get_basename(local_task_layer_col.name) not in local_tl_names:
# Replace source object suffix with target suffix to get target object.
external_col_name = asset_suffix.get_target_name(
local_task_layer_col.name
)
external_col_name = get_target_name(local_task_layer_col.name)
external_col = bpy.data.collections.get(external_col_name)
if external_col:
coll_map[local_task_layer_col] = external_col
@ -135,7 +132,7 @@ class AssetTransferMapping:
def _get_transfer_data_map_item(self, obj, target_obj, transfer_info):
temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data
temp_info_index = len(temp_transfer_data)
temp_info = transfer_core.transfer_data_add_entry(
temp_info = transfer_data_add_entry(
transfer_data=temp_transfer_data,
name=transfer_info.name,
td_type=transfer_info.type,
@ -153,7 +150,7 @@ class AssetTransferMapping:
return name, map_item
def _check_transfer_data_conflict(self, obj, transfer_info):
other_obj = bpy.data.objects.get(asset_suffix.get_target_name(obj.name))
other_obj = bpy.data.objects.get(get_target_name(obj.name))
check_transfer_info = None
if not other_obj:
return
@ -210,8 +207,8 @@ class AssetTransferMapping:
def _gen_other_id_map(self):
other_id_map: Dict[bpy.types.ID, bpy.types.ID] = {}
for local_id in core.get_other_ids(self._local_col):
external_id_name = asset_suffix.get_target_name(local_id.name)
id_storage = util.get_storage_of_id(local_id)
external_id_name = get_target_name(local_id.name)
id_storage = get_storage_of_id(local_id)
external_id = id_storage.get(external_id_name)
# TODO Check for conflicts
if (

View File

@ -20,8 +20,8 @@
import bpy
from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids
from . import constants
from .util import get_storage_of_id
from . import constants
DELIMITER = "."
@ -64,7 +64,7 @@ def get_basename(name: str) -> str:
constants.EXTERNAL_SUFFIX
):
return DELIMITER.join(name.split(DELIMITER)[:-1])
return name
return name
def remove_suffix_from_hierarchy(collection: bpy.types.Collection) -> None:

View File

@ -1,15 +1,24 @@
import bpy
from .transfer_data import transfer_core
from .asset_mapping import AssetTransferMapping
from .transfer_data.transfer_core import (
init_transfer_data,
transfer_data_is_missing,
transfer_data_add_entry,
apply_transfer_data,
transfer_data_clean,
)
from . import asset_suffix
from .asset_suffix import (
add_suffix_to_hierarchy,
remove_suffix_from_hierarchy,
get_name_with_asset_prefix,
)
from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids
from pathlib import Path
from typing import Dict
from .asset_mapping import AssetTransferMapping
from . import constants, util
from rigify.utils.misc import copy_attributes
from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids
from . import constants
def ownership_transfer_data_cleanup(
@ -25,7 +34,7 @@ def ownership_transfer_data_cleanup(
to_remove = []
for transfer_info in transfer_data:
if transfer_info.owner == task_layer_name:
if transfer_core.transfer_data_is_missing(transfer_info):
if transfer_data_is_missing(transfer_info):
to_remove.append(transfer_info.name)
for name in to_remove:
@ -64,7 +73,7 @@ def ownership_get(
if obj.asset_id_owner == "NONE":
continue
ownership_transfer_data_cleanup(obj, task_layer_key)
transfer_core.init_transfer_data(scene, obj)
init_transfer_data(scene, obj)
def ownership_set(temp_transfer_data: bpy.types.CollectionProperty) -> None:
@ -77,7 +86,7 @@ def ownership_set(temp_transfer_data: bpy.types.CollectionProperty) -> None:
"""
for transfer_info in temp_transfer_data:
transfer_data = transfer_info.obj.transfer_data_ownership
transfer_core.transfer_data_add_entry(
transfer_data_add_entry(
transfer_data, transfer_info.name, transfer_info.type, transfer_info.owner
)
@ -109,15 +118,6 @@ def get_task_layer_col_name(task_layer_key):
return get_name_with_asset_prefix(task_layer_name)
def get_name_with_asset_prefix(name: str):
# TODO Docstring and return types
asset_pipe = bpy.context.scene.asset_pipeline
if name.startswith(asset_pipe.prefix + "."):
return name
prefix = asset_pipe.prefix + "." if asset_pipe.prefix != "" else ""
return prefix + name
def remap_user(source_datablock: bpy.data, target_datablock: bpy.data) -> None:
"""Remap datablock and append name to datablock that has been remapped
@ -156,10 +156,10 @@ def merge_task_layer(
col_base_name = local_col.name
local_suffix = constants.LOCAL_SUFFIX
external_suffix = constants.EXTERNAL_SUFFIX
asset_suffix.add_suffix_to_hierarchy(local_col, local_suffix)
add_suffix_to_hierarchy(local_col, local_suffix)
appended_col = import_data_from_lib(external_file, "collections", col_base_name)
asset_suffix.add_suffix_to_hierarchy(appended_col, external_suffix)
add_suffix_to_hierarchy(appended_col, external_suffix)
local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"]
external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"]
@ -183,12 +183,12 @@ def merge_task_layer(
target_obj = map.object_map[source_obj]
target_obj.transfer_data_ownership.clear()
transfer_core.apply_transfer_data(context, map.transfer_data_map)
apply_transfer_data(context, map.transfer_data_map)
for source_obj in map.object_map:
target_obj = map.object_map[source_obj]
remap_user(source_obj, target_obj)
transfer_core.transfer_data_clean(target_obj)
transfer_data_clean(target_obj)
for col in map.collection_map:
remap_user(col, map.collection_map[col])
@ -199,7 +199,7 @@ def merge_task_layer(
bpy.ops.outliner.orphans_purge(
do_local_ids=True, do_linked_ids=False, do_recursive=True
)
asset_suffix.remove_suffix_from_hierarchy(local_col)
remove_suffix_from_hierarchy(local_col)
def find_file_version(published_file: Path) -> int:

View File

@ -1,10 +1,20 @@
import bpy
from . import core
from pathlib import Path
from . import constants
from .transfer_data import transfer_ui
import os
from pathlib import Path
from .core import (
get_task_layer_col_name,
ownership_get,
ownership_set,
get_invalid_objects,
init_other_ids,
find_sync_target,
merge_task_layer,
find_all_published,
get_next_published_file,
)
from .transfer_data.transfer_ui import draw_transfer_data
from . import constants
class ASSETPIPE_OT_create_new_asset(bpy.types.Operator):
@ -68,7 +78,7 @@ class ASSETPIPE_OT_create_new_asset(bpy.types.Operator):
for task_layer_key in constants.TASK_LAYER_TYPES.keys():
if task_layer_key == "NONE":
continue
col_name = core.get_task_layer_col_name(task_layer_key)
col_name = get_task_layer_col_name(task_layer_key)
bpy.data.collections.new(col_name)
asset_col.children.link(bpy.data.collections.get(col_name))
@ -141,11 +151,11 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
self.report({'ERROR'}, "Current File Name doesn't contain valid task layer")
return {'CANCELLED'}
core.ownership_get(local_col, context.scene)
ownership_get(local_col, context.scene)
# TODO Remove Invalid Objs Explicitly, some will be auto removed but not all
self._invalid_objs = core.get_invalid_objects(local_col, context.scene)
self._other_ids = core.init_other_ids(context.scene)
self._invalid_objs = get_invalid_objects(local_col, context.scene)
self._other_ids = init_other_ids(context.scene)
# Default behaviour is to pull before pushing
if self.push:
@ -193,12 +203,12 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
]
box = layout.box()
box.label(text=obj.name, icon="OBJECT_DATA")
transfer_ui.draw_transfer_data(obj_ownership, box)
draw_transfer_data(obj_ownership, box)
def execute(self, context: bpy.types.Context):
# Find current task Layer
temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data
core.ownership_set(temp_transfer_data)
ownership_set(temp_transfer_data)
current_file = Path(bpy.data.filepath)
temp_dir = Path(bpy.app.tempdir).parent
task_layer_key = context.scene.asset_pipeline.task_layer_name
@ -206,7 +216,7 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
self.report({'ERROR'}, "Current File Name doesn't contain valid task layer")
return {'CANCELLED'}
sync_target = core.find_sync_target(current_file)
sync_target = find_sync_target(current_file)
if not sync_target.exists():
self.report({'ERROR'}, "Sync Target could not be determined")
return {'CANCELLED'}
@ -216,7 +226,7 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
current_file.name.replace(".blend", "") + "_Asset_Pipe_Backup.blend"
)
bpy.ops.wm.save_as_mainfile(filepath=temp_file.__str__(), copy=True)
error_msg = core.merge_task_layer(
error_msg = merge_task_layer(
context,
local_tls=[task_layer_key],
external_file=sync_target,
@ -234,9 +244,7 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
if not self.push:
return {'FINISHED'}
push_targets = core.find_all_published(
current_file, constants.ACTIVE_PUBLISH_KEY
)
push_targets = find_all_published(current_file, constants.ACTIVE_PUBLISH_KEY)
if sync_target not in push_targets:
push_targets.append(sync_target)
@ -254,7 +262,7 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
if task_layer != task_layer_key
]
error_msg = core.merge_task_layer(
error_msg = merge_task_layer(
context,
local_tls=local_tls,
external_file=current_file,
@ -294,7 +302,7 @@ class ASSETPIPE_OT_publish_new_version(bpy.types.Operator):
)
return {'CANCELLED'}
current_file = Path(bpy.data.filepath)
new_file_path = core.get_next_published_file(current_file, self.publish_types)
new_file_path = get_next_published_file(current_file, self.publish_types)
bpy.ops.wm.save_as_mainfile(filepath=new_file_path.__str__(), copy=True)
return {'FINISHED'}

View File

@ -1,7 +1,9 @@
import bpy
from bpy import context
from . import transfer_core
from .. import asset_suffix, constants, util, core
from ..asset_suffix import get_basename
from ..core import find_drivers, copy_driver
from . import transfer_core # TODO FIX
from .. import constants
import mathutils
import bmesh
import numpy as np
@ -226,11 +228,11 @@ def transfer_modifier(modifier_name, target_obj, source_obj):
)
if source_obj.animation_data is None:
return
fcurves = core.find_drivers(
fcurves = find_drivers(
source_obj.animation_data.drivers, 'modifiers', modifier_name
)
for fcurve in fcurves:
core.copy_driver(from_fcurve=fcurve, target=target_obj)
copy_driver(from_fcurve=fcurve, target=target_obj)
# CONSTRAINTS
@ -292,17 +294,17 @@ def transfer_constraint(constraint_name, target_obj, source_obj):
if source_obj.animation_data is None:
return
fcurves = core.find_drivers(
fcurves = find_drivers(
source_obj.animation_data.drivers, 'constraints', constraint_name
)
for fcurve in fcurves:
core.copy_driver(from_fcurve=fcurve, target=target_obj)
copy_driver(from_fcurve=fcurve, target=target_obj)
# MATERIAL SLOT
def material_slots_clean(obj):
# Material slots cannot use generic transfer_info_clean() function
# Material slots cannot use generic transfer_core.transfer_info_clean() function
context = bpy.context
transfer_data_list = transfer_core.get_transfer_data_as_names(
obj.transfer_data_ownership, constants.MATERIAL_SLOT_KEY
@ -418,7 +420,7 @@ def shape_keys_clean(obj):
obj.transfer_data_ownership, constants.SHAPE_KEY_KEY
)
for shape_key in obj.data.shape_keys.key_blocks:
if not asset_suffix.get_basename(shape_key.name) in transfer_data_list:
if not get_basename(shape_key.name) in transfer_data_list:
obj.shape_key_remove(shape_key)
@ -525,13 +527,13 @@ def transfer_shape_key(
]
val = mathutils.Vector(sum(np.array(vals_weighted)))
sk_target.data[i].co = vert.co + val
fcurves = core.find_drivers(
fcurves = find_drivers(
source_obj.data.shape_keys.animation_data.drivers,
'key_blocks',
shape_key_name,
)
for fcurve in fcurves:
core.copy_driver(from_fcurve=fcurve, target=target_obj.data.shape_keys)
copy_driver(from_fcurve=fcurve, target=target_obj.data.shape_keys)
# ATTRIBUTE
@ -554,7 +556,7 @@ def attribute_clean(obj):
obj.transfer_data_ownership, constants.ATTRIBUTE_KEY
)
for item in attributes:
if not asset_suffix.get_basename(item.name) in transfer_data_list:
if not get_basename(item.name) in transfer_data_list:
print(f"Cleaning attribute {item.name}")
obj.data.attributes.remove(item)

View File

@ -1,5 +1,5 @@
from .. import constants, core
import bpy
from .. import constants
def draw_transfer_data_type(

View File

@ -1,8 +1,8 @@
import bpy
from . import core, constants
from .transfer_data import transfer_ui
from pathlib import Path
from .transfer_data.transfer_ui import draw_transfer_data
from . import constants
class ASSETPIPE_sync(bpy.types.Panel):
@ -67,7 +67,7 @@ class ASSETPIPE_ownership_inspector(bpy.types.Panel):
layout = layout.box()
owner_tl_ui_name = constants.TASK_LAYER_TYPES[obj.asset_id_owner]
layout.label(text=f"{obj.name}: '{owner_tl_ui_name}'", icon="OBJECT_DATA")
transfer_ui.draw_transfer_data(transfer_data, layout)
draw_transfer_data(transfer_data, layout)
classes = (