Asset Pipeline v2 #145

Closed
Nick Alberelli wants to merge 431 commits from (deleted):feature/asset-pipeline-v2 into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
2 changed files with 83 additions and 67 deletions
Showing only changes of commit 7b3f6a17e3 - Show all commits

View File

@ -1,6 +1,6 @@
import bpy
from . import transfer_functions
from . import transfer_functions, asset_suffix
from pathlib import Path
@ -8,29 +8,30 @@ def update_transfer_data_ownership(transfer_data_item, target_obj: bpy.types.Obj
transfer_data_ownership = target_obj.transfer_data_ownership
transfer_items_names = [item.name for item in transfer_data_ownership]
if transfer_data_item.name not in transfer_items_names:
id_name = asset_suffix.get_asset_basename(transfer_data_item.id.name)
new_item = transfer_data_ownership.add()
new_item.name = transfer_data_item.name
new_item.owner = transfer_data_item.owner
new_item.type = transfer_data_item.type
new_item.id = transfer_data_item.id
new_item.id = bpy.data.objects[
id_name
] # TODO replace this pointer with a a string instead
def apply_transfer_data(
context: bpy.types.Context,
transfer_data_list,
target_col: bpy.types.Collection,
source_task_layer: str,
):
for item in transfer_data_list:
for target_obj in target_col.objects:
if target_obj.name.split(".")[0] == item.id.name.split(".")[0]:
if target_obj.name == asset_suffix.get_asset_basename(item.id.name):
print(f"{target_obj.name}: READY TO TRANSFER BABY!")
transfer_functions.transfer_vertex_group(
context=context,
vertex_group_name=item.name,
target_obj=target_obj,
source_obj=bpy.data.objects[
f"{item.id.name.split('.')[0]}.{source_task_layer}"
],
source_obj=item.id,
)
update_transfer_data_ownership(
transfer_data_item=item,
@ -41,33 +42,22 @@ def apply_transfer_data(
def update_task_layer_objects(
target_col: bpy.types.Collection,
transfer_objs: list[bpy.types.Object],
target_tl: str,
):
# TODO CHECK WHY MAKE DUPLICATES ON PULL
# Delete existing root OBJ
for target_obj in target_col.objects:
obj_root_name = target_obj.name.split('.')[0]
for push_obj in transfer_objs:
if f"{obj_root_name}.{target_tl}" in push_obj.name:
bpy.data.objects.remove(target_obj)
# Link new obj to collection
for transfer_obj in transfer_objs:
obj_root_name = transfer_obj.name.split('.')[0]
new_obj = transfer_obj.copy()
new_obj.data = transfer_obj.data.copy()
new_obj.name = f"{obj_root_name}.{target_tl}"
target_col.objects.link(new_obj)
obj_root_name = asset_suffix.get_asset_basename(transfer_obj.name)
transfer_obj.name = f"{obj_root_name}"
target_col.objects.link(transfer_obj)
def push_task_layer(
context: bpy.types.Collection,
source_col: bpy.types.Collection,
target_col: bpy.types.Collection,
current_col: bpy.types.Collection,
source_tl: str,
target_tl: str,
):
# TODO REFACTOR based on new PULL FUNCTION
# NOTE PUSHING MAY BE AS SIMPLE AS OPENING THE PUBLISH FILE AND PULLING THE CURRENT TASK LAYER IN
# Find Obj owned by Current Task Layer
transfer_objs = []
transfer_data_list = []
@ -79,34 +69,78 @@ def push_task_layer(
if item.owner == source_tl:
transfer_data_list.append(item)
update_task_layer_objects(target_col, transfer_objs, target_tl)
update_task_layer_objects(
current_col,
transfer_objs,
)
# Move transferrable data onto obj owned by others
apply_transfer_data(context, transfer_data_list, target_col, source_tl)
apply_transfer_data(context, transfer_data_list, current_col)
def pull_task_layer(
context: bpy.types.Collection,
source_col: bpy.types.Collection,
context: bpy.types.Context,
current_task_col: bpy.types.Collection,
col_base_name: str,
current_tl: str,
target_col: bpy.types.Collection,
source_tl: str,
target_tl: str,
):
current_suffix = "TASK"
source_suffix = "PUBLISH"
asset_suffix.add_suffix_to_hierarchy(current_task_col, current_suffix)
current_file = Path(bpy.data.filepath)
pub_file = find_published_file(
current_file
) # TODO if this function is used in PULL then this needs to be a variable set by the operator instead
import_data_from_lib(pub_file, "collections", col_base_name)
appended_col = bpy.data.collections[col_base_name] # find appended data
asset_suffix.add_suffix_to_hierarchy(appended_col, source_suffix)
current_col = bpy.data.collections[f"{col_base_name}.{current_suffix}"]
source_col = bpy.data.collections[f"{col_base_name}.{source_suffix}"]
target_col = bpy.data.collections.new(col_base_name)
# Link Target as new Active Collection
context.scene.collection.children.link(target_col)
context.scene.collection.children.unlink(current_col)
# Find Obj owned by other Current Task Layer
transfer_objs = []
transfer_data = []
source_transfer_objs = []
source_transfer_data = []
for obj in source_col.objects:
if obj.asset_id_owner != target_tl:
transfer_objs.append(obj)
if obj.asset_id_owner != current_tl:
source_transfer_objs.append(obj)
# Find Transfer-Data in other Task Layers
for item in obj.transfer_data_ownership:
if item.owner != source_tl:
transfer_data.append(item)
if item.owner != current_tl:
source_transfer_data.append(item)
update_task_layer_objects(target_col, source_transfer_objs)
update_task_layer_objects(target_col, transfer_objs, target_tl)
current_transfer_objs = []
current_transfer_data = []
for obj in current_col.objects:
if obj.asset_id_owner == current_tl:
current_transfer_objs.append(obj)
# Find Transfer-Data in other Task Layers
for item in obj.transfer_data_ownership:
if item.owner == current_tl:
current_transfer_data.append(item)
update_task_layer_objects(target_col, current_transfer_objs)
# TODO Move transferrable data onto obj owned by others
apply_transfer_data(context, transfer_data, target_col, source_tl)
apply_transfer_data(
context,
source_transfer_data,
target_col,
)
apply_transfer_data(
context,
current_transfer_data,
target_col,
) # TODO test if only one list of transfer data is needed
bpy.ops.outliner.orphans_purge(
do_local_ids=True, do_linked_ids=False, do_recursive=True
)
def find_published_file_version(file):

View File

@ -12,16 +12,15 @@ class ASSETPIPE_OT_update_ownership(bpy.types.Operator):
def execute(self, context):
obj = context.active_object
ownership = context.active_object.transfer_data_ownership
task_layer_name = obj.name.split(".")[-1]
file_name = bpy.path.basename(bpy.context.blend_data.filepath)
task_layer_name = file_name.split(".")[-2]
for vertex_group in obj.vertex_groups:
if not vertex_group.name in [item.name for item in ownership]:
item = ownership.add()
item.name = vertex_group.name
item.owner = task_layer_name.upper()
item.type = "VERTEX_GROUP" # TODO Make procedural
item.id = bpy.data.objects[
f"{obj.name.split('.')[0]}.{obj.asset_id_owner}"
]
item.id = bpy.data.objects[f"{obj.name.split('.')[-1]}"]
return {'FINISHED'}
@ -50,33 +49,16 @@ class ASSETPIPE_OT_pull_test(bpy.types.Operator):
bl_label = 'Pull from Publish'
def execute(self, context):
task_col = bpy.data.collections["CH-chr_test"] # TODO replace hard coded value
asset_suffix.add_suffix_to_hierarchy(task_col, "TASK")
current_file = Path(bpy.data.filepath)
pub_file = core.find_published_file(current_file)
col_name = "CH-chr_test" # TODO replace hard coded value
core.import_data_from_lib(pub_file, "collections", col_name)
appended_col = bpy.data.collections["CH-chr_test"] # TODO find appended data
asset_suffix.add_suffix_to_hierarchy(appended_col, "PUBLISH")
task_layer_col = bpy.data.collections["CH-chr_test.TASK"]
publish_col = bpy.data.collections["CH-chr_test.PUBLISH"]
# TODO fix pull function to work with multiple files
return {'FINISHED'}
# Find current task Layer
task_layer_col = context.collection
current_task_layer = task_layer_col.name.split('.')[-1]
# TODO move some of this logic into the core pull function
col_base_name = "CH-chr_test" # TODO replace hard coded value
current_task_col = bpy.data.collections[col_base_name]
core.pull_task_layer(
context,
source_col=publish_col,
target_col=task_layer_col,
source_tl="PUB",
target_tl=current_task_layer,
current_task_col=current_task_col,
col_base_name=col_base_name,
current_tl="MODEL",
)
return {'FINISHED'}
classes = (