Asset Pipeline v2 #145

Closed
Nick Alberelli wants to merge 431 commits from (deleted):feature/asset-pipeline-v2 into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
10 changed files with 70 additions and 67 deletions
Showing only changes of commit 64bbed70ca - Show all commits

View File

@ -15,6 +15,7 @@ from .naming import (
get_id_type_name,
)
from .task_layer import get_local_task_layers
from pathlib import Path
from typing import Dict
@ -22,18 +23,18 @@ from .. import constants
def ownership_transfer_data_cleanup(
obj: bpy.types.Object, task_layer_name: str
obj: bpy.types.Object,
) -> None:
"""Remove Transfer Data ownership items if the corrisponding data is missing
Args:
obj (bpy.types.Object): Object that contains the transfer data
task_layer_name (str): Name of the current task layer that owns the data
"""
local_task_layer_keys = get_local_task_layers()
transfer_data = obj.transfer_data_ownership
to_remove = []
for transfer_data_item in transfer_data:
if transfer_data_item.owner == task_layer_name:
if transfer_data_item.owner in local_task_layer_keys:
if transfer_data_is_missing(transfer_data_item):
to_remove.append(transfer_data_item.name)
@ -61,21 +62,22 @@ def ownership_get(
"""
asset_pipe = scene.asset_pipeline
asset_pipe.temp_transfer_data.clear()
task_layer_key = asset_pipe.task_layer_name
# TODO Figure out default in this case
default_task_layer = get_local_task_layers()[0]
task_layer_objs = get_task_layer_objects()
for obj in local_col.all_objects:
# Mark Asset ID Owner for objects in the current task layers collection
if obj.asset_id_owner == "NONE" and obj in task_layer_objs:
obj.asset_id_owner = task_layer_key
obj.asset_id_owner = default_task_layer
obj.name = get_name_with_asset_prefix(obj.name)
# Skip items that have no owner
if obj.asset_id_owner == "NONE":
continue
ownership_transfer_data_cleanup(obj, task_layer_key)
ownership_transfer_data_cleanup(obj)
init_transfer_data(scene, obj)
for col in asset_pipe.asset_collection.children:
if col.asset_id_owner == "NONE":
col.asset_id_owner = task_layer_key
col.asset_id_owner = default_task_layer
def ownership_set(temp_transfer_data: bpy.types.CollectionProperty) -> None:
@ -111,14 +113,14 @@ def get_invalid_objects(
Returns:
list[bpy.types.Object]: List of Invalid Objects
"""
task_layer_key = scene.asset_pipeline.task_layer_name
local_task_layer_keys = get_local_task_layers()
task_layer_objs = get_task_layer_objects()
invalid_obj = []
for obj in scene.objects:
if obj.asset_id_owner == "NONE":
invalid_obj.append(obj)
if obj not in task_layer_objs and obj.asset_id_owner == task_layer_key:
if obj not in task_layer_objs and obj.asset_id_owner in local_task_layer_keys:
invalid_obj.append(obj)
return invalid_obj
@ -271,10 +273,10 @@ def import_data_from_lib(
def get_task_layer_objects():
asset_pipe = bpy.context.scene.asset_pipeline
task_layer_key = asset_pipe.task_layer_name
local_task_layer_keys = get_local_task_layers()
local_col = asset_pipe.asset_collection
task_layer_objs = []
for col in local_col.children:
if col.asset_id_owner == task_layer_key:
if col.asset_id_owner in local_task_layer_keys:
task_layer_objs = task_layer_objs + list(col.all_objects)
return task_layer_objs

View File

@ -22,6 +22,7 @@ import bpy
from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids
from .util import get_storage_of_id
from .. import constants
from .task_layer import get_default_task_layer
DELIMITER = "."
@ -134,7 +135,7 @@ def get_name_with_asset_prefix(name: str) -> str:
return prefix + name
def get_name_with_task_layer_prefix(name: str) -> str:
def get_name_with_task_layer_prefix(name: str, td_type_key: str) -> str:
"""Returns a string with the prefix if it is not already set.
Users can specify a prefix to live on all objects during the
asset creation process. This prefix is stored in the scene.
@ -145,29 +146,13 @@ def get_name_with_task_layer_prefix(name: str) -> str:
Returns:
str: Returns name with prefix
"""
asset_pipe = bpy.context.scene.asset_pipeline
prefix = asset_pipe.task_layer_name
prefix = get_default_task_layer(td_type_key)
for task_layer_key in constants.TASK_LAYER_TYPES.keys():
if name.startswith(task_layer_key + "."):
return name
return prefix + "." + name
def get_task_layer_col_name(task_layer_key) -> str:
"""Returns the name of a givem task layer colection via
the task layer key. Task Layer Collection names are a combination
of a prefix if any and the task_layer_name.
Args:
task_layer_key (_type_): Key of a given task layer
Returns:
str: Task Layer Collection name including prefix if exists
"""
task_layer_name = constants.TASK_LAYER_TYPES[task_layer_key]
return get_name_with_asset_prefix(task_layer_name)
def get_id_type_name(id_type: bpy.types) -> str:
"""Return the cosmetic name of a given ID type

View File

@ -2,6 +2,7 @@ import bpy
from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids
from .util import get_fundamental_id_type
from .. import constants
from .task_layer import get_local_task_layers
def get_shared_ids(collection: bpy.types.Collection) -> list[bpy.types.ID]:
@ -32,12 +33,14 @@ def init_shared_ids(scene: bpy.types.Scene) -> list[bpy.types.ID]:
Returns:
list[bpy.types.ID]: A list of new 'shared_ids' owned by the file's task layer
"""
# TODO Figure our what is the default in this case
task_layer_key = get_local_task_layers()[0]
shared_ids = []
asset_pipe = scene.asset_pipeline
local_col = asset_pipe.asset_collection
for id in get_shared_ids(local_col):
if id.asset_id_owner == 'NONE':
id.asset_id_owner = asset_pipe.task_layer_name
id.asset_id_owner = task_layer_key
shared_ids.append(id)
return shared_ids

View File

@ -0,0 +1,10 @@
import bpy
def get_local_task_layers():
local_task_layer_strings = bpy.context.scene.asset_pipeline.local_task_layers
return local_task_layer_strings.split(",")
def get_default_task_layer(td_type: str):
return get_local_task_layers()[0]

View File

@ -9,6 +9,8 @@ from .transfer_util import (
transfer_data_item_init,
check_transfer_data_entry,
)
from ..task_layer import get_default_task_layer
from ... import constants
import mathutils
import bmesh
@ -100,9 +102,9 @@ def modifier_is_missing(transfer_data_item):
def init_modifiers(scene, obj):
td_type_key = constants.MODIFIER_KEY
transfer_data = obj.transfer_data_ownership
task_layer_key = scene.asset_pipeline.task_layer_name
task_layer_key = get_default_task_layer(td_type_key)
for mod in obj.modifiers:
mod.name = get_name_with_task_layer_prefix(mod.name)
mod.name = get_name_with_task_layer_prefix(mod.name, td_type_key)
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, mod.name, td_type_key)
if len(matches) == 0:
@ -196,9 +198,9 @@ def constraint_is_missing(transfer_data_item):
def init_constraints(scene, obj):
td_type_key = constants.CONSTRAINT_KEY
transfer_data = obj.transfer_data_ownership
task_layer_key = scene.asset_pipeline.task_layer_name
task_layer_key = get_default_task_layer(td_type_key)
for const in obj.constraints:
const.name = get_name_with_task_layer_prefix(const.name)
const.name = get_name_with_task_layer_prefix(const.name, td_type_key)
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, const.name, td_type_key)
if len(matches) == 0:
@ -286,8 +288,8 @@ def material_slots_is_missing(transfer_data_item):
def init_material_slots(scene, obj):
task_layer_key = scene.asset_pipeline.task_layer_name
td_type_key = constants.MATERIAL_SLOT_KEY
task_layer_key = get_default_task_layer(td_type_key)
name = constants.MATERIAL_TRANSFER_DATA_ITEM_NAME
transfer_data = obj.transfer_data_ownership
@ -553,8 +555,8 @@ def init_attributes(scene, obj):
if obj.type != "MESH":
return
transfer_data = obj.transfer_data_ownership
task_layer_key = scene.asset_pipeline.task_layer_name
td_type_key = constants.ATTRIBUTE_KEY
task_layer_key = get_default_task_layer(td_type_key)
for atttribute in attributes_get_editable(obj.data.attributes):
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, atttribute.name, td_type_key)
@ -620,8 +622,8 @@ def parent_is_missing(transfer_data_item):
def init_parent(scene, obj):
task_layer_key = scene.asset_pipeline.task_layer_name
td_type_key = constants.PARENT_KEY
task_layer_key = get_default_task_layer(td_type_key)
name = constants.PARENT_TRANSFER_DATA_ITEM_NAME
transfer_data = obj.transfer_data_ownership

View File

@ -1,5 +1,6 @@
import bpy
from ..naming import get_basename
from ..task_layer import get_default_task_layer
def check_transfer_data_entry(
@ -101,14 +102,14 @@ def transfer_data_item_init(
td_type_key (str): Key for the transfer data type
"""
transfer_data = obj.transfer_data_ownership
task_layer_key = scene.asset_pipeline.task_layer_name
default_task_layer = get_default_task_layer(td_type_key)
for item in data_list:
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, item.name, td_type_key)
if len(matches) == 0:
scene.asset_pipeline.add_temp_transfer_data(
name=item.name,
owner=task_layer_key,
owner=default_task_layer,
type=td_type_key,
obj=obj,
)

View File

@ -5,7 +5,6 @@ from pathlib import Path
from .merge.publish import (
get_next_published_file,
)
from .merge.naming import get_task_layer_col_name
from .merge.transfer_data.transfer_ui import draw_transfer_data
from .images import save_images
@ -79,14 +78,14 @@ class ASSETPIPE_OT_create_new_asset(bpy.types.Operator):
asset_pipe.prefix = self._prefix
# Create the collections for each task layer.
for task_layer_key in constants.TASK_LAYER_TYPES.keys():
if task_layer_key == "NONE":
continue
col_name = get_task_layer_col_name(task_layer_key)
bpy.data.collections.new(col_name)
new_col = bpy.data.collections.get(col_name)
asset_col.children.link(new_col)
new_col.asset_id_owner = task_layer_key
# for task_layer_key in constants.TASK_LAYER_TYPES.keys():
# if task_layer_key == "NONE":
# continue
# col_name = get_task_layer_col_name(task_layer_key)
# bpy.data.collections.new(col_name)
# new_col = bpy.data.collections.get(col_name)
# asset_col.children.link(new_col)
# new_col.asset_id_owner = task_layer_key
starting_file = ""
# Create the file for each task layer.

View File

@ -51,10 +51,7 @@ class AssetPipeline(bpy.types.PropertyGroup):
temp_transfer_data: bpy.props.CollectionProperty(type=AssetTransferDataTemp)
# TODO Rename to Current_Task_Layer
task_layer_name: bpy.props.EnumProperty(
name="Task Layer Name", items=constants.TASK_LAYER_TYPES_ENUM_ITEMS
)
local_task_layers: bpy.props.StringProperty(name="Local Task Layers", default="")
def add_temp_transfer_data(self, name, owner, type, obj):
new_transfer_data = self.temp_transfer_data

View File

@ -14,6 +14,7 @@ from .merge.core import (
from .merge.transfer_data.transfer_ui import draw_transfer_data
from .merge.shared_ids import get_shared_id_icon
from . import constants
from .merge.task_layer import get_local_task_layers
def sync_poll(cls, context):
@ -35,10 +36,11 @@ def sync_invoke(self, context):
if not local_col:
self.report({'ERROR'}, "Top level collection could not be found")
return {'CANCELLED'}
task_layer_key = context.scene.asset_pipeline.task_layer_name
if task_layer_key == "NONE":
self.report({'ERROR'}, "Current File Name doesn't contain valid task layer")
return {'CANCELLED'}
# TODO Check if file contains a valid task layer
# task_layer_key = context.scene.asset_pipeline.task_layer_name
# if task_layer_key == "NONE":
# self.report({'ERROR'}, "Current File Name doesn't contain valid task layer")
# return {'CANCELLED'}
ownership_get(local_col, context.scene)
@ -94,10 +96,11 @@ def sync_execute_update_ownership(self, context):
def sync_execute_prepare_sync(self, context):
self._current_file = Path(bpy.data.filepath)
self._temp_dir = Path(bpy.app.tempdir).parent
self._task_layer_key = context.scene.asset_pipeline.task_layer_name
if self._task_layer_key == "NONE":
self.report({'ERROR'}, "Current File Name doesn't contain valid task layer")
return {'CANCELLED'}
self._task_layer_keys = get_local_task_layers()
# TODO Check if file contains a valid task layer
# if self._task_layer_key == "NONE":
# self.report({'ERROR'}, "Current File Name doesn't contain valid task layer")
# return {'CANCELLED'}
self._sync_target = find_sync_target(self._current_file)
if not self._sync_target.exists():
@ -115,7 +118,7 @@ def sync_execute_pull(self, context):
bpy.ops.wm.save_as_mainfile(filepath=temp_file.__str__(), copy=True)
error_msg = merge_task_layer(
context,
local_tls=[self._task_layer_key],
local_tls=self._task_layer_keys,
external_file=self._sync_target,
)
@ -142,7 +145,7 @@ def sync_execute_push(self, context):
local_tls = [
task_layer
for task_layer in constants.TASK_LAYER_TYPES.keys()
if task_layer != self._task_layer_key
if task_layer not in self._task_layer_keys
]
error_msg = merge_task_layer(

View File

@ -36,11 +36,12 @@ class ASSETPIPE_sync(bpy.types.Panel):
)
layout.operator("assetpipe.publish_new_version", icon="PLUS")
if asset_pipe.is_asset_pipeline_file and asset_pipe.task_layer_name == "NONE":
asset_pipe = context.scene.asset_pipeline
box = layout.box()
box.label(text="Published File Settings")
box.prop(asset_pipe, "is_depreciated")
# TODO Find new way to determine if we are in a published file more explicitly
# if asset_pipe.is_asset_pipeline_file and asset_pipe.task_layer_name == "NONE":
# asset_pipe = context.scene.asset_pipeline
# box = layout.box()
# box.label(text="Published File Settings")
# box.prop(asset_pipe, "is_depreciated")
class ASSETPIPE_ownership_inspector(bpy.types.Panel):