Asset Pipeline v2 #145

Closed
Nick Alberelli wants to merge 431 commits from (deleted):feature/asset-pipeline-v2 into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
7 changed files with 217 additions and 56 deletions
Showing only changes of commit 129fbcc0a9 - Show all commits

View File

@ -135,7 +135,7 @@ class AssetTransferMapping:
temp_info = transfer_data_add_entry(
transfer_data=temp_transfer_data,
name=transfer_info.name,
td_type=transfer_info.type,
td_type_key=transfer_info.type,
task_layer_name=transfer_info.owner,
)

View File

@ -95,7 +95,17 @@ def get_invalid_objects(
local_col: bpy.types.Collection,
scene: bpy.types.Scene,
) -> list[bpy.types.Object]:
# TODO Add Docstring
"""Returns a list of objects not used in the merge processing,
which are considered invalid. The objects will be excluded from
the merge process.
Args:
local_col (bpy.types.Collection): The top level asset collection that is local to the file
scene (bpy.types.Scene): Scene that contains a the file's asset
Returns:
list[bpy.types.Object]: List of Invalid Objects
"""
task_layer_key = scene.asset_pipeline.task_layer_name
task_layer_col_name = get_task_layer_col_name(task_layer_key)
task_layer_col = local_col.children.get(task_layer_col_name)

View File

@ -2,7 +2,21 @@ import bpy
from rigify.utils.misc import copy_attributes
def copy_driver(from_fcurve, target, data_path=None, index=None):
def copy_driver(
from_fcurve: bpy.types.FCurve, target: bpy.types.ID, data_path=None, index=None
) -> bpy.types.FCurve:
"""Copy an existing FCurve containing a driver to a new ID, by creating a copy
of the existing driver on the target ID.
Args:
from_fcurve (bpy.types.FCurve): FCurve containing a driver
target (bpy.types.ID): ID that can have drivers added to it
data_path (_type_, optional): Data Path of existing driver. Defaults to None.
index (_type_, optional): array index of the property drive. Defaults to None.
Returns:
bpy.types.FCurve: Fcurve containing copy of driver on target ID
"""
if not data_path:
data_path = from_fcurve.data_path
@ -36,7 +50,19 @@ def copy_driver(from_fcurve, target, data_path=None, index=None):
return new_fc
def find_drivers(drivers, target_type, target_name):
def find_drivers(
drivers: list[bpy.types.FCurve], target_type: str, target_name: str
) -> list[bpy.types.FCurve]:
"""_summary_
Args:
drivers (list[bpy.types.FCurve]): List or Collection Property containing Fcurves with drivers
target_type (str): Name of data type found in driver data path, e.g. "modifiers"
target_name (str): Name of data found in driver path, e.g. modifier's name
Returns:
list[bpy.types.FCurve]: List of FCurves containing drivers that match type & name
"""
found_drivers = []
for driver in drivers:
if f'{target_type}["{target_name}"]' in driver.data_path:

View File

@ -115,8 +115,17 @@ def add_suffix_to_hierarchy(collection: bpy.types.Collection, suffix_base: str)
pass
def get_name_with_asset_prefix(name: str):
# TODO Docstring and return types
def get_name_with_asset_prefix(name: str) -> str:
"""Returns a string with the prefix if it is not already set.
Users can specify a prefix to live on all objects during the
asset creation process. This prefix is stored in the scene.
Args:
name (str): Name to add prefix to
Returns:
str: Returns name with prefix
"""
asset_pipe = bpy.context.scene.asset_pipeline
if name.startswith(asset_pipe.prefix + "."):
return name
@ -124,7 +133,16 @@ def get_name_with_asset_prefix(name: str):
return prefix + name
def get_task_layer_col_name(task_layer_key):
# TODO Docstring and return types
def get_task_layer_col_name(task_layer_key) -> str:
"""Returns the name of a givem task layer colection via
the task layer key. Task Layer Collection names are a combination
of a prefix if any and the task_layer_name.
Args:
task_layer_key (_type_): Key of a given task layer
Returns:
str: Task Layer Collection name including prefix if exists
"""
task_layer_name = constants.TASK_LAYER_TYPES[task_layer_key]
return get_name_with_asset_prefix(task_layer_name)

View File

@ -1,8 +1,18 @@
import bpy
from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids
# TODO find better name for 'other_ids'
def get_other_ids(collection): # TODO find better name
def get_other_ids(collection: bpy.types.Collection) -> list[bpy.types.ID]:
"""Returns a list of any ID that is not covered by the merge process
Args:
collection (bpy.types.Collection): Collection that contains data that references 'other_ids'
Returns:
list[bpy.types.ID]: List of 'other_ids'
"""
ref_map = get_id_reference_map()
all_ids_of_coll = get_all_referenced_ids(collection, ref_map)
return [
@ -12,7 +22,16 @@ def get_other_ids(collection): # TODO find better name
]
def init_other_ids(scene):
def init_other_ids(scene: bpy.types.Scene) -> list[bpy.types.ID]:
"""Intilizes any ID not covered by the transfer process as an 'other_id'
and marks all 'other_ids' without an owner to the current task layer
Args:
scene (bpy.types.Scene): Scene that contains a the file's asset
Returns:
list[bpy.types.ID]: A list of new 'other_ids' owned by the file's task layer
"""
other_ids = []
asset_pipe = scene.asset_pipeline
local_col = asset_pipe.asset_collection

View File

@ -20,17 +20,26 @@ import numpy as np
# VERTEX GROUPS
def vertex_groups_clean(obj):
transfer_info_clean(obj, obj.vertex_groups, constants.VERTEX_GROUP_KEY)
transfer_info_clean(
obj=obj, data_list=obj.vertex_groups, td_type_key=constants.VERTEX_GROUP_KEY
)
def vertex_group_is_missing(transfer_info):
return transfer_info_is_missing(
transfer_info, constants.VERTEX_GROUP_KEY, transfer_info.id_data.vertex_groups
transfer_info=transfer_info,
td_type_key=constants.VERTEX_GROUP_KEY,
data_list=transfer_info.id_data.vertex_groups,
)
def init_vertex_groups(scene, obj):
transfer_info_init(scene, obj, obj.vertex_groups, constants.VERTEX_GROUP_KEY)
transfer_info_init(
scene=scene,
obj=obj,
data_list=obj.vertex_groups,
td_type_key=constants.VERTEX_GROUP_KEY,
)
def transfer_vertex_group(
@ -77,19 +86,30 @@ def transfer_vertex_group(
def vertex_colors_clean(obj):
if not obj.type == "MESH":
return
transfer_info_clean(obj, obj.data.vertex_colors, constants.VERTEX_COLOR_KEY)
transfer_info_clean(
obj=obj,
data_list=obj.data.vertex_colors,
td_type_key=constants.VERTEX_COLOR_KEY,
)
def vertex_color_is_missing(transfer_info):
return transfer_info_is_missing(
transfer_info, constants.VERTEX_COLOR_KEY, transfer_info.id_data.vertex_colors
transfer_info=transfer_info,
td_type_key=constants.VERTEX_COLOR_KEY,
data_list=transfer_info.id_data.vertex_colors,
)
def init_vertex_colors(scene, obj):
if not obj.type == "MESH":
return
transfer_info_init(scene, obj, obj.data.vertex_colors, constants.VERTEX_COLOR_KEY)
transfer_info_init(
scene=scene,
obj=obj,
data_list=obj.data.vertex_colors,
td_type_key=constants.VERTEX_COLOR_KEY,
)
def transfer_vertex_color(
@ -118,19 +138,28 @@ def transfer_vertex_color(
def uv_layer_clean(obj):
if not obj.type == "MESH":
return
transfer_info_clean(obj, obj.data.uv_layers, constants.UV_LAYERS_KEY)
transfer_info_clean(
obj=obj, data_list=obj.data.uv_layers, td_type_key=constants.UV_LAYERS_KEY
)
def uv_layer_is_missing(transfer_info):
return transfer_info_is_missing(
transfer_info, constants.UV_LAYERS_KEY, transfer_info.id_data.data.uv_layers
transfer_info=transfer_info,
td_type_key=constants.UV_LAYERS_KEY,
data_list=transfer_info.id_data.data.uv_layers,
)
def init_uv_layers(scene, obj):
if not obj.type == "MESH":
return
transfer_info_init(scene, obj, obj.data.uv_layers, constants.UV_LAYERS_KEY)
transfer_info_init(
scene=scene,
obj=obj,
data_list=obj.data.uv_layers,
td_type_key=constants.UV_LAYERS_KEY,
)
def transfer_uv_layer(source_obj, target_obj, uv_name):
@ -157,17 +186,26 @@ def transfer_uv_layer(source_obj, target_obj, uv_name):
# MODIFIERS
def modifiers_clean(obj):
transfer_info_clean(obj, obj.modifiers, constants.MODIFIER_KEY)
transfer_info_clean(
obj=obj, data_list=obj.modifiers, td_type_key=constants.MODIFIER_KEY
)
def modifier_is_missing(transfer_info):
return transfer_info_is_missing(
transfer_info, constants.MODIFIER_KEY, transfer_info.id_data.modifiers
transfer_info=transfer_info,
td_type_key=constants.MODIFIER_KEY,
data_list=transfer_info.id_data.modifiers,
)
def init_modifiers(scene, obj):
transfer_info_init(scene, obj, obj.modifiers, constants.MODIFIER_KEY)
transfer_info_init(
scene=scene,
obj=obj,
data_list=obj.modifiers,
td_type_key=constants.MODIFIER_KEY,
)
def transfer_modifier(modifier_name, target_obj, source_obj):
@ -233,17 +271,26 @@ def transfer_modifier(modifier_name, target_obj, source_obj):
# CONSTRAINTS
def constraints_clean(obj):
transfer_info_clean(obj, obj.constraints, constants.CONSTRAINT_KEY)
transfer_info_clean(
obj=obj, data_list=obj.constraints, td_type_key=constants.CONSTRAINT_KEY
)
def constraint_is_missing(transfer_info):
return transfer_info_is_missing(
transfer_info, constants.CONSTRAINT_KEY, transfer_info.id_data.constraints
transfer_info=transfer_info,
td_type_key=constants.CONSTRAINT_KEY,
data_list=transfer_info.id_data.constraints,
)
def init_constraints(scene, obj):
transfer_info_init(scene, obj, obj.constraints, constants.CONSTRAINT_KEY)
transfer_info_init(
scene=scene,
obj=obj,
data_list=obj.constraints,
td_type_key=constants.CONSTRAINT_KEY,
)
def transfer_constraint(constraint_name, target_obj, source_obj):
@ -322,20 +369,20 @@ def material_slots_is_missing(transfer_info):
def init_material_slots(scene, obj):
task_layer_key = scene.asset_pipeline.task_layer_name
type_key = constants.MATERIAL_SLOT_KEY
td_type_key = constants.MATERIAL_SLOT_KEY
name = constants.MATERIAL_TRANSFER_INFO_NAME
transfer_data = obj.transfer_data_ownership
# Only Execute if Material Slots exist on object
if len(obj.material_slots) == 0:
return
matches = check_transfer_data_entry(transfer_data, name, type_key)
matches = check_transfer_data_entry(transfer_data, name, td_type_key)
# Only add new ownership transfer_info if vertex group doesn't have an owner
if len(matches) == 0:
scene.asset_pipeline.add_temp_transfer_data(
name=name,
owner=task_layer_key,
type=type_key,
type=td_type_key,
obj=obj,
)
@ -427,9 +474,9 @@ def shape_key_is_missing(transfer_info):
if not obj.data.shape_keys:
return True
return transfer_info_is_missing(
transfer_info,
constants.SHAPE_KEY_KEY,
obj.data.shape_keys.key_blocks,
transfer_info=transfer_info,
td_type_key=constants.SHAPE_KEY_KEY,
data_list=obj.data.shape_keys.key_blocks,
)
@ -448,7 +495,10 @@ def init_shape_keys(scene, obj):
)
transfer_info_init(
scene, obj, obj.data.shape_keys.key_blocks, constants.SHAPE_KEY_KEY
scene=scene,
obj=obj,
data_list=obj.data.shape_keys.key_blocks,
td_type_key=constants.SHAPE_KEY_KEY,
)
@ -573,15 +623,15 @@ def init_attributes(scene, obj):
return
transfer_data = obj.transfer_data_ownership
task_layer_key = scene.asset_pipeline.task_layer_name
type_key = constants.ATTRIBUTE_KEY
td_type_key = constants.ATTRIBUTE_KEY
for atttribute in attributes_get_editable(obj.data.attributes):
# Only add new ownership transfer_info if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, atttribute.name, type_key)
matches = check_transfer_data_entry(transfer_data, atttribute.name, td_type_key)
if len(matches) == 0:
scene.asset_pipeline.add_temp_transfer_data(
name=atttribute.name,
owner=task_layer_key,
type=type_key,
type=td_type_key,
obj=obj,
)
@ -638,20 +688,20 @@ def parent_is_missing(transfer_info):
def init_parent(scene, obj):
task_layer_key = scene.asset_pipeline.task_layer_name
type_key = constants.PARENT_KEY
td_type_key = constants.PARENT_KEY
name = constants.PARENT_TRANSFER_INFO_NAME
transfer_data = obj.transfer_data_ownership
# Only Execute if Material Slots exist on object
if obj.parent == None:
return
matches = check_transfer_data_entry(transfer_data, name, type_key)
matches = check_transfer_data_entry(transfer_data, name, td_type_key)
# Only add new ownership transfer_info if vertex group doesn't have an owner
if len(matches) == 0:
scene.asset_pipeline.add_temp_transfer_data(
name=name,
owner=task_layer_key,
type=type_key,
type=td_type_key,
obj=obj,
)

View File

@ -3,14 +3,14 @@ from ..naming import get_basename
def check_transfer_data_entry(
transfer_data: bpy.types.CollectionProperty, key: str, td_type: str
transfer_data: bpy.types.CollectionProperty, key: str, td_type_key: str
) -> set:
"""Verifies if transfer data entry exists
Args:
ownership (bpy.types.CollectionProperty): Transfer Data of an object
key (str): Name of item that is being verified
td_type (str): Type of transfer data
td_type_key (str): Type of transfer data
Returns:
set: Returns set of matches where name is found in ownership
@ -18,7 +18,7 @@ def check_transfer_data_entry(
existing_items = [
transfer_info.name
for transfer_info in transfer_data
if transfer_info.type == td_type
if transfer_info.type == td_type_key
]
return set([key]).intersection(set(existing_items))
@ -26,7 +26,7 @@ def check_transfer_data_entry(
def transfer_data_add_entry(
transfer_data: bpy.types.CollectionProperty,
name: str,
td_type: str,
td_type_key: str,
task_layer_name: str,
):
"""Add entry to transfer data ownership
@ -34,48 +34,86 @@ def transfer_data_add_entry(
Args:
ownership (bpy.types.CollectionProperty): Transfer Data of an object
name (str): Name of new transfer data item
td_type (str): Type of transfer data
td_type_key (str): Type of transfer data
task_layer_name (str): Name of current task layer
"""
transfer_info = transfer_data.add()
transfer_info.name = name
transfer_info.owner = task_layer_name.upper()
transfer_info.type = td_type
transfer_info.type = td_type_key
return transfer_info
def get_transfer_data_as_names(transfer_data, td_type):
def get_transfer_data_as_names(transfer_data, td_type_key):
return [
transfer_info.name
for transfer_info in transfer_data
if transfer_info.type == td_type
if transfer_info.type == td_type_key
]
def transfer_info_clean(obj, list, td_type):
# TODO Test if Clean and Missing are redudent functions
def transfer_info_clean(
obj: bpy.types.Object, data_list: bpy.types.CollectionProperty, td_type_key: str
):
"""Remove transfer data entries if the corrisponding data doesn't exist
Args:
obj (bpy.types.Object): Object containing transfer data
data_list (bpy.types.CollectionProperty): Collection Property containing a type of possible transfer data e.g. obj.modifiers
td_type_key (str): Key for the transfer data type
"""
transfer_data_list = get_transfer_data_as_names(
obj.transfer_data_ownership, td_type
obj.transfer_data_ownership, td_type_key
)
for item in list:
for item in data_list:
if not get_basename(item.name) in transfer_data_list:
list.remove(item)
data_list.remove(item)
def transfer_info_is_missing(transfer_info, type_key, list):
if transfer_info.type == type_key and not list.get(transfer_info["name"]):
def transfer_info_is_missing(
transfer_info, data_list: bpy.types.CollectionProperty, td_type_key: str
) -> bool:
"""Returns true if a transfer_data_item does not exist
Args:
transfer_info (_type_): Item of Transfer Data
data_list (bpy.types.CollectionProperty): Collection Property containing a type of possible transfer data e.g. obj.modifiers
td_type_key (str): Key for the transfer data type
Returns:
bool: Returns True if transfer_info is missing
"""
if transfer_info.type == td_type_key and not data_list.get(transfer_info["name"]):
return True
def transfer_info_init(scene, obj, list, type_key):
"""Intilize transfer data to a temporary collection property, used
to draw a display of new transfer data to the user before merge process.
"""
def transfer_info_init(
scene: bpy.types.Scene,
obj: bpy.types.Object,
data_list: bpy.types.CollectionProperty,
td_type_key: str,
):
"""_summary_
Args:
scene (bpy.types.Scene): Scene that contains a the file's asset
obj (bpy.types.Object): Object containing possible transfer data
data_list (bpy.types.CollectionProperty): Collection Property containing a type of possible transfer data e.g. obj.modifiers
td_type_key (str): Key for the transfer data type
"""
transfer_data = obj.transfer_data_ownership
task_layer_key = scene.asset_pipeline.task_layer_name
for item in list:
for item in data_list:
# Only add new ownership transfer_info if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, item.name, type_key)
matches = check_transfer_data_entry(transfer_data, item.name, td_type_key)
if len(matches) == 0:
scene.asset_pipeline.add_temp_transfer_data(
name=item.name,
owner=task_layer_key,
type=type_key,
type=td_type_key,
obj=obj,
)