Asset Pipeline v2 #145
@ -1,77 +0,0 @@
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def import_data_from_lib(
|
||||
libpath: Path,
|
||||
data_category: str,
|
||||
data_name: str,
|
||||
link: bool = False,
|
||||
) -> bpy.data:
|
||||
"""Appends/Links data from an external file into the current file.
|
||||
|
||||
Args:
|
||||
libpath (Path): path to .blend file that contains library
|
||||
data_category (str): bpy.types, like object or collection
|
||||
data_name (str): name of datablock to link/append
|
||||
link (bool, optional): Set to link library otherwise append. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bpy.data: returns whichever data_category/type that was linked/appended
|
||||
"""
|
||||
|
||||
noun = "Appended"
|
||||
if link:
|
||||
noun = "Linked"
|
||||
|
||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
if data_name not in eval(f"data_from.{data_category}"):
|
||||
print(
|
||||
f"Failed to import {data_category} {data_name} from {libpath.as_posix()}. Doesn't exist in file.",
|
||||
)
|
||||
|
||||
# Check if datablock with same name already exists in blend file.
|
||||
try:
|
||||
eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
print(
|
||||
f"{data_name} already in bpy.data.{data_category} of this blendfile.",
|
||||
)
|
||||
|
||||
# Append data block.
|
||||
eval(f"data_to.{data_category}.append('{data_name}')")
|
||||
print(f"{noun}:{data_name} from library: {libpath.as_posix()}")
|
||||
|
||||
if link:
|
||||
return eval(
|
||||
f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']"
|
||||
)
|
||||
|
||||
return eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
|
||||
|
||||
## EXECUTION
|
||||
task_layer_name = "Modeling"
|
||||
external_file = (
|
||||
Path(bpy.data.filepath)
|
||||
.parent.parent.parent.joinpath("resources")
|
||||
.joinpath("sky_for_asset_test.blend")
|
||||
)
|
||||
appended_col = import_data_from_lib(
|
||||
external_file, "collections", f"sky.{task_layer_name.lower()}"
|
||||
)
|
||||
asset_collection = bpy.context.scene.asset_pipeline.asset_collection
|
||||
bpy.context.scene.collection.children.link(appended_col)
|
||||
|
||||
task_layer_col = bpy.data.collections.new(task_layer_name)
|
||||
asset_collection.children.link(task_layer_col)
|
||||
|
||||
for obj in appended_col.objects:
|
||||
task_layer_col.objects.link(obj)
|
||||
|
||||
bpy.data.collections.remove(appended_col)
|
@ -1,174 +0,0 @@
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def import_data_from_lib(
|
||||
libpath: Path,
|
||||
data_category: str,
|
||||
data_name: str,
|
||||
link: bool = False,
|
||||
) -> bpy.data:
|
||||
"""Appends/Links data from an external file into the current file.
|
||||
|
||||
Args:
|
||||
libpath (Path): path to .blend file that contains library
|
||||
data_category (str): bpy.types, like object or collection
|
||||
data_name (str): name of datablock to link/append
|
||||
link (bool, optional): Set to link library otherwise append. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bpy.data: returns whichever data_category/type that was linked/appended
|
||||
"""
|
||||
|
||||
noun = "Appended"
|
||||
if link:
|
||||
noun = "Linked"
|
||||
|
||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
if data_name not in eval(f"data_from.{data_category}"):
|
||||
print(
|
||||
f"Failed to import {data_category} {data_name} from {libpath.as_posix()}. Doesn't exist in file.",
|
||||
)
|
||||
|
||||
# Check if datablock with same name already exists in blend file.
|
||||
try:
|
||||
eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
print(
|
||||
f"{data_name} already in bpy.data.{data_category} of this blendfile.",
|
||||
)
|
||||
|
||||
# Append data block.
|
||||
eval(f"data_to.{data_category}.append('{data_name}')")
|
||||
print(f"{noun}:{data_name} from library: {libpath.as_posix()}")
|
||||
|
||||
if link:
|
||||
return eval(
|
||||
f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']"
|
||||
)
|
||||
|
||||
return eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
|
||||
|
||||
def transfer_constraint(constraint_name, target_obj, source_obj):
|
||||
context = bpy.context
|
||||
# remove old and sync existing modifiers
|
||||
old_mod = target_obj.constraints.get(constraint_name)
|
||||
if old_mod:
|
||||
target_obj.constraints.remove(old_mod)
|
||||
|
||||
# transfer new modifiers
|
||||
for i, constraint in enumerate(source_obj.constraints):
|
||||
if constraint.name == constraint_name:
|
||||
constraint_new = target_obj.constraints.new(constraint.type)
|
||||
constraint_new.name = constraint.name
|
||||
# sort new modifier at correct index (default to beginning of the stack)
|
||||
idx = 0
|
||||
if i > 0:
|
||||
name_prev = source_obj.constraints[i - 1].name
|
||||
for target_mod_i, target_constraint in enumerate(
|
||||
target_obj.constraints
|
||||
):
|
||||
if target_constraint.name == name_prev:
|
||||
idx = target_mod_i + 1
|
||||
|
||||
if idx != i:
|
||||
with override_obj_visability(obj=target_obj):
|
||||
with context.temp_override(object=target_obj):
|
||||
bpy.ops.constraint.move_to_index(
|
||||
constraint=constraint_new.name, index=idx
|
||||
)
|
||||
constraint_target = target_obj.constraints.get(constraint.name)
|
||||
props = [
|
||||
p.identifier for p in constraint.bl_rna.properties if not p.is_readonly
|
||||
]
|
||||
for prop in props:
|
||||
value = getattr(constraint, prop)
|
||||
setattr(constraint_target, prop, value)
|
||||
|
||||
# HACK to cover edge case of armature constraints
|
||||
if constraint.type == "ARMATURE":
|
||||
for target_item in constraint.targets:
|
||||
new_target = constraint_new.targets.new()
|
||||
new_target.target = target_item.target
|
||||
new_target.subtarget = target_item.subtarget
|
||||
|
||||
|
||||
def transfer_vertex_groups(context, target_obj, source_obj):
|
||||
with context.temp_override(
|
||||
object=source_obj, selected_editable_objects=[target_obj, source_obj]
|
||||
):
|
||||
bpy.ops.object.data_transfer(
|
||||
data_type="VGROUP_WEIGHTS",
|
||||
use_create=True,
|
||||
vert_mapping='POLYINTERP_NEAREST',
|
||||
layers_select_src="ALL",
|
||||
layers_select_dst="NAME",
|
||||
mix_mode="REPLACE",
|
||||
)
|
||||
|
||||
|
||||
## EXECUTION
|
||||
task_layer_name = "Rigging"
|
||||
task_layer_col = bpy.data.collections.new(task_layer_name)
|
||||
asset_collection = bpy.context.scene.asset_pipeline.asset_collection
|
||||
asset_collection.children.link(task_layer_col)
|
||||
external_file = (
|
||||
Path(bpy.data.filepath)
|
||||
.parent.parent.parent.joinpath("resources")
|
||||
.joinpath("sky_for_asset_test.blend")
|
||||
)
|
||||
appended_col = import_data_from_lib(
|
||||
external_file, "collections", f"sky.{task_layer_name.lower()}"
|
||||
)
|
||||
bpy.context.scene.collection.children.link(appended_col)
|
||||
|
||||
rig = None
|
||||
|
||||
|
||||
# Link Armature into Scene
|
||||
for obj in appended_col.objects:
|
||||
if obj.type == "ARMATURE":
|
||||
task_layer_col.objects.link(obj)
|
||||
rig = obj
|
||||
|
||||
|
||||
for obj in bpy.data.collections["Modeling"].objects:
|
||||
source_obj = bpy.data.objects[f"{obj.name}.rigging"]
|
||||
|
||||
## Set Parent
|
||||
obj.parent = rig
|
||||
obj.matrix_parent_inverse = source_obj.parent.matrix_world.inverted()
|
||||
|
||||
## Transfer Vertex Groups
|
||||
transfer_vertex_groups(bpy.context, obj, source_obj)
|
||||
|
||||
## Copy Constraints
|
||||
for constraint in source_obj.constraints:
|
||||
transfer_constraint(constraint.name, obj, source_obj)
|
||||
|
||||
|
||||
main_body_obj = bpy.data.objects["GEO-Body"]
|
||||
mod = main_body_obj.modifiers.new("Armature", type="ARMATURE")
|
||||
mod.object = rig
|
||||
|
||||
|
||||
for col in appended_col.children:
|
||||
task_layer_col.children.link(col)
|
||||
|
||||
for obj in task_layer_col.all_objects:
|
||||
if obj.name.endswith(".rigging"):
|
||||
obj.name = obj.name.replace(".rigging", "")
|
||||
|
||||
|
||||
## REMOVE EVERYTHING ELSE
|
||||
for obj in bpy.data.objects:
|
||||
if obj.name.endswith(".rigging"):
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
bpy.data.collections.remove(appended_col)
|
@ -1,125 +0,0 @@
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def import_data_from_lib(
|
||||
libpath: Path,
|
||||
data_category: str,
|
||||
data_name: str,
|
||||
link: bool = False,
|
||||
) -> bpy.data:
|
||||
"""Appends/Links data from an external file into the current file.
|
||||
|
||||
Args:
|
||||
libpath (Path): path to .blend file that contains library
|
||||
data_category (str): bpy.types, like object or collection
|
||||
data_name (str): name of datablock to link/append
|
||||
link (bool, optional): Set to link library otherwise append. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bpy.data: returns whichever data_category/type that was linked/appended
|
||||
"""
|
||||
|
||||
noun = "Appended"
|
||||
if link:
|
||||
noun = "Linked"
|
||||
|
||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
if data_name not in eval(f"data_from.{data_category}"):
|
||||
print(
|
||||
f"Failed to import {data_category} {data_name} from {libpath.as_posix()}. Doesn't exist in file.",
|
||||
)
|
||||
|
||||
# Check if datablock with same name already exists in blend file.
|
||||
try:
|
||||
eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
print(
|
||||
f"{data_name} already in bpy.data.{data_category} of this blendfile.",
|
||||
)
|
||||
|
||||
# Append data block.
|
||||
eval(f"data_to.{data_category}.append('{data_name}')")
|
||||
print(f"{noun}:{data_name} from library: {libpath.as_posix()}")
|
||||
|
||||
if link:
|
||||
return eval(
|
||||
f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']"
|
||||
)
|
||||
|
||||
return eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
|
||||
|
||||
def transfer_material_slots(target_obj: bpy.types.Object, source_obj):
|
||||
# Delete all material slots of target object.
|
||||
target_obj.data.materials.clear()
|
||||
|
||||
# Transfer material slots
|
||||
for idx in range(len(source_obj.material_slots)):
|
||||
target_obj.data.materials.append(source_obj.material_slots[idx].material)
|
||||
target_obj.material_slots[idx].link = source_obj.material_slots[idx].link
|
||||
|
||||
|
||||
def transfer_attribute(
|
||||
attribute_name: str,
|
||||
target_obj: bpy.types.Object,
|
||||
source_obj: bpy.types.Object,
|
||||
):
|
||||
source_attributes = source_obj.data.attributes
|
||||
target_attributes = target_obj.data.attributes
|
||||
source_attribute = source_attributes.get(attribute_name)
|
||||
|
||||
target_attribute = target_attributes.get(attribute_name)
|
||||
if target_attribute:
|
||||
target_attributes.remove(target_attribute)
|
||||
|
||||
target_attribute = target_attributes.new(
|
||||
name=attribute_name,
|
||||
type=source_attribute.data_type,
|
||||
domain=source_attribute.domain,
|
||||
)
|
||||
# print(f"Transfering Attribute {attribute_name}")
|
||||
for source_data_item in source_attribute.data.items():
|
||||
index = source_data_item[0]
|
||||
source_data = source_data_item[1]
|
||||
keys = set(source_data.bl_rna.properties.keys()) - set(
|
||||
bpy.types.Attribute.bl_rna.properties.keys()
|
||||
)
|
||||
for key in list(keys):
|
||||
target_data = target_attribute.data[index]
|
||||
setattr(target_data, key, getattr(source_data, key))
|
||||
|
||||
|
||||
## EXECUTION"
|
||||
task_layer_name = "Shading"
|
||||
external_file = (
|
||||
Path(bpy.data.filepath)
|
||||
.parent.parent.parent.joinpath("resources")
|
||||
.joinpath("sky_for_asset_test.blend")
|
||||
)
|
||||
appended_col = import_data_from_lib(
|
||||
external_file, "collections", f"sky.{task_layer_name.lower()}"
|
||||
)
|
||||
bpy.context.scene.collection.children.link(appended_col)
|
||||
|
||||
source_body_obj = bpy.data.objects["GEO-Body.shading"]
|
||||
target_body_obj = bpy.data.objects["GEO-Body"]
|
||||
|
||||
|
||||
for obj in bpy.data.collections["Modeling"].objects:
|
||||
source_obj = bpy.data.objects[f"{obj.name}.shading"]
|
||||
transfer_material_slots(obj, source_obj)
|
||||
|
||||
transfer_attribute(
|
||||
attribute_name="material_index",
|
||||
target_obj=target_body_obj,
|
||||
source_obj=source_body_obj,
|
||||
)
|
||||
|
||||
|
||||
bpy.data.collections.remove(appended_col)
|
@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:221d07c3dc474f9c349b7ce0eaaa9892167b78c373f8418e9d863815b5bb6246
|
||||
size 2054192
|
Loading…
Reference in New Issue
Block a user