Asset Pipeline v2 #145

Closed
Nick Alberelli wants to merge 431 commits from (deleted):feature/asset-pipeline-v2 into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
10 changed files with 1211 additions and 1160 deletions
Showing only changes of commit 8d58308c7d - Show all commits

View File

@ -1,6 +1,14 @@
import bpy
from . import transfer_functions
from .transfer_functions import (
attributes,
constraints,
modifers,
parent,
shape_keys,
vertex_groups,
materials,
)
from ... import constants
@ -36,13 +44,12 @@ def copy_transfer_data_ownership(
def transfer_data_clean(obj):
transfer_functions.vertex_groups_clean(obj)
transfer_functions.modifiers_clean(obj)
transfer_functions.constraints_clean(obj)
# transfer_functions.material_slots_clean(obj)
transfer_functions.shape_keys_clean(obj)
transfer_functions.attribute_clean(obj)
transfer_functions.parent_clean(obj)
vertex_groups.vertex_groups_clean(obj)
modifers.modifiers_clean(obj)
constraints.constraints_clean(obj)
shape_keys.shape_keys_clean(obj)
attributes.attribute_clean(obj)
parent.parent_clean(obj)
def transfer_data_is_missing(transfer_data_item) -> bool:
@ -55,13 +62,12 @@ def transfer_data_is_missing(transfer_data_item) -> bool:
bool: bool if item is missing
"""
return bool(
transfer_functions.vertex_group_is_missing(transfer_data_item)
or transfer_functions.modifier_is_missing(transfer_data_item)
# or transfer_functions.material_slots_is_missing(transfer_data_item)
or transfer_functions.constraint_is_missing(transfer_data_item)
or transfer_functions.shape_key_is_missing(transfer_data_item)
or transfer_functions.attribute_is_missing(transfer_data_item)
or transfer_functions.parent_is_missing(transfer_data_item)
vertex_groups.vertex_group_is_missing(transfer_data_item)
or modifers.modifier_is_missing(transfer_data_item)
or constraints.constraint_is_missing(transfer_data_item)
or shape_keys.shape_key_is_missing(transfer_data_item)
or attributes.attribute_is_missing(transfer_data_item)
or parent.parent_is_missing(transfer_data_item)
)
@ -76,13 +82,13 @@ def init_transfer_data(
task_layer_name (str): Name of task layer
temp_transfer_data: Item of class ASSET_TRANSFER_DATA_TEMP
"""
transfer_functions.init_vertex_groups(scene, obj)
transfer_functions.init_material_slots(scene, obj)
transfer_functions.init_modifiers(scene, obj)
transfer_functions.init_constraints(scene, obj)
transfer_functions.init_shape_keys(scene, obj)
transfer_functions.init_attributes(scene, obj)
transfer_functions.init_parent(scene, obj)
vertex_groups.init_vertex_groups(scene, obj)
materials.init_materials(scene, obj)
modifers.init_modifiers(scene, obj)
constraints.init_constraints(scene, obj)
shape_keys.init_shape_keys(scene, obj)
attributes.init_attributes(scene, obj)
parent.init_parent(scene, obj)
def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
@ -113,7 +119,7 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
if source_obj != target_obj:
if transfer_data_item.type == constants.VERTEX_GROUP_KEY:
print(f"Transfering Data {constants.VERTEX_GROUP_KEY}: {name}")
transfer_functions.transfer_vertex_group(
vertex_groups.transfer_vertex_group(
context=context,
vertex_group_name=transfer_data_item.name,
target_obj=target_obj,
@ -121,38 +127,38 @@ def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None:
)
if transfer_data_item.type == constants.MODIFIER_KEY:
print(f"Transfering Data {constants.MODIFIER_KEY}: {name}")
transfer_functions.transfer_modifier(
modifers.transfer_modifier(
modifier_name=transfer_data_item.name,
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.CONSTRAINT_KEY:
transfer_functions.transfer_constraint(
constraints.transfer_constraint(
constraint_name=transfer_data_item.name,
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.MATERIAL_SLOT_KEY:
print(f"Transfering Data {constants.MATERIAL_SLOT_KEY}: {name}")
transfer_functions.transfer_material_slots(
materials.transfer_materials(
target_obj=target_obj,
source_obj=source_obj,
)
if transfer_data_item.type == constants.SHAPE_KEY_KEY:
transfer_functions.transfer_shape_key(
shape_keys.transfer_shape_key(
context=context,
target_obj=target_obj,
source_obj=source_obj,
shape_key_name=transfer_data_item.name,
)
if transfer_data_item.type == constants.ATTRIBUTE_KEY:
transfer_functions.transfer_attribute(
attributes.transfer_attribute(
target_obj=target_obj,
source_obj=source_obj,
attribute_name=transfer_data_item.name,
)
if transfer_data_item.type == constants.PARENT_KEY:
transfer_functions.transfer_parent(
parent.transfer_parent(
target_obj=target_obj,
source_obj=source_obj,
)

View File

@ -0,0 +1,247 @@
import bpy
import mathutils
import bmesh
import numpy as np
from .proximity_core import (
tris_per_face,
closest_face_to_point,
closest_tri_on_face,
is_obdata_identical,
transfer_corner_data,
)
from ..transfer_util import check_transfer_data_entry
from ...naming import get_basename
from ...task_layer import get_transfer_data_owner
from .... import constants
def attributes_get_editable(attributes):
return [
attribute
for attribute in attributes
if not (
attribute.is_internal
or attribute.is_required
# Material Index is part of material transfer and should be skipped
or attribute.name == 'material_index'
)
]
def attribute_clean(obj):
if obj.type != "MESH":
return
attributes = attributes_get_editable(obj.data.attributes)
attributes_to_remove = []
for attribute in attributes:
matches = check_transfer_data_entry(
obj.transfer_data_ownership,
get_basename(attribute.name),
constants.ATTRIBUTE_KEY,
)
if len(matches) == 0:
attributes_to_remove.append(attribute.name)
for attribute_name_to_remove in reversed(attributes_to_remove):
attribute_to_remove = obj.data.attributes.get(attribute_name_to_remove)
print(f"Cleaning attribute {attribute.name}")
obj.data.attributes.remove(attribute_to_remove)
def attribute_is_missing(transfer_data_item):
obj = transfer_data_item.id_data
if obj.type != "MESH":
return
attributes = attributes_get_editable(obj.data.attributes)
attribute_names = [attribute.name for attribute in attributes]
if (
transfer_data_item.type == constants.ATTRIBUTE_KEY
and not transfer_data_item["name"] in attribute_names
):
return True
def init_attributes(scene, obj):
asset_pipe = scene.asset_pipeline
if obj.type != "MESH":
return
transfer_data = obj.transfer_data_ownership
td_type_key = constants.ATTRIBUTE_KEY
for atttribute in attributes_get_editable(obj.data.attributes):
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, atttribute.name, td_type_key)
if len(matches) == 0:
task_layer_owner, auto_surrender = get_transfer_data_owner(
asset_pipe, td_type_key, atttribute.name
)
asset_pipe.add_temp_transfer_data(
name=atttribute.name,
owner=task_layer_owner,
type=td_type_key,
obj=obj,
surrender=auto_surrender,
)
def transfer_attribute(
attribute_name: str,
target_obj: bpy.types.Object,
source_obj: bpy.types.Object,
):
source_attributes = source_obj.data.attributes
target_attributes = target_obj.data.attributes
source_attribute = source_attributes.get(attribute_name)
target_attribute = target_attributes.get(attribute_name)
if target_attribute:
target_attributes.remove(target_attribute)
target_attribute = target_attributes.new(
name=attribute_name,
type=source_attribute.data_type,
domain=source_attribute.domain,
)
if not is_obdata_identical(source_obj, target_obj):
proximity_transfer_single_attribute(
source_obj, target_obj, source_attribute, target_attribute
)
return
for source_data_item in source_attribute.data.items():
index = source_data_item[0]
source_data = source_data_item[1]
keys = set(source_data.bl_rna.properties.keys()) - set(
bpy.types.Attribute.bl_rna.properties.keys()
)
for key in list(keys):
target_data = target_attribute.data[index]
setattr(target_data, key, getattr(source_data, key))
def proximity_transfer_single_attribute(
source_obj: bpy.types.Object,
target_obj: bpy.types.Object,
source_attribute: bpy.types.Attribute,
target_attribute: bpy.types.Attribute,
):
# src_dat = source_obj.data
# tgt_dat = target_obj.data
# if type(src_dat) is not type(tgt_dat) or not (src_dat or tgt_dat):
# return False
# if type(tgt_dat) is not bpy.types.Mesh: # TODO: support more types
# return False
# If target attribute already exists, remove it.
# tgt_attr = tgt_dat.attributes.get(source_attribute.name)
# if tgt_attr is not None:
# try:
# tgt_dat.attributes.remove(tgt_attr)
# except RuntimeError:
# # Built-ins like "position" cannot be removed, and should be skipped.
# return
# Create target attribute.
# target_attribute = tgt_dat.attributes.new(
# source_attribute.name, source_attribute.data_type, source_attribute.domain
# )
data_sfx = {
'INT8': 'value',
'INT': 'value',
'FLOAT': 'value',
'FLOAT2': 'vector',
'BOOLEAN': 'value',
'STRING': 'value',
'BYTE_COLOR': 'color',
'FLOAT_COLOR': 'color',
'FLOAT_VECTOR': 'vector',
}
data_sfx = data_sfx[source_attribute.data_type]
# if topo_match:
# # TODO: optimize using foreach_get/set rather than loop
# for i in range(len(source_attribute.data)):
# setattr(tgt_attr.data[i], data_sfx, getattr(source_attribute.data[i], data_sfx))
# return
# proximity fallback
if source_attribute.data_type == 'STRING':
# TODO: add NEAREST transfer fallback for attributes without interpolation
print(
f'Proximity based transfer for generic attributes of type STRING not supported yet. Skipping attribute {source_attribute.name} on {target_obj}.'
)
return
domain = source_attribute.domain
if (
domain == 'POINT'
): # TODO: deduplicate interpolated point domain proximity transfer
bm_source = bmesh.new()
bm_source.from_mesh(source_obj.data)
bm_source.faces.ensure_lookup_table()
bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source)
tris_dict = tris_per_face(bm_source)
for i, vert in enumerate(target_obj.data.vertices):
p = vert.co
face = closest_face_to_point(bm_source, p, bvh_tree)
(tri, point) = closest_tri_on_face(tris_dict, face, p)
if not tri:
continue
weights = mathutils.interpolate.poly_3d_calc(
[tri[i].vert.co for i in range(3)], point
)
if data_sfx in ['color']:
vals_weighted = [
weights[i]
* (
np.array(
getattr(source_attribute.data[tri[i].vert.index], data_sfx)
)
)
for i in range(3)
]
else:
vals_weighted = [
weights[i]
* (getattr(source_attribute.data[tri[i].vert.index], data_sfx))
for i in range(3)
]
setattr(target_attribute.data[i], data_sfx, sum(np.array(vals_weighted)))
return
elif domain == 'EDGE':
# TODO support proximity fallback for generic edge attributes
print(
f'Proximity based transfer of generic edge attributes not supported yet. Skipping attribute {source_attribute.name} on {target_obj}.'
)
return
elif domain == 'FACE':
bm_source = bmesh.new()
bm_source.from_mesh(source_obj.data)
bm_source.faces.ensure_lookup_table()
bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source)
for i, face in enumerate(target_obj.data.polygons):
p_target = face.center
closest_face = closest_face_to_point(bm_source, p_target, bvh_tree)
setattr(
target_attribute.data[i],
data_sfx,
getattr(source_attribute.data[closest_face.index], data_sfx),
)
return
elif domain == 'CORNER':
transfer_corner_data(
source_obj,
target_obj,
source_attribute.data,
target_attribute.data,
data_suffix=data_sfx,
)
return

View File

@ -0,0 +1,96 @@
import bpy
from ..transfer_util import (
transfer_data_clean,
transfer_data_item_is_missing,
check_transfer_data_entry,
)
from ...naming import task_layer_prefix_name_get
from ...drivers import find_drivers, copy_driver
from ...visibility import override_obj_visability
from ...task_layer import get_transfer_data_owner
from .... import constants
def constraints_clean(obj):
transfer_data_clean(
obj=obj, data_list=obj.constraints, td_type_key=constants.CONSTRAINT_KEY
)
def constraint_is_missing(transfer_data_item):
return transfer_data_item_is_missing(
transfer_data_item=transfer_data_item,
td_type_key=constants.CONSTRAINT_KEY,
data_list=transfer_data_item.id_data.constraints,
)
def init_constraints(scene, obj):
td_type_key = constants.CONSTRAINT_KEY
transfer_data = obj.transfer_data_ownership
asset_pipe = scene.asset_pipeline
task_layer_owner, auto_surrender = get_transfer_data_owner(
asset_pipe,
td_type_key,
)
for const in obj.constraints:
const.name = task_layer_prefix_name_get(const.name, task_layer_owner)
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, const.name, td_type_key)
if len(matches) == 0:
asset_pipe.add_temp_transfer_data(
name=const.name,
owner=task_layer_owner,
type=td_type_key,
obj=obj,
surrender=auto_surrender,
)
def transfer_constraint(constraint_name, target_obj, source_obj):
context = bpy.context
# remove old and sync existing modifiers
old_mod = target_obj.constraints.get(constraint_name)
if old_mod:
target_obj.constraints.remove(old_mod)
# transfer new modifiers
for i, constraint in enumerate(source_obj.constraints):
if constraint.name == constraint_name:
constraint_new = target_obj.constraints.new(constraint.type)
constraint_new.name = constraint.name
# sort new modifier at correct index (default to beginning of the stack)
idx = 0
if i > 0:
name_prev = source_obj.constraints[i - 1].name
for target_mod_i, target_constraint in enumerate(
target_obj.constraints
):
if target_constraint.name == name_prev:
idx = target_mod_i + 1
if idx != i:
with override_obj_visability(obj=target_obj, scene=context.scene):
with context.temp_override(object=target_obj):
bpy.ops.constraint.move_to_index(
constraint=constraint_new.name, index=idx
)
constraint_target = target_obj.constraints.get(constraint.name)
props = [
p.identifier for p in constraint.bl_rna.properties if not p.is_readonly
]
for prop in props:
value = getattr(constraint, prop)
setattr(constraint_target, prop, value)
# HACK to cover edge case of armature constraints
if constraint.type == "ARMATURE":
for target_item in constraint.targets:
new_target = constraint_new.targets.new()
new_target.target = target_item.target
new_target.subtarget = target_item.subtarget
fcurves = find_drivers(source_obj, 'constraints', constraint_name)
for fcurve in fcurves:
copy_driver(from_fcurve=fcurve, target=target_obj)

View File

@ -0,0 +1,86 @@
import bpy
from .attributes import transfer_attribute
from ..transfer_util import check_transfer_data_entry
from ...task_layer import get_transfer_data_owner
from .... import constants
def materials_clean(obj):
# Material slots cannot use generic transfer_data_clean() function
matches = check_transfer_data_entry(
obj.transfer_data_ownership,
constants.MATERIAL_TRANSFER_DATA_ITEM_NAME,
constants.MATERIAL_SLOT_KEY,
)
# Clear Materials if No Transfer Data is Found
if len(matches) != 0:
return
if obj.data and hasattr(obj.data, 'materials'):
obj.data.materials.clear()
def materials_is_missing(transfer_data_item):
if (
transfer_data_item.type == constants.MATERIAL_SLOT_KEY
and len(transfer_data_item.id_data.material_slots) == 0
):
return True
def init_materials(scene, obj):
asset_pipe = scene.asset_pipeline
td_type_key = constants.MATERIAL_SLOT_KEY
name = constants.MATERIAL_TRANSFER_DATA_ITEM_NAME
transfer_data = obj.transfer_data_ownership
material_objects = [
'CURVE',
'GPENCIL',
'META',
'MESH',
'SURFACE',
'FONT',
'VOLUME',
]
# Only Execute if Material Slots exist on object
if obj.type not in material_objects:
return
matches = check_transfer_data_entry(transfer_data, name, td_type_key)
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
if len(matches) == 0:
task_layer_owner, auto_surrender = get_transfer_data_owner(
asset_pipe,
td_type_key,
)
asset_pipe.add_temp_transfer_data(
name=name,
owner=task_layer_owner,
type=td_type_key,
obj=obj,
surrender=auto_surrender,
)
def transfer_materials(target_obj: bpy.types.Object, source_obj):
# Delete all material slots of target object.
target_obj.data.materials.clear()
# Transfer material slots
for idx in range(len(source_obj.material_slots)):
target_obj.data.materials.append(source_obj.material_slots[idx].material)
target_obj.material_slots[idx].link = source_obj.material_slots[idx].link
# Transfer active material slot index
target_obj.active_material_index = source_obj.active_material_index
# Transfer material slot assignments for curve
if target_obj.type == "CURVE":
for spl_to, spl_from in zip(target_obj.data.splines, source_obj.data.splines):
spl_to.material_index = spl_from.material_index
if source_obj.data.attributes.get(constants.MATERIAL_ATTRIBUTE_NAME):
transfer_attribute(constants.MATERIAL_ATTRIBUTE_NAME, target_obj, source_obj)

View File

@ -0,0 +1,112 @@
import bpy
from ..transfer_util import (
transfer_data_clean,
transfer_data_item_is_missing,
check_transfer_data_entry,
)
from ...naming import task_layer_prefix_name_get
from ...drivers import find_drivers, copy_driver
from ...visibility import override_obj_visability
from ...task_layer import get_transfer_data_owner
from .... import constants
def modifiers_clean(obj):
transfer_data_clean(
obj=obj, data_list=obj.modifiers, td_type_key=constants.MODIFIER_KEY
)
def modifier_is_missing(transfer_data_item):
return transfer_data_item_is_missing(
transfer_data_item=transfer_data_item,
td_type_key=constants.MODIFIER_KEY,
data_list=transfer_data_item.id_data.modifiers,
)
def init_modifiers(scene, obj):
asset_pipe = scene.asset_pipeline
td_type_key = constants.MODIFIER_KEY
transfer_data = obj.transfer_data_ownership
task_layer_owner, auto_surrender = get_transfer_data_owner(
asset_pipe,
td_type_key,
)
for mod in obj.modifiers:
mod.name = task_layer_prefix_name_get(mod.name, task_layer_owner)
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
matches = check_transfer_data_entry(transfer_data, mod.name, td_type_key)
if len(matches) == 0:
asset_pipe.add_temp_transfer_data(
name=mod.name,
owner=task_layer_owner,
type=td_type_key,
obj=obj,
surrender=auto_surrender,
)
def transfer_modifier(modifier_name, target_obj, source_obj):
# remove old and sync existing modifiers
context = bpy.context
scene = context.scene
old_mod = target_obj.modifiers.get(modifier_name)
if old_mod:
target_obj.modifiers.remove(old_mod)
# transfer new modifiers
for i, mod in enumerate(source_obj.modifiers):
if mod.name == modifier_name:
mod_new = target_obj.modifiers.new(mod.name, mod.type)
# sort new modifier at correct index (default to beginning of the stack)
idx = 0
if i > 0:
name_prev = source_obj.modifiers[i - 1].name
for target_mod_i, target_mod in enumerate(target_obj.modifiers):
if target_mod.name == name_prev:
idx = target_mod_i + 1
with override_obj_visability(obj=target_obj, scene=scene):
with context.temp_override(object=target_obj):
bpy.ops.object.modifier_move_to_index(
modifier=mod_new.name, index=idx
)
mod_target = target_obj.modifiers.get(mod.name)
props = [p.identifier for p in mod.bl_rna.properties if not p.is_readonly]
for prop in props:
value = getattr(mod, prop)
setattr(mod_target, prop, value)
# rebind modifiers (corr. smooth, surf. deform, mesh deform)
for mod in target_obj.modifiers:
if mod.type == 'SURFACE_DEFORM':
if not mod.is_bound:
continue
for i in range(2):
with override_obj_visability(obj=target_obj, scene=scene):
with context.temp_override(
object=target_obj, active_object=target_obj
):
bpy.ops.object.surfacedeform_bind(modifier=mod.name)
elif mod.type == 'MESH_DEFORM':
if not mod.is_bound:
continue
for i in range(2):
with override_obj_visability(obj=target_obj, scene=scene):
with context.temp_override(
object=target_obj, active_object=target_obj
):
bpy.ops.object.meshdeform_bind(modifier=mod.name)
elif mod.type == 'CORRECTIVE_SMOOTH':
if not mod.is_bind:
continue
for i in range(2):
with override_obj_visability(obj=target_obj, scene=scene):
with context.temp_override(
object=target_obj, active_object=target_obj
):
bpy.ops.object.correctivesmooth_bind(modifier=mod.name)
fcurves = find_drivers(source_obj, 'modifiers', modifier_name)
for fcurve in fcurves:
copy_driver(from_fcurve=fcurve, target=target_obj)

View File

@ -0,0 +1,59 @@
import bpy
from ..transfer_util import check_transfer_data_entry
from ...task_layer import get_transfer_data_owner
from ...naming import get_basename
from .... import constants
def parent_clean(obj):
matches = check_transfer_data_entry(
obj.transfer_data_ownership,
get_basename(constants.PARENT_TRANSFER_DATA_ITEM_NAME),
constants.PARENT_KEY,
)
if len(matches) != 0:
return
obj.parent = None
print("Cleaning Parent Relationship")
def parent_is_missing(transfer_data_item):
if (
transfer_data_item.type == constants.PARENT_KEY
and transfer_data_item.id_data.parent == None
):
return True
def init_parent(scene, obj):
asset_pipe = scene.asset_pipeline
td_type_key = constants.PARENT_KEY
name = constants.PARENT_TRANSFER_DATA_ITEM_NAME
transfer_data = obj.transfer_data_ownership
# Only Execute if Material Slots exist on object
if obj.parent == None:
return
if obj.parent not in list(asset_pipe.asset_collection.all_objects):
raise Exception("Object parent cannot be outside of asset collection")
matches = check_transfer_data_entry(transfer_data, name, td_type_key)
# Only add new ownership transfer_data_item if vertex group doesn't have an owner
if len(matches) == 0:
task_layer_owner, auto_surrender = get_transfer_data_owner(
asset_pipe,
td_type_key,
)
asset_pipe.add_temp_transfer_data(
name=name,
owner=task_layer_owner,
type=td_type_key,
obj=obj,
surrender=auto_surrender,
)
def transfer_parent(target_obj, source_obj):
target_obj.parent = source_obj.parent
target_obj.matrix_parent_inverse = source_obj.parent.matrix_world.inverted()

View File

@ -0,0 +1,231 @@
import bpy
import mathutils
import bmesh
import numpy as np
def closest_face_to_point(bm_source, p_target, bvh_tree=None):
if not bvh_tree:
bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source)
(loc, norm, index, distance) = bvh_tree.find_nearest(p_target)
return bm_source.faces[index]
def tris_per_face(bm_source):
tris_source = bm_source.calc_loop_triangles()
tris_dict = dict()
for face in bm_source.faces:
tris_face = []
for i in range(len(tris_source))[::-1]:
if tris_source[i][0] in face.loops:
tris_face.append(tris_source.pop(i))
tris_dict[face] = tris_face
return tris_dict
def closest_tri_on_face(tris_dict, face, p):
points = []
dist = []
tris = []
for tri in tris_dict[face]:
point = mathutils.geometry.closest_point_on_tri(
p, *[tri[i].vert.co for i in range(3)]
)
tris.append(tri)
points.append(point)
dist.append((point - p).length)
min_idx = np.argmin(np.array(dist))
point = points[min_idx]
tri = tris[min_idx]
return (tri, point)
def closest_edge_on_face_to_line(face, p1, p2, skip_edges=None):
"""Returns edge of a face which is closest to line."""
for edge in face.edges:
if skip_edges:
if edge in skip_edges:
continue
res = mathutils.geometry.intersect_line_line(
p1, p2, *[edge.verts[i].co for i in range(2)]
)
if not res:
continue
(p_traversal, p_edge) = res
frac_1 = (edge.verts[1].co - edge.verts[0].co).dot(
p_edge - edge.verts[0].co
) / (edge.verts[1].co - edge.verts[0].co).length ** 2.0
frac_2 = (p2 - p1).dot(p_traversal - p1) / (p2 - p1).length ** 2.0
if (frac_1 >= 0 and frac_1 <= 1) and (frac_2 >= 0 and frac_2 <= 1):
return edge
return None
def edge_data_split(edge, data_layer, data_suffix: str):
for vert in edge.verts:
vals = []
for loop in vert.link_loops:
loops_edge_vert = set([loop for f in edge.link_faces for loop in f.loops])
if loop not in loops_edge_vert:
continue
dat = data_layer[loop.index]
element = list(getattr(dat, data_suffix))
if not vals:
vals.append(element)
elif not vals[0] == element:
vals.append(element)
if len(vals) > 1:
return True
return False
def interpolate_data_from_face(
bm_source, tris_dict, face, p, data_layer_source, data_suffix=''
):
"""Returns interpolated value of a data layer within a face closest to a point."""
(tri, point) = closest_tri_on_face(tris_dict, face, p)
if not tri:
return None
weights = mathutils.interpolate.poly_3d_calc(
[tri[i].vert.co for i in range(3)], point
)
if not data_suffix:
cols_weighted = [
weights[i] * np.array(data_layer_source[tri[i].index]) for i in range(3)
]
col = sum(np.array(cols_weighted))
else:
cols_weighted = [
weights[i] * np.array(getattr(data_layer_source[tri[i].index], data_suffix))
for i in range(3)
]
col = sum(np.array(cols_weighted))
return col
def transfer_corner_data(
obj_source, obj_target, data_layer_source, data_layer_target, data_suffix=''
):
"""
Transfers interpolated face corner data from data layer of a source object to data layer of a
target object, while approximately preserving data seams (e.g. necessary for UV Maps).
The transfer is face interpolated per target corner within the source face that is closest
to the target corner point and does not have any data seams on the way back to the
source face that is closest to the target face's center.
"""
bm_source = bmesh.new()
bm_source.from_mesh(obj_source.data)
bm_source.faces.ensure_lookup_table()
bm_target = bmesh.new()
bm_target.from_mesh(obj_target.data)
bm_target.faces.ensure_lookup_table()
bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source)
tris_dict = tris_per_face(bm_source)
for face_target in bm_target.faces:
face_target_center = face_target.calc_center_median()
face_source = closest_face_to_point(bm_source, face_target_center, bvh_tree)
for corner_target in face_target.loops:
# find nearest face on target compared to face that loop belongs to
p = corner_target.vert.co
face_source_closest = closest_face_to_point(bm_source, p, bvh_tree)
enclosed = face_source_closest is face_source
face_source_int = face_source
if not enclosed:
# traverse faces between point and face center
traversed_faces = set()
traversed_edges = set()
while face_source_int is not face_source_closest:
traversed_faces.add(face_source_int)
edge = closest_edge_on_face_to_line(
face_source_int,
face_target_center,
p,
skip_edges=traversed_edges,
)
if edge == None:
break
if len(edge.link_faces) != 2:
break
traversed_edges.add(edge)
split = edge_data_split(edge, data_layer_source, data_suffix)
if split:
break
# set new source face to other face belonging to edge
face_source_int = (
edge.link_faces[1]
if edge.link_faces[1] is not face_source_int
else edge.link_faces[0]
)
# avoid looping behaviour
if face_source_int in traversed_faces:
face_source_int = face_source
break
# interpolate data from selected face
col = interpolate_data_from_face(
bm_source, tris_dict, face_source_int, p, data_layer_source, data_suffix
)
if col is None:
continue
if not data_suffix:
data_layer_target.data[corner_target.index] = col
else:
setattr(data_layer_target[corner_target.index], data_suffix, list(col))
return
def is_mesh_identical(mesh_a, mesh_b) -> bool:
if len(mesh_a.vertices) != len(mesh_b.vertices):
return False
if len(mesh_a.edges) != len(mesh_b.edges):
return False
if len(mesh_a.polygons) != len(mesh_b.polygons):
return False
for e1, e2 in zip(mesh_a.edges, mesh_b.edges):
for v1, v2 in zip(e1.vertices, e2.vertices):
if v1 != v2:
return False
return True
def is_curve_identical(curve_a: bpy.types.Curve, curve_b: bpy.types.Curve) -> bool:
if len(curve_a.splines) != len(curve_b.splines):
return False
for spline1, spline2 in zip(curve_a.splines, curve_b.splines):
if len(spline1.points) != len(spline2.points):
return False
return True
def is_obdata_identical(
a: bpy.types.Object or bpy.types.Mesh, b: bpy.types.Object or bpy.types.Mesh
) -> bool:
"""Checks if two objects have matching topology (efficiency over exactness)"""
if type(a) == bpy.types.Object:
a = a.data
if type(b) == bpy.types.Object:
b = b.data
if type(a) != type(b):
return False
if type(a) == bpy.types.Mesh:
return is_mesh_identical(a, b)
elif type(a) == bpy.types.Curve:
return is_curve_identical(a, b)
else:
# TODO: Support geometry types other than mesh or curve.
return

View File

@ -0,0 +1,152 @@
import bpy
import mathutils
import bmesh
import numpy as np
from .proximity_core import tris_per_face, closest_face_to_point, closest_tri_on_face
from ..transfer_util import (
transfer_data_item_is_missing,
transfer_data_item_init,
check_transfer_data_entry,
)
from ...naming import get_basename
from ...drivers import find_drivers, copy_driver
from .... import constants
def shape_key_set_active(obj, shape_key_name):
for index, shape_key in enumerate(obj.data.shape_keys.key_blocks):
if shape_key.name == shape_key_name:
obj.active_shape_key_index = index
def shape_keys_clean(obj):
if obj.type != "MESH" or obj.data.shape_keys is None:
return
for shape_key in obj.data.shape_keys.key_blocks:
matches = check_transfer_data_entry(
obj.transfer_data_ownership,
get_basename(shape_key.name),
constants.SHAPE_KEY_KEY,
)
if len(matches) == 0:
obj.shape_key_remove(shape_key)
def shape_key_is_missing(transfer_data_item):
if not transfer_data_item.type == constants.SHAPE_KEY_KEY:
return
obj = transfer_data_item.id_data
if obj.type != 'MESH':
return
if not obj.data.shape_keys:
return True
return transfer_data_item_is_missing(
transfer_data_item=transfer_data_item,
td_type_key=constants.SHAPE_KEY_KEY,
data_list=obj.data.shape_keys.key_blocks,
)
def init_shape_keys(scene, obj):
if obj.type != "MESH" or obj.data.shape_keys is None:
return
# Check that the order is legal.
# Key Blocks must be ordered after the key they are Relative To.
for i, kb in enumerate(obj.data.shape_keys.key_blocks):
if kb.relative_key:
base_shape_idx = obj.data.shape_keys.key_blocks.find(kb.relative_key.name)
if base_shape_idx > i:
raise Exception(
f'Shape Key "{kb.name}" must be ordered after its base shape "{kb.relative_key.name}" on object "{obj.name}".'
)
transfer_data_item_init(
scene=scene,
obj=obj,
data_list=obj.data.shape_keys.key_blocks,
td_type_key=constants.SHAPE_KEY_KEY,
)
def transfer_shape_key(
context: bpy.types.Context,
shape_key_name: str,
target_obj: bpy.types.Object,
source_obj: bpy.types.Object,
):
if not source_obj.data.shape_keys:
return
sk_source = source_obj.data.shape_keys.key_blocks.get(shape_key_name)
assert sk_source
sk_target = None
if not target_obj.data.shape_keys:
sk_target = target_obj.shape_key_add()
if not sk_target:
sk_target = target_obj.data.shape_keys.key_blocks.get(shape_key_name)
if not sk_target:
sk_target = target_obj.shape_key_add()
sk_target.name = sk_source.name
sk_target.vertex_group = sk_source.vertex_group
if sk_source.relative_key != sk_source:
relative_key = None
if target_obj.data.shape_keys:
relative_key = target_obj.data.shape_keys.key_blocks.get(
sk_source.relative_key.name
)
if relative_key:
sk_target.relative_key = relative_key
else:
# If the base shape of one of our shapes was removed by another task layer,
# the result will probably be pretty bad, but it's not a catastrophic failure.
# Proceed with a warning.
print(
f'Warning: Base shape "{sk_source.relative_key.name}" of Key "{sk_source.name}" was removed from "{target_obj.name}"'
)
sk_target.slider_min = sk_source.slider_min
sk_target.slider_max = sk_source.slider_max
sk_target.value = sk_source.value
sk_target.mute = sk_source.mute
bm_source = bmesh.new()
bm_source.from_mesh(source_obj.data)
bm_source.faces.ensure_lookup_table()
bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source)
tris_dict = tris_per_face(bm_source)
for i, vert in enumerate(target_obj.data.vertices):
p = vert.co
face = closest_face_to_point(bm_source, p, bvh_tree)
(tri, point) = closest_tri_on_face(tris_dict, face, p)
if not tri:
continue
weights = mathutils.interpolate.poly_3d_calc(
[tri[i].vert.co for i in range(3)], point
)
vals_weighted = [
weights[i]
* (
sk_source.data[tri[i].vert.index].co
- source_obj.data.vertices[tri[i].vert.index].co
)
for i in range(3)
]
val = mathutils.Vector(sum(np.array(vals_weighted)))
sk_target.data[i].co = vert.co + val
if source_obj.data.shape_keys is None:
return
fcurves = find_drivers(
source_obj.data.shape_keys,
'key_blocks',
shape_key_name,
)
for fcurve in fcurves:
copy_driver(from_fcurve=fcurve, target=target_obj.data.shape_keys)

View File

@ -0,0 +1,193 @@
import bpy
from mathutils import Vector, kdtree
from typing import Dict, Tuple, List
from ..transfer_util import (
transfer_data_clean,
transfer_data_item_is_missing,
transfer_data_item_init,
)
from .... import constants
def vertex_groups_clean(obj):
transfer_data_clean(
obj=obj, data_list=obj.vertex_groups, td_type_key=constants.VERTEX_GROUP_KEY
)
def vertex_group_is_missing(transfer_data_item):
return transfer_data_item_is_missing(
transfer_data_item=transfer_data_item,
td_type_key=constants.VERTEX_GROUP_KEY,
data_list=transfer_data_item.id_data.vertex_groups,
)
def init_vertex_groups(scene, obj):
transfer_data_item_init(
scene=scene,
obj=obj,
data_list=obj.vertex_groups,
td_type_key=constants.VERTEX_GROUP_KEY,
)
def transfer_vertex_group(
context,
vertex_group_name: str,
target_obj: bpy.types.Object,
source_obj: bpy.types.Object,
):
if target_obj == source_obj:
return
if not source_obj.vertex_groups.get(vertex_group_name):
print(f"ERROR Vertex Group {vertex_group_name} not found in {source_obj.name}")
return
precalc_and_transfer_single_group(
source_obj, target_obj, vertex_group_name, expand=2
)
def precalc_and_transfer_single_group(source_obj, target_obj, vgroup_name, expand=2):
"""Convenience function to transfer a single group. For transferring multiple groups,
this is very inefficient and shouldn't be used.
Instead, you should:
- build_kd_tree ONCE per source mesh.
- build_vert_influence_map and transfer_vertex_groups ONCE per object pair.
"""
# Remove group from the target obj if it already exists.
tgt_vg = target_obj.vertex_groups.get(vgroup_name)
if tgt_vg:
target_obj.vertex_groups.remove(tgt_vg)
kd_tree = build_kdtree(source_obj.data)
vert_influence_map = build_vert_influence_map(
source_obj, target_obj, kd_tree, expand
)
transfer_vertex_groups(
source_obj,
target_obj,
vert_influence_map,
[source_obj.vertex_groups[vgroup_name]],
)
def build_kdtree(mesh):
kd = kdtree.KDTree(len(mesh.vertices))
for i, v in enumerate(mesh.vertices):
kd.insert(v.co, i)
kd.balance()
return kd
def build_vert_influence_map(obj_from, obj_to, kd_tree, expand=2):
verts_of_edge = {
i: (e.vertices[0], e.vertices[1]) for i, e in enumerate(obj_from.data.edges)
}
edges_of_vert: Dict[int, List[int]] = {}
for edge_idx, edge in enumerate(obj_from.data.edges):
for vert_idx in edge.vertices:
if vert_idx not in edges_of_vert:
edges_of_vert[vert_idx] = []
edges_of_vert[vert_idx].append(edge_idx)
# A mapping from target vertex index to a list of source vertex indicies and
# their influence.
# This can be pre-calculated once per object pair, to minimize re-calculations
# of subsequent transferring of individual vertex groups.
vert_influence_map: List[int, List[Tuple[int, float]]] = {}
for i, dest_vert in enumerate(obj_to.data.vertices):
vert_influence_map[i] = get_source_vert_influences(
dest_vert, obj_from, kd_tree, expand, edges_of_vert, verts_of_edge
)
return vert_influence_map
def get_source_vert_influences(
target_vert, obj_from, kd_tree, expand=2, edges_of_vert={}, verts_of_edge={}
) -> List[Tuple[int, float]]:
_coord, idx, dist = get_nearest_vert(target_vert.co, kd_tree)
source_vert_indices = [idx]
if dist == 0:
# If the vertex position is a perfect match, just use that one vertex with max influence.
return [(idx, 1)]
for i in range(0, expand):
new_indices = []
for vert_idx in source_vert_indices:
for edge in edges_of_vert[vert_idx]:
vert_other = other_vert_of_edge(edge, vert_idx, verts_of_edge)
if vert_other not in source_vert_indices:
new_indices.append(vert_other)
source_vert_indices.extend(new_indices)
distances: List[Tuple[int, float]] = []
distance_total = 0
for src_vert_idx in source_vert_indices:
distance = (target_vert.co - obj_from.data.vertices[src_vert_idx].co).length
distance_total += distance
distances.append((src_vert_idx, distance))
# Calculate influences such that the total of all influences adds up to 1.0,
# and the influence is inversely correlated with the distance.
parts = [1 / (dist / distance_total) for idx, dist in distances]
parts_sum = sum(parts)
influences = [
(idx, 1 if dist == 0 else part / parts_sum)
for part, dist in zip(parts, distances)
]
return influences
def get_nearest_vert(
coords: Vector, kd_tree: kdtree.KDTree
) -> Tuple[Vector, int, float]:
"""Return coordinate, index, and distance of nearest vert to coords in kd_tree."""
return kd_tree.find(coords)
def other_vert_of_edge(
edge: int, vert: int, verts_of_edge: Dict[int, Tuple[int, int]]
) -> int:
verts = verts_of_edge[edge]
assert vert in verts, f"Vert {vert} not part of edge {edge}."
return verts[0] if vert == verts[1] else verts[1]
def transfer_vertex_groups(obj_from, obj_to, vert_influence_map, src_vgroups):
"""Transfer src_vgroups in obj_from to obj_to using a pre-calculated vert_influence_map."""
for src_vg in src_vgroups:
target_vg = obj_to.vertex_groups.get(src_vg.name)
if target_vg == None:
target_vg = obj_to.vertex_groups.new(name=src_vg.name)
for i, dest_vert in enumerate(obj_to.data.vertices):
source_verts = vert_influence_map[i]
# Vertex Group Name : Weight
vgroup_weights = {}
for src_vert_idx, influence in source_verts:
for group in obj_from.data.vertices[src_vert_idx].groups:
group_idx = group.group
vg = obj_from.vertex_groups[group_idx]
if vg not in src_vgroups:
continue
if vg.name not in vgroup_weights:
vgroup_weights[vg.name] = 0
vgroup_weights[vg.name] += vg.weight(src_vert_idx) * influence
# Assign final weights of this vertex in the vertex groups.
for vg_name in vgroup_weights.keys():
target_vg = obj_to.vertex_groups.get(vg_name)
target_vg.add([dest_vert.index], vgroup_weights[vg_name], 'REPLACE')