Node Wrangler: add back exposure compensation for Preview Node #105136

Closed
Damien Picard wants to merge 4 commits from pioverfour/blender-addons:dp_nw_exposure_compensation into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
64 changed files with 1352 additions and 1261 deletions
Showing only changes of commit 83f1bc2302 - Show all commits

View File

@ -9,12 +9,12 @@ body:
attributes: attributes:
value: | value: |
### Instructions ### Instructions
First time reporting? See [tips](https://wiki.blender.org/wiki/Process/Bug_Reports). First time reporting? See [tips](https://developer.blender.org/docs/handbook/bug_reports/making_good_bug_reports/).
* Use **Help > Report a Bug** in Blender to fill system information and exact Blender version. * Use **Help > Report a Bug** in Blender to fill system information and exact Blender version.
* Test [daily builds](https://builder.blender.org/) to verify if the issue is already fixed. * Test [daily builds](https://builder.blender.org/) to verify if the issue is already fixed.
* Test [previous versions](https://download.blender.org/release/) to find an older working version. * Test [previous versions](https://download.blender.org/release/) to find an older working version.
* For feature requests, feedback, questions or build issues, see [communication channels](https://wiki.blender.org/wiki/Communication/Contact#User_Feedback_and_Requests). * For feature requests, feedback, questions or build issues, see [communication channels](https://developer.blender.org/docs/handbook/communication/user_feedback/).
* If there are multiple bugs, make multiple bug reports. * If there are multiple bugs, make multiple bug reports.
- type: textarea - type: textarea

View File

@ -6,9 +6,9 @@ body:
value: | value: |
### Instructions ### Instructions
* [Contributing a new add-on](https://wiki.blender.org/wiki/Process/Addons) * [Contributing a new add-on](https://developer.blender.org/docs/handbook/addons/)
* [Contributing code](https://wiki.blender.org/index.php/Dev:Doc/Process/Contributing_Code) * [Contributing code](https://developer.blender.org/docs/handbook/contributing/)
* [Effective code review](https://wiki.blender.org/index.php/Dev:Doc/Tools/Code_Review) * [Effective code review](https://developer.blender.org/docs/handbook/contributing/pull_requests/)
By submitting code here, you agree that the code is (compatible with) GNU GPL v2 or later. By submitting code here, you agree that the code is (compatible with) GNU GPL v2 or later.

View File

@ -1,4 +1,4 @@
This repository is only used as a mirror. Blender development happens on projects.blender.org. This repository is only used as a mirror. Blender development happens on projects.blender.org.
To get started with contributing code, please see: To get started with contributing code, please see:
https://wiki.blender.org/wiki/Process/Contributing_Code https://developer.blender.org/docs/handbook/contributing/

2
.github/stale.yml vendored
View File

@ -18,4 +18,4 @@ closeComment: >
used as a mirror. Blender development happens on projects.blender.org. used as a mirror. Blender development happens on projects.blender.org.
To get started contributing code, please read: To get started contributing code, please read:
https://wiki.blender.org/wiki/Process/Contributing_Code https://developer.blender.org/docs/handbook/contributing/

View File

@ -32,10 +32,10 @@ if flag is False:
# Import modules # Import modules
# ---------------------------------------------- # ----------------------------------------------
if "bpy" in locals(): if "bpy" in locals():
import imp import importlib
imp.reload(import_ase) importlib.reload(import_ase)
imp.reload(import_krita) importlib.reload(import_krita)
else: else:
import import_ase import import_ase
import import_krita import import_krita

View File

@ -5,7 +5,7 @@
bl_info = { bl_info = {
"name": "FBX format", "name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem", "author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 11, 5), "version": (5, 11, 6),
"blender": (4, 1, 0), "blender": (4, 1, 0),
"location": "File > Import-Export", "location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions", "description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -2753,7 +2753,7 @@ def fbx_data_from_scene(scene, depsgraph, settings):
_cos = MESH_ATTRIBUTE_POSITION.to_ndarray(me.attributes) _cos = MESH_ATTRIBUTE_POSITION.to_ndarray(me.attributes)
else: else:
_cos = np.empty(len(me.vertices) * 3, dtype=co_bl_dtype) _cos = np.empty(len(me.vertices) * 3, dtype=co_bl_dtype)
shape_key.data.foreach_get("co", _cos) shape_key.points.foreach_get("co", _cos)
return vcos_transformed(_cos, geom_mat_co, co_fbx_dtype) return vcos_transformed(_cos, geom_mat_co, co_fbx_dtype)
for shape in me.shape_keys.key_blocks[1:]: for shape in me.shape_keys.key_blocks[1:]:
@ -3108,9 +3108,9 @@ def fbx_header_elements(root, scene_data, time=None):
app_name = "Blender (stable FBX IO)" app_name = "Blender (stable FBX IO)"
app_ver = bpy.app.version_string app_ver = bpy.app.version_string
import addon_utils from . import bl_info
import sys addon_ver = bl_info["version"]
addon_ver = addon_utils.module_bl_info(sys.modules[__package__])['version'] del bl_info
# ##### Start of FBXHeaderExtension element. # ##### Start of FBXHeaderExtension element.
header_ext = elem_empty(root, b"FBXHeaderExtension") header_ext = elem_empty(root, b"FBXHeaderExtension")

View File

@ -2002,7 +2002,7 @@ def blen_read_shapes(fbx_tmpl, fbx_data, objects, me, scene):
if dvcos.any(): if dvcos.any():
shape_cos = me_vcos_vector_view.copy() shape_cos = me_vcos_vector_view.copy()
shape_cos[indices] += dvcos shape_cos[indices] += dvcos
kb.data.foreach_set("co", shape_cos.ravel()) kb.points.foreach_set("co", shape_cos.ravel())
shape_key_values_in_range &= expand_shape_key_range(kb, weight) shape_key_values_in_range &= expand_shape_key_range(kb, weight)

View File

@ -5,7 +5,7 @@
bl_info = { bl_info = {
'name': 'glTF 2.0 format', 'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors', 'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (4, 1, 38), "version": (4, 1, 57),
'blender': (4, 1, 0), 'blender': (4, 1, 0),
'location': 'File > Import-Export', 'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0', 'description': 'Import-Export as glTF 2.0',

View File

@ -139,6 +139,13 @@ def get_attribute_type(component_type, data_type):
else: else:
pass pass
def get_attribute(attributes, name, data_type, domain):
attribute = attributes.get(name)
if attribute is not None and attribute.data_type == data_type and attribute.domain == domain:
return attribute
else:
return None
def get_gltf_interpolation(interpolation): def get_gltf_interpolation(interpolation):
return { return {
"BEZIER": "CUBICSPLINE", "BEZIER": "CUBICSPLINE",

View File

@ -0,0 +1,69 @@
# SPDX-FileCopyrightText: 2018-2024 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
import numpy as np
def fast_structured_np_unique(arr, *args, **kwargs):
"""
np.unique optimized for structured arrays when a sorted result is not required.
np.unique works through sorting, but sorting a structured array requires as many sorts as there are fields in the
structured dtype.
By viewing the array as a single non-structured dtype that sorts according to its bytes, unique elements can be
found with a single sort. Since the values are viewed as a different type to their original, this means that the
returned array of unique values may not be sorted according to their original type.
Float field caveats:
All elements of -0.0 in the input array will be replaced with 0.0 to ensure that both values are collapsed into one.
NaN values can have lots of different byte representations (e.g. signalling/quiet and custom payloads). Only the
duplicates of each unique byte representation will be collapsed into one.
Nested structured dtypes are not supported.
The behaviour of structured dtypes with overlapping fields is undefined.
"""
structured_dtype = arr.dtype
fields = structured_dtype.fields
if fields is None:
raise RuntimeError('%s is not a structured dtype' % structured_dtype)
for field_name, (field_dtype, *_offset_and_optional_title) in fields.items():
if field_dtype.subdtype is not None:
raise RuntimeError('Nested structured types are not supported in %s' % structured_dtype)
if field_dtype.kind == 'f':
# Replace all -0.0 in the array with 0.0 because -0.0 and 0.0 have different byte representations.
arr[field_name][arr[field_name] == -0.0] = 0.0
elif field_dtype.kind not in "iuUSV":
# Signed integer, unsigned integer, unicode string, byte string (bytes) and raw bytes (void) can be left
# as they are. Everything else is unsupported.
raise RuntimeError('Unsupported structured field type %s for field %s' % (field_dtype, field_name))
structured_itemsize = structured_dtype.itemsize
# Integer types sort the fastest, but are only available for specific itemsizes.
uint_dtypes_by_itemsize = {1: np.uint8, 2: np.uint16, 4: np.uint32, 8: np.uint64}
# Signed/unsigned makes no noticeable speed difference, but using unsigned will result in ordering according to
# individual bytes like the other, non-integer types.
if structured_itemsize in uint_dtypes_by_itemsize:
entire_structure_dtype = uint_dtypes_by_itemsize[structured_itemsize]
else:
# Construct a flexible size dtype with matching itemsize to the entire structured dtype.
# Should always be 4 because each character in a unicode string is UCS4.
str_itemsize = np.dtype((np.str_, 1)).itemsize
if structured_itemsize % str_itemsize == 0:
# Unicode strings seem to be slightly faster to sort than bytes.
entire_structure_dtype = np.dtype((np.str_, structured_itemsize // str_itemsize))
else:
# Bytes seem to be slightly faster to sort than raw bytes (np.void).
entire_structure_dtype = np.dtype((np.bytes_, structured_itemsize))
result = np.unique(arr.view(entire_structure_dtype), *args, **kwargs)
unique = result[0] if isinstance(result, tuple) else result
# View in the original dtype.
unique = unique.view(arr.dtype)
if isinstance(result, tuple):
return (unique,) + result[1:]
else:
return unique

View File

@ -135,7 +135,7 @@ def __convert_keyframes(
transform = matrix_parent_inverse transform = matrix_parent_inverse
values = [] values = []
fps = bpy.context.scene.render.fps fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
for keyframe in keyframes: for keyframe in keyframes:
# Transform the data and build gltf control points # Transform the data and build gltf control points
value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, need_rotation_correction) value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, need_rotation_correction)

View File

@ -9,9 +9,9 @@ from ...com import gltf2_blender_math
class Keyframe: class Keyframe:
def __init__(self, channels: typing.Tuple[bpy.types.FCurve], frame: float, bake_channel: typing.Union[str, None]): def __init__(self, channels: typing.Tuple[bpy.types.FCurve], frame: float, bake_channel: typing.Union[str, None]):
self.seconds = frame / bpy.context.scene.render.fps self.seconds = frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
self.frame = frame self.frame = frame
self.fps = bpy.context.scene.render.fps self.fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
self.__length_morph = 0 self.__length_morph = 0
# Note: channels has some None items only for SK if some SK are not animated # Note: channels has some None items only for SK if some SK are not animated
if bake_channel is None: if bake_channel is None:

View File

@ -94,7 +94,7 @@ def __convert_keyframes(armature_uuid, bone_name, channel, keyframes, action_nam
if armature_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][armature_uuid].keys(): if armature_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][armature_uuid].keys():
for k in keyframes: for k in keyframes:
k.frame += -export_settings['slide'][armature_uuid][action_name] k.frame += -export_settings['slide'][armature_uuid][action_name]
k.seconds = k.frame / bpy.context.scene.render.fps k.seconds = k.frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
times = [k.seconds for k in keyframes] times = [k.seconds for k in keyframes]
input = gather_accessor( input = gather_accessor(
@ -145,7 +145,7 @@ def __convert_keyframes(armature_uuid, bone_name, channel, keyframes, action_nam
transform = correction_matrix_local transform = correction_matrix_local
values = [] values = []
fps = bpy.context.scene.render.fps fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
for keyframe in keyframes: for keyframe in keyframes:
# Transform the data and build gltf control points # Transform the data and build gltf control points
value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, False) value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, False)
@ -206,6 +206,8 @@ def __gather_interpolation(node_channel_is_animated, node_channel_interpolation,
elif len(keyframes) == 1: elif len(keyframes) == 1:
if node_channel_is_animated is False: if node_channel_is_animated is False:
return "STEP" return "STEP"
elif node_channel_interpolation == "CUBICSPLINE":
return "LINEAR" # We can't have a single keyframe with CUBICSPLINE
else: else:
return node_channel_interpolation return node_channel_interpolation
else: else:

View File

@ -78,7 +78,7 @@ def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str
if obj_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][obj_uuid].keys(): if obj_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][obj_uuid].keys():
for k in keyframes: for k in keyframes:
k.frame += -export_settings['slide'][obj_uuid][action_name] k.frame += -export_settings['slide'][obj_uuid][action_name]
k.seconds = k.frame / bpy.context.scene.render.fps k.seconds = k.frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
times = [k.seconds for k in keyframes] times = [k.seconds for k in keyframes]
input = gather_accessor( input = gather_accessor(
@ -100,7 +100,7 @@ def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str
values = [] values = []
fps = bpy.context.scene.render.fps fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
for keyframe in keyframes: for keyframe in keyframes:
# Transform the data and build gltf control points # Transform the data and build gltf control points
@ -148,6 +148,8 @@ def __gather_interpolation(
elif len(keyframes) == 1: elif len(keyframes) == 1:
if node_channel_is_animated is False: if node_channel_is_animated is False:
return "STEP" return "STEP"
elif node_channel_interpolation == "CUBICSPLINE":
return "LINEAR" # We can't have a single keyframe with CUBICSPLINE
else: else:
return node_channel_interpolation return node_channel_interpolation
else: else:

View File

@ -66,7 +66,7 @@ def __convert_keyframes(obj_uuid, keyframes, action_name: str, export_settings):
if obj_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][obj_uuid].keys(): if obj_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][obj_uuid].keys():
for k in keyframes: for k in keyframes:
k.frame += -export_settings['slide'][obj_uuid][action_name] k.frame += -export_settings['slide'][obj_uuid][action_name]
k.seconds = k.frame / bpy.context.scene.render.fps k.seconds = k.frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
times = [k.seconds for k in keyframes] times = [k.seconds for k in keyframes]
input = gather_accessor( input = gather_accessor(

View File

@ -29,13 +29,27 @@ def gather_node(vnode, export_settings):
if skin is not None: if skin is not None:
vnode.skin = skin vnode.skin = skin
# Hook to check if we should export mesh or not (force it to None)
class GltfHookNodeMesh:
def __init__(self):
self.export_mesh = True
gltf_hook_node_mesh = GltfHookNodeMesh()
export_user_extensions('gather_node_mesh_hook', export_settings, gltf_hook_node_mesh, blender_object)
if gltf_hook_node_mesh.export_mesh is True:
mesh = __gather_mesh(vnode, blender_object, export_settings)
else:
mesh = None
node = gltf2_io.Node( node = gltf2_io.Node(
camera=__gather_camera(vnode, export_settings), camera=__gather_camera(vnode, export_settings),
children=__gather_children(vnode, export_settings), children=__gather_children(vnode, export_settings),
extensions=__gather_extensions(vnode, export_settings), extensions=__gather_extensions(vnode, export_settings),
extras=__gather_extras(blender_object, export_settings), extras=__gather_extras(blender_object, export_settings),
matrix=__gather_matrix(blender_object, export_settings), matrix=__gather_matrix(blender_object, export_settings),
mesh=__gather_mesh(vnode, blender_object, export_settings), mesh=mesh,
name=__gather_name(blender_object, export_settings), name=__gather_name(blender_object, export_settings),
rotation=None, rotation=None,
scale=None, scale=None,

View File

@ -11,6 +11,7 @@ from ...io.com.gltf2_io_constants import ROUNDING_DIGIT
from ...io.exp.gltf2_io_user_extensions import export_user_extensions from ...io.exp.gltf2_io_user_extensions import export_user_extensions
from ...io.com import gltf2_io_constants from ...io.com import gltf2_io_constants
from ..com import gltf2_blender_conversion from ..com import gltf2_blender_conversion
from ..com.gltf2_blender_utils import fast_structured_np_unique
from .material.gltf2_blender_gather_materials import get_base_material, get_material_from_idx, get_active_uvmap_index, get_new_material_texture_shared from .material.gltf2_blender_gather_materials import get_base_material, get_material_from_idx, get_active_uvmap_index, get_new_material_texture_shared
from .material.gltf2_blender_gather_texture_info import gather_udim_texture_info from .material.gltf2_blender_gather_texture_info import gather_udim_texture_info
from . import gltf2_blender_gather_skins from . import gltf2_blender_gather_skins
@ -334,8 +335,10 @@ class PrimitiveCreator:
def populate_dots_data(self): def populate_dots_data(self):
vidxs = np.empty(len(self.blender_mesh.loops)) corner_vertex_indices = gltf2_blender_conversion.get_attribute(self.blender_mesh.attributes, '.corner_vert', 'INT', 'CORNER')
self.blender_mesh.loops.foreach_get('vertex_index', vidxs) if corner_vertex_indices:
vidxs = np.empty(len(self.blender_mesh.loops), dtype=np.intc)
corner_vertex_indices.data.foreach_get('value', vidxs)
self.dots['vertex_index'] = vidxs self.dots['vertex_index'] = vidxs
del vidxs del vidxs
@ -412,11 +415,19 @@ class PrimitiveCreator:
# Now we need to get data and populate # Now we need to get data and populate
for attr in self.uvmap_attribute_list: for attr in self.uvmap_attribute_list:
if attr + str(0) not in self.dots.dtype.names: # In case user exports custom attributes, we may have it already if attr + str(0) not in self.dots.dtype.names: # In case user exports custom attributes, we may have it already
# Vector in custom Attributes are Vector3, but keeping only the first two data # Vector in custom Attributes are Vector2 or Vector3 (but keeping only the first two data)
if self.blender_mesh.attributes[attr].data_type == "FLOAT_VECTOR":
data = np.empty(len(self.blender_mesh.loops) * 3, gltf2_blender_conversion.get_numpy_type('FLOAT2')) data = np.empty(len(self.blender_mesh.loops) * 3, gltf2_blender_conversion.get_numpy_type('FLOAT2'))
self.blender_mesh.attributes[attr].data.foreach_get('vector', data) self.blender_mesh.attributes[attr].data.foreach_get('vector', data)
data = data.reshape(-1, 3) data = data.reshape(-1, 3)
data = data[:,:2] data = data[:,:2]
elif self.blender_mesh.attributes[attr].data_type == "FLOAT2":
data = np.empty(len(self.blender_mesh.loops) * 2, gltf2_blender_conversion.get_numpy_type('FLOAT2'))
self.blender_mesh.attributes[attr].data.foreach_get('vector', data)
data = data.reshape(-1, 2)
else:
print_console('WARNING', 'We are not managing this case yet (UVMap as custom attribute for unknown type)')
continue
# Blender UV space -> glTF UV space # Blender UV space -> glTF UV space
# u,v -> u,1-v # u,v -> u,1-v
data[:, 1] *= -1 data[:, 1] *= -1
@ -621,7 +632,7 @@ class PrimitiveCreator:
def primitive_creation_shared(self): def primitive_creation_shared(self):
primitives = [] primitives = []
self.dots, shared_dot_indices = np.unique(self.dots, return_inverse=True) self.dots, shared_dot_indices = fast_structured_np_unique(self.dots, return_inverse=True)
self.blender_idxs = self.dots['vertex_index'] self.blender_idxs = self.dots['vertex_index']
@ -696,7 +707,7 @@ class PrimitiveCreator:
# Extract just dots used by this primitive, deduplicate them, and # Extract just dots used by this primitive, deduplicate them, and
# calculate indices into this deduplicated list. # calculate indices into this deduplicated list.
self.prim_dots = self.dots[dot_indices] self.prim_dots = self.dots[dot_indices]
self.prim_dots, indices = np.unique(self.prim_dots, return_inverse=True) self.prim_dots, indices = fast_structured_np_unique(self.prim_dots, return_inverse=True)
if len(self.prim_dots) == 0: if len(self.prim_dots) == 0:
continue continue
@ -770,7 +781,7 @@ class PrimitiveCreator:
if self.blender_idxs_edges.shape[0] > 0: if self.blender_idxs_edges.shape[0] > 0:
# Export one glTF vert per unique Blender vert in a loose edge # Export one glTF vert per unique Blender vert in a loose edge
self.blender_idxs = self.blender_idxs_edges self.blender_idxs = self.blender_idxs_edges
dots_edges, indices = np.unique(self.dots_edges, return_inverse=True) dots_edges, indices = fast_structured_np_unique(self.dots_edges, return_inverse=True)
self.blender_idxs = np.unique(self.blender_idxs_edges) self.blender_idxs = np.unique(self.blender_idxs_edges)
self.attributes_edges_points = {} self.attributes_edges_points = {}
@ -871,14 +882,21 @@ class PrimitiveCreator:
def __get_positions(self): def __get_positions(self):
self.locs = np.empty(len(self.blender_mesh.vertices) * 3, dtype=np.float32) self.locs = np.empty(len(self.blender_mesh.vertices) * 3, dtype=np.float32)
source = self.key_blocks[0].relative_key.data if self.key_blocks else self.blender_mesh.vertices if self.key_blocks:
source.foreach_get('co', self.locs) source = self.key_blocks[0].relative_key.points
foreach_attribute = 'co'
else:
position_attribute = gltf2_blender_conversion.get_attribute(self.blender_mesh.attributes, 'position', 'FLOAT_VECTOR', 'POINT')
source = position_attribute.data if position_attribute else None
foreach_attribute = 'vector'
if source:
source.foreach_get(foreach_attribute, self.locs)
self.locs = self.locs.reshape(len(self.blender_mesh.vertices), 3) self.locs = self.locs.reshape(len(self.blender_mesh.vertices), 3)
self.morph_locs = [] self.morph_locs = []
for key_block in self.key_blocks: for key_block in self.key_blocks:
vs = np.empty(len(self.blender_mesh.vertices) * 3, dtype=np.float32) vs = np.empty(len(self.blender_mesh.vertices) * 3, dtype=np.float32)
key_block.data.foreach_get('co', vs) key_block.points.foreach_get('co', vs)
vs = vs.reshape(len(self.blender_mesh.vertices), 3) vs = vs.reshape(len(self.blender_mesh.vertices), 3)
self.morph_locs.append(vs) self.morph_locs.append(vs)
@ -1116,7 +1134,7 @@ class PrimitiveCreator:
def __get_uvs_attribute(self, blender_uv_idx, attr): def __get_uvs_attribute(self, blender_uv_idx, attr):
layer = self.blender_mesh.uv_layers[blender_uv_idx] layer = self.blender_mesh.uv_layers[blender_uv_idx]
uvs = np.empty(len(self.blender_mesh.loops) * 2, dtype=np.float32) uvs = np.empty(len(self.blender_mesh.loops) * 2, dtype=np.float32)
layer.data.foreach_get('uv', uvs) layer.uv.foreach_get('vector', uvs)
uvs = uvs.reshape(len(self.blender_mesh.loops), 2) uvs = uvs.reshape(len(self.blender_mesh.loops), 2)
# Blender UV space -> glTF UV space # Blender UV space -> glTF UV space
@ -1136,7 +1154,7 @@ class PrimitiveCreator:
self.normals = np.array(self.normals, dtype=np.float32) self.normals = np.array(self.normals, dtype=np.float32)
else: else:
self.normals = np.empty(len(self.blender_mesh.loops) * 3, dtype=np.float32) self.normals = np.empty(len(self.blender_mesh.loops) * 3, dtype=np.float32)
self.blender_mesh.loops.foreach_get('normal', self.normals) self.blender_mesh.corner_normals.foreach_get('vector', self.normals)
self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3) self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3)

View File

@ -568,6 +568,10 @@ class VExportTree:
hasattr(self.nodes[n.armature], "need_neutral_bone")]: #all skin meshes objects where neutral bone is needed hasattr(self.nodes[n.armature], "need_neutral_bone")]: #all skin meshes objects where neutral bone is needed
# Only for meshes, as curve can't have skin data (no weights pain available) # Only for meshes, as curve can't have skin data (no weights pain available)
# Be sure to add it to really exported meshes
if n.node.skin is None:
print("WARNING: {} has no skin, skipping adding neutral bone data on it.".format(n.blender_object.name))
continue
if n.armature not in added_armatures: if n.armature not in added_armatures:

View File

@ -37,8 +37,11 @@ def get_material_cache_key(blender_material, export_settings):
# Use id of material # Use id of material
# Do not use bpy.types that can be unhashable # Do not use bpy.types that can be unhashable
# Do not use material name, that can be not unique (when linked) # Do not use material name, that can be not unique (when linked)
# We use here the id of original material as for apply modifier, the material has a new id
# So, when no modifier applied => original is the same id
# And when modifier applied => new one is different id, but original is still the same
return ( return (
(id(blender_material),), (id(blender_material.original),),
) )
@cached_by_key(key=get_material_cache_key) @cached_by_key(key=get_material_cache_key)

View File

@ -144,8 +144,10 @@ def __gather_metallic_roughness_texture(blender_material, orm_texture, export_se
# Using directlty the Blender socket object # Using directlty the Blender socket object
if not hasMetal and not hasRough: if not hasMetal and not hasRough:
metallic_roughness = get_socket_from_gltf_material_node(blender_material, "MetallicRoughness") metallic_roughness = get_socket_from_gltf_material_node(blender_material, "MetallicRoughness")
if metallic_roughness is None or not has_image_node_from_socket(metallic_roughness, export_settings): if metallic_roughness.socket is None or not has_image_node_from_socket(metallic_roughness, export_settings):
return None, {}, {}, None return None, {}, {}, None
else:
texture_input = (metallic_roughness, metallic_roughness)
elif not hasMetal: elif not hasMetal:
texture_input = (roughness_socket,) texture_input = (roughness_socket,)
elif not hasRough: elif not hasRough:
@ -154,6 +156,7 @@ def __gather_metallic_roughness_texture(blender_material, orm_texture, export_se
texture_input = (metallic_socket, roughness_socket) texture_input = (metallic_socket, roughness_socket)
tex, uvmap_info, udim_info, factor = gather_texture_info( tex, uvmap_info, udim_info, factor = gather_texture_info(
texture_input[0], texture_input[0],
orm_texture or texture_input, orm_texture or texture_input,
export_settings, export_settings,

View File

@ -177,7 +177,7 @@ def get_socket_from_gltf_material_node(blender_material: bpy.types.Material, nam
if blender_material.node_tree and blender_material.use_nodes: if blender_material.node_tree and blender_material.use_nodes:
nodes = get_material_nodes(blender_material.node_tree, [blender_material], bpy.types.ShaderNodeGroup) nodes = get_material_nodes(blender_material.node_tree, [blender_material], bpy.types.ShaderNodeGroup)
# Some weird node groups with missing datablock can have no node_tree, so checking n.node_tree (See #1797) # Some weird node groups with missing datablock can have no node_tree, so checking n.node_tree (See #1797)
nodes = [n for n in nodes if n[0].node_tree is not None and ( n[0].node_tree.name.lower().startswith(get_gltf_old_group_node_name()) or n[0].node_tree.name.lower() in gltf_node_group_names)] nodes = [n for n in nodes if n[0].node_tree is not None and any([[n[0].node_tree.name.lower().startswith(g) for g in gltf_node_group_names]])]
inputs = sum([[(input, node[1]) for input in node[0].inputs if input.name == name] for node in nodes], []) inputs = sum([[(input, node[1]) for input in node[0].inputs if input.name == name] for node in nodes], [])
if inputs: if inputs:
return NodeSocket(inputs[0][0], inputs[0][1]) return NodeSocket(inputs[0][0], inputs[0][1])
@ -461,6 +461,12 @@ def get_vertex_color_info(color_socket, alpha_socket, export_settings):
attribute_color_type = "active" attribute_color_type = "active"
elif use_vc is True and use_active is None and attribute_color is not None: elif use_vc is True and use_active is None and attribute_color is not None:
attribute_color_type = "name" attribute_color_type = "name"
elif node.node.type in ["ATTRIBUTE", "VERTEX_COLOR"]:
use_vc, attribute_color, use_active = get_attribute_name(NodeSocket(node.node.outputs[0], node.group_path), export_settings)
if use_vc is True and use_active is True:
attribute_color_type = "active"
elif use_vc is True and use_active is None and attribute_color is not None:
attribute_color_type = "name"
if alpha_socket is not None and alpha_socket.socket is not None: if alpha_socket is not None and alpha_socket.socket is not None:
node = previous_node(alpha_socket) node = previous_node(alpha_socket)
@ -473,6 +479,12 @@ def get_vertex_color_info(color_socket, alpha_socket, export_settings):
attribute_alpha_type = "active" attribute_alpha_type = "active"
elif use_vc is True and use_active is None and attribute_alpha is not None: elif use_vc is True and use_active is None and attribute_alpha is not None:
attribute_alpha_type = "name" attribute_alpha_type = "name"
elif node.node.type in ["ATTRIBUTE", "VERTEX_COLOR"]:
use_vc, attribute_color, use_active = get_attribute_name(NodeSocket(node.node.outputs[0], node.group_path), export_settings)
if use_vc is True and use_active is True:
attribute_color_type = "active"
elif use_vc is True and use_active is None and attribute_color is not None:
attribute_color_type = "name"
return {"color": attribute_color, "alpha": attribute_alpha, "color_type": attribute_color_type, "alpha_type": attribute_alpha_type} return {"color": attribute_color, "alpha": attribute_alpha, "color_type": attribute_color_type, "alpha_type": attribute_alpha_type}

View File

@ -1,157 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo, MaterialNormalTextureInfoClass
from .gltf2_blender_texture import texture
# [Texture] => [Separate R] => [Clearcoat Factor] =>
def clearcoat(mh, location, clearcoat_socket):
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
return
clearcoat_factor = ext.get('clearcoatFactor', 0)
tex_info = ext.get('clearcoatTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if clearcoat_socket is None:
return
if tex_info is None:
clearcoat_socket.default_value = clearcoat_factor
return
# Mix clearcoat factor
if clearcoat_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Clearcoat Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(clearcoat_socket, node.outputs[0])
# Inputs
clearcoat_socket = node.inputs[0]
node.inputs[1].default_value = clearcoat_factor
x -= 200
# Separate RGB
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(clearcoat_socket, node.outputs['Red'])
# Inputs
clearcoat_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='CLEARCOAT',
location=(x, y),
is_data=True,
color_socket=clearcoat_socket,
)
# [Texture] => [Separate G] => [Roughness Factor] =>
def clearcoat_roughness(mh, location, roughness_socket):
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
return
roughness_factor = ext.get('clearcoatRoughnessFactor', 0)
tex_info = ext.get('clearcoatRoughnessTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if roughness_socket is None:
return
if tex_info is None:
roughness_socket.default_value = roughness_factor
return
# Mix roughness factor
if roughness_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Clearcoat Roughness Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(roughness_socket, node.outputs[0])
# Inputs
roughness_socket = node.inputs[0]
node.inputs[1].default_value = roughness_factor
x -= 200
# Separate RGB (roughness is in G)
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(roughness_socket, node.outputs['Green'])
# Inputs
color_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='CLEARCOAT ROUGHNESS',
location=(x, y),
is_data=True,
color_socket=color_socket,
)
# [Texture] => [Normal Map] =>
def clearcoat_normal(mh, location, normal_socket):
x,y = location
try:
ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
return
tex_info = ext.get('clearcoatNormalTexture')
if tex_info is not None:
tex_info = MaterialNormalTextureInfoClass.from_dict(tex_info)
if tex_info is None:
return
# Normal map
node = mh.node_tree.nodes.new('ShaderNodeNormalMap')
node.location = x - 150, y - 40
# Set UVMap
uv_idx = tex_info.tex_coord or 0
try:
uv_idx = tex_info.extensions['KHR_texture_transform']['texCoord']
except Exception:
pass
node.uv_map = 'UVMap' if uv_idx == 0 else 'UVMap.%03d' % uv_idx
# Set strength
scale = tex_info.scale
scale = scale if scale is not None else 1
node.inputs['Strength'].default_value = scale
# Outputs
mh.node_tree.links.new(normal_socket, node.outputs['Normal'])
# Inputs
color_socket = node.inputs['Color']
x -= 200
texture(
mh,
tex_info=tex_info,
label='CLEARCOAT NORMAL',
location=(x, y),
is_data=True,
color_socket=color_socket,
)

View File

@ -1,13 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io_constants import GLTF_IOR
def ior(mh, ior_socket):
try:
ext = mh.pymat.extensions['KHR_materials_ior']
except Exception:
return
ior = ext.get('ior', GLTF_IOR)
ior_socket.default_value = ior

View File

@ -2,159 +2,202 @@
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import bpy
from ...io.com.gltf2_io import TextureInfo from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_pbrMetallicRoughness import \ from .gltf2_blender_pbrMetallicRoughness import \
base_color, emission, normal, occlusion, make_output_nodes, make_settings_node base_color, emission, normal, occlusion, make_settings_node
from .gltf2_blender_texture import texture from .gltf2_blender_material_utils import color_factor_and_texture
from .gltf2_blender_texture import texture, get_source
from .gltf2_blender_image import BlenderImage
import numpy as np
def pbr_specular_glossiness(mh): def pbr_specular_glossiness(mh):
"""Creates node tree for pbrSpecularGlossiness materials.""" """Creates node tree for pbrSpecularGlossiness materials."""
# This does option #1 from ext = mh.get_ext('KHR_materials_pbrSpecularGlossiness', {})
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/303
# Sum a Glossy and Diffuse Shader pbr_node = mh.nodes.new('ShaderNodeBsdfPrincipled')
glossy_node = mh.node_tree.nodes.new('ShaderNodeBsdfGlossy') out_node = mh.nodes.new('ShaderNodeOutputMaterial')
diffuse_node = mh.node_tree.nodes.new('ShaderNodeBsdfDiffuse') pbr_node.location = 10, 300
add_node = mh.node_tree.nodes.new('ShaderNodeAddShader') out_node.location = 300, 300
glossy_node.location = 10, 220 mh.links.new(pbr_node.outputs[0], out_node.inputs[0])
diffuse_node.location = 10, 0
add_node.location = 230, 100
mh.node_tree.links.new(add_node.inputs[0], glossy_node.outputs[0])
mh.node_tree.links.new(add_node.inputs[1], diffuse_node.outputs[0])
emission_socket, alpha_socket, _ = make_output_nodes( locs = calc_locations(mh, ext)
mh,
location=(370, 250),
additional_location=None, #No additional location needed for SpecGloss
shader_socket=add_node.outputs[0],
make_emission_socket=mh.needs_emissive(),
make_alpha_socket=not mh.is_opaque(),
make_volume_socket=None # No possible to have KHR_materials_volume with specular/glossiness
)
if emission_socket:
emission(
mh,
location=(-200, 860),
color_socket=emission_socket,
strength_socket=emission_socket.node.inputs['Strength']
)
base_color( base_color(
mh, mh,
is_diffuse=True, is_diffuse=True,
location=(-200, 380), location=locs['diffuse'],
color_socket=diffuse_node.inputs['Color'], color_socket=pbr_node.inputs['Base Color'],
alpha_socket=alpha_socket, alpha_socket=pbr_node.inputs['Alpha'] if not mh.is_opaque() else None,
) )
specular_glossiness( emission(
mh, mh,
location=(-200, -100), location=locs['emission'],
specular_socket=glossy_node.inputs['Color'], color_socket=pbr_node.inputs['Emission Color'],
roughness_socket=glossy_node.inputs['Roughness'], strength_socket=pbr_node.inputs['Emission Strength'],
)
copy_socket(
mh,
copy_from=glossy_node.inputs['Roughness'],
copy_to=diffuse_node.inputs['Roughness'],
) )
normal( normal(
mh, mh,
location=(-200, -580), location=locs['normal'],
normal_socket=glossy_node.inputs['Normal'], normal_socket=pbr_node.inputs['Normal'],
)
copy_socket(
mh,
copy_from=glossy_node.inputs['Normal'],
copy_to=diffuse_node.inputs['Normal'],
) )
if mh.pymat.occlusion_texture is not None: if mh.pymat.occlusion_texture is not None:
if mh.settings_node is None: if mh.settings_node is None:
mh.settings_node = make_settings_node(mh) mh.settings_node = make_settings_node(mh)
mh.settings_node.location = (610, -1060) mh.settings_node.location = 10, 425
mh.settings_node.width = 240
occlusion( occlusion(
mh, mh,
location=(510, -970), location=locs['occlusion'],
occlusion_socket=mh.settings_node.inputs['Occlusion'], occlusion_socket=mh.settings_node.inputs['Occlusion'],
) )
# The F0 color is the specular tint modulated by
# ((1-IOR)/(1+IOR))^2. Setting IOR=1000 makes this factor
# approximately 1.
pbr_node.inputs['IOR'].default_value = 1000
# [Texture] => [Spec/Gloss Factor] => [Gloss to Rough] => # Specular
def specular_glossiness(mh, location, specular_socket, roughness_socket): color_factor_and_texture(
x, y = location
spec_factor = mh.pymat.extensions \
['KHR_materials_pbrSpecularGlossiness'] \
.get('specularFactor', [1, 1, 1])
gloss_factor = mh.pymat.extensions \
['KHR_materials_pbrSpecularGlossiness'] \
.get('glossinessFactor', 1)
spec_gloss_texture = mh.pymat.extensions \
['KHR_materials_pbrSpecularGlossiness'] \
.get('specularGlossinessTexture', None)
if spec_gloss_texture is not None:
spec_gloss_texture = TextureInfo.from_dict(spec_gloss_texture)
if spec_gloss_texture is None:
specular_socket.default_value = spec_factor + [1]
roughness_socket.default_value = 1 - gloss_factor
return
# (1 - x) converts glossiness to roughness
node = mh.node_tree.nodes.new('ShaderNodeInvert')
node.label = 'Invert (Gloss to Rough)'
node.location = x - 140, y - 75
# Outputs
mh.node_tree.links.new(roughness_socket, node.outputs[0])
# Inputs
node.inputs['Fac'].default_value = 1
glossiness_socket = node.inputs['Color']
x -= 250
# Mix in spec/gloss factor
if spec_factor != [1, 1, 1] or gloss_factor != 1:
if spec_factor != [1, 1, 1]:
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.data_type = 'RGBA'
node.label = 'Specular Factor'
node.location = x - 140, y
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(specular_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
specular_socket = node.inputs[6]
node.inputs[7].default_value = spec_factor + [1]
if gloss_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Glossiness Factor'
node.location = x - 140, y - 200
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(glossiness_socket, node.outputs[0])
# Inputs
glossiness_socket = node.inputs[0]
node.inputs[1].default_value = gloss_factor
x -= 200
texture(
mh, mh,
tex_info=spec_gloss_texture, location=locs['specular'],
label='SPECULAR GLOSSINESS', label='Specular Color',
location=(x, y), socket=pbr_node.inputs['Specular Tint'],
color_socket=specular_socket, factor=ext.get('specularFactor', [1, 1, 1]),
alpha_socket=glossiness_socket, tex_info=ext.get('specularGlossinessTexture'),
)
# Glossiness
glossiness(
mh,
ext,
location=locs['glossiness'],
roughness_socket=pbr_node.inputs['Roughness'],
) )
def copy_socket(mh, copy_from, copy_to): def glossiness(mh, ext, location, roughness_socket):
"""Copy the links/default value from one socket to another.""" # Glossiness = glossinessFactor * specularGlossinessTexture.alpha
copy_to.default_value = copy_from.default_value # Roughness = 1 - Glossiness
for link in copy_from.links:
mh.node_tree.links.new(copy_to, link.from_socket) factor = ext.get('glossinessFactor', 1)
tex_info = ext.get('specularGlossinessTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
# Simple case: no texture
if tex_info is None or factor == 0:
roughness_socket.default_value = 1 - factor
return
# Bake an image with the roughness. The reason we don't do
# 1-X with a node is that won't export.
roughness_img = make_roughness_image(mh, factor, tex_info)
if roughness_img is None:
return
texture(
mh,
tex_info,
location=location,
label='ROUGHNESS',
color_socket=None,
alpha_socket=roughness_socket,
is_data=False,
forced_image=roughness_img,
)
def make_roughness_image(mh, glossiness_factor, tex_info):
"""
Bakes the roughness (1-glossiness) into an image. The
roughness is in the alpha channel.
"""
pytexture = mh.gltf.data.textures[tex_info.index]
source = get_source(mh, pytexture)
if source is None:
return None
pyimg = mh.gltf.data.images[source]
BlenderImage.create(mh.gltf, source)
# See if cached roughness texture already exists
if hasattr(pyimg, 'blender_roughness_image_name'):
return bpy.data.images[pyimg.blender_roughness_image_name]
orig_image = bpy.data.images[pyimg.blender_image_name]
# TODO: check for placeholder image and bail
# Make a copy of the specularGlossiness texture
# Avoids interfering if it's used elsewhere
image = orig_image.copy()
w, h = image.size
pixels = np.empty(w * h * 4, dtype=np.float32)
image.pixels.foreach_get(pixels)
pixels = pixels.reshape((w, h, 4))
# Glossiness = GlossinessFactor * Texture.alpha
# Roughness = 1 - Glossiness
if glossiness_factor != 1:
pixels[:, :, 3] *= glossiness_factor
pixels[:, :, 3] *= -1
pixels[:, :, 3] += 1
pixels = pixels.reshape(w * h * 4)
image.pixels.foreach_set(pixels)
image.pack()
# Cache for reuse
pyimg.blender_roughness_image_name = image.name
return image
def calc_locations(mh, ext):
"""Calculate locations to place each bit of the node graph at."""
# Lay the blocks out top-to-bottom, aligned on the right
x = -200
y = 0
height = 460 # height of each block
locs = {}
locs['occlusion'] = (x, y)
if mh.pymat.occlusion_texture is not None:
y -= height
locs['diffuse'] = (x, y)
if 'diffuseTexture' in ext or mh.vertex_color:
y -= height
locs['glossiness'] = (x, y)
gloss_factor = ext.get('glossinessFactor', 1)
if 'specularGlossinessTexture' in ext and gloss_factor != 0:
y -= height
locs['normal'] = (x, y)
if mh.pymat.normal_texture is not None:
y -= height
locs['specular'] = (x, y)
if 'specularGlossinessTexture' in ext:
y -= height
locs['emission'] = (x, y)
if mh.pymat.emissive_texture is not None:
y -= height
# Center things
total_height = -y
y_offset = total_height / 2 - 20
for key in locs:
x, y = locs[key]
locs[key] = (x, y + y_offset)
return locs

View File

@ -1,88 +0,0 @@
# SPDX-FileCopyrightText: 2018-2022 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
def sheen( mh,
location_sheenTint,
location_sheenRoughness,
sheen_socket,
sheenTint_socket,
sheenRoughness_socket
):
x_sheenTint, y_sheenTint = location_sheenTint
x_sheenRoughness, y_sheenRoughness = location_sheenRoughness
try:
ext = mh.pymat.extensions['KHR_materials_sheen']
except Exception:
return
sheen_socket.default_value = 1.0
sheenTintFactor = ext.get('sheenColorFactor', [0.0, 0.0, 0.0])
tex_info_color = ext.get('sheenColorTexture')
if tex_info_color is not None:
tex_info_color = TextureInfo.from_dict(tex_info_color)
sheenRoughnessFactor = ext.get('sheenRoughnessFactor', 0.0)
tex_info_roughness = ext.get('sheenRoughnessTexture')
if tex_info_roughness is not None:
tex_info_roughness = TextureInfo.from_dict(tex_info_roughness)
if tex_info_color is None:
sheenTintFactor.extend([1.0])
sheenTint_socket.default_value = sheenTintFactor
else:
# Mix sheenTint factor
sheenTintFactor = sheenTintFactor + [1.0]
if sheenTintFactor != [1.0, 1.0, 1.0, 1.0]:
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.label = 'sheenTint Factor'
node.data_type = 'RGBA'
node.location = x_sheenTint - 140, y_sheenTint
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(sheenTint_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
sheenTint_socket = node.inputs[6]
node.inputs[7].default_value = sheenTintFactor
x_sheenTint -= 200
texture(
mh,
tex_info=tex_info_color,
label='SHEEN COLOR',
location=(x_sheenTint, y_sheenTint),
color_socket=sheenTint_socket
)
if tex_info_roughness is None:
sheenRoughness_socket.default_value = sheenRoughnessFactor
else:
# Mix sheenRoughness factor
if sheenRoughnessFactor != 1.0:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'shennRoughness Factor'
node.location = x_sheenRoughness - 140, y_sheenRoughness
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(sheenRoughness_socket, node.outputs[0])
# Inputs
sheenRoughness_socket = node.inputs[0]
node.inputs[1].default_value = sheenRoughnessFactor
x_sheenRoughness -= 200
texture(
mh,
tex_info=tex_info_roughness,
label='SHEEN ROUGHNESS',
location=(x_sheenRoughness, y_sheenRoughness),
is_data=True,
color_socket=None,
alpha_socket=sheenRoughness_socket
)
return

View File

@ -1,94 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
import bpy
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
from ...io.com.gltf2_io_constants import GLTF_IOR
from ..exp.material.extensions.gltf2_blender_image import TmpImageGuard, make_temp_image_copy #TODO move to com
def specular(mh, location_specular,
location_specular_tint,
specular_socket,
specular_tint_socket):
if specular_socket is None:
return
if specular_tint_socket is None:
return
try:
ext = mh.pymat.extensions['KHR_materials_specular']
except Exception:
return
# First check if we need a texture or not -> retrieve all info needed
specular_factor = ext.get('specularFactor', 1.0)
tex_specular_info = ext.get('specularTexture')
if tex_specular_info is not None:
tex_specular_info = TextureInfo.from_dict(tex_specular_info)
specular_tint_factor = ext.get('specularColorFactor', [1.0, 1.0, 1.0])[:3]
tex_specular_tint_info = ext.get('specularColorTexture')
if tex_specular_tint_info is not None:
tex_specular_tint_info = TextureInfo.from_dict(tex_specular_tint_info)
x_specular, y_specular = location_specular
x_specularcolor, y_specularcolor = location_specular_tint
if tex_specular_info is None:
specular_socket.default_value = specular_factor / 2.0
else:
# Mix specular factor
if specular_factor != 1.0:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Specular Factor'
node.location = x_specular - 140, y_specular
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(specular_socket, node.outputs[0])
# Inputs
specular_socket = node.inputs[0]
node.inputs[1].default_value = specular_factor / 2.0
x_specular -= 200
texture(
mh,
tex_info=tex_specular_info,
label='SPECULAR',
location=(x_specular, y_specular),
is_data=True,
color_socket=None,
alpha_socket=specular_socket
)
if tex_specular_tint_info is None:
specular_tint_factor = list(specular_tint_factor)
specular_tint_factor.extend([1.0])
specular_tint_socket.default_value = specular_tint_factor
else:
specular_tint_factor = list(specular_tint_factor) + [1.0]
if specular_tint_factor != [1.0, 1.0, 1.0, 1.0]:
# Mix specularColorFactor
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.label = 'SpecularColor Factor'
node.data_type = 'RGBA'
node.location = x_specularcolor - 140, y_specularcolor
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(specular_tint_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
specular_tint_socket = node.inputs[6]
node.inputs[7].default_value = specular_tint_factor
x_specularcolor -= 200
texture(
mh,
tex_info=tex_specular_tint_info,
label='SPECULAR COLOR',
location=(x_specularcolor, y_specularcolor),
color_socket=specular_tint_socket,
)

View File

@ -1,68 +0,0 @@
# SPDX-FileCopyrightText: 2018-2022 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
# [Texture] => [Separate R] => [Transmission Factor] =>
def transmission(mh, location, transmission_socket):
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_transmission']
except Exception:
return
transmission_factor = ext.get('transmissionFactor', 0)
# Default value is 0, so no transmission
if transmission_factor == 0:
return
# Activate screen refraction (for Eevee)
mh.mat.use_screen_refraction = True
tex_info = ext.get('transmissionTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if transmission_socket is None:
return
if tex_info is None:
transmission_socket.default_value = transmission_factor
return
# Mix transmission factor
if transmission_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Transmission Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(transmission_socket, node.outputs[0])
# Inputs
transmission_socket = node.inputs[0]
node.inputs[1].default_value = transmission_factor
x -= 200
# Separate RGB
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(transmission_socket, node.outputs['Red'])
# Inputs
transmission_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='TRANSMISSION',
location=(x, y),
is_data=True,
color_socket=transmission_socket,
)

View File

@ -8,32 +8,45 @@ from .gltf2_blender_pbrMetallicRoughness import base_color, make_output_nodes
def unlit(mh): def unlit(mh):
"""Creates node tree for unlit materials.""" """Creates node tree for unlit materials."""
# Emission node for the base color # Emission node for the base color
emission_node = mh.node_tree.nodes.new('ShaderNodeEmission') emission_node = mh.nodes.new('ShaderNodeEmission')
emission_node.location = 10, 126 emission_node.location = 10, 126
# Lightpath trick: makes Emission visible only to camera rays. # Create a "Lightpath trick": makes Emission visible only to
# camera rays, so it won't "glow" in Cycles.
#
# [Is Camera Ray] => [Mix] => # [Is Camera Ray] => [Mix] =>
# [Transparent] => [ ] # [Transparent] => [ ]
# [Emission] => [ ] # [Emission] => [ ]
lightpath_node = mh.node_tree.nodes.new('ShaderNodeLightPath') lightpath_node = mh.nodes.new('ShaderNodeLightPath')
transparent_node = mh.node_tree.nodes.new('ShaderNodeBsdfTransparent') transparent_node = mh.nodes.new('ShaderNodeBsdfTransparent')
mix_node = mh.node_tree.nodes.new('ShaderNodeMixShader') mix_node = mh.nodes.new('ShaderNodeMixShader')
lightpath_node.location = 10, 600 lightpath_node.location = 10, 600
transparent_node.location = 10, 240 transparent_node.location = 10, 240
mix_node.location = 260, 320 mix_node.location = 260, 320
mh.node_tree.links.new(mix_node.inputs['Fac'], lightpath_node.outputs['Is Camera Ray']) mh.links.new(mix_node.inputs['Fac'], lightpath_node.outputs['Is Camera Ray'])
mh.node_tree.links.new(mix_node.inputs[1], transparent_node.outputs[0]) mh.links.new(mix_node.inputs[1], transparent_node.outputs[0])
mh.node_tree.links.new(mix_node.inputs[2], emission_node.outputs[0]) mh.links.new(mix_node.inputs[2], emission_node.outputs[0])
_emission_socket, alpha_socket, _ = make_output_nodes( # Material output
mh, alpha_socket = None
location=(420, 280) if mh.is_opaque() else (150, 130), out_node = mh.nodes.new('ShaderNodeOutputMaterial')
additional_location=None, #No additional location needed for Unlit if mh.is_opaque():
shader_socket=mix_node.outputs[0], out_node.location = 490, 290
make_emission_socket=False, mh.links.new(out_node.inputs[0], mix_node.outputs[0])
make_alpha_socket=not mh.is_opaque(), else:
make_volume_socket=None # Not possible to have KHR_materials_volume with unlit # Create a "Mix with Transparent" setup so there's a
) # place to put Alpha.
#
# Alpha => [Mix] => [Output]
# [Transparent] => [ ]
# Color => [ ]
mix2_node = mh.nodes.new('ShaderNodeMixShader')
alpha_socket = mix2_node.inputs['Fac']
mix2_node.location = 490, -50
out_node.location = 700, -70
mh.links.new(mix2_node.inputs[1], transparent_node.outputs[0])
mh.links.new(mix2_node.inputs[2], mix_node.outputs[0])
mh.links.new(out_node.inputs[0], mix2_node.outputs[0])
base_color( base_color(
mh, mh,

View File

@ -1,83 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
def volume(mh, location, volume_socket, thickness_socket):
# implementation based on https://github.com/KhronosGroup/glTF-Blender-IO/issues/1454#issuecomment-928319444
try:
ext = mh.pymat.extensions['KHR_materials_volume']
except Exception:
return
# Attenuation Color
attenuationColor = \
mh.pymat.extensions['KHR_materials_volume'] \
.get('attenuationColor')
# glTF is color3, Blender adds alpha
if attenuationColor is None:
attenuationColor = [1.0, 1.0, 1.0, 1.0]
else:
attenuationColor.extend([1.0])
volume_socket.node.inputs[0].default_value = attenuationColor
# Attenuation Distance / Density
attenuationDistance = mh.pymat.extensions['KHR_materials_volume'].get('attenuationDistance')
if attenuationDistance is None:
density = 0
else:
density = 1.0 / attenuationDistance
volume_socket.node.inputs[1].default_value = density
# thicknessFactor / thicknessTexture
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_volume']
except Exception:
return
thickness_factor = ext.get('thicknessFactor', 0)
tex_info = ext.get('thicknessTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if thickness_socket is None:
return
if tex_info is None:
thickness_socket.default_value = thickness_factor
return
# Mix thickness factor
if thickness_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Thickness Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(thickness_socket, node.outputs[0])
# Inputs
thickness_socket = node.inputs[0]
node.inputs[1].default_value = thickness_factor
x -= 200
# Separate RGB
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(thickness_socket, node.outputs['Green'])
# Inputs
thickness_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='THICKNESS',
location=(x, y),
is_data=True,
color_socket=thickness_socket,
)

View File

@ -129,7 +129,7 @@ class BlenderNodeAnim():
if values[i].dot(values[i-1]) < 0: if values[i].dot(values[i-1]) < 0:
values[i] = -values[i] values[i] = -values[i]
fps = bpy.context.scene.render.fps fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
coords = [0] * (2 * len(keys)) coords = [0] * (2 * len(keys))
coords[::2] = (key[0] * fps for key in keys) coords[::2] = (key[0] * fps for key in keys)

View File

@ -28,7 +28,7 @@ class BlenderWeightAnim():
node = gltf.data.nodes[node_idx] node = gltf.data.nodes[node_idx]
obj = vnode.blender_object obj = vnode.blender_object
fps = bpy.context.scene.render.fps fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
animation = gltf.data.animations[anim_idx] animation = gltf.data.animations[anim_idx]

View File

@ -0,0 +1,191 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import (
TextureInfo,
MaterialNormalTextureInfoClass,
MaterialPBRMetallicRoughness,
)
from .gltf2_blender_texture import texture
class MaterialHelper:
"""Helper class. Stores material stuff to be passed around everywhere."""
def __init__(self, gltf, pymat, mat, vertex_color):
self.gltf = gltf
self.pymat = pymat
self.mat = mat
self.node_tree = mat.node_tree
self.nodes = mat.node_tree.nodes
self.links = mat.node_tree.links
self.vertex_color = vertex_color
if pymat.pbr_metallic_roughness is None:
pymat.pbr_metallic_roughness = \
MaterialPBRMetallicRoughness.from_dict({})
self.settings_node = None
def is_opaque(self):
alpha_mode = self.pymat.alpha_mode
return alpha_mode is None or alpha_mode == 'OPAQUE'
def needs_emissive(self):
return (
self.pymat.emissive_texture is not None or
(self.pymat.emissive_factor or [0, 0, 0]) != [0, 0, 0]
)
def get_ext(self, ext_name, default=None):
if not self.pymat.extensions:
return default
return self.pymat.extensions.get(ext_name, default)
# Creates nodes for multiplying a texture channel and scalar factor.
# [Texture] => [Sep RGB] => [Mul Factor] => socket
def scalar_factor_and_texture(
mh: MaterialHelper,
location,
label,
socket, # socket to connect to
factor, # scalar factor
tex_info, # texture
channel, # texture channel to use (0-4)
):
if isinstance(tex_info, dict):
tex_info = TextureInfo.from_dict(tex_info)
x, y = location
if socket is None:
return
if tex_info is None:
socket.default_value = factor
return
if factor != 1.0:
node = mh.nodes.new('ShaderNodeMath')
node.label = f'{label} Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.links.new(socket, node.outputs[0])
# Inputs
socket = node.inputs[0]
node.inputs[1].default_value = factor
x -= 200
if channel != 4:
# Separate RGB
node = mh.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.links.new(socket, node.outputs[channel])
# Inputs
socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label=label.upper(),
location=(x, y),
is_data=channel < 4,
color_socket=socket if channel != 4 else None,
alpha_socket=socket if channel == 4 else None,
)
# Creates nodes for multiplying a texture color and color factor.
# [Texture] => [Mix Factor] => socket
def color_factor_and_texture(
mh: MaterialHelper,
location,
label,
socket, # socket to connect to
factor, # color factor
tex_info, # texture
):
if isinstance(tex_info, dict):
tex_info = TextureInfo.from_dict(tex_info)
x, y = location
if socket is None:
return
if tex_info is None:
socket.default_value = [*factor, 1]
return
if factor != [1, 1, 1]:
node = mh.nodes.new('ShaderNodeMix')
node.data_type = 'RGBA'
node.label = f'{label} Factor'
node.location = x - 140, y
node.blend_type = 'MULTIPLY'
# Outputs
mh.links.new(socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1
socket = node.inputs[6]
node.inputs[7].default_value = [*factor, 1]
x -= 200
texture(
mh,
tex_info=tex_info,
label=label.upper(),
location=(x, y),
is_data=False,
color_socket=socket,
)
# [Texture] => [Normal Map] => socket
def normal_map(
mh: MaterialHelper,
location,
label,
socket,
tex_info,
):
if isinstance(tex_info, dict):
tex_info = MaterialNormalTextureInfoClass.from_dict(tex_info)
if not tex_info:
return
x,y = location
# Normal map
node = mh.nodes.new('ShaderNodeNormalMap')
node.location = x - 150, y - 40
# Set UVMap
uv_idx = tex_info.tex_coord or 0
try:
uv_idx = tex_info.extensions['KHR_texture_transform']['texCoord']
except Exception:
pass
node.uv_map = 'UVMap' if uv_idx == 0 else 'UVMap.%03d' % uv_idx
# Set strength
scale = tex_info.scale
scale = scale if scale is not None else 1
node.inputs['Strength'].default_value = scale
# Outputs
mh.links.new(socket, node.outputs['Normal'])
x -= 200
texture(
mh,
tex_info=tex_info,
label=label.upper(),
location=(x, y),
is_data=True,
color_socket=node.inputs['Color'],
)

View File

@ -11,6 +11,7 @@ from ...io.imp.gltf2_io_binary import BinaryData
from ...io.com.gltf2_io_constants import DataType, ComponentType from ...io.com.gltf2_io_constants import DataType, ComponentType
from ...blender.com.gltf2_blender_conversion import get_attribute_type from ...blender.com.gltf2_blender_conversion import get_attribute_type
from ..com.gltf2_blender_extras import set_extras from ..com.gltf2_blender_extras import set_extras
from ..com.gltf2_blender_utils import fast_structured_np_unique
from .gltf2_blender_material import BlenderMaterial from .gltf2_blender_material import BlenderMaterial
from .gltf2_io_draco_compression_extension import decode_primitive from .gltf2_io_draco_compression_extension import decode_primitive
@ -296,21 +297,22 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
# Start creating things # Start creating things
mesh.vertices.add(len(vert_locs)) mesh.vertices.add(len(vert_locs))
mesh.vertices.foreach_set('co', squish(vert_locs)) position_attribute = attribute_ensure(mesh.attributes, 'position', 'FLOAT_VECTOR', 'POINT')
position_attribute.data.foreach_set('vector', squish(vert_locs, np.float32))
mesh.loops.add(len(loop_vidxs)) mesh.loops.add(len(loop_vidxs))
mesh.loops.foreach_set('vertex_index', loop_vidxs) corner_vert_attribute = attribute_ensure(mesh.attributes, '.corner_vert', 'INT', 'CORNER')
corner_vert_attribute.data.foreach_set('value', squish(loop_vidxs, np.intc))
mesh.edges.add(len(edge_vidxs) // 2) mesh.edges.add(len(edge_vidxs) // 2)
mesh.edges.foreach_set('vertices', edge_vidxs) edge_verts_attribute = attribute_ensure(mesh.attributes, '.edge_verts', 'INT32_2D', 'EDGE')
edge_verts_attribute.data.foreach_set('value', squish(edge_vidxs, np.intc))
mesh.polygons.add(num_faces) mesh.polygons.add(num_faces)
# All polys are tris # All polys are tris
loop_starts = np.arange(0, 3 * num_faces, step=3) loop_starts = np.arange(0, 3 * num_faces, step=3)
loop_totals = np.full(num_faces, 3)
mesh.polygons.foreach_set('loop_start', loop_starts) mesh.polygons.foreach_set('loop_start', loop_starts)
mesh.polygons.foreach_set('loop_total', loop_totals)
for uv_i in range(num_uvs): for uv_i in range(num_uvs):
name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i
@ -320,18 +322,13 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
print("WARNING: UV map is ignored because the maximum number of UV layers has been reached.") print("WARNING: UV map is ignored because the maximum number of UV layers has been reached.")
break break
layer.data.foreach_set('uv', squish(loop_uvs[uv_i])) layer.uv.foreach_set('vector', squish(loop_uvs[uv_i], np.float32))
for col_i in range(num_cols): for col_i in range(num_cols):
name = 'Col' if col_i == 0 else 'Col.%03d' % col_i name = 'Color' if col_i == 0 else 'Color.%03d' % col_i
layer = mesh.vertex_colors.new(name=name) layer = mesh.color_attributes.new(name, 'BYTE_COLOR', 'CORNER')
if layer is None: layer.data.foreach_set('color', squish(loop_cols[col_i], np.float32))
print("WARNING: Vertex colors are ignored because the maximum number of vertex color layers has been "
"reached.")
break
mesh.color_attributes[layer.name].data.foreach_set('color', squish(loop_cols[col_i]))
# Make sure the first Vertex Color Attribute is the rendered one # Make sure the first Vertex Color Attribute is the rendered one
if num_cols > 0: if num_cols > 0:
@ -370,7 +367,7 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
ob.shape_key_add(name=sk_name) ob.shape_key_add(name=sk_name)
key_block = mesh.shape_keys.key_blocks[sk_name] key_block = mesh.shape_keys.key_blocks[sk_name]
key_block.data.foreach_set('co', squish(sk_vert_locs[sk_i])) key_block.points.foreach_set('co', squish(sk_vert_locs[sk_i], np.float32))
sk_i += 1 sk_i += 1
@ -385,7 +382,8 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
and 'mappings' in prim.extensions['KHR_materials_variants'].keys() and 'mappings' in prim.extensions['KHR_materials_variants'].keys()
if has_materials: if has_materials:
material_indices = np.empty(num_faces, dtype=np.uint32) bl_material_index_dtype = np.intc
material_indices = np.empty(num_faces, dtype=bl_material_index_dtype)
empty_material_slot_index = None empty_material_slot_index = None
f = 0 f = 0
@ -448,7 +446,8 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
vari = variant_primitive.variants.add() vari = variant_primitive.variants.add()
vari.variant.variant_idx = variant vari.variant.variant_idx = variant
mesh.polygons.foreach_set('material_index', material_indices) material_index_attribute = attribute_ensure(mesh.attributes, 'material_index', 'INT', 'FACE')
material_index_attribute.data.foreach_set('value', material_indices)
# Custom Attributes # Custom Attributes
for idx, attr in enumerate(attributes): for idx, attr in enumerate(attributes):
@ -534,27 +533,54 @@ def points_edges_tris(mode, indices):
# 0---2---4 # 0---2---4
# \ / \ / # \ / \ /
# 1---3 # 1---3
# TODO: numpyify # in: 01234
def alternate(i, xs): # out: 012132234
even = i % 2 == 0 # out (viewed as triplets): 012, 132, 234
return xs if even else (xs[0], xs[2], xs[1]) tris = np.empty((len(indices) - 2) * 3, dtype=np.uint32)
tris = np.array([ # 012__
alternate(i, (indices[i], indices[i + 1], indices[i + 2])) first_indices = indices[:-2]
for i in range(0, len(indices) - 2) # _123_
]) second_indices = indices[1:-1]
tris = squish(tris) # __234
third_indices = indices[2:]
# Each triplet starts with the first index
# 0__, 1__, 2__ <- 012__
tris[0::3] = first_indices
# Even triplets end with the next two indices in order
# _1_, ___, _3_ <- _1_3_ <- _123_
# 01_, 1__, 23_
tris[1::6] = second_indices[0::2]
# __2, ___, __4 <- __2_4 <- __234
# 012, 1__, 234
tris[2::6] = third_indices[0::2]
# Odd triplets end with the next two indices in reverse order
# ___, _3_, ___ <- ___3_ <- __234
# 012, 13_, 234
tris[4::6] = third_indices[1::2]
# ___, __2, ___ <- __2__ <- _123_
# 012, 132, 234
tris[5::6] = second_indices[1::2]
elif mode == 6: elif mode == 6:
# TRIANGLE FAN # TRIANGLE FAN
# 3---2 # 3---2
# / \ / \ # / \ / \
# 4---0---1 # 4---0---1
# TODO: numpyify # in: 01234
tris = np.array([ # out: 012023034
(indices[0], indices[i], indices[i + 1]) # out (viewed as triplets): 012, 023, 034
for i in range(1, len(indices) - 1) # Start filled with the first index
]) # 000, 000, 000
tris = squish(tris) tris = np.full((len(indices) - 2) * 3, indices[0], dtype=np.uint32)
# _1_, _2_, _3_ <- _123_
# 010, 020, 030
tris[1::3] = indices[1:-1]
# __2, __3, __4 <- __234
# 012, 023, 034
tris[2::3] = indices[2:]
else: else:
raise Exception('primitive mode unimplemented: %d' % mode) raise Exception('primitive mode unimplemented: %d' % mode)
@ -562,9 +588,10 @@ def points_edges_tris(mode, indices):
return points, edges, tris return points, edges, tris
def squish(array): def squish(array, dtype=None):
"""Squish nD array into 1D array (required by foreach_set).""" """Squish nD array into a C-contiguous (required for faster access with the buffer protocol in foreach_set) 1D array
return array.reshape(array.size) (required by foreach_set). Optionally converting the array to a different dtype."""
return np.ascontiguousarray(array, dtype=dtype).reshape(array.size)
def colors_rgb_to_rgba(rgb): def colors_rgb_to_rgba(rgb):
@ -646,6 +673,15 @@ def normalize_vecs(vectors):
norms = np.linalg.norm(vectors, axis=1, keepdims=True) norms = np.linalg.norm(vectors, axis=1, keepdims=True)
np.divide(vectors, norms, out=vectors, where=norms != 0) np.divide(vectors, norms, out=vectors, where=norms != 0)
def attribute_ensure(attributes, name, data_type, domain):
attribute = attributes.get(name)
if attribute is None:
return attributes.new(name, data_type, domain)
if attribute.domain == domain and attribute.data_type == data_type:
return attribute
# There is an existing attribute, but it has the wrong domain or data_type.
attributes.remove(attribute)
return attributes.new(name, data_type, domain)
def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs): def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
num_polys = len(mesh.polygons) num_polys = len(mesh.polygons)
@ -656,14 +692,15 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
return return
if gltf.import_settings['import_shading'] == "SMOOTH": if gltf.import_settings['import_shading'] == "SMOOTH":
poly_smooths = np.full(num_polys, True) poly_sharps = np.full(num_polys, False)
f = 0 f = 0
for prim in pymesh.primitives: for prim in pymesh.primitives:
if 'NORMAL' not in prim.attributes: if 'NORMAL' not in prim.attributes:
# Primitives with no NORMALs should use flat shading # Primitives with no NORMALs should use flat shading
poly_smooths[f:f + prim.num_faces].fill(False) poly_sharps[f:f + prim.num_faces].fill(True)
f += prim.num_faces f += prim.num_faces
mesh.polygons.foreach_set('use_smooth', poly_smooths) sharp_face_attribute = attribute_ensure(mesh.attributes, 'sharp_face', 'BOOLEAN', 'FACE')
sharp_face_attribute.data.foreach_set('value', poly_sharps)
return return
assert gltf.import_settings['import_shading'] == "NORMALS" assert gltf.import_settings['import_shading'] == "NORMALS"
@ -671,17 +708,17 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
# Try to guess which polys should be flat based on the fact that all the # Try to guess which polys should be flat based on the fact that all the
# loop normals for a flat poly are = the poly's normal. # loop normals for a flat poly are = the poly's normal.
poly_smooths = np.empty(num_polys, dtype=bool) poly_sharps = np.empty(num_polys, dtype=bool)
poly_normals = np.empty(num_polys * 3, dtype=np.float32) poly_normals = np.empty(num_polys * 3, dtype=np.float32)
mesh.polygons.foreach_get('normal', poly_normals) mesh.polygon_normals.foreach_get('vector', poly_normals)
poly_normals = poly_normals.reshape(num_polys, 3) poly_normals = poly_normals.reshape(num_polys, 3)
f = 0 f = 0
for prim in pymesh.primitives: for prim in pymesh.primitives:
if 'NORMAL' not in prim.attributes: if 'NORMAL' not in prim.attributes:
# Primitives with no NORMALs should use flat shading # Primitives with no NORMALs should use flat shading
poly_smooths[f:f + prim.num_faces].fill(False) poly_sharps[f:f + prim.num_faces].fill(True)
f += prim.num_faces f += prim.num_faces
continue continue
@ -704,11 +741,12 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
dot_prods = np.sum(vert_ns * poly_ns, axis=1) dot_prods = np.sum(vert_ns * poly_ns, axis=1)
np.logical_or(smooth, dot_prods <= 0.9999999, out=smooth) np.logical_or(smooth, dot_prods <= 0.9999999, out=smooth)
poly_smooths[f:f + prim.num_faces] = smooth np.logical_not(smooth, out=poly_sharps[f:f + prim.num_faces])
f += prim.num_faces f += prim.num_faces
mesh.polygons.foreach_set('use_smooth', poly_smooths) sharp_face_attribute = attribute_ensure(mesh.attributes, 'sharp_face', 'BOOLEAN', 'FACE')
sharp_face_attribute.data.foreach_set('value', poly_sharps)
def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs, attribute_data): def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs, attribute_data):
@ -766,7 +804,7 @@ def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk
dots['sk%dy' % i] = locs[:, 1] dots['sk%dy' % i] = locs[:, 1]
dots['sk%dz' % i] = locs[:, 2] dots['sk%dz' % i] = locs[:, 2]
unique_dots, unique_ind, inv_indices = np.unique(dots, return_index=True, return_inverse=True) unique_dots, unique_ind, inv_indices = fast_structured_np_unique(dots, return_index=True, return_inverse=True)
loop_vidxs = inv_indices[loop_vidxs] loop_vidxs = inv_indices[loop_vidxs]
edge_vidxs = inv_indices[edge_vidxs] edge_vidxs = inv_indices[edge_vidxs]

View File

@ -229,8 +229,9 @@ class BlenderNode():
if gltf.import_settings['bone_heuristic'] == "BLENDER": if gltf.import_settings['bone_heuristic'] == "BLENDER":
pose_bone.custom_shape = bpy.data.objects[gltf.bone_shape] pose_bone.custom_shape = bpy.data.objects[gltf.bone_shape]
armature_max_dim = max([blender_arma.dimensions[0] / blender_arma.scale[0], blender_arma.dimensions[1] / blender_arma.scale[1], blender_arma.dimensions[2] / blender_arma.scale[2]]) armature_min_dim = min([blender_arma.dimensions[0] / blender_arma.scale[0], blender_arma.dimensions[1] / blender_arma.scale[1], blender_arma.dimensions[2] / blender_arma.scale[2]])
pose_bone.custom_shape_scale_xyz = Vector([armature_max_dim * 0.2] * 3) pose_bone.custom_shape_scale_xyz = Vector([armature_min_dim * 0.05] * 3)
pose_bone.use_custom_shape_bone_size = False
@staticmethod @staticmethod
def create_mesh_object(gltf, vnode): def create_mesh_object(gltf, vnode):

View File

@ -8,37 +8,9 @@ from ...io.com.gltf2_io_constants import GLTF_IOR
from ...io.com.gltf2_io import TextureInfo, MaterialPBRMetallicRoughness from ...io.com.gltf2_io import TextureInfo, MaterialPBRMetallicRoughness
from ..com.gltf2_blender_material_helpers import get_gltf_node_name, create_settings_group from ..com.gltf2_blender_material_helpers import get_gltf_node_name, create_settings_group
from .gltf2_blender_texture import texture from .gltf2_blender_texture import texture
from .gltf2_blender_KHR_materials_clearcoat import \
clearcoat, clearcoat_roughness, clearcoat_normal
from .gltf2_blender_KHR_materials_transmission import transmission
from .gltf2_blender_KHR_materials_ior import ior
from .gltf2_blender_KHR_materials_volume import volume
from .gltf2_blender_KHR_materials_specular import specular
from .gltf2_blender_KHR_materials_sheen import sheen
from .gltf2_blender_KHR_materials_anisotropy import anisotropy from .gltf2_blender_KHR_materials_anisotropy import anisotropy
from .gltf2_blender_material_utils import \
class MaterialHelper: MaterialHelper, scalar_factor_and_texture, color_factor_and_texture, normal_map
"""Helper class. Stores material stuff to be passed around everywhere."""
def __init__(self, gltf, pymat, mat, vertex_color):
self.gltf = gltf
self.pymat = pymat
self.mat = mat
self.node_tree = mat.node_tree
self.vertex_color = vertex_color
if pymat.pbr_metallic_roughness is None:
pymat.pbr_metallic_roughness = \
MaterialPBRMetallicRoughness.from_dict({})
self.settings_node = None
def is_opaque(self):
alpha_mode = self.pymat.alpha_mode
return alpha_mode is None or alpha_mode == 'OPAQUE'
def needs_emissive(self):
return (
self.pymat.emissive_texture is not None or
(self.pymat.emissive_factor or [0, 0, 0]) != [0, 0, 0]
)
def pbr_metallic_roughness(mh: MaterialHelper): def pbr_metallic_roughness(mh: MaterialHelper):
@ -47,10 +19,6 @@ def pbr_metallic_roughness(mh: MaterialHelper):
pbr_node.location = 10, 300 pbr_node.location = 10, 300
additional_location = 40, -370 # For occlusion and/or volume / original PBR extensions additional_location = 40, -370 # For occlusion and/or volume / original PBR extensions
# Set IOR to 1.5, this is the default in glTF
# This value may be overridden later if IOR extension is set on file
pbr_node.inputs['IOR'].default_value = GLTF_IOR
if mh.pymat.occlusion_texture is not None: if mh.pymat.occlusion_texture is not None:
if mh.settings_node is None: if mh.settings_node is None:
mh.settings_node = make_settings_node(mh) mh.settings_node = make_settings_node(mh)
@ -119,29 +87,9 @@ def pbr_metallic_roughness(mh: MaterialHelper):
occlusion_socket=mh.settings_node.inputs['Occlusion'], occlusion_socket=mh.settings_node.inputs['Occlusion'],
) )
clearcoat( clearcoat(mh, locs, pbr_node)
mh,
location=locs['clearcoat'],
clearcoat_socket=pbr_node.inputs['Coat Weight'],
)
clearcoat_roughness( transmission(mh, locs, pbr_node)
mh,
location=locs['clearcoat_roughness'],
roughness_socket=pbr_node.inputs['Coat Roughness'],
)
clearcoat_normal(
mh,
location=locs['clearcoat_normal'],
normal_socket=pbr_node.inputs['Coat Normal'],
)
transmission(
mh,
location=locs['transmission'],
transmission_socket=pbr_node.inputs['Transmission Weight']
)
if need_volume_node: if need_volume_node:
volume( volume(
@ -151,13 +99,7 @@ def pbr_metallic_roughness(mh: MaterialHelper):
thickness_socket=mh.settings_node.inputs[1] if mh.settings_node else None thickness_socket=mh.settings_node.inputs[1] if mh.settings_node else None
) )
specular( specular(mh, locs, pbr_node)
mh,
location_specular=locs['specularTexture'],
location_specular_tint=locs['specularColorTexture'],
specular_socket=pbr_node.inputs['Specular IOR Level'],
specular_tint_socket=pbr_node.inputs['Specular Tint']
)
anisotropy( anisotropy(
mh, mh,
@ -167,18 +109,135 @@ def pbr_metallic_roughness(mh: MaterialHelper):
anisotropy_tangent_socket=pbr_node.inputs['Tangent'] anisotropy_tangent_socket=pbr_node.inputs['Tangent']
) )
sheen( sheen(mh, locs, pbr_node)
# IOR
ior_ext = mh.get_ext('KHR_materials_ior', {})
ior = ior_ext.get('ior', GLTF_IOR)
pbr_node.inputs['IOR'].default_value = ior
def clearcoat(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_clearcoat', {})
scalar_factor_and_texture(
mh, mh,
location_sheenTint=locs['sheenColorTexture'], location=locs['clearcoat'],
location_sheenRoughness=locs['sheenRoughnessTexture'], label='Clearcoat',
sheen_socket=pbr_node.inputs['Sheen Weight'], socket=pbr_node.inputs['Coat Weight'],
sheenTint_socket=pbr_node.inputs['Sheen Tint'], factor=ext.get('clearcoatFactor', 0),
sheenRoughness_socket=pbr_node.inputs['Sheen Roughness'] tex_info=ext.get('clearcoatTexture'),
channel=0, # Red
) )
ior( scalar_factor_and_texture(
mh, mh,
ior_socket=pbr_node.inputs['IOR'] location=locs['clearcoat_roughness'],
label='Clearcoat Roughness',
socket=pbr_node.inputs['Coat Roughness'],
factor=ext.get('clearcoatRoughnessFactor', 0),
tex_info=ext.get('clearcoatRoughnessTexture'),
channel=1, # Green
)
normal_map(
mh,
location=locs['clearcoat_normal'],
label='Clearcoat Normal',
socket=pbr_node.inputs['Coat Normal'],
tex_info=ext.get('clearcoatNormalTexture'),
)
def transmission(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_transmission', {})
factor = ext.get('transmissionFactor', 0)
if factor > 0:
# Activate screen refraction (for Eevee)
mh.mat.use_screen_refraction = True
scalar_factor_and_texture(
mh,
location=locs['transmission'],
label='Transmission',
socket=pbr_node.inputs['Transmission Weight'],
factor=factor,
tex_info=ext.get('transmissionTexture'),
channel=0, # Red
)
def volume(mh, location, volume_socket, thickness_socket):
# Based on https://github.com/KhronosGroup/glTF-Blender-IO/issues/1454#issuecomment-928319444
ext = mh.get_ext('KHR_materials_volume', {})
color = ext.get('attenuationColor', [1, 1, 1])
volume_socket.node.inputs[0].default_value = [*color, 1]
distance = ext.get('attenuationDistance', float('inf'))
density = 1 / distance
volume_socket.node.inputs[1].default_value = density
scalar_factor_and_texture(
mh,
location=location,
label='Thickness',
socket=thickness_socket,
factor=ext.get('thicknessFactor', 0),
tex_info=ext.get('thicknessTexture'),
channel=1, # Green
)
def specular(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_specular', {})
# blender.IORLevel = 0.5 * gltf.specular
scalar_factor_and_texture(
mh,
location=locs['specularTexture'],
label='Specular',
socket=pbr_node.inputs['Specular IOR Level'],
factor=0.5 * ext.get('specularFactor', 1),
tex_info=ext.get('specularTexture'),
channel=4, # Alpha
)
color_factor_and_texture(
mh,
location=locs['specularColorTexture'],
label='Specular Color',
socket=pbr_node.inputs['Specular Tint'],
factor=ext.get('specularColorFactor', [1, 1, 1]),
tex_info=ext.get('specularColorTexture'),
)
def sheen(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_sheen')
if ext is None:
return
pbr_node.inputs['Sheen Weight'].default_value = 1
color_factor_and_texture(
mh,
location=locs['sheenColorTexture'],
label='Sheen Color',
socket=pbr_node.inputs['Sheen Tint'],
factor=ext.get('sheenColorFactor', [0, 0, 0]),
tex_info=ext.get('sheenColorTexture'),
)
scalar_factor_and_texture(
mh,
location=locs['sheenRoughnessTexture'],
label='Sheen Roughness',
socket=pbr_node.inputs['Sheen Roughness'],
factor=ext.get('sheenRoughnessFactor', 0),
tex_info=ext.get('sheenRoughnessTexture'),
channel=4, # Alpha
) )
@ -190,35 +249,12 @@ def calc_locations(mh):
height = 460 # height of each block height = 460 # height of each block
locs = {} locs = {}
try: clearcoat_ext = mh.get_ext('KHR_materials_clearcoat', {})
clearcoat_ext = mh.pymat.extensions['KHR_materials_clearcoat'] transmission_ext = mh.get_ext('KHR_materials_transmission', {})
except Exception: volume_ext = mh.get_ext('KHR_materials_volume', {})
clearcoat_ext = {} specular_ext = mh.get_ext('KHR_materials_specular', {})
anisotropy_ext = mh.get_ext('KHR_materials_anisotropy', {})
try: sheen_ext = mh.get_ext('KHR_materials_sheen', {})
transmission_ext = mh.pymat.exntesions['KHR_materials_transmission']
except:
transmission_ext = {}
try:
volume_ext = mh.pymat.extensions['KHR_materials_volume']
except Exception:
volume_ext = {}
try:
specular_ext = mh.pymat.extensions['KHR_materials_specular']
except:
specular_ext = {}
try:
anisotropy_ext = mh.pymat.extensions['KHR_materials_anisotropy']
except:
anisotropy_ext = {}
try:
sheen_ext = mh.pymat.extensions['KHR_materials_sheen']
except:
sheen_ext = {}
locs['base_color'] = (x, y) locs['base_color'] = (x, y)
if mh.pymat.pbr_metallic_roughness.base_color_texture is not None or mh.vertex_color: if mh.pymat.pbr_metallic_roughness.base_color_texture is not None or mh.vertex_color:
@ -283,60 +319,24 @@ def calc_locations(mh):
# [Texture] => [Emissive Factor] => # [Texture] => [Emissive Factor] =>
def emission(mh: MaterialHelper, location, color_socket, strength_socket): def emission(mh: MaterialHelper, location, color_socket, strength_socket):
x, y = location factor = mh.pymat.emissive_factor or [0, 0, 0]
emissive_factor = mh.pymat.emissive_factor or [0, 0, 0] ext = mh.get_ext('KHR_materials_emissive_strength', {})
strength = ext.get('emissiveStrength', 1)
strength = 1 if factor[0] == factor[1] == factor[2]:
try: # Fold greyscale factor into strength
# Get strength from KHR_materials_emissive_strength if exists strength *= factor[0]
strength = mh.pymat.extensions['KHR_materials_emissive_strength']['emissiveStrength'] factor = [1, 1, 1]
except Exception:
pass
if color_socket is None: color_factor_and_texture(
return
if mh.pymat.emissive_texture is None:
if emissive_factor == [0, 0, 0]:
# Keep as close as possible to the default Blender value when there is no emission
color_socket.default_value = [1,1,1,1]
strength_socket.default_value = 0
return
color_socket.default_value = emissive_factor + [1]
strength_socket.default_value = strength
return
# Put grayscale emissive factors into the Emission Strength
e0, e1, e2 = emissive_factor
if strength_socket and e0 == e1 == e2:
strength_socket.default_value = e0 * strength
# Otherwise, use a multiply node for it
else:
if emissive_factor != [1, 1, 1]:
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.label = 'Emissive Factor'
node.data_type = 'RGBA'
node.location = x - 140, y
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(color_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
color_socket = node.inputs[6]
node.inputs[7].default_value = emissive_factor + [1]
x -= 200
strength_socket.default_value = strength
texture(
mh, mh,
location,
label='Emissive',
socket=color_socket,
factor=factor,
tex_info=mh.pymat.emissive_texture, tex_info=mh.pymat.emissive_texture,
label='EMISSIVE',
location=(x, y),
color_socket=color_socket,
) )
strength_socket.default_value = strength
# [Texture] => [Mix Colors] => [Color Factor] => # [Texture] => [Mix Colors] => [Color Factor] =>
@ -395,7 +395,7 @@ def base_color(
if needs_alpha_factor: if needs_alpha_factor:
node = mh.node_tree.nodes.new('ShaderNodeMath') node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Alpha Factor' node.label = 'Alpha Factor'
node.location = x - 140, y - 200 node.location = x - 140, y - 230
# Outputs # Outputs
mh.node_tree.links.new(alpha_socket, node.outputs[0]) mh.node_tree.links.new(alpha_socket, node.outputs[0])
# Inputs # Inputs
@ -428,7 +428,7 @@ def base_color(
if alpha_socket is not None: if alpha_socket is not None:
node = mh.node_tree.nodes.new('ShaderNodeMath') node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Mix Vertex Alpha' node.label = 'Mix Vertex Alpha'
node.location = x - 140, y - 200 node.location = x - 140, y - 230
node.operation = 'MULTIPLY' node.operation = 'MULTIPLY'
# Outputs # Outputs
mh.node_tree.links.new(alpha_socket, node.outputs[0]) mh.node_tree.links.new(alpha_socket, node.outputs[0])
@ -528,40 +528,12 @@ def metallic_roughness(mh: MaterialHelper, location, metallic_socket, roughness_
# [Texture] => [Normal Map] => # [Texture] => [Normal Map] =>
def normal(mh: MaterialHelper, location, normal_socket): def normal(mh: MaterialHelper, location, normal_socket):
x,y = location normal_map(
tex_info = mh.pymat.normal_texture
if tex_info is None:
return
# Normal map
node = mh.node_tree.nodes.new('ShaderNodeNormalMap')
node.location = x - 150, y - 40
# Set UVMap
uv_idx = tex_info.tex_coord or 0
try:
uv_idx = tex_info.extensions['KHR_texture_transform']['texCoord']
except Exception:
pass
node.uv_map = 'UVMap' if uv_idx == 0 else 'UVMap.%03d' % uv_idx
# Set strength
scale = tex_info.scale
scale = scale if scale is not None else 1
node.inputs['Strength'].default_value = scale
# Outputs
mh.node_tree.links.new(normal_socket, node.outputs['Normal'])
# Inputs
color_socket = node.inputs['Color']
x -= 200
texture(
mh, mh,
tex_info=tex_info, location=location,
label='NORMALMAP', label='Normal Map',
location=(x, y), socket=normal_socket,
is_data=True, tex_info=mh.pymat.normal_texture,
color_socket=color_socket,
) )

View File

@ -39,25 +39,7 @@ def texture(
# Get image # Get image
if forced_image is None: if forced_image is None:
source = get_source(mh, pytexture)
if mh.gltf.import_settings['import_webp_texture'] is True:
# Get the WebP image if there is one
if pytexture.extensions \
and 'EXT_texture_webp' in pytexture.extensions \
and pytexture.extensions['EXT_texture_webp']['source'] is not None:
source = pytexture.extensions['EXT_texture_webp']['source']
elif pytexture.source is not None:
source = pytexture.source
else:
source = pytexture.source
if mh.gltf.import_settings['import_webp_texture'] is False and source is None:
# In case webp is not used as a fallback, use this as main texture
if pytexture.extensions \
and 'EXT_texture_webp' in pytexture.extensions \
and pytexture.extensions['EXT_texture_webp']['source'] is not None:
source = pytexture.extensions['EXT_texture_webp']['source']
if source is not None: if source is not None:
BlenderImage.create(mh.gltf, source) BlenderImage.create(mh.gltf, source)
pyimg = mh.gltf.data.images[source] pyimg = mh.gltf.data.images[source]
@ -188,6 +170,20 @@ def texture(
import_user_extensions('gather_import_texture_after_hook', mh.gltf, pytexture, mh.node_tree, mh, tex_info, location, label, color_socket, alpha_socket, is_data) import_user_extensions('gather_import_texture_after_hook', mh.gltf, pytexture, mh.node_tree, mh, tex_info, location, label, color_socket, alpha_socket, is_data)
def get_source(mh, pytexture):
src = pytexture.source
try:
webp_src = pytexture.extensions['EXT_texture_webp']['source']
except Exception:
webp_src = None
if mh.gltf.import_settings['import_webp_texture']:
return webp_src if webp_src is not None else src
else:
return src if src is not None else webp_src
def set_filtering(tex_img, pysampler): def set_filtering(tex_img, pysampler):
"""Set the filtering/interpolation on an Image Texture from the glTf sampler.""" """Set the filtering/interpolation on an Image Texture from the glTf sampler."""
minf = pysampler.min_filter minf = pysampler.min_filter

View File

@ -8,7 +8,7 @@
bl_info = { bl_info = {
"name": "Snap_Utilities_Line", "name": "Snap_Utilities_Line",
"author": "Germano Cavalcante", "author": "Germano Cavalcante",
"version": (6, 7, 1), "version": (6, 8, 0),
"blender": (3, 2, 0), "blender": (3, 2, 0),
"location": "View3D > TOOLS > Line Tool", "location": "View3D > TOOLS > Line Tool",
"description": "Extends Blender Snap controls", "description": "Extends Blender Snap controls",

View File

@ -8,6 +8,8 @@ import bmesh
from mathutils import Vector from mathutils import Vector
from mathutils.geometry import intersect_point_line from mathutils.geometry import intersect_point_line
from .snap_context_l.utils_projection import intersect_ray_ray_fac
from .common_utilities import snap_utilities from .common_utilities import snap_utilities
from .common_classes import ( from .common_classes import (
CharMap, CharMap,
@ -243,6 +245,7 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
self.bool_update = True self.bool_update = True
self.vector_constrain = () self.vector_constrain = ()
self.len = 0 self.len = 0
self.curr_dir = Vector()
if not (self.bm and self.obj): if not (self.bm and self.obj):
self.obj = context.edit_object self.obj = context.edit_object
@ -262,8 +265,6 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
if self.navigation_ops.run(context, event, self.prevloc if self.vector_constrain else self.location): if self.navigation_ops.run(context, event, self.prevloc if self.vector_constrain else self.location):
return {'RUNNING_MODAL'} return {'RUNNING_MODAL'}
context.area.tag_redraw()
if event.ctrl and event.type == 'Z' and event.value == 'PRESS': if event.ctrl and event.type == 'Z' and event.value == 'PRESS':
bpy.ops.ed.undo() bpy.ops.ed.undo()
if not self.wait_for_input: if not self.wait_for_input:
@ -287,9 +288,16 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
is_making_lines = bool(self.list_verts_co) is_making_lines = bool(self.list_verts_co)
if (event.type == 'MOUSEMOVE' or self.bool_update) and self.charmap.length_entered_value == 0.0: if (event.type == 'MOUSEMOVE' or self.bool_update):
mval = Vector((event.mouse_region_x, event.mouse_region_y)) mval = Vector((event.mouse_region_x, event.mouse_region_y))
if self.charmap.length_entered_value != 0.0:
ray_dir, ray_orig = self.sctx.get_ray(mval)
loc = self.list_verts_co[-1]
fac = intersect_ray_ray_fac(loc, self.curr_dir, ray_orig, ray_dir)
if fac < 0.0:
self.curr_dir.negate()
self.location = loc - (self.location - loc)
else:
if self.rv3d.view_matrix != self.rotMat: if self.rv3d.view_matrix != self.rotMat:
self.rotMat = self.rv3d.view_matrix.copy() self.rotMat = self.rv3d.view_matrix.copy()
self.bool_update = True self.bool_update = True
@ -308,23 +316,22 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
self.snap_to_grid() self.snap_to_grid()
if is_making_lines and self.preferences.auto_constrain: if is_making_lines:
loc = self.list_verts_co[-1] loc = self.list_verts_co[-1]
vec, type = self.constrain.update( self.curr_dir = self.location - loc
if self.preferences.auto_constrain:
vec, cons_type = self.constrain.update(
self.sctx.region, self.sctx.rv3d, mval, loc) self.sctx.region, self.sctx.rv3d, mval, loc)
self.vector_constrain = [loc, loc + vec, type] self.vector_constrain = [loc, loc + vec, cons_type]
if event.value == 'PRESS': elif event.value == 'PRESS':
if is_making_lines and self.charmap.modal_(context, event): if is_making_lines and self.charmap.modal_(context, event):
self.bool_update = self.charmap.length_entered_value == 0.0 self.bool_update = self.charmap.length_entered_value == 0.0
if not self.bool_update: if not self.bool_update:
text_value = self.charmap.length_entered_value text_value = self.charmap.length_entered_value
vector = (self.location - vector = self.curr_dir.normalized()
self.list_verts_co[-1]).normalized() self.location = self.list_verts_co[-1] + (vector * text_value)
self.location = self.list_verts_co[-1] + \
(vector * text_value)
del vector
elif self.constrain.modal(event, self._shift_contrain_callback): elif self.constrain.modal(event, self._shift_contrain_callback):
self.bool_update = True self.bool_update = True
@ -379,6 +386,8 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
self.list_verts = [] self.list_verts = []
self.list_verts_co = [] self.list_verts_co = []
self.charmap.clear() self.charmap.clear()
else:
return {'RUNNING_MODAL'}
a = "" a = ""
if is_making_lines: if is_making_lines:
@ -387,11 +396,9 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
context.area.header_text_set( context.area.header_text_set(
text="hit: %.3f %.3f %.3f %s" % (*self.location, a)) text="hit: %.3f %.3f %.3f %s" % (*self.location, a))
if True or is_making_lines: context.area.tag_redraw()
return {'RUNNING_MODAL'} return {'RUNNING_MODAL'}
return {'PASS_THROUGH'}
def draw_callback_px(self): def draw_callback_px(self):
if self.bm: if self.bm:
self.draw_cache.draw_elem(self.snap_obj, self.bm, self.geom) self.draw_cache.draw_elem(self.snap_obj, self.bm, self.geom)

View File

@ -185,17 +185,21 @@ def intersect_boundbox_threshold(sctx, MVP, ray_origin_local, ray_direction_loca
return (sctx.mval - p).length_squared < sctx._dist_px_sq return (sctx.mval - p).length_squared < sctx._dist_px_sq
def intersect_ray_segment_fac(v0, v1, ray_direction, ray_origin): def intersect_ray_ray_fac(orig_a, dir_a, orig_b, dir_b):
a = v1 - v0 t = orig_a - orig_b
t = v0 - ray_origin n = dir_a.cross(dir_b)
n = a.cross(ray_direction)
nlen = n.length_squared nlen = n.length_squared
# if (nlen == 0.0f) the lines are parallel, has no nearest point, only distance squared.*/ # if (nlen == 0.0f) the lines are parallel, has no nearest point, only distance squared.*/
if nlen == 0.0: if nlen == 0.0:
# Calculate the distance to the nearest point to origin then # # Calculate the distance to the nearest point to origin then #
return a.dot(ray_direction) < 0 return intersect_point_line(orig_a, orig_b, orig_b + dir_b)
else: else:
c = n - t c = n - t
cray = c.cross(ray_direction) cray = c.cross(dir_b)
return cray.dot(n) / nlen return cray.dot(n) / nlen
def intersect_ray_segment_fac(v0, v1, ray_direction, ray_origin):
dir_a = v1 - v0
return intersect_ray_ray_fac(v0, dir_a, ray_origin, ray_direction)

View File

@ -5,7 +5,7 @@
bl_info = { bl_info = {
"name": "Node Wrangler", "name": "Node Wrangler",
"author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer", "author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer",
"version": (3, 52), "version": (3, 53),
"blender": (4, 0, 0), "blender": (4, 0, 0),
"location": "Node Editor Toolbar or Shift-W", "location": "Node Editor Toolbar or Shift-W",
"description": "Various tools to enhance and speed up node-based workflow", "description": "Various tools to enhance and speed up node-based workflow",

View File

@ -10,7 +10,7 @@ from nodeitems_utils import node_categories_iter, NodeItemCustom
from . import operators from . import operators
from .utils.constants import blend_types, geo_combine_operations, operations from .utils.constants import blend_types, geo_combine_operations, operations
from .utils.nodes import get_nodes_links, nw_check, NWBase from .utils.nodes import get_nodes_links, NWBaseMenu
def drawlayout(context, layout, mode='non-panel'): def drawlayout(context, layout, mode='non-panel'):
@ -71,7 +71,7 @@ def drawlayout(context, layout, mode='non-panel'):
col.separator() col.separator()
class NodeWranglerPanel(Panel, NWBase): class NodeWranglerPanel(Panel, NWBaseMenu):
bl_idname = "NODE_PT_nw_node_wrangler" bl_idname = "NODE_PT_nw_node_wrangler"
bl_space_type = 'NODE_EDITOR' bl_space_type = 'NODE_EDITOR'
bl_label = "Node Wrangler" bl_label = "Node Wrangler"
@ -92,7 +92,7 @@ class NodeWranglerPanel(Panel, NWBase):
# #
# M E N U S # M E N U S
# #
class NodeWranglerMenu(Menu, NWBase): class NodeWranglerMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_node_wrangler_menu" bl_idname = "NODE_MT_nw_node_wrangler_menu"
bl_label = "Node Wrangler" bl_label = "Node Wrangler"
@ -101,7 +101,7 @@ class NodeWranglerMenu(Menu, NWBase):
drawlayout(context, self.layout) drawlayout(context, self.layout)
class NWMergeNodesMenu(Menu, NWBase): class NWMergeNodesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_nodes_menu" bl_idname = "NODE_MT_nw_merge_nodes_menu"
bl_label = "Merge Selected Nodes" bl_label = "Merge Selected Nodes"
@ -124,7 +124,7 @@ class NWMergeNodesMenu(Menu, NWBase):
props.merge_type = 'ALPHAOVER' props.merge_type = 'ALPHAOVER'
class NWMergeGeometryMenu(Menu, NWBase): class NWMergeGeometryMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_geometry_menu" bl_idname = "NODE_MT_nw_merge_geometry_menu"
bl_label = "Merge Selected Nodes using Geometry Nodes" bl_label = "Merge Selected Nodes using Geometry Nodes"
@ -137,7 +137,7 @@ class NWMergeGeometryMenu(Menu, NWBase):
props.merge_type = 'GEOMETRY' props.merge_type = 'GEOMETRY'
class NWMergeShadersMenu(Menu, NWBase): class NWMergeShadersMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_shaders_menu" bl_idname = "NODE_MT_nw_merge_shaders_menu"
bl_label = "Merge Selected Nodes using Shaders" bl_label = "Merge Selected Nodes using Shaders"
@ -150,7 +150,7 @@ class NWMergeShadersMenu(Menu, NWBase):
props.merge_type = 'SHADER' props.merge_type = 'SHADER'
class NWMergeMixMenu(Menu, NWBase): class NWMergeMixMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_mix_menu" bl_idname = "NODE_MT_nw_merge_mix_menu"
bl_label = "Merge Selected Nodes using Mix" bl_label = "Merge Selected Nodes using Mix"
@ -162,7 +162,7 @@ class NWMergeMixMenu(Menu, NWBase):
props.merge_type = 'MIX' props.merge_type = 'MIX'
class NWConnectionListOutputs(Menu, NWBase): class NWConnectionListOutputs(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_connection_list_out" bl_idname = "NODE_MT_nw_connection_list_out"
bl_label = "From:" bl_label = "From:"
@ -180,7 +180,7 @@ class NWConnectionListOutputs(Menu, NWBase):
icon="RADIOBUT_OFF").from_socket = index icon="RADIOBUT_OFF").from_socket = index
class NWConnectionListInputs(Menu, NWBase): class NWConnectionListInputs(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_connection_list_in" bl_idname = "NODE_MT_nw_connection_list_in"
bl_label = "To:" bl_label = "To:"
@ -201,7 +201,7 @@ class NWConnectionListInputs(Menu, NWBase):
op.to_socket = index op.to_socket = index
class NWMergeMathMenu(Menu, NWBase): class NWMergeMathMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_math_menu" bl_idname = "NODE_MT_nw_merge_math_menu"
bl_label = "Merge Selected Nodes using Math" bl_label = "Merge Selected Nodes using Math"
@ -213,7 +213,7 @@ class NWMergeMathMenu(Menu, NWBase):
props.merge_type = 'MATH' props.merge_type = 'MATH'
class NWBatchChangeNodesMenu(Menu, NWBase): class NWBatchChangeNodesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_batch_change_nodes_menu" bl_idname = "NODE_MT_nw_batch_change_nodes_menu"
bl_label = "Batch Change Selected Nodes" bl_label = "Batch Change Selected Nodes"
@ -223,7 +223,7 @@ class NWBatchChangeNodesMenu(Menu, NWBase):
layout.menu(NWBatchChangeOperationMenu.bl_idname) layout.menu(NWBatchChangeOperationMenu.bl_idname)
class NWBatchChangeBlendTypeMenu(Menu, NWBase): class NWBatchChangeBlendTypeMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_batch_change_blend_type_menu" bl_idname = "NODE_MT_nw_batch_change_blend_type_menu"
bl_label = "Batch Change Blend Type" bl_label = "Batch Change Blend Type"
@ -235,7 +235,7 @@ class NWBatchChangeBlendTypeMenu(Menu, NWBase):
props.operation = 'CURRENT' props.operation = 'CURRENT'
class NWBatchChangeOperationMenu(Menu, NWBase): class NWBatchChangeOperationMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_batch_change_operation_menu" bl_idname = "NODE_MT_nw_batch_change_operation_menu"
bl_label = "Batch Change Math Operation" bl_label = "Batch Change Math Operation"
@ -247,7 +247,7 @@ class NWBatchChangeOperationMenu(Menu, NWBase):
props.operation = type props.operation = type
class NWCopyToSelectedMenu(Menu, NWBase): class NWCopyToSelectedMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_copy_node_properties_menu" bl_idname = "NODE_MT_nw_copy_node_properties_menu"
bl_label = "Copy to Selected" bl_label = "Copy to Selected"
@ -257,7 +257,7 @@ class NWCopyToSelectedMenu(Menu, NWBase):
layout.menu(NWCopyLabelMenu.bl_idname) layout.menu(NWCopyLabelMenu.bl_idname)
class NWCopyLabelMenu(Menu, NWBase): class NWCopyLabelMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_copy_label_menu" bl_idname = "NODE_MT_nw_copy_label_menu"
bl_label = "Copy Label" bl_label = "Copy Label"
@ -268,7 +268,7 @@ class NWCopyLabelMenu(Menu, NWBase):
layout.operator(operators.NWCopyLabel.bl_idname, text="from Linked Output's Name").option = 'FROM_SOCKET' layout.operator(operators.NWCopyLabel.bl_idname, text="from Linked Output's Name").option = 'FROM_SOCKET'
class NWAddReroutesMenu(Menu, NWBase): class NWAddReroutesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_add_reroutes_menu" bl_idname = "NODE_MT_nw_add_reroutes_menu"
bl_label = "Add Reroutes" bl_label = "Add Reroutes"
bl_description = "Add Reroute Nodes to Selected Nodes' Outputs" bl_description = "Add Reroute Nodes to Selected Nodes' Outputs"
@ -280,7 +280,7 @@ class NWAddReroutesMenu(Menu, NWBase):
layout.operator(operators.NWAddReroutes.bl_idname, text="to Linked Outputs").option = 'LINKED' layout.operator(operators.NWAddReroutes.bl_idname, text="to Linked Outputs").option = 'LINKED'
class NWLinkActiveToSelectedMenu(Menu, NWBase): class NWLinkActiveToSelectedMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_active_to_selected_menu" bl_idname = "NODE_MT_nw_link_active_to_selected_menu"
bl_label = "Link Active to Selected" bl_label = "Link Active to Selected"
@ -291,7 +291,7 @@ class NWLinkActiveToSelectedMenu(Menu, NWBase):
layout.menu(NWLinkUseOutputsNamesMenu.bl_idname) layout.menu(NWLinkUseOutputsNamesMenu.bl_idname)
class NWLinkStandardMenu(Menu, NWBase): class NWLinkStandardMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_standard_menu" bl_idname = "NODE_MT_nw_link_standard_menu"
bl_label = "To All Selected" bl_label = "To All Selected"
@ -307,7 +307,7 @@ class NWLinkStandardMenu(Menu, NWBase):
props.use_outputs_names = False props.use_outputs_names = False
class NWLinkUseNodeNameMenu(Menu, NWBase): class NWLinkUseNodeNameMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_use_node_name_menu" bl_idname = "NODE_MT_nw_link_use_node_name_menu"
bl_label = "Use Node Name/Label" bl_label = "Use Node Name/Label"
@ -323,7 +323,7 @@ class NWLinkUseNodeNameMenu(Menu, NWBase):
props.use_outputs_names = False props.use_outputs_names = False
class NWLinkUseOutputsNamesMenu(Menu, NWBase): class NWLinkUseOutputsNamesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_use_outputs_names_menu" bl_idname = "NODE_MT_nw_link_use_outputs_names_menu"
bl_label = "Use Outputs Names" bl_label = "Use Outputs Names"
@ -345,7 +345,11 @@ class NWAttributeMenu(bpy.types.Menu):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return nw_check(context) and context.space_data.tree_type == 'ShaderNodeTree' space = context.space_data
return (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None
and space.tree_type == 'ShaderNodeTree')
def draw(self, context): def draw(self, context):
l = self.layout l = self.layout
@ -372,7 +376,7 @@ class NWAttributeMenu(bpy.types.Menu):
l.label(text="No attributes on objects with this material") l.label(text="No attributes on objects with this material")
class NWSwitchNodeTypeMenu(Menu, NWBase): class NWSwitchNodeTypeMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_switch_node_type_menu" bl_idname = "NODE_MT_nw_switch_node_type_menu"
bl_label = "Switch Type to..." bl_label = "Switch Type to..."
@ -411,8 +415,11 @@ def bgreset_menu_func(self, context):
def save_viewer_menu_func(self, context): def save_viewer_menu_func(self, context):
if (nw_check(context) space = context.space_data
and context.space_data.tree_type == 'CompositorNodeTree' if (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None
and space.tree_type == 'CompositorNodeTree'
and context.scene.node_tree.nodes.active and context.scene.node_tree.nodes.active
and context.scene.node_tree.nodes.active.type == "VIEWER"): and context.scene.node_tree.nodes.active.type == "VIEWER"):
self.layout.operator(operators.NWSaveViewer.bl_idname, icon='FILE_IMAGE') self.layout.operator(operators.NWSaveViewer.bl_idname, icon='FILE_IMAGE')
@ -421,17 +428,21 @@ def save_viewer_menu_func(self, context):
def reset_nodes_button(self, context): def reset_nodes_button(self, context):
node_active = context.active_node node_active = context.active_node
node_selected = context.selected_nodes node_selected = context.selected_nodes
node_ignore = ["FRAME", "REROUTE", "GROUP"]
# Check if active node is in the selection and respective type # Check if active node is in the selection, ignore some node types
if (len(node_selected) == 1) and node_active and node_active.select and node_active.type not in node_ignore: if (len(node_selected) != 1
row = self.layout.row() or node_active is None
row.operator(operators.NWResetNodes.bl_idname, text="Reset Node", icon="FILE_REFRESH") or not node_active.select
self.layout.separator() or node_active.type in {"REROUTE", "GROUP"}):
return
elif (len(node_selected) == 1) and node_active and node_active.select and node_active.type == "FRAME":
row = self.layout.row() row = self.layout.row()
if node_active.type == "FRAME":
row.operator(operators.NWResetNodes.bl_idname, text="Reset Nodes in Frame", icon="FILE_REFRESH") row.operator(operators.NWResetNodes.bl_idname, text="Reset Nodes in Frame", icon="FILE_REFRESH")
else:
row.operator(operators.NWResetNodes.bl_idname, text="Reset Node", icon="FILE_REFRESH")
self.layout.separator() self.layout.separator()

View File

@ -29,7 +29,9 @@ from .utils.draw import draw_callback_nodeoutline
from .utils.paths import match_files_to_socket_names, split_into_components from .utils.paths import match_files_to_socket_names, split_into_components
from .utils.nodes import (node_mid_pt, autolink, node_at_pos, get_nodes_links, is_viewer_socket, is_viewer_link, from .utils.nodes import (node_mid_pt, autolink, node_at_pos, get_nodes_links, is_viewer_socket, is_viewer_link,
get_group_output_node, get_output_location, force_update, get_internal_socket, nw_check, get_group_output_node, get_output_location, force_update, get_internal_socket, nw_check,
nw_check_space_type, NWBase, get_first_enabled_output, is_visible_socket, viewer_socket_name) nw_check_not_empty, nw_check_selected, nw_check_active, nw_check_space_type,
nw_check_node_type, nw_check_visible_outputs, nw_check_viewer_node, NWBase,
get_first_enabled_output, is_visible_socket, viewer_socket_name)
class NWLazyMix(Operator, NWBase): class NWLazyMix(Operator, NWBase):
"""Add a Mix RGB/Shader node by interactively drawing lines between nodes""" """Add a Mix RGB/Shader node by interactively drawing lines between nodes"""
@ -37,6 +39,10 @@ class NWLazyMix(Operator, NWBase):
bl_label = "Mix Nodes" bl_label = "Mix Nodes"
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_not_empty(cls, context)
def modal(self, context, event): def modal(self, context, event):
context.area.tag_redraw() context.area.tag_redraw()
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -115,6 +121,10 @@ class NWLazyConnect(Operator, NWBase):
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
with_menu: BoolProperty() with_menu: BoolProperty()
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_not_empty(cls, context)
def modal(self, context, event): def modal(self, context, event):
context.area.tag_redraw() context.area.tag_redraw()
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -244,10 +254,10 @@ class NWDeleteUnused(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
"""Disabled for custom nodes as we do not know which nodes are supported.""" """Disabled for custom nodes as we do not know which nodes are supported."""
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree', and nw_check_not_empty(cls, context)
'TextureNodeTree', 'GeometryNodeTree') and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
and context.space_data.node_tree.nodes) 'TextureNodeTree', 'GeometryNodeTree'}))
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -334,7 +344,7 @@ class NWSwapLinks(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return nw_check(context) and context.selected_nodes and len(context.selected_nodes) <= 2 return nw_check(cls, context) and nw_check_selected(cls, context, max=2)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -448,8 +458,7 @@ class NWResetBG(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return nw_check(cls, context) and nw_check_space_type(cls, context, {'CompositorNodeTree'})
and nw_check_space_type(cls, context, 'CompositorNodeTree'))
def execute(self, context): def execute(self, context):
context.space_data.backdrop_zoom = 1 context.space_data.backdrop_zoom = 1
@ -468,8 +477,7 @@ class NWAddAttrNode(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return nw_check(cls, context) and nw_check_space_type(cls, context, {'ShaderNodeTree'})
and nw_check_space_type(cls, context, 'ShaderNodeTree'))
def execute(self, context): def execute(self, context):
bpy.ops.node.add_node('INVOKE_DEFAULT', use_transform=True, type="ShaderNodeAttribute") bpy.ops.node.add_node('INVOKE_DEFAULT', use_transform=True, type="ShaderNodeAttribute")
@ -496,10 +504,8 @@ class NWPreviewNode(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
"""Already implemented natively for compositing nodes.""" """Already implemented natively for compositing nodes."""
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'GeometryNodeTree') and nw_check_space_type(cls, context, {'ShaderNodeTree', 'GeometryNodeTree'}))
and (not context.active_node
or context.active_node.type not in {"OUTPUT_MATERIAL", "OUTPUT_WORLD"}))
@staticmethod @staticmethod
def get_output_sockets(node_tree): def get_output_sockets(node_tree):
@ -803,6 +809,11 @@ class NWPreviewNode(Operator, NWBase):
active, output_node, base_node_tree == active_tree, 'SHADER' active, output_node, base_node_tree == active_tree, 'SHADER'
) )
# Cancel if no socket was found. This can happen for group input
# nodes with only a virtual socket output.
if active_node_socket_index is None:
return {'CANCELLED'}
node_output = active.outputs[active_node_socket_index] node_output = active.outputs[active_node_socket_index]
if node_output.name == "Volume": if node_output.name == "Volume":
output_node_socket_index = 1 output_node_socket_index = 1
@ -895,11 +906,10 @@ class NWReloadImages(Operator):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) """Disabled for custom nodes."""
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree', return (nw_check(cls, context)
'TextureNodeTree', 'GeometryNodeTree') and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
and context.active_node is not None 'TextureNodeTree', 'GeometryNodeTree'}))
and any(is_visible_socket(out) for out in context.active_node.outputs))
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -1025,9 +1035,10 @@ class NWMergeNodes(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree', and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree')) 'TextureNodeTree', 'GeometryNodeTree'})
and nw_check_selected(cls, context))
def execute(self, context): def execute(self, context):
settings = context.preferences.addons[__package__].preferences settings = context.preferences.addons[__package__].preferences
@ -1348,9 +1359,10 @@ class NWBatchChangeNodes(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree', and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree')) 'TextureNodeTree', 'GeometryNodeTree'})
and nw_check_selected(cls, context))
def execute(self, context): def execute(self, context):
blend_type = self.blend_type blend_type = self.blend_type
@ -1404,6 +1416,10 @@ class NWChangeMixFactor(Operator, NWBase):
bl_description = "Change Factors of Mix Nodes and Mix Shader Nodes" bl_description = "Change Factors of Mix Nodes and Mix Shader Nodes"
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
# option: Change factor. # option: Change factor.
# If option is 1.0 or 0.0 - set to 1.0 or 0.0 # If option is 1.0 or 0.0 - set to 1.0 or 0.0
# Else - change factor by option value. # Else - change factor by option value.
@ -1437,24 +1453,15 @@ class NWCopySettings(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and context.active_node is not None and nw_check_active(cls, context)
and context.active_node.type != 'FRAME') and nw_check_selected(cls, context, min=2)
and nw_check_node_type(cls, context, 'FRAME', invert=True))
def execute(self, context): def execute(self, context):
node_active = context.active_node node_active = context.active_node
node_selected = context.selected_nodes node_selected = context.selected_nodes
# Error handling
if not (len(node_selected) > 1):
self.report({'ERROR'}, "2 nodes must be selected at least")
return {'CANCELLED'}
# Check if active node is in the selection
selected_node_names = [n.name for n in node_selected] selected_node_names = [n.name for n in node_selected]
if node_active.name not in selected_node_names:
self.report({'ERROR'}, "No active node")
return {'CANCELLED'}
# Get nodes in selection by type # Get nodes in selection by type
valid_nodes = [n for n in node_selected if n.type == node_active.type] valid_nodes = [n for n in node_selected if n.type == node_active.type]
@ -1530,7 +1537,7 @@ class NWCopySettings(Operator, NWBase):
new_node.location = node_loc new_node.location = node_loc
for str_from, str_to in reconnections: for str_from, str_to in reconnections:
node_tree.connect_sockets(eval(str_from), eval(str_to)) connect_sockets(eval(str_from), eval(str_to))
success_names.append(new_node.name) success_names.append(new_node.name)
@ -1548,6 +1555,7 @@ class NWCopyLabel(Operator, NWBase):
bl_idname = "node.nw_copy_label" bl_idname = "node.nw_copy_label"
bl_label = "Copy Label" bl_label = "Copy Label"
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
bl_description = "Copy label from active to selected nodes"
option: EnumProperty( option: EnumProperty(
name="option", name="option",
@ -1559,6 +1567,10 @@ class NWCopyLabel(Operator, NWBase):
) )
) )
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context, min=2)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
option = self.option option = self.option
@ -1592,9 +1604,14 @@ class NWClearLabel(Operator, NWBase):
bl_idname = "node.nw_clear_label" bl_idname = "node.nw_clear_label"
bl_label = "Clear Label" bl_label = "Clear Label"
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
bl_description = "Clear labels on selected nodes"
option: BoolProperty() option: BoolProperty()
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
for node in [n for n in nodes if n.select]: for node in [n for n in nodes if n.select]:
@ -1610,7 +1627,7 @@ class NWClearLabel(Operator, NWBase):
class NWModifyLabels(Operator, NWBase): class NWModifyLabels(Operator, NWBase):
"""Modify Labels of all selected nodes""" """Modify labels of all selected nodes"""
bl_idname = "node.nw_modify_labels" bl_idname = "node.nw_modify_labels"
bl_label = "Modify Labels" bl_label = "Modify Labels"
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
@ -1628,6 +1645,10 @@ class NWModifyLabels(Operator, NWBase):
name="Replace with" name="Replace with"
) )
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
for node in [n for n in nodes if n.select]: for node in [n for n in nodes if n.select]:
@ -1655,8 +1676,9 @@ class NWAddTextureSetup(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree')) and nw_check_space_type(cls, context, {'ShaderNodeTree'})
and nw_check_selected(cls, context))
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -1758,23 +1780,22 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree')) and nw_check_active(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree'})
and nw_check_node_type(cls, context, 'BSDF_PRINCIPLED'))
def execute(self, context): def execute(self, context):
# Check if everything is ok # Check if everything is ok
if not self.directory: if not self.directory:
self.report({'INFO'}, 'No Folder Selected') self.report({'INFO'}, 'No folder selected')
return {'CANCELLED'} return {'CANCELLED'}
if not self.files[:]: if not self.files[:]:
self.report({'INFO'}, 'No Files Selected') self.report({'INFO'}, 'No files selected')
return {'CANCELLED'} return {'CANCELLED'}
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
active_node = nodes.active active_node = nodes.active
if not (active_node and active_node.bl_idname == 'ShaderNodeBsdfPrincipled'):
self.report({'INFO'}, 'Select Principled BSDF')
return {'CANCELLED'}
# Filter textures names for texturetypes in filenames # Filter textures names for texturetypes in filenames
# [Socket Name, [abbreviations and keyword list], Filename placeholder] # [Socket Name, [abbreviations and keyword list], Filename placeholder]
@ -2032,85 +2053,75 @@ class NWAddReroutes(Operator, NWBase):
] ]
) )
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context): def execute(self, context):
tree_type = context.space_data.node_tree.type nodes, _links = get_nodes_links(context)
option = self.option post_select = [] # Nodes to be selected after execution.
nodes, links = get_nodes_links(context) y_offset = -22.0
# output valid when option is 'all' or when 'loose' output has no links
valid = False # Create reroutes and recreate links.
post_select = [] # nodes to be selected after execution
# create reroutes and recreate links
for node in [n for n in nodes if n.select]: for node in [n for n in nodes if n.select]:
if node.outputs: if not node.outputs:
x = node.location.x continue
x = node.location.x + node.width + 20.0
y = node.location.y y = node.location.y
width = node.width new_node_reroutes = []
# unhide 'REROUTE' nodes to avoid issues with location.y
# Unhide 'REROUTE' nodes to avoid issues with location.y
if node.type == 'REROUTE': if node.type == 'REROUTE':
node.hide = False node.hide = False
# Hack needed to calculate real width else:
if node.hide:
bpy.ops.node.select_all(action='DESELECT')
helper = nodes.new('NodeReroute')
helper.select = True
node.select = True
# resize node and helper to zero. Then check locations to calculate width
bpy.ops.transform.resize(value=(0.0, 0.0, 0.0))
width = 2.0 * (helper.location.x - node.location.x)
# restore node location
node.location = x, y
# delete helper
node.select = False
# only helper is selected now
bpy.ops.node.delete()
x = node.location.x + width + 20.0
if node.type != 'REROUTE':
y -= 35.0 y -= 35.0
y_offset = -22.0
loc = x, y reroutes_count = 0 # Will be used when aligning reroutes added to hidden nodes.
reroutes_count = 0 # will be used when aligning reroutes added to hidden nodes
for out_i, output in enumerate(node.outputs): for out_i, output in enumerate(node.outputs):
pass_used = False # initial value to be analyzed if 'R_LAYERS' if output.is_unavailable:
# if node != 'R_LAYERS' - "pass_used" not needed, so set it to True continue
if node.type != 'R_LAYERS': if node.type == 'R_LAYERS' and output.name != 'Alpha':
pass_used = True # If 'R_LAYERS' check if output is used in render pass.
else: # if 'R_LAYERS' check if output represent used render pass # If output is "Alpha", assume it's used. Not available in passes.
node_scene = node.scene node_scene = node.scene
node_layer = node.layer node_layer = node.layer
# If output - "Alpha" is analyzed - assume it's used. Not represented in passes.
if output.name == 'Alpha':
pass_used = True
else:
# check entries in global 'rl_outputs' variable
for rlo in rl_outputs: for rlo in rl_outputs:
# Check entries in global 'rl_outputs' variable.
if output.name in {rlo.output_name, rlo.exr_output_name}: if output.name in {rlo.output_name, rlo.exr_output_name}:
pass_used = getattr(node_scene.view_layers[node_layer], rlo.render_pass) if not getattr(node_scene.view_layers[node_layer], rlo.render_pass):
break continue
if pass_used: # Output is valid when option is 'all' or when 'loose' output has no links.
valid = ((option == 'ALL') or valid = ((self.option == 'ALL') or
(option == 'LOOSE' and not output.links) or (self.option == 'LOOSE' and not output.links) or
(option == 'LINKED' and output.links)) (self.option == 'LINKED' and output.links))
# Add reroutes only if valid, but offset location in all cases.
if valid: if valid:
# Add reroutes only if valid.
n = nodes.new('NodeReroute') n = nodes.new('NodeReroute')
nodes.active = n nodes.active = n
for link in output.links: for link in output.links:
connect_sockets(n.outputs[0], link.to_socket) connect_sockets(n.outputs[0], link.to_socket)
connect_sockets(output, n.inputs[0]) connect_sockets(output, n.inputs[0])
n.location = loc n.location = x, y
new_node_reroutes.append(n)
post_select.append(n) post_select.append(n)
if valid or not output.hide:
# Offset reroutes for all outputs, except hidden ones.
reroutes_count += 1 reroutes_count += 1
y += y_offset y += y_offset
loc = x, y
# disselect the node so that after execution of script only newly created nodes are selected # Nicer reroutes distribution along y when node.hide.
node.select = False
# nicer reroutes distribution along y when node.hide
if node.hide: if node.hide:
y_translate = reroutes_count * y_offset / 2.0 - y_offset - 35.0 y_translate = reroutes_count * y_offset / 2.0 - y_offset - 35.0
for reroute in [r for r in nodes if r.select]: for reroute in new_node_reroutes:
reroute.location.y -= y_translate reroute.location.y -= y_translate
for node in post_select:
node.select = True if post_select:
for node in nodes:
# Select only newly created nodes.
node.select = node in post_select
else:
# No new nodes were created.
return {'CANCELLED'}
return {'FINISHED'} return {'FINISHED'}
@ -2127,9 +2138,9 @@ class NWLinkActiveToSelected(Operator, NWBase):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and context.active_node is not None and nw_check_active(cls, context)
and context.active_node.select) and nw_check_selected(cls, context, min=2))
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -2200,6 +2211,10 @@ class NWAlignNodes(Operator, NWBase):
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
margin: IntProperty(name='Margin', default=50, description='The amount of space between nodes') margin: IntProperty(name='Margin', default=50, description='The amount of space between nodes')
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_not_empty(cls, context)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
margin = self.margin margin = self.margin
@ -2278,6 +2293,10 @@ class NWSelectParentChildren(Operator, NWBase):
) )
) )
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
option = self.option option = self.option
@ -2302,6 +2321,10 @@ class NWDetachOutputs(Operator, NWBase):
bl_label = "Detach Outputs" bl_label = "Detach Outputs"
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
selected = context.selected_nodes selected = context.selected_nodes
@ -2327,11 +2350,11 @@ class NWLinkToOutputNode(Operator):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
"""Disabled for custom nodes as we do not know which nodes are outputs.""" """Disabled for custom nodes as we do not know which nodes are outputs."""
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree', and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree') 'TextureNodeTree', 'GeometryNodeTree'})
and context.active_node is not None and nw_check_active(cls, context)
and any(is_visible_socket(out) for out in context.active_node.outputs)) and nw_check_visible_outputs(cls, context))
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -2451,6 +2474,11 @@ class NWAddSequence(Operator, NWBase, ImportHelper):
default=True default=True
) )
@classmethod
def poll(cls, context):
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree'}))
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
layout.alignment = 'LEFT' layout.alignment = 'LEFT'
@ -2563,6 +2591,11 @@ class NWAddMultipleImages(Operator, NWBase, ImportHelper):
options={'HIDDEN', 'SKIP_SAVE'} options={'HIDDEN', 'SKIP_SAVE'}
) )
@classmethod
def poll(cls, context):
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree'}))
def execute(self, context): def execute(self, context):
nodes, links = get_nodes_links(context) nodes, links = get_nodes_links(context)
@ -2612,8 +2645,8 @@ class NWViewerFocus(bpy.types.Operator):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'CompositorNodeTree')) and nw_check_space_type(cls, context, {'CompositorNodeTree'}))
def execute(self, context): def execute(self, context):
return {'FINISHED'} return {'FINISHED'}
@ -2683,12 +2716,9 @@ class NWSaveViewer(bpy.types.Operator, ExportHelper):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return (nw_check(context) return (nw_check(cls, context)
and nw_check_space_type(cls, context, 'CompositorNodeTree') and nw_check_space_type(cls, context, {'CompositorNodeTree'})
and any(img.source == 'VIEWER' and nw_check_viewer_node(cls))
and img.render_slots == 0
for img in bpy.data.images)
and sum(bpy.data.images["Viewer Node"].size) > 0) # False if not connected or connected but no image
def execute(self, context): def execute(self, context):
fp = self.filepath fp = self.filepath
@ -2726,19 +2756,15 @@ class NWResetNodes(bpy.types.Operator):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
space = context.space_data return (nw_check(cls, context)
return space.type == 'NODE_EDITOR' and nw_check_selected(cls, context)
and nw_check_active(cls, context))
def execute(self, context): def execute(self, context):
node_active = context.active_node node_active = context.active_node
node_selected = context.selected_nodes node_selected = context.selected_nodes
node_ignore = ["FRAME", "REROUTE", "GROUP", "SIMULATION_INPUT", "SIMULATION_OUTPUT"] node_ignore = ["FRAME", "REROUTE", "GROUP", "SIMULATION_INPUT", "SIMULATION_OUTPUT"]
# Check if one node is selected at least
if not (len(node_selected) > 0):
self.report({'ERROR'}, "1 node must be selected at least")
return {'CANCELLED'}
active_node_name = node_active.name if node_active.select else None active_node_name = node_active.name if node_active.select else None
valid_nodes = [n for n in node_selected if n.type not in node_ignore] valid_nodes = [n for n in node_selected if n.type not in node_ignore]

View File

@ -4,7 +4,7 @@
import bpy import bpy
from bpy_extras.node_utils import connect_sockets from bpy_extras.node_utils import connect_sockets
from math import hypot from math import hypot, inf
def force_update(context): def force_update(context):
@ -200,23 +200,85 @@ def get_output_location(tree):
return loc_x, loc_y return loc_x, loc_y
def nw_check(context): def nw_check(cls, context):
space = context.space_data space = context.space_data
if space.type != 'NODE_EDITOR':
return (space.type == 'NODE_EDITOR' cls.poll_message_set("Current editor is not a node editor.")
and space.node_tree is not None return False
and space.node_tree.library is None) if space.node_tree is None:
cls.poll_message_set("No node tree was found in the current node editor.")
return False
if space.node_tree.library is not None:
cls.poll_message_set("Current node tree is linked from another .blend file.")
return False
return True
def nw_check_space_type(cls, context, *args): def nw_check_not_empty(cls, context):
if context.space_data.tree_type not in args: if not context.space_data.node_tree.nodes:
tree_types_str = ", ".join(t.split('NodeTree')[0].lower() for t in sorted(args)) cls.poll_message_set("Current node tree does not contain any nodes.")
return False
return True
def nw_check_active(cls, context):
if context.active_node is None or not context.active_node.select:
cls.poll_message_set("No active node.")
return False
return True
def nw_check_selected(cls, context, min=1, max=inf):
num_selected = len(context.selected_nodes)
if num_selected < min:
if min > 1:
cls.poll_message_set(f"At least {min} nodes must be selected.")
else:
cls.poll_message_set(f"At least {min} node must be selected.")
return False
if num_selected > max:
cls.poll_message_set(f"{num_selected} nodes are selected, but this operator can only work on {max}.")
return False
return True
def nw_check_space_type(cls, context, types):
if context.space_data.tree_type not in types:
tree_types_str = ", ".join(t.split('NodeTree')[0].lower() for t in sorted(types))
cls.poll_message_set("Current node tree type not supported.\n" cls.poll_message_set("Current node tree type not supported.\n"
"Should be one of " + tree_types_str + ".") "Should be one of " + tree_types_str + ".")
return False return False
return True return True
def nw_check_node_type(cls, context, type, invert=False):
if invert and context.active_node.type == type:
cls.poll_message_set(f"Active node should be not of type {type}.")
return False
elif not invert and context.active_node.type != type:
cls.poll_message_set(f"Active node should be of type {type}.")
return False
return True
def nw_check_visible_outputs(cls, context):
if not any(is_visible_socket(out) for out in context.active_node.outputs):
cls.poll_message_set("Current node has no visible outputs.")
return False
return True
def nw_check_viewer_node(cls):
for img in bpy.data.images:
# False if not connected or connected but no image
if (img.source == 'VIEWER'
and len(img.render_slots) == 0
and sum(img.size) > 0):
return True
cls.poll_message_set("Viewer image not found.")
return False
def get_first_enabled_output(node): def get_first_enabled_output(node):
for output in node.outputs: for output in node.outputs:
if output.enabled: if output.enabled:
@ -232,4 +294,13 @@ def is_visible_socket(socket):
class NWBase: class NWBase:
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return nw_check(context) return nw_check(cls, context)
class NWBaseMenu:
@classmethod
def poll(cls, context):
space = context.space_data
return (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None)

View File

@ -15,24 +15,30 @@ bl_info = {
"category": "Object" "category": "Object"
} }
if "bpy" in locals():
import importlib
importlib.reload(carver_utils)
importlib.reload(carver_profils)
importlib.reload(carver_draw)
importlib.reload(carver_operator)
import bpy import bpy
import imp
from bpy.props import ( from bpy.props import (
BoolProperty, BoolProperty,
StringProperty, StringProperty,
IntProperty IntProperty
) )
from bpy.types import (AddonPreferences, WorkSpaceTool) from bpy.types import (AddonPreferences, WorkSpaceTool)
from bpy.utils.toolsystem import ToolDef from bpy.utils.toolsystem import ToolDef
from . import carver_utils from . import (
imp.reload(carver_utils) carver_utils,
from . import carver_profils carver_profils,
imp.reload(carver_profils) carver_draw,
from . import carver_draw carver_operator,
imp.reload(carver_draw) )
from . import carver_operator
imp.reload(carver_operator)
# TODO : Create an icon for Carver MT # TODO : Create an icon for Carver MT
# Add an icon in the toolbar # Add an icon in the toolbar

View File

@ -504,14 +504,14 @@ class RigifyBoneCollectionReference(bpy.types.PropertyGroup):
arm = self.id_data.data arm = self.id_data.data
if new_coll := arm.collections.get(new_val): if new_coll := arm.collections_all.get(new_val):
self.set_collection(new_coll) self.set_collection(new_coll)
else: else:
self.find_collection(update=True) self.find_collection(update=True)
def _name_search(self, _context, _edit): def _name_search(self, _context, _edit):
arm = self.id_data.data arm = self.id_data.data
return [coll.name for coll in arm.collections] return [coll.name for coll in utils.misc.flatten_children(arm.collections)]
name: StringProperty( name: StringProperty(
name="Collection Name", description="Name of the referenced bone collection", name="Collection Name", description="Name of the referenced bone collection",

View File

@ -17,7 +17,8 @@ from .utils.naming import (ORG_PREFIX, MCH_PREFIX, DEF_PREFIX, ROOT_NAME, make_o
from .utils.widgets import WGT_PREFIX, WGT_GROUP_PREFIX from .utils.widgets import WGT_PREFIX, WGT_GROUP_PREFIX
from .utils.widgets_special import create_root_widget from .utils.widgets_special import create_root_widget
from .utils.mechanism import refresh_all_drivers from .utils.mechanism import refresh_all_drivers
from .utils.misc import select_object, ArmatureObject, verify_armature_obj, choose_next_uid from .utils.misc import select_object, ArmatureObject, verify_armature_obj, choose_next_uid, flatten_children,\
flatten_parents
from .utils.collections import (ensure_collection, list_layer_collections, from .utils.collections import (ensure_collection, list_layer_collections,
filter_layer_collections_by_object) filter_layer_collections_by_object)
from .utils.rig import get_rigify_type, get_rigify_target_rig,\ from .utils.rig import get_rigify_type, get_rigify_target_rig,\
@ -134,7 +135,7 @@ class Generator(base_generate.BaseGenerator):
def __save_rig_data(self, obj: ArmatureObject, obj_found: bool): def __save_rig_data(self, obj: ArmatureObject, obj_found: bool):
if obj_found: if obj_found:
self.saved_visible_layers = {coll.name: coll.is_visible for coll in obj.data.collections} self.saved_visible_layers = {coll.name: coll.is_visible for coll in obj.data.collections_all}
self.artifacts.generate_init_existing(obj) self.artifacts.generate_init_existing(obj)
@ -216,14 +217,14 @@ class Generator(base_generate.BaseGenerator):
self.widget_mirror_mesh[mid_name] = widget.data self.widget_mirror_mesh[mid_name] = widget.data
def ensure_root_bone_collection(self): def ensure_root_bone_collection(self):
collections = self.metarig.data.collections collections = self.metarig.data.collections_all
validate_collection_references(self.metarig) validate_collection_references(self.metarig)
coll = collections.get(ROOT_COLLECTION) coll = collections.get(ROOT_COLLECTION)
if not coll: if not coll:
coll = collections.new(ROOT_COLLECTION) coll = self.metarig.data.collections.new(ROOT_COLLECTION)
if coll.rigify_ui_row <= 0: if coll.rigify_ui_row <= 0:
coll.rigify_ui_row = 2 + choose_next_uid(collections, 'rigify_ui_row', min_value=1) coll.rigify_ui_row = 2 + choose_next_uid(collections, 'rigify_ui_row', min_value=1)
@ -240,7 +241,7 @@ class Generator(base_generate.BaseGenerator):
bpy.ops.object.mode_set(mode='OBJECT') bpy.ops.object.mode_set(mode='OBJECT')
# Remove all bone collections from the target armature. # Remove all bone collections from the target armature.
for coll in list(obj.data.collections): for coll in list(obj.data.collections_all):
obj.data.collections.remove(coll) obj.data.collections.remove(coll)
# Select and duplicate metarig # Select and duplicate metarig
@ -355,7 +356,7 @@ class Generator(base_generate.BaseGenerator):
pb.lock_scale = (True, True, True) pb.lock_scale = (True, True, True)
def ensure_bone_collection(self, name): def ensure_bone_collection(self, name):
coll = self.obj.data.collections.get(name) coll = self.obj.data.collections_all.get(name)
if not coll: if not coll:
coll = self.obj.data.collections.new(name) coll = self.obj.data.collections.new(name)
@ -435,10 +436,16 @@ class Generator(base_generate.BaseGenerator):
bone.custom_shape = obj_table[wgt_name] bone.custom_shape = obj_table[wgt_name]
def __compute_visible_layers(self): def __compute_visible_layers(self):
has_ui_buttons = set().union(*[
{p.name for p in flatten_parents(coll)}
for coll in self.obj.data.collections_all
if coll.rigify_ui_row > 0
])
# Hide all layers without UI buttons # Hide all layers without UI buttons
for coll in self.obj.data.collections: for coll in self.obj.data.collections_all:
user_visible = self.saved_visible_layers.get(coll.name, coll.is_visible) user_visible = self.saved_visible_layers.get(coll.name, coll.is_visible)
coll.is_visible = user_visible and coll.rigify_ui_row > 0 coll.is_visible = user_visible and coll.name in has_ui_buttons
def generate(self): def generate(self):
context = self.context context = self.context
@ -709,7 +716,7 @@ def create_selection_sets(obj: ArmatureObject, _metarig: ArmatureObject):
obj.selection_sets.clear() # noqa obj.selection_sets.clear() # noqa
for coll in obj.data.collections: for coll in obj.data.collections_all:
if not coll.rigify_sel_set: if not coll.rigify_sel_set:
continue continue
@ -725,7 +732,7 @@ def apply_bone_colors(obj, metarig, priorities: Optional[dict[str, dict[str, flo
collection_table: dict[str, tuple[int, 'RigifyColorSet']] = { collection_table: dict[str, tuple[int, 'RigifyColorSet']] = {
coll.name: (i, color_map[coll.rigify_color_set_id]) coll.name: (i, color_map[coll.rigify_color_set_id])
for i, coll in enumerate(obj.data.collections) for i, coll in enumerate(flatten_children(obj.data.collections))
if coll.rigify_color_set_id in color_map if coll.rigify_color_set_id in color_map
} }

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {} bone_collections = {}
for bcoll in list(arm.collections): for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll) arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0): def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -96,6 +96,13 @@ def ternarySearch(f, left, right, absolutePrecision):
left = leftThird left = leftThird
else: else:
right = rightThird right = rightThird
def flatten_children(iterable):
"""Enumerate the iterator items as well as their children in the tree order."""
for item in iterable:
yield item
yield from flatten_children(item.children)
''' '''
UTILITIES_FUNC_COMMON_IK_FK = [''' UTILITIES_FUNC_COMMON_IK_FK = ['''
@ -902,7 +909,7 @@ class RigLayers(bpy.types.Panel):
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
row_table = collections.defaultdict(list) row_table = collections.defaultdict(list)
for coll in context.active_object.data.collections: for coll in flatten_children(context.active_object.data.collections):
row_id = coll.get('rigify_ui_row', 0) row_id = coll.get('rigify_ui_row', 0)
if row_id > 0: if row_id > 0:
row_table[row_id].append(coll) row_table[row_id].append(coll)
@ -913,7 +920,9 @@ class RigLayers(bpy.types.Panel):
if row_buttons: if row_buttons:
for coll in row_buttons: for coll in row_buttons:
title = coll.get('rigify_ui_title') or coll.name title = coll.get('rigify_ui_title') or coll.name
row.prop(coll, 'is_visible', toggle=True, text=title) row2 = row.row()
row2.active = coll.is_visible_ancestors
row2.prop(coll, 'is_visible', toggle=True, text=title)
else: else:
row.separator() row.separator()
''' '''

View File

@ -23,7 +23,7 @@ from .utils.widgets import write_widget
from .utils.naming import unique_name from .utils.naming import unique_name
from .utils.rig import upgrade_metarig_types, outdated_types, upgrade_metarig_layers, \ from .utils.rig import upgrade_metarig_types, outdated_types, upgrade_metarig_layers, \
is_valid_metarig, metarig_needs_upgrade is_valid_metarig, metarig_needs_upgrade
from .utils.misc import verify_armature_obj, ArmatureObject, IdPropSequence from .utils.misc import verify_armature_obj, ArmatureObject, IdPropSequence, flatten_children
from .rigs.utils import get_limb_generated_names from .rigs.utils import get_limb_generated_names
@ -243,6 +243,29 @@ class DATA_PT_rigify_samples(bpy.types.Panel):
# noinspection SpellCheckingInspection # noinspection SpellCheckingInspection
# noinspection PyPep8Naming # noinspection PyPep8Naming
class DATA_UL_rigify_bone_collections(UIList): class DATA_UL_rigify_bone_collections(UIList):
def filter_items(self, _context, data, propname):
assert propname == 'collections_all'
collections = data.collections_all
flags = []
# Filtering by name
if self.filter_name:
print(self.filter_name, self.use_filter_invert)
flags = bpy.types.UI_UL_list.filter_items_by_name(
self.filter_name, self.bitflag_filter_item, collections, "name")
if not flags:
flags = [self.bitflag_filter_item] * len(collections)
# Reorder by name.
if self.use_filter_sort_alpha:
indices = bpy.types.UI_UL_list.sort_items_by_name(collections, "name")
# Sort by tree order
else:
index_map = {c.name: i for i, c in enumerate(flatten_children(data.collections))}
indices = [index_map[c.name] for c in collections]
return flags, indices
def draw_item(self, _context, layout, armature, bcoll, _icon, _active_data, def draw_item(self, _context, layout, armature, bcoll, _icon, _active_data,
_active_prop_name, _index=0, _flt_flag=0): _active_prop_name, _index=0, _flt_flag=0):
active_bone = armature.edit_bones.active or armature.bones.active active_bone = armature.edit_bones.active or armature.bones.active
@ -290,9 +313,9 @@ class DATA_PT_rigify_collection_list(bpy.types.Panel):
row.template_list( row.template_list(
"DATA_UL_rigify_bone_collections", "DATA_UL_rigify_bone_collections",
"collections", "",
arm, arm,
"collections", "collections_all",
arm.collections, arm.collections,
"active_index", "active_index",
rows=(4 if active_coll else 1), rows=(4 if active_coll else 1),
@ -322,7 +345,7 @@ class DATA_PT_rigify_collection_list(bpy.types.Panel):
row.active = active_coll.rigify_ui_row > 0 # noqa row.active = active_coll.rigify_ui_row > 0 # noqa
row.prop(active_coll, "rigify_ui_title") row.prop(active_coll, "rigify_ui_title")
if ROOT_COLLECTION not in arm.collections: if ROOT_COLLECTION not in arm.collections_all:
layout.label(text=f"The '{ROOT_COLLECTION}' collection will be added upon generation", icon='INFO') layout.label(text=f"The '{ROOT_COLLECTION}' collection will be added upon generation", icon='INFO')
@ -337,11 +360,11 @@ class DATA_PT_rigify_collection_ui(bpy.types.Panel):
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return is_valid_metarig(context) and len(verify_armature_obj(context.object).data.collections) return is_valid_metarig(context) and len(verify_armature_obj(context.object).data.collections_all)
@staticmethod @staticmethod
def draw_btn_block(arm: Armature, parent: UILayout, bcoll_id: int, loose=False): def draw_btn_block(arm: Armature, parent: UILayout, bcoll_id: int, loose=False):
bcoll = arm.collections[bcoll_id] bcoll = arm.collections_all[bcoll_id]
block = parent.row(align=True) block = parent.row(align=True)
if bcoll == arm.collections.active: if bcoll == arm.collections.active:
@ -364,8 +387,10 @@ class DATA_PT_rigify_collection_ui(bpy.types.Panel):
row_table = defaultdict(list) row_table = defaultdict(list)
has_buttons = False has_buttons = False
for i, bcoll in enumerate(arm.collections): index_map = {c.name: i for i, c in enumerate(arm.collections_all)}
row_table[bcoll.rigify_ui_row].append(i)
for bcoll in flatten_children(arm.collections):
row_table[bcoll.rigify_ui_row].append(index_map[bcoll.name])
if bcoll.rigify_ui_row > 0: if bcoll.rigify_ui_row > 0:
has_buttons = True has_buttons = True
@ -469,7 +494,7 @@ class DATA_OT_rigify_collection_set_ui_row(bpy.types.Operator):
obj = verify_armature_obj(context.object) obj = verify_armature_obj(context.object)
if self.select: if self.select:
obj.data.collections.active_index = self.index obj.data.collections.active_index = self.index
obj.data.collections[self.index].rigify_ui_row = self.row obj.data.collections_all[self.index].rigify_ui_row = self.row
return {'FINISHED'} return {'FINISHED'}
@ -495,7 +520,7 @@ class DATA_OT_rigify_collection_add_ui_row(bpy.types.Operator):
def execute(self, context): def execute(self, context):
obj = verify_armature_obj(context.object) obj = verify_armature_obj(context.object)
for coll in obj.data.collections: for coll in obj.data.collections_all:
if coll.rigify_ui_row >= self.row: if coll.rigify_ui_row >= self.row:
coll.rigify_ui_row += (1 if self.add else -1) coll.rigify_ui_row += (1 if self.add else -1)
return {'FINISHED'} return {'FINISHED'}
@ -716,7 +741,7 @@ class DATA_OT_rigify_color_set_remove(bpy.types.Operator):
rigify_colors.remove(self.idx) rigify_colors.remove(self.idx)
# set layers references to 0 # set layers references to 0
for coll in obj.data.collections: for coll in obj.data.collections_all:
idx = coll.rigify_color_set_id idx = coll.rigify_color_set_id
if idx == self.idx + 1: if idx == self.idx + 1:
@ -745,7 +770,7 @@ class DATA_OT_rigify_color_set_remove_all(bpy.types.Operator):
rigify_colors.remove(0) rigify_colors.remove(0)
# set layers references to 0 # set layers references to 0
for coll in obj.data.collections: for coll in obj.data.collections_all:
coll.rigify_color_set_id = 0 coll.rigify_color_set_id = 0
return {'FINISHED'} return {'FINISHED'}
@ -1025,7 +1050,7 @@ class Generate(bpy.types.Operator):
def execute(self, context): def execute(self, context):
metarig = verify_armature_obj(context.object) metarig = verify_armature_obj(context.object)
for bcoll in metarig.data.collections: for bcoll in metarig.data.collections_all:
if bcoll.rigify_ui_row > 0 and bcoll.name not in SPECIAL_COLLECTIONS: if bcoll.rigify_ui_row > 0 and bcoll.name not in SPECIAL_COLLECTIONS:
break break
else: else:

View File

@ -102,7 +102,7 @@ def ensure_collection_uid(bcoll: BoneCollection):
uid = zlib.adler32(bcoll.name.encode("utf-8")) & max_uid uid = zlib.adler32(bcoll.name.encode("utf-8")) & max_uid
# Ensure the uid is unique within the armature # Ensure the uid is unique within the armature
used_ids = set(coll.rigify_uid for coll in bcoll.id_data.collections) used_ids = set(coll.rigify_uid for coll in bcoll.id_data.collections_all)
while uid in used_ids: while uid in used_ids:
uid = random.randint(0, max_uid) uid = random.randint(0, max_uid)
@ -126,14 +126,14 @@ def resolve_collection_reference(obj: ArmatureObject, ref: Any, *,
arm = obj.data arm = obj.data
name = ref.get("name", "") name = ref.get("name", "")
name_coll = arm.collections.get(name) if name else None name_coll = arm.collections_all.get(name) if name else None
# First try an exact match of both name and uid # First try an exact match of both name and uid
if name_coll and name_coll.rigify_uid == uid: if name_coll and name_coll.rigify_uid == uid:
return name_coll return name_coll
# Then try searching by the uid # Then try searching by the uid
for coll in arm.collections: for coll in arm.collections_all:
if coll.rigify_uid == uid: if coll.rigify_uid == uid:
if update: if update:
ref["name"] = coll.name ref["name"] = coll.name
@ -194,7 +194,7 @@ def validate_collection_references(obj: ArmatureObject):
# Ensure uids are unique # Ensure uids are unique
known_uids = dict() known_uids = dict()
for bcoll in obj.data.collections: for bcoll in obj.data.collections_all:
uid = bcoll.rigify_uid uid = bcoll.rigify_uid
if uid < 0: if uid < 0:
continue continue

View File

@ -172,6 +172,20 @@ def find_index(sequence, item, default=None):
return default return default
def flatten_children(iterable: typing.Iterable):
"""Enumerate the iterator items as well as their children in the tree order."""
for item in iterable:
yield item
yield from flatten_children(item.children)
def flatten_parents(item):
"""Enumerate the item and all its parents."""
while item:
yield item
item = item.parent
############################################## ##############################################
# Lazy references # Lazy references
############################################## ##############################################

View File

@ -16,7 +16,7 @@ from bpy.types import bpy_prop_array, bpy_prop_collection # noqa
from idprop.types import IDPropertyArray from idprop.types import IDPropertyArray
from mathutils import Vector from mathutils import Vector
from .misc import ArmatureObject, wrap_list_to_lines, IdPropSequence, find_index from .misc import ArmatureObject, wrap_list_to_lines, IdPropSequence, find_index, flatten_children
if TYPE_CHECKING: if TYPE_CHECKING:
from ..base_rig import BaseRig from ..base_rig import BaseRig
@ -193,7 +193,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
# Find layer collections # Find layer collections
coll_table = {} coll_table = {}
for coll in arm.collections: for coll in arm.collections_all:
if m := re.match(r'^Layer (\d+)', coll.name): if m := re.match(r'^Layer (\d+)', coll.name):
coll_table[int(m[1]) - 1] = coll coll_table[int(m[1]) - 1] = coll
@ -229,7 +229,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
coll.name = new_name coll.name = new_name
if coll: if coll:
coll_idx = find_index(arm.collections, coll) coll_idx = find_index(arm.collections_all, coll)
arm.collections.move(coll_idx, cur_idx) arm.collections.move(coll_idx, cur_idx)
cur_idx += 1 cur_idx += 1
@ -249,7 +249,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
root_bcoll = coll_table.get(28) root_bcoll = coll_table.get(28)
used_rows = set() used_rows = set()
for bcoll in arm.collections: for bcoll in arm.collections_all:
if bcoll != root_bcoll and bcoll.rigify_ui_row > 0: if bcoll != root_bcoll and bcoll.rigify_ui_row > 0:
used_rows.add(bcoll.rigify_ui_row) used_rows.add(bcoll.rigify_ui_row)
@ -258,7 +258,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
if i in used_rows: if i in used_rows:
row_map[i] = len(row_map) + 1 row_map[i] = len(row_map) + 1
for bcoll in arm.collections: for bcoll in arm.collections_all:
if bcoll == root_bcoll: if bcoll == root_bcoll:
bcoll.rigify_ui_row = len(row_map) + 3 bcoll.rigify_ui_row = len(row_map) + 3
elif bcoll.rigify_ui_row > 0: elif bcoll.rigify_ui_row > 0:
@ -535,13 +535,13 @@ def write_metarig(obj: ArmatureObject, layers=False, func_name="create",
code.append('\n bone_collections = {}') code.append('\n bone_collections = {}')
code.append('\n for bcoll in list(arm.collections):' code.append('\n for bcoll in list(arm.collections_all):'
'\n arm.collections.remove(bcoll)\n') '\n arm.collections.remove(bcoll)\n')
args = ', '.join(f'{k}={repr(v)}' for k, v in collection_attrs.items()) args = ', '.join(f'{k}={repr(v)}' for k, v in collection_attrs.items())
code.append(f" def add_bone_collection(name, *, {args}):") code.append(f" def add_bone_collection(name, *, parent=None, {args}):")
code.append(f" new_bcoll = arm.collections.new(name)") code.append(f" new_bcoll = arm.collections.new(name, parent=bone_collections.get(parent))")
for k, _v in collection_attrs.items(): for k, _v in collection_attrs.items():
code.append(f" new_bcoll.rigify_{k} = {k}") code.append(f" new_bcoll.rigify_{k} = {k}")
code.append(" bone_collections[name] = new_bcoll") code.append(" bone_collections[name] = new_bcoll")
@ -559,8 +559,10 @@ def write_metarig(obj: ArmatureObject, layers=False, func_name="create",
ref_list.add().set_collection(bone_collections[name]) ref_list.add().set_collection(bone_collections[name])
""") """)
for i, bcoll in enumerate(arm.collections): for bcoll in flatten_children(arm.collections):
args = [repr(bcoll.name)] args = [repr(bcoll.name)]
if bcoll.parent:
args.append(f"parent={bcoll.parent.name!r}")
for k, v in collection_attrs.items(): for k, v in collection_attrs.items():
value = getattr(bcoll, "rigify_" + k) value = getattr(bcoll, "rigify_" + k)
if value != v: if value != v:

View File

@ -131,7 +131,7 @@ def move_sun(context):
azimuth, elevation = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
local_time, sun_props.latitude, local_time, sun_props.latitude,
sun_props.longitude, zone, sun_props.longitude, zone,
sun_props.month, sun_props.day) sun_props.month, sun_props.day, sun_props.year)
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
local_time -= time_increment local_time -= time_increment
obj.rotation_euler = ((elevation - pi/2, 0, -azimuth)) obj.rotation_euler = ((elevation - pi/2, 0, -azimuth))

View File

@ -12,7 +12,7 @@
translations_tuple = ( translations_tuple = (
(("*", ""), (("*", ""),
((), ()), ((), ()),
("fr_FR", "Project-Id-Version: Sun Position 3.3.3 (0)\n", ("fr_FR", "Project-Id-Version: Sun Position 3.5.4 (0)\n",
(False, (False,
("Blender's translation file (po format).", ("Blender's translation file (po format).",
"Copyright (C) 2022 The Blender Foundation.", "Copyright (C) 2022 The Blender Foundation.",
@ -31,12 +31,6 @@ translations_tuple = (
("fr_FR", "Afficher les infos dazimut et de hauteur du Soleil", ("fr_FR", "Afficher les infos dazimut et de hauteur du Soleil",
(False, ())), (False, ())),
), ),
(("*", "Daylight Savings"),
(("bpy.types.SunPosProperties.use_daylight_savings",),
()),
("fr_FR", "Heure dété",
(False, ())),
),
(("*", "Display overlays in the viewport: the direction of the north, analemmas and the Sun surface"), (("*", "Display overlays in the viewport: the direction of the north, analemmas and the Sun surface"),
(("bpy.types.SunPosAddonPreferences.show_overlays",), (("bpy.types.SunPosAddonPreferences.show_overlays",),
()), ()),
@ -82,10 +76,10 @@ translations_tuple = (
("fr_FR", "Options de position du Soleil", ("fr_FR", "Options de position du Soleil",
(False, ())), (False, ())),
), ),
(("*", "Sun Position Presets"), (("Operator", "Add Sun Position preset"),
(("bpy.types.SUNPOS_PT_Presets",), (("bpy.types.WORLD_OT_sunpos_add_preset",),
()), ()),
("fr_FR", "Préréglages de position du Soleil", ("fr_FR", "Ajouter un préréglage de position du Soleil",
(False, ())), (False, ())),
), ),
(("Operator", "Pick Sun in Viewport"), (("Operator", "Pick Sun in Viewport"),
@ -100,6 +94,18 @@ translations_tuple = (
("fr_FR", "Sélectionner la position du Soleil dans nimporte quelle vue 3D, puis la synchroniser avec lenvironnement", ("fr_FR", "Sélectionner la position du Soleil dans nimporte quelle vue 3D, puis la synchroniser avec lenvironnement",
(False, ())), (False, ())),
), ),
(("*", "Add a new preset for Sun Position settings"),
(("bpy.types.WORLD_OT_sunpos_add_preset",),
()),
("fr_FR", "Ajouter un nouveau préréglage pour les réglages de position du Soleil",
(False, ())),
),
(("*", "Sun Position Presets"),
(("bpy.types.SUNPOS_PT_Presets",),
()),
("fr_FR", "Préréglages de position du Soleil",
(False, ())),
),
(("*", "UTC Zone"), (("*", "UTC Zone"),
(("bpy.types.SunPosProperties.UTC_zone",), (("bpy.types.SunPosProperties.UTC_zone",),
()), ()),
@ -392,6 +398,12 @@ translations_tuple = (
("fr_FR", "Utiliser une seule valeur pour le jour de lannée", ("fr_FR", "Utiliser une seule valeur pour le jour de lannée",
(False, ())), (False, ())),
), ),
(("*", "Daylight Savings"),
(("bpy.types.SunPosProperties.use_daylight_savings",),
()),
("fr_FR", "Heure dété",
(False, ())),
),
(("*", "Daylight savings time adds 1 hour to standard time"), (("*", "Daylight savings time adds 1 hour to standard time"),
(("bpy.types.SunPosProperties.use_daylight_savings",), (("bpy.types.SunPosProperties.use_daylight_savings",),
()), ()),
@ -416,43 +428,55 @@ translations_tuple = (
("fr_FR", "Année", ("fr_FR", "Année",
(False, ())), (False, ())),
), ),
(("*", "Unknown projection"), (("*", "Please select a Sun object"),
(("scripts/addons/sun_position/hdr.py:181",), (("scripts/addons/sun_position/hdr.py:82",),
()), ()),
("fr_FR", "Projection inconnue", ("fr_FR", "Veuillez sélectionner un objet Soleil",
(False, ())),
),
(("*", "Enter/LMB: confirm, Esc/RMB: cancel, MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure"),
(("scripts/addons/sun_position/hdr.py:252",),
()),
("fr_FR", "Entrée/ClicG : Confirmer, Échap/ClicD : Annuler, ClicM : défiler, "
"molette : zoom, Ctrl + molette : exposition",
(False, ())),
),
(("*", "Could not find 3D View"),
(("scripts/addons/sun_position/hdr.py:263",),
()),
("fr_FR", "Impossible de trouver la vue 3D",
(False, ())), (False, ())),
), ),
(("*", "Please select an Environment Texture node"), (("*", "Please select an Environment Texture node"),
(("scripts/addons/sun_position/hdr.py:269",), (("scripts/addons/sun_position/hdr.py:85",
"scripts/addons/sun_position/hdr.py:253"),
()), ()),
("fr_FR", "Veuillez utiliser un nœud de texture denvironnement", ("fr_FR", "Veuillez utiliser un nœud de texture denvironnement",
(False, ())), (False, ())),
), ),
(("*", "Show options and info:"), (("*", "Please select a valid Environment Texture node"),
(("scripts/addons/sun_position/properties.py:297",), (("scripts/addons/sun_position/hdr.py:91",),
()), ()),
("fr_FR", "Afficher les options et infos :", ("fr_FR", "Veuillez utiliser un nœud de texture denvironnement valide",
(False, ())),
),
(("*", "Could not find 3D View"),
(("scripts/addons/sun_position/hdr.py:247",),
()),
("fr_FR", "Impossible de trouver la vue 3D",
(False, ())),
),
(("*", "Enter/LMB: confirm, Esc/RMB: cancel, MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure"),
(("scripts/addons/sun_position/hdr.py:264",),
()),
("fr_FR", "Entrée/ClicG : Confirmer, Échap/ClicD : Annuler, ClicM : défiler, molette : zoom, Ctrl + molette : exposition",
(False, ())),
),
(("*", "Unknown projection"),
(("scripts/addons/sun_position/hdr.py:165",),
()),
("fr_FR", "Projection inconnue",
(False, ())), (False, ())),
), ),
(("*", "ERROR: Could not parse coordinates"), (("*", "ERROR: Could not parse coordinates"),
(("scripts/addons/sun_position/sun_calc.py:54",), (("scripts/addons/sun_position/properties.py:36",),
()), ()),
("fr_FR", "ERREUR : Impossible danalyser les coordonnées", ("fr_FR", "ERREUR : Impossible danalyser les coordonnées",
(False, ())), (False, ())),
), ),
(("*", "Show options and info:"),
(("scripts/addons/sun_position/properties.py:317",),
()),
("fr_FR", "Afficher les options et infos :",
(False, ())),
),
(("Hour", "Time"), (("Hour", "Time"),
(("scripts/addons/sun_position/ui_sun.py:224",), (("scripts/addons/sun_position/ui_sun.py:224",),
()), ()),
@ -460,13 +484,13 @@ translations_tuple = (
(False, ())), (False, ())),
), ),
(("*", "Time Local:"), (("*", "Time Local:"),
(("scripts/addons/sun_position/ui_sun.py:242",), (("scripts/addons/sun_position/ui_sun.py:241",),
()), ()),
("fr_FR", "Heure locale :", ("fr_FR", "Heure locale :",
(False, ())), (False, ())),
), ),
(("*", "UTC:"), (("*", "UTC:"),
(("scripts/addons/sun_position/ui_sun.py:243",), (("scripts/addons/sun_position/ui_sun.py:242",),
()), ()),
("fr_FR", "UTC :", ("fr_FR", "UTC :",
(False, ())), (False, ())),

View File

@ -23,7 +23,7 @@ class SUNPOS_PT_Presets(PresetPanel, bpy.types.Panel):
class SUNPOS_OT_AddPreset(AddPresetBase, Operator): class SUNPOS_OT_AddPreset(AddPresetBase, Operator):
'''Add Sun Position preset''' '''Add a new preset for Sun Position settings'''
bl_idname = "world.sunpos_add_preset" bl_idname = "world.sunpos_add_preset"
bl_label = "Add Sun Position preset" bl_label = "Add Sun Position preset"
preset_menu = "SUNPOS_PT_Presets" preset_menu = "SUNPOS_PT_Presets"

View File

@ -11,7 +11,7 @@ bl_info = {
"description": "Allows managing UI translations directly from Blender " "description": "Allows managing UI translations directly from Blender "
"(update main .po files, update scripts' translations, etc.)", "(update main .po files, update scripts' translations, etc.)",
"warning": "Still in development, not all features are fully implemented yet!", "warning": "Still in development, not all features are fully implemented yet!",
"doc_url": "http://wiki.blender.org/index.php/Dev:Doc/How_to/Translate_Blender", "doc_url": "https://developer.blender.org/docs/handbook/translating/translator_guide/",
"support": 'OFFICIAL', "support": 'OFFICIAL',
"category": "System", "category": "System",
} }