Node Wrangler: add back exposure compensation for Preview Node #105136

Closed
Damien Picard wants to merge 4 commits from pioverfour/blender-addons:dp_nw_exposure_compensation into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
64 changed files with 1352 additions and 1261 deletions
Showing only changes of commit 83f1bc2302 - Show all commits

View File

@ -9,12 +9,12 @@ body:
attributes:
value: |
### Instructions
First time reporting? See [tips](https://wiki.blender.org/wiki/Process/Bug_Reports).
First time reporting? See [tips](https://developer.blender.org/docs/handbook/bug_reports/making_good_bug_reports/).
* Use **Help > Report a Bug** in Blender to fill system information and exact Blender version.
* Test [daily builds](https://builder.blender.org/) to verify if the issue is already fixed.
* Test [previous versions](https://download.blender.org/release/) to find an older working version.
* For feature requests, feedback, questions or build issues, see [communication channels](https://wiki.blender.org/wiki/Communication/Contact#User_Feedback_and_Requests).
* For feature requests, feedback, questions or build issues, see [communication channels](https://developer.blender.org/docs/handbook/communication/user_feedback/).
* If there are multiple bugs, make multiple bug reports.
- type: textarea

View File

@ -6,9 +6,9 @@ body:
value: |
### Instructions
* [Contributing a new add-on](https://wiki.blender.org/wiki/Process/Addons)
* [Contributing code](https://wiki.blender.org/index.php/Dev:Doc/Process/Contributing_Code)
* [Effective code review](https://wiki.blender.org/index.php/Dev:Doc/Tools/Code_Review)
* [Contributing a new add-on](https://developer.blender.org/docs/handbook/addons/)
* [Contributing code](https://developer.blender.org/docs/handbook/contributing/)
* [Effective code review](https://developer.blender.org/docs/handbook/contributing/pull_requests/)
By submitting code here, you agree that the code is (compatible with) GNU GPL v2 or later.

View File

@ -1,4 +1,4 @@
This repository is only used as a mirror. Blender development happens on projects.blender.org.
To get started with contributing code, please see:
https://wiki.blender.org/wiki/Process/Contributing_Code
https://developer.blender.org/docs/handbook/contributing/

2
.github/stale.yml vendored
View File

@ -18,4 +18,4 @@ closeComment: >
used as a mirror. Blender development happens on projects.blender.org.
To get started contributing code, please read:
https://wiki.blender.org/wiki/Process/Contributing_Code
https://developer.blender.org/docs/handbook/contributing/

View File

@ -32,10 +32,10 @@ if flag is False:
# Import modules
# ----------------------------------------------
if "bpy" in locals():
import imp
import importlib
imp.reload(import_ase)
imp.reload(import_krita)
importlib.reload(import_ase)
importlib.reload(import_krita)
else:
import import_ase
import import_krita

View File

@ -5,7 +5,7 @@
bl_info = {
"name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 11, 5),
"version": (5, 11, 6),
"blender": (4, 1, 0),
"location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -2753,7 +2753,7 @@ def fbx_data_from_scene(scene, depsgraph, settings):
_cos = MESH_ATTRIBUTE_POSITION.to_ndarray(me.attributes)
else:
_cos = np.empty(len(me.vertices) * 3, dtype=co_bl_dtype)
shape_key.data.foreach_get("co", _cos)
shape_key.points.foreach_get("co", _cos)
return vcos_transformed(_cos, geom_mat_co, co_fbx_dtype)
for shape in me.shape_keys.key_blocks[1:]:
@ -3108,9 +3108,9 @@ def fbx_header_elements(root, scene_data, time=None):
app_name = "Blender (stable FBX IO)"
app_ver = bpy.app.version_string
import addon_utils
import sys
addon_ver = addon_utils.module_bl_info(sys.modules[__package__])['version']
from . import bl_info
addon_ver = bl_info["version"]
del bl_info
# ##### Start of FBXHeaderExtension element.
header_ext = elem_empty(root, b"FBXHeaderExtension")

View File

@ -2002,7 +2002,7 @@ def blen_read_shapes(fbx_tmpl, fbx_data, objects, me, scene):
if dvcos.any():
shape_cos = me_vcos_vector_view.copy()
shape_cos[indices] += dvcos
kb.data.foreach_set("co", shape_cos.ravel())
kb.points.foreach_set("co", shape_cos.ravel())
shape_key_values_in_range &= expand_shape_key_range(kb, weight)

View File

@ -5,7 +5,7 @@
bl_info = {
'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (4, 1, 38),
"version": (4, 1, 57),
'blender': (4, 1, 0),
'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0',

View File

@ -139,6 +139,13 @@ def get_attribute_type(component_type, data_type):
else:
pass
def get_attribute(attributes, name, data_type, domain):
attribute = attributes.get(name)
if attribute is not None and attribute.data_type == data_type and attribute.domain == domain:
return attribute
else:
return None
def get_gltf_interpolation(interpolation):
return {
"BEZIER": "CUBICSPLINE",

View File

@ -0,0 +1,69 @@
# SPDX-FileCopyrightText: 2018-2024 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
import numpy as np
def fast_structured_np_unique(arr, *args, **kwargs):
"""
np.unique optimized for structured arrays when a sorted result is not required.
np.unique works through sorting, but sorting a structured array requires as many sorts as there are fields in the
structured dtype.
By viewing the array as a single non-structured dtype that sorts according to its bytes, unique elements can be
found with a single sort. Since the values are viewed as a different type to their original, this means that the
returned array of unique values may not be sorted according to their original type.
Float field caveats:
All elements of -0.0 in the input array will be replaced with 0.0 to ensure that both values are collapsed into one.
NaN values can have lots of different byte representations (e.g. signalling/quiet and custom payloads). Only the
duplicates of each unique byte representation will be collapsed into one.
Nested structured dtypes are not supported.
The behaviour of structured dtypes with overlapping fields is undefined.
"""
structured_dtype = arr.dtype
fields = structured_dtype.fields
if fields is None:
raise RuntimeError('%s is not a structured dtype' % structured_dtype)
for field_name, (field_dtype, *_offset_and_optional_title) in fields.items():
if field_dtype.subdtype is not None:
raise RuntimeError('Nested structured types are not supported in %s' % structured_dtype)
if field_dtype.kind == 'f':
# Replace all -0.0 in the array with 0.0 because -0.0 and 0.0 have different byte representations.
arr[field_name][arr[field_name] == -0.0] = 0.0
elif field_dtype.kind not in "iuUSV":
# Signed integer, unsigned integer, unicode string, byte string (bytes) and raw bytes (void) can be left
# as they are. Everything else is unsupported.
raise RuntimeError('Unsupported structured field type %s for field %s' % (field_dtype, field_name))
structured_itemsize = structured_dtype.itemsize
# Integer types sort the fastest, but are only available for specific itemsizes.
uint_dtypes_by_itemsize = {1: np.uint8, 2: np.uint16, 4: np.uint32, 8: np.uint64}
# Signed/unsigned makes no noticeable speed difference, but using unsigned will result in ordering according to
# individual bytes like the other, non-integer types.
if structured_itemsize in uint_dtypes_by_itemsize:
entire_structure_dtype = uint_dtypes_by_itemsize[structured_itemsize]
else:
# Construct a flexible size dtype with matching itemsize to the entire structured dtype.
# Should always be 4 because each character in a unicode string is UCS4.
str_itemsize = np.dtype((np.str_, 1)).itemsize
if structured_itemsize % str_itemsize == 0:
# Unicode strings seem to be slightly faster to sort than bytes.
entire_structure_dtype = np.dtype((np.str_, structured_itemsize // str_itemsize))
else:
# Bytes seem to be slightly faster to sort than raw bytes (np.void).
entire_structure_dtype = np.dtype((np.bytes_, structured_itemsize))
result = np.unique(arr.view(entire_structure_dtype), *args, **kwargs)
unique = result[0] if isinstance(result, tuple) else result
# View in the original dtype.
unique = unique.view(arr.dtype)
if isinstance(result, tuple):
return (unique,) + result[1:]
else:
return unique

View File

@ -135,7 +135,7 @@ def __convert_keyframes(
transform = matrix_parent_inverse
values = []
fps = bpy.context.scene.render.fps
fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
for keyframe in keyframes:
# Transform the data and build gltf control points
value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, need_rotation_correction)

View File

@ -9,9 +9,9 @@ from ...com import gltf2_blender_math
class Keyframe:
def __init__(self, channels: typing.Tuple[bpy.types.FCurve], frame: float, bake_channel: typing.Union[str, None]):
self.seconds = frame / bpy.context.scene.render.fps
self.seconds = frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
self.frame = frame
self.fps = bpy.context.scene.render.fps
self.fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
self.__length_morph = 0
# Note: channels has some None items only for SK if some SK are not animated
if bake_channel is None:

View File

@ -94,7 +94,7 @@ def __convert_keyframes(armature_uuid, bone_name, channel, keyframes, action_nam
if armature_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][armature_uuid].keys():
for k in keyframes:
k.frame += -export_settings['slide'][armature_uuid][action_name]
k.seconds = k.frame / bpy.context.scene.render.fps
k.seconds = k.frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
times = [k.seconds for k in keyframes]
input = gather_accessor(
@ -145,7 +145,7 @@ def __convert_keyframes(armature_uuid, bone_name, channel, keyframes, action_nam
transform = correction_matrix_local
values = []
fps = bpy.context.scene.render.fps
fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
for keyframe in keyframes:
# Transform the data and build gltf control points
value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, False)
@ -206,6 +206,8 @@ def __gather_interpolation(node_channel_is_animated, node_channel_interpolation,
elif len(keyframes) == 1:
if node_channel_is_animated is False:
return "STEP"
elif node_channel_interpolation == "CUBICSPLINE":
return "LINEAR" # We can't have a single keyframe with CUBICSPLINE
else:
return node_channel_interpolation
else:

View File

@ -78,7 +78,7 @@ def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str
if obj_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][obj_uuid].keys():
for k in keyframes:
k.frame += -export_settings['slide'][obj_uuid][action_name]
k.seconds = k.frame / bpy.context.scene.render.fps
k.seconds = k.frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
times = [k.seconds for k in keyframes]
input = gather_accessor(
@ -100,7 +100,7 @@ def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str
values = []
fps = bpy.context.scene.render.fps
fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
for keyframe in keyframes:
# Transform the data and build gltf control points
@ -148,6 +148,8 @@ def __gather_interpolation(
elif len(keyframes) == 1:
if node_channel_is_animated is False:
return "STEP"
elif node_channel_interpolation == "CUBICSPLINE":
return "LINEAR" # We can't have a single keyframe with CUBICSPLINE
else:
return node_channel_interpolation
else:

View File

@ -66,7 +66,7 @@ def __convert_keyframes(obj_uuid, keyframes, action_name: str, export_settings):
if obj_uuid in export_settings['slide'].keys() and action_name in export_settings['slide'][obj_uuid].keys():
for k in keyframes:
k.frame += -export_settings['slide'][obj_uuid][action_name]
k.seconds = k.frame / bpy.context.scene.render.fps
k.seconds = k.frame / (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
times = [k.seconds for k in keyframes]
input = gather_accessor(

View File

@ -29,13 +29,27 @@ def gather_node(vnode, export_settings):
if skin is not None:
vnode.skin = skin
# Hook to check if we should export mesh or not (force it to None)
class GltfHookNodeMesh:
def __init__(self):
self.export_mesh = True
gltf_hook_node_mesh = GltfHookNodeMesh()
export_user_extensions('gather_node_mesh_hook', export_settings, gltf_hook_node_mesh, blender_object)
if gltf_hook_node_mesh.export_mesh is True:
mesh = __gather_mesh(vnode, blender_object, export_settings)
else:
mesh = None
node = gltf2_io.Node(
camera=__gather_camera(vnode, export_settings),
children=__gather_children(vnode, export_settings),
extensions=__gather_extensions(vnode, export_settings),
extras=__gather_extras(blender_object, export_settings),
matrix=__gather_matrix(blender_object, export_settings),
mesh=__gather_mesh(vnode, blender_object, export_settings),
mesh=mesh,
name=__gather_name(blender_object, export_settings),
rotation=None,
scale=None,

View File

@ -11,6 +11,7 @@ from ...io.com.gltf2_io_constants import ROUNDING_DIGIT
from ...io.exp.gltf2_io_user_extensions import export_user_extensions
from ...io.com import gltf2_io_constants
from ..com import gltf2_blender_conversion
from ..com.gltf2_blender_utils import fast_structured_np_unique
from .material.gltf2_blender_gather_materials import get_base_material, get_material_from_idx, get_active_uvmap_index, get_new_material_texture_shared
from .material.gltf2_blender_gather_texture_info import gather_udim_texture_info
from . import gltf2_blender_gather_skins
@ -334,10 +335,12 @@ class PrimitiveCreator:
def populate_dots_data(self):
vidxs = np.empty(len(self.blender_mesh.loops))
self.blender_mesh.loops.foreach_get('vertex_index', vidxs)
self.dots['vertex_index'] = vidxs
del vidxs
corner_vertex_indices = gltf2_blender_conversion.get_attribute(self.blender_mesh.attributes, '.corner_vert', 'INT', 'CORNER')
if corner_vertex_indices:
vidxs = np.empty(len(self.blender_mesh.loops), dtype=np.intc)
corner_vertex_indices.data.foreach_get('value', vidxs)
self.dots['vertex_index'] = vidxs
del vidxs
for attr in self.blender_attributes:
if 'skip_getting_to_dots' in attr:
@ -412,11 +415,19 @@ class PrimitiveCreator:
# Now we need to get data and populate
for attr in self.uvmap_attribute_list:
if attr + str(0) not in self.dots.dtype.names: # In case user exports custom attributes, we may have it already
# Vector in custom Attributes are Vector3, but keeping only the first two data
data = np.empty(len(self.blender_mesh.loops) * 3, gltf2_blender_conversion.get_numpy_type('FLOAT2'))
self.blender_mesh.attributes[attr].data.foreach_get('vector', data)
data = data.reshape(-1, 3)
data = data[:,:2]
# Vector in custom Attributes are Vector2 or Vector3 (but keeping only the first two data)
if self.blender_mesh.attributes[attr].data_type == "FLOAT_VECTOR":
data = np.empty(len(self.blender_mesh.loops) * 3, gltf2_blender_conversion.get_numpy_type('FLOAT2'))
self.blender_mesh.attributes[attr].data.foreach_get('vector', data)
data = data.reshape(-1, 3)
data = data[:,:2]
elif self.blender_mesh.attributes[attr].data_type == "FLOAT2":
data = np.empty(len(self.blender_mesh.loops) * 2, gltf2_blender_conversion.get_numpy_type('FLOAT2'))
self.blender_mesh.attributes[attr].data.foreach_get('vector', data)
data = data.reshape(-1, 2)
else:
print_console('WARNING', 'We are not managing this case yet (UVMap as custom attribute for unknown type)')
continue
# Blender UV space -> glTF UV space
# u,v -> u,1-v
data[:, 1] *= -1
@ -621,7 +632,7 @@ class PrimitiveCreator:
def primitive_creation_shared(self):
primitives = []
self.dots, shared_dot_indices = np.unique(self.dots, return_inverse=True)
self.dots, shared_dot_indices = fast_structured_np_unique(self.dots, return_inverse=True)
self.blender_idxs = self.dots['vertex_index']
@ -696,7 +707,7 @@ class PrimitiveCreator:
# Extract just dots used by this primitive, deduplicate them, and
# calculate indices into this deduplicated list.
self.prim_dots = self.dots[dot_indices]
self.prim_dots, indices = np.unique(self.prim_dots, return_inverse=True)
self.prim_dots, indices = fast_structured_np_unique(self.prim_dots, return_inverse=True)
if len(self.prim_dots) == 0:
continue
@ -770,7 +781,7 @@ class PrimitiveCreator:
if self.blender_idxs_edges.shape[0] > 0:
# Export one glTF vert per unique Blender vert in a loose edge
self.blender_idxs = self.blender_idxs_edges
dots_edges, indices = np.unique(self.dots_edges, return_inverse=True)
dots_edges, indices = fast_structured_np_unique(self.dots_edges, return_inverse=True)
self.blender_idxs = np.unique(self.blender_idxs_edges)
self.attributes_edges_points = {}
@ -871,14 +882,21 @@ class PrimitiveCreator:
def __get_positions(self):
self.locs = np.empty(len(self.blender_mesh.vertices) * 3, dtype=np.float32)
source = self.key_blocks[0].relative_key.data if self.key_blocks else self.blender_mesh.vertices
source.foreach_get('co', self.locs)
if self.key_blocks:
source = self.key_blocks[0].relative_key.points
foreach_attribute = 'co'
else:
position_attribute = gltf2_blender_conversion.get_attribute(self.blender_mesh.attributes, 'position', 'FLOAT_VECTOR', 'POINT')
source = position_attribute.data if position_attribute else None
foreach_attribute = 'vector'
if source:
source.foreach_get(foreach_attribute, self.locs)
self.locs = self.locs.reshape(len(self.blender_mesh.vertices), 3)
self.morph_locs = []
for key_block in self.key_blocks:
vs = np.empty(len(self.blender_mesh.vertices) * 3, dtype=np.float32)
key_block.data.foreach_get('co', vs)
key_block.points.foreach_get('co', vs)
vs = vs.reshape(len(self.blender_mesh.vertices), 3)
self.morph_locs.append(vs)
@ -1116,7 +1134,7 @@ class PrimitiveCreator:
def __get_uvs_attribute(self, blender_uv_idx, attr):
layer = self.blender_mesh.uv_layers[blender_uv_idx]
uvs = np.empty(len(self.blender_mesh.loops) * 2, dtype=np.float32)
layer.data.foreach_get('uv', uvs)
layer.uv.foreach_get('vector', uvs)
uvs = uvs.reshape(len(self.blender_mesh.loops), 2)
# Blender UV space -> glTF UV space
@ -1136,7 +1154,7 @@ class PrimitiveCreator:
self.normals = np.array(self.normals, dtype=np.float32)
else:
self.normals = np.empty(len(self.blender_mesh.loops) * 3, dtype=np.float32)
self.blender_mesh.loops.foreach_get('normal', self.normals)
self.blender_mesh.corner_normals.foreach_get('vector', self.normals)
self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3)

View File

@ -568,6 +568,10 @@ class VExportTree:
hasattr(self.nodes[n.armature], "need_neutral_bone")]: #all skin meshes objects where neutral bone is needed
# Only for meshes, as curve can't have skin data (no weights pain available)
# Be sure to add it to really exported meshes
if n.node.skin is None:
print("WARNING: {} has no skin, skipping adding neutral bone data on it.".format(n.blender_object.name))
continue
if n.armature not in added_armatures:

View File

@ -37,8 +37,11 @@ def get_material_cache_key(blender_material, export_settings):
# Use id of material
# Do not use bpy.types that can be unhashable
# Do not use material name, that can be not unique (when linked)
# We use here the id of original material as for apply modifier, the material has a new id
# So, when no modifier applied => original is the same id
# And when modifier applied => new one is different id, but original is still the same
return (
(id(blender_material),),
(id(blender_material.original),),
)
@cached_by_key(key=get_material_cache_key)

View File

@ -144,8 +144,10 @@ def __gather_metallic_roughness_texture(blender_material, orm_texture, export_se
# Using directlty the Blender socket object
if not hasMetal and not hasRough:
metallic_roughness = get_socket_from_gltf_material_node(blender_material, "MetallicRoughness")
if metallic_roughness is None or not has_image_node_from_socket(metallic_roughness, export_settings):
if metallic_roughness.socket is None or not has_image_node_from_socket(metallic_roughness, export_settings):
return None, {}, {}, None
else:
texture_input = (metallic_roughness, metallic_roughness)
elif not hasMetal:
texture_input = (roughness_socket,)
elif not hasRough:
@ -154,6 +156,7 @@ def __gather_metallic_roughness_texture(blender_material, orm_texture, export_se
texture_input = (metallic_socket, roughness_socket)
tex, uvmap_info, udim_info, factor = gather_texture_info(
texture_input[0],
orm_texture or texture_input,
export_settings,

View File

@ -177,7 +177,7 @@ def get_socket_from_gltf_material_node(blender_material: bpy.types.Material, nam
if blender_material.node_tree and blender_material.use_nodes:
nodes = get_material_nodes(blender_material.node_tree, [blender_material], bpy.types.ShaderNodeGroup)
# Some weird node groups with missing datablock can have no node_tree, so checking n.node_tree (See #1797)
nodes = [n for n in nodes if n[0].node_tree is not None and ( n[0].node_tree.name.lower().startswith(get_gltf_old_group_node_name()) or n[0].node_tree.name.lower() in gltf_node_group_names)]
nodes = [n for n in nodes if n[0].node_tree is not None and any([[n[0].node_tree.name.lower().startswith(g) for g in gltf_node_group_names]])]
inputs = sum([[(input, node[1]) for input in node[0].inputs if input.name == name] for node in nodes], [])
if inputs:
return NodeSocket(inputs[0][0], inputs[0][1])
@ -461,6 +461,12 @@ def get_vertex_color_info(color_socket, alpha_socket, export_settings):
attribute_color_type = "active"
elif use_vc is True and use_active is None and attribute_color is not None:
attribute_color_type = "name"
elif node.node.type in ["ATTRIBUTE", "VERTEX_COLOR"]:
use_vc, attribute_color, use_active = get_attribute_name(NodeSocket(node.node.outputs[0], node.group_path), export_settings)
if use_vc is True and use_active is True:
attribute_color_type = "active"
elif use_vc is True and use_active is None and attribute_color is not None:
attribute_color_type = "name"
if alpha_socket is not None and alpha_socket.socket is not None:
node = previous_node(alpha_socket)
@ -473,6 +479,12 @@ def get_vertex_color_info(color_socket, alpha_socket, export_settings):
attribute_alpha_type = "active"
elif use_vc is True and use_active is None and attribute_alpha is not None:
attribute_alpha_type = "name"
elif node.node.type in ["ATTRIBUTE", "VERTEX_COLOR"]:
use_vc, attribute_color, use_active = get_attribute_name(NodeSocket(node.node.outputs[0], node.group_path), export_settings)
if use_vc is True and use_active is True:
attribute_color_type = "active"
elif use_vc is True and use_active is None and attribute_color is not None:
attribute_color_type = "name"
return {"color": attribute_color, "alpha": attribute_alpha, "color_type": attribute_color_type, "alpha_type": attribute_alpha_type}

View File

@ -1,157 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo, MaterialNormalTextureInfoClass
from .gltf2_blender_texture import texture
# [Texture] => [Separate R] => [Clearcoat Factor] =>
def clearcoat(mh, location, clearcoat_socket):
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
return
clearcoat_factor = ext.get('clearcoatFactor', 0)
tex_info = ext.get('clearcoatTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if clearcoat_socket is None:
return
if tex_info is None:
clearcoat_socket.default_value = clearcoat_factor
return
# Mix clearcoat factor
if clearcoat_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Clearcoat Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(clearcoat_socket, node.outputs[0])
# Inputs
clearcoat_socket = node.inputs[0]
node.inputs[1].default_value = clearcoat_factor
x -= 200
# Separate RGB
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(clearcoat_socket, node.outputs['Red'])
# Inputs
clearcoat_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='CLEARCOAT',
location=(x, y),
is_data=True,
color_socket=clearcoat_socket,
)
# [Texture] => [Separate G] => [Roughness Factor] =>
def clearcoat_roughness(mh, location, roughness_socket):
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
return
roughness_factor = ext.get('clearcoatRoughnessFactor', 0)
tex_info = ext.get('clearcoatRoughnessTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if roughness_socket is None:
return
if tex_info is None:
roughness_socket.default_value = roughness_factor
return
# Mix roughness factor
if roughness_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Clearcoat Roughness Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(roughness_socket, node.outputs[0])
# Inputs
roughness_socket = node.inputs[0]
node.inputs[1].default_value = roughness_factor
x -= 200
# Separate RGB (roughness is in G)
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(roughness_socket, node.outputs['Green'])
# Inputs
color_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='CLEARCOAT ROUGHNESS',
location=(x, y),
is_data=True,
color_socket=color_socket,
)
# [Texture] => [Normal Map] =>
def clearcoat_normal(mh, location, normal_socket):
x,y = location
try:
ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
return
tex_info = ext.get('clearcoatNormalTexture')
if tex_info is not None:
tex_info = MaterialNormalTextureInfoClass.from_dict(tex_info)
if tex_info is None:
return
# Normal map
node = mh.node_tree.nodes.new('ShaderNodeNormalMap')
node.location = x - 150, y - 40
# Set UVMap
uv_idx = tex_info.tex_coord or 0
try:
uv_idx = tex_info.extensions['KHR_texture_transform']['texCoord']
except Exception:
pass
node.uv_map = 'UVMap' if uv_idx == 0 else 'UVMap.%03d' % uv_idx
# Set strength
scale = tex_info.scale
scale = scale if scale is not None else 1
node.inputs['Strength'].default_value = scale
# Outputs
mh.node_tree.links.new(normal_socket, node.outputs['Normal'])
# Inputs
color_socket = node.inputs['Color']
x -= 200
texture(
mh,
tex_info=tex_info,
label='CLEARCOAT NORMAL',
location=(x, y),
is_data=True,
color_socket=color_socket,
)

View File

@ -1,13 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io_constants import GLTF_IOR
def ior(mh, ior_socket):
try:
ext = mh.pymat.extensions['KHR_materials_ior']
except Exception:
return
ior = ext.get('ior', GLTF_IOR)
ior_socket.default_value = ior

View File

@ -2,159 +2,202 @@
#
# SPDX-License-Identifier: Apache-2.0
import bpy
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_pbrMetallicRoughness import \
base_color, emission, normal, occlusion, make_output_nodes, make_settings_node
from .gltf2_blender_texture import texture
base_color, emission, normal, occlusion, make_settings_node
from .gltf2_blender_material_utils import color_factor_and_texture
from .gltf2_blender_texture import texture, get_source
from .gltf2_blender_image import BlenderImage
import numpy as np
def pbr_specular_glossiness(mh):
"""Creates node tree for pbrSpecularGlossiness materials."""
# This does option #1 from
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/303
ext = mh.get_ext('KHR_materials_pbrSpecularGlossiness', {})
# Sum a Glossy and Diffuse Shader
glossy_node = mh.node_tree.nodes.new('ShaderNodeBsdfGlossy')
diffuse_node = mh.node_tree.nodes.new('ShaderNodeBsdfDiffuse')
add_node = mh.node_tree.nodes.new('ShaderNodeAddShader')
glossy_node.location = 10, 220
diffuse_node.location = 10, 0
add_node.location = 230, 100
mh.node_tree.links.new(add_node.inputs[0], glossy_node.outputs[0])
mh.node_tree.links.new(add_node.inputs[1], diffuse_node.outputs[0])
pbr_node = mh.nodes.new('ShaderNodeBsdfPrincipled')
out_node = mh.nodes.new('ShaderNodeOutputMaterial')
pbr_node.location = 10, 300
out_node.location = 300, 300
mh.links.new(pbr_node.outputs[0], out_node.inputs[0])
emission_socket, alpha_socket, _ = make_output_nodes(
mh,
location=(370, 250),
additional_location=None, #No additional location needed for SpecGloss
shader_socket=add_node.outputs[0],
make_emission_socket=mh.needs_emissive(),
make_alpha_socket=not mh.is_opaque(),
make_volume_socket=None # No possible to have KHR_materials_volume with specular/glossiness
)
if emission_socket:
emission(
mh,
location=(-200, 860),
color_socket=emission_socket,
strength_socket=emission_socket.node.inputs['Strength']
)
locs = calc_locations(mh, ext)
base_color(
mh,
is_diffuse=True,
location=(-200, 380),
color_socket=diffuse_node.inputs['Color'],
alpha_socket=alpha_socket,
location=locs['diffuse'],
color_socket=pbr_node.inputs['Base Color'],
alpha_socket=pbr_node.inputs['Alpha'] if not mh.is_opaque() else None,
)
specular_glossiness(
emission(
mh,
location=(-200, -100),
specular_socket=glossy_node.inputs['Color'],
roughness_socket=glossy_node.inputs['Roughness'],
)
copy_socket(
mh,
copy_from=glossy_node.inputs['Roughness'],
copy_to=diffuse_node.inputs['Roughness'],
location=locs['emission'],
color_socket=pbr_node.inputs['Emission Color'],
strength_socket=pbr_node.inputs['Emission Strength'],
)
normal(
mh,
location=(-200, -580),
normal_socket=glossy_node.inputs['Normal'],
)
copy_socket(
mh,
copy_from=glossy_node.inputs['Normal'],
copy_to=diffuse_node.inputs['Normal'],
location=locs['normal'],
normal_socket=pbr_node.inputs['Normal'],
)
if mh.pymat.occlusion_texture is not None:
if mh.settings_node is None:
mh.settings_node = make_settings_node(mh)
mh.settings_node.location = (610, -1060)
mh.settings_node.location = 10, 425
mh.settings_node.width = 240
occlusion(
mh,
location=(510, -970),
location=locs['occlusion'],
occlusion_socket=mh.settings_node.inputs['Occlusion'],
)
# The F0 color is the specular tint modulated by
# ((1-IOR)/(1+IOR))^2. Setting IOR=1000 makes this factor
# approximately 1.
pbr_node.inputs['IOR'].default_value = 1000
# [Texture] => [Spec/Gloss Factor] => [Gloss to Rough] =>
def specular_glossiness(mh, location, specular_socket, roughness_socket):
x, y = location
spec_factor = mh.pymat.extensions \
['KHR_materials_pbrSpecularGlossiness'] \
.get('specularFactor', [1, 1, 1])
gloss_factor = mh.pymat.extensions \
['KHR_materials_pbrSpecularGlossiness'] \
.get('glossinessFactor', 1)
spec_gloss_texture = mh.pymat.extensions \
['KHR_materials_pbrSpecularGlossiness'] \
.get('specularGlossinessTexture', None)
if spec_gloss_texture is not None:
spec_gloss_texture = TextureInfo.from_dict(spec_gloss_texture)
if spec_gloss_texture is None:
specular_socket.default_value = spec_factor + [1]
roughness_socket.default_value = 1 - gloss_factor
return
# (1 - x) converts glossiness to roughness
node = mh.node_tree.nodes.new('ShaderNodeInvert')
node.label = 'Invert (Gloss to Rough)'
node.location = x - 140, y - 75
# Outputs
mh.node_tree.links.new(roughness_socket, node.outputs[0])
# Inputs
node.inputs['Fac'].default_value = 1
glossiness_socket = node.inputs['Color']
x -= 250
# Mix in spec/gloss factor
if spec_factor != [1, 1, 1] or gloss_factor != 1:
if spec_factor != [1, 1, 1]:
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.data_type = 'RGBA'
node.label = 'Specular Factor'
node.location = x - 140, y
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(specular_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
specular_socket = node.inputs[6]
node.inputs[7].default_value = spec_factor + [1]
if gloss_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Glossiness Factor'
node.location = x - 140, y - 200
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(glossiness_socket, node.outputs[0])
# Inputs
glossiness_socket = node.inputs[0]
node.inputs[1].default_value = gloss_factor
x -= 200
texture(
# Specular
color_factor_and_texture(
mh,
tex_info=spec_gloss_texture,
label='SPECULAR GLOSSINESS',
location=(x, y),
color_socket=specular_socket,
alpha_socket=glossiness_socket,
location=locs['specular'],
label='Specular Color',
socket=pbr_node.inputs['Specular Tint'],
factor=ext.get('specularFactor', [1, 1, 1]),
tex_info=ext.get('specularGlossinessTexture'),
)
# Glossiness
glossiness(
mh,
ext,
location=locs['glossiness'],
roughness_socket=pbr_node.inputs['Roughness'],
)
def copy_socket(mh, copy_from, copy_to):
"""Copy the links/default value from one socket to another."""
copy_to.default_value = copy_from.default_value
for link in copy_from.links:
mh.node_tree.links.new(copy_to, link.from_socket)
def glossiness(mh, ext, location, roughness_socket):
# Glossiness = glossinessFactor * specularGlossinessTexture.alpha
# Roughness = 1 - Glossiness
factor = ext.get('glossinessFactor', 1)
tex_info = ext.get('specularGlossinessTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
# Simple case: no texture
if tex_info is None or factor == 0:
roughness_socket.default_value = 1 - factor
return
# Bake an image with the roughness. The reason we don't do
# 1-X with a node is that won't export.
roughness_img = make_roughness_image(mh, factor, tex_info)
if roughness_img is None:
return
texture(
mh,
tex_info,
location=location,
label='ROUGHNESS',
color_socket=None,
alpha_socket=roughness_socket,
is_data=False,
forced_image=roughness_img,
)
def make_roughness_image(mh, glossiness_factor, tex_info):
"""
Bakes the roughness (1-glossiness) into an image. The
roughness is in the alpha channel.
"""
pytexture = mh.gltf.data.textures[tex_info.index]
source = get_source(mh, pytexture)
if source is None:
return None
pyimg = mh.gltf.data.images[source]
BlenderImage.create(mh.gltf, source)
# See if cached roughness texture already exists
if hasattr(pyimg, 'blender_roughness_image_name'):
return bpy.data.images[pyimg.blender_roughness_image_name]
orig_image = bpy.data.images[pyimg.blender_image_name]
# TODO: check for placeholder image and bail
# Make a copy of the specularGlossiness texture
# Avoids interfering if it's used elsewhere
image = orig_image.copy()
w, h = image.size
pixels = np.empty(w * h * 4, dtype=np.float32)
image.pixels.foreach_get(pixels)
pixels = pixels.reshape((w, h, 4))
# Glossiness = GlossinessFactor * Texture.alpha
# Roughness = 1 - Glossiness
if glossiness_factor != 1:
pixels[:, :, 3] *= glossiness_factor
pixels[:, :, 3] *= -1
pixels[:, :, 3] += 1
pixels = pixels.reshape(w * h * 4)
image.pixels.foreach_set(pixels)
image.pack()
# Cache for reuse
pyimg.blender_roughness_image_name = image.name
return image
def calc_locations(mh, ext):
"""Calculate locations to place each bit of the node graph at."""
# Lay the blocks out top-to-bottom, aligned on the right
x = -200
y = 0
height = 460 # height of each block
locs = {}
locs['occlusion'] = (x, y)
if mh.pymat.occlusion_texture is not None:
y -= height
locs['diffuse'] = (x, y)
if 'diffuseTexture' in ext or mh.vertex_color:
y -= height
locs['glossiness'] = (x, y)
gloss_factor = ext.get('glossinessFactor', 1)
if 'specularGlossinessTexture' in ext and gloss_factor != 0:
y -= height
locs['normal'] = (x, y)
if mh.pymat.normal_texture is not None:
y -= height
locs['specular'] = (x, y)
if 'specularGlossinessTexture' in ext:
y -= height
locs['emission'] = (x, y)
if mh.pymat.emissive_texture is not None:
y -= height
# Center things
total_height = -y
y_offset = total_height / 2 - 20
for key in locs:
x, y = locs[key]
locs[key] = (x, y + y_offset)
return locs

View File

@ -1,88 +0,0 @@
# SPDX-FileCopyrightText: 2018-2022 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
def sheen( mh,
location_sheenTint,
location_sheenRoughness,
sheen_socket,
sheenTint_socket,
sheenRoughness_socket
):
x_sheenTint, y_sheenTint = location_sheenTint
x_sheenRoughness, y_sheenRoughness = location_sheenRoughness
try:
ext = mh.pymat.extensions['KHR_materials_sheen']
except Exception:
return
sheen_socket.default_value = 1.0
sheenTintFactor = ext.get('sheenColorFactor', [0.0, 0.0, 0.0])
tex_info_color = ext.get('sheenColorTexture')
if tex_info_color is not None:
tex_info_color = TextureInfo.from_dict(tex_info_color)
sheenRoughnessFactor = ext.get('sheenRoughnessFactor', 0.0)
tex_info_roughness = ext.get('sheenRoughnessTexture')
if tex_info_roughness is not None:
tex_info_roughness = TextureInfo.from_dict(tex_info_roughness)
if tex_info_color is None:
sheenTintFactor.extend([1.0])
sheenTint_socket.default_value = sheenTintFactor
else:
# Mix sheenTint factor
sheenTintFactor = sheenTintFactor + [1.0]
if sheenTintFactor != [1.0, 1.0, 1.0, 1.0]:
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.label = 'sheenTint Factor'
node.data_type = 'RGBA'
node.location = x_sheenTint - 140, y_sheenTint
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(sheenTint_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
sheenTint_socket = node.inputs[6]
node.inputs[7].default_value = sheenTintFactor
x_sheenTint -= 200
texture(
mh,
tex_info=tex_info_color,
label='SHEEN COLOR',
location=(x_sheenTint, y_sheenTint),
color_socket=sheenTint_socket
)
if tex_info_roughness is None:
sheenRoughness_socket.default_value = sheenRoughnessFactor
else:
# Mix sheenRoughness factor
if sheenRoughnessFactor != 1.0:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'shennRoughness Factor'
node.location = x_sheenRoughness - 140, y_sheenRoughness
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(sheenRoughness_socket, node.outputs[0])
# Inputs
sheenRoughness_socket = node.inputs[0]
node.inputs[1].default_value = sheenRoughnessFactor
x_sheenRoughness -= 200
texture(
mh,
tex_info=tex_info_roughness,
label='SHEEN ROUGHNESS',
location=(x_sheenRoughness, y_sheenRoughness),
is_data=True,
color_socket=None,
alpha_socket=sheenRoughness_socket
)
return

View File

@ -1,94 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
import bpy
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
from ...io.com.gltf2_io_constants import GLTF_IOR
from ..exp.material.extensions.gltf2_blender_image import TmpImageGuard, make_temp_image_copy #TODO move to com
def specular(mh, location_specular,
location_specular_tint,
specular_socket,
specular_tint_socket):
if specular_socket is None:
return
if specular_tint_socket is None:
return
try:
ext = mh.pymat.extensions['KHR_materials_specular']
except Exception:
return
# First check if we need a texture or not -> retrieve all info needed
specular_factor = ext.get('specularFactor', 1.0)
tex_specular_info = ext.get('specularTexture')
if tex_specular_info is not None:
tex_specular_info = TextureInfo.from_dict(tex_specular_info)
specular_tint_factor = ext.get('specularColorFactor', [1.0, 1.0, 1.0])[:3]
tex_specular_tint_info = ext.get('specularColorTexture')
if tex_specular_tint_info is not None:
tex_specular_tint_info = TextureInfo.from_dict(tex_specular_tint_info)
x_specular, y_specular = location_specular
x_specularcolor, y_specularcolor = location_specular_tint
if tex_specular_info is None:
specular_socket.default_value = specular_factor / 2.0
else:
# Mix specular factor
if specular_factor != 1.0:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Specular Factor'
node.location = x_specular - 140, y_specular
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(specular_socket, node.outputs[0])
# Inputs
specular_socket = node.inputs[0]
node.inputs[1].default_value = specular_factor / 2.0
x_specular -= 200
texture(
mh,
tex_info=tex_specular_info,
label='SPECULAR',
location=(x_specular, y_specular),
is_data=True,
color_socket=None,
alpha_socket=specular_socket
)
if tex_specular_tint_info is None:
specular_tint_factor = list(specular_tint_factor)
specular_tint_factor.extend([1.0])
specular_tint_socket.default_value = specular_tint_factor
else:
specular_tint_factor = list(specular_tint_factor) + [1.0]
if specular_tint_factor != [1.0, 1.0, 1.0, 1.0]:
# Mix specularColorFactor
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.label = 'SpecularColor Factor'
node.data_type = 'RGBA'
node.location = x_specularcolor - 140, y_specularcolor
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(specular_tint_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
specular_tint_socket = node.inputs[6]
node.inputs[7].default_value = specular_tint_factor
x_specularcolor -= 200
texture(
mh,
tex_info=tex_specular_tint_info,
label='SPECULAR COLOR',
location=(x_specularcolor, y_specularcolor),
color_socket=specular_tint_socket,
)

View File

@ -1,68 +0,0 @@
# SPDX-FileCopyrightText: 2018-2022 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
# [Texture] => [Separate R] => [Transmission Factor] =>
def transmission(mh, location, transmission_socket):
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_transmission']
except Exception:
return
transmission_factor = ext.get('transmissionFactor', 0)
# Default value is 0, so no transmission
if transmission_factor == 0:
return
# Activate screen refraction (for Eevee)
mh.mat.use_screen_refraction = True
tex_info = ext.get('transmissionTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if transmission_socket is None:
return
if tex_info is None:
transmission_socket.default_value = transmission_factor
return
# Mix transmission factor
if transmission_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Transmission Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(transmission_socket, node.outputs[0])
# Inputs
transmission_socket = node.inputs[0]
node.inputs[1].default_value = transmission_factor
x -= 200
# Separate RGB
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(transmission_socket, node.outputs['Red'])
# Inputs
transmission_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='TRANSMISSION',
location=(x, y),
is_data=True,
color_socket=transmission_socket,
)

View File

@ -8,32 +8,45 @@ from .gltf2_blender_pbrMetallicRoughness import base_color, make_output_nodes
def unlit(mh):
"""Creates node tree for unlit materials."""
# Emission node for the base color
emission_node = mh.node_tree.nodes.new('ShaderNodeEmission')
emission_node = mh.nodes.new('ShaderNodeEmission')
emission_node.location = 10, 126
# Lightpath trick: makes Emission visible only to camera rays.
# Create a "Lightpath trick": makes Emission visible only to
# camera rays, so it won't "glow" in Cycles.
#
# [Is Camera Ray] => [Mix] =>
# [Transparent] => [ ]
# [Emission] => [ ]
lightpath_node = mh.node_tree.nodes.new('ShaderNodeLightPath')
transparent_node = mh.node_tree.nodes.new('ShaderNodeBsdfTransparent')
mix_node = mh.node_tree.nodes.new('ShaderNodeMixShader')
lightpath_node = mh.nodes.new('ShaderNodeLightPath')
transparent_node = mh.nodes.new('ShaderNodeBsdfTransparent')
mix_node = mh.nodes.new('ShaderNodeMixShader')
lightpath_node.location = 10, 600
transparent_node.location = 10, 240
mix_node.location = 260, 320
mh.node_tree.links.new(mix_node.inputs['Fac'], lightpath_node.outputs['Is Camera Ray'])
mh.node_tree.links.new(mix_node.inputs[1], transparent_node.outputs[0])
mh.node_tree.links.new(mix_node.inputs[2], emission_node.outputs[0])
mh.links.new(mix_node.inputs['Fac'], lightpath_node.outputs['Is Camera Ray'])
mh.links.new(mix_node.inputs[1], transparent_node.outputs[0])
mh.links.new(mix_node.inputs[2], emission_node.outputs[0])
_emission_socket, alpha_socket, _ = make_output_nodes(
mh,
location=(420, 280) if mh.is_opaque() else (150, 130),
additional_location=None, #No additional location needed for Unlit
shader_socket=mix_node.outputs[0],
make_emission_socket=False,
make_alpha_socket=not mh.is_opaque(),
make_volume_socket=None # Not possible to have KHR_materials_volume with unlit
)
# Material output
alpha_socket = None
out_node = mh.nodes.new('ShaderNodeOutputMaterial')
if mh.is_opaque():
out_node.location = 490, 290
mh.links.new(out_node.inputs[0], mix_node.outputs[0])
else:
# Create a "Mix with Transparent" setup so there's a
# place to put Alpha.
#
# Alpha => [Mix] => [Output]
# [Transparent] => [ ]
# Color => [ ]
mix2_node = mh.nodes.new('ShaderNodeMixShader')
alpha_socket = mix2_node.inputs['Fac']
mix2_node.location = 490, -50
out_node.location = 700, -70
mh.links.new(mix2_node.inputs[1], transparent_node.outputs[0])
mh.links.new(mix2_node.inputs[2], mix_node.outputs[0])
mh.links.new(out_node.inputs[0], mix2_node.outputs[0])
base_color(
mh,

View File

@ -1,83 +0,0 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import TextureInfo
from .gltf2_blender_texture import texture
def volume(mh, location, volume_socket, thickness_socket):
# implementation based on https://github.com/KhronosGroup/glTF-Blender-IO/issues/1454#issuecomment-928319444
try:
ext = mh.pymat.extensions['KHR_materials_volume']
except Exception:
return
# Attenuation Color
attenuationColor = \
mh.pymat.extensions['KHR_materials_volume'] \
.get('attenuationColor')
# glTF is color3, Blender adds alpha
if attenuationColor is None:
attenuationColor = [1.0, 1.0, 1.0, 1.0]
else:
attenuationColor.extend([1.0])
volume_socket.node.inputs[0].default_value = attenuationColor
# Attenuation Distance / Density
attenuationDistance = mh.pymat.extensions['KHR_materials_volume'].get('attenuationDistance')
if attenuationDistance is None:
density = 0
else:
density = 1.0 / attenuationDistance
volume_socket.node.inputs[1].default_value = density
# thicknessFactor / thicknessTexture
x, y = location
try:
ext = mh.pymat.extensions['KHR_materials_volume']
except Exception:
return
thickness_factor = ext.get('thicknessFactor', 0)
tex_info = ext.get('thicknessTexture')
if tex_info is not None:
tex_info = TextureInfo.from_dict(tex_info)
if thickness_socket is None:
return
if tex_info is None:
thickness_socket.default_value = thickness_factor
return
# Mix thickness factor
if thickness_factor != 1:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Thickness Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(thickness_socket, node.outputs[0])
# Inputs
thickness_socket = node.inputs[0]
node.inputs[1].default_value = thickness_factor
x -= 200
# Separate RGB
node = mh.node_tree.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.node_tree.links.new(thickness_socket, node.outputs['Green'])
# Inputs
thickness_socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label='THICKNESS',
location=(x, y),
is_data=True,
color_socket=thickness_socket,
)

View File

@ -129,7 +129,7 @@ class BlenderNodeAnim():
if values[i].dot(values[i-1]) < 0:
values[i] = -values[i]
fps = bpy.context.scene.render.fps
fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
coords = [0] * (2 * len(keys))
coords[::2] = (key[0] * fps for key in keys)

View File

@ -28,7 +28,7 @@ class BlenderWeightAnim():
node = gltf.data.nodes[node_idx]
obj = vnode.blender_object
fps = bpy.context.scene.render.fps
fps = (bpy.context.scene.render.fps * bpy.context.scene.render.fps_base)
animation = gltf.data.animations[anim_idx]

View File

@ -0,0 +1,191 @@
# SPDX-FileCopyrightText: 2018-2021 The glTF-Blender-IO authors
#
# SPDX-License-Identifier: Apache-2.0
from ...io.com.gltf2_io import (
TextureInfo,
MaterialNormalTextureInfoClass,
MaterialPBRMetallicRoughness,
)
from .gltf2_blender_texture import texture
class MaterialHelper:
"""Helper class. Stores material stuff to be passed around everywhere."""
def __init__(self, gltf, pymat, mat, vertex_color):
self.gltf = gltf
self.pymat = pymat
self.mat = mat
self.node_tree = mat.node_tree
self.nodes = mat.node_tree.nodes
self.links = mat.node_tree.links
self.vertex_color = vertex_color
if pymat.pbr_metallic_roughness is None:
pymat.pbr_metallic_roughness = \
MaterialPBRMetallicRoughness.from_dict({})
self.settings_node = None
def is_opaque(self):
alpha_mode = self.pymat.alpha_mode
return alpha_mode is None or alpha_mode == 'OPAQUE'
def needs_emissive(self):
return (
self.pymat.emissive_texture is not None or
(self.pymat.emissive_factor or [0, 0, 0]) != [0, 0, 0]
)
def get_ext(self, ext_name, default=None):
if not self.pymat.extensions:
return default
return self.pymat.extensions.get(ext_name, default)
# Creates nodes for multiplying a texture channel and scalar factor.
# [Texture] => [Sep RGB] => [Mul Factor] => socket
def scalar_factor_and_texture(
mh: MaterialHelper,
location,
label,
socket, # socket to connect to
factor, # scalar factor
tex_info, # texture
channel, # texture channel to use (0-4)
):
if isinstance(tex_info, dict):
tex_info = TextureInfo.from_dict(tex_info)
x, y = location
if socket is None:
return
if tex_info is None:
socket.default_value = factor
return
if factor != 1.0:
node = mh.nodes.new('ShaderNodeMath')
node.label = f'{label} Factor'
node.location = x - 140, y
node.operation = 'MULTIPLY'
# Outputs
mh.links.new(socket, node.outputs[0])
# Inputs
socket = node.inputs[0]
node.inputs[1].default_value = factor
x -= 200
if channel != 4:
# Separate RGB
node = mh.nodes.new('ShaderNodeSeparateColor')
node.location = x - 150, y - 75
# Outputs
mh.links.new(socket, node.outputs[channel])
# Inputs
socket = node.inputs[0]
x -= 200
texture(
mh,
tex_info=tex_info,
label=label.upper(),
location=(x, y),
is_data=channel < 4,
color_socket=socket if channel != 4 else None,
alpha_socket=socket if channel == 4 else None,
)
# Creates nodes for multiplying a texture color and color factor.
# [Texture] => [Mix Factor] => socket
def color_factor_and_texture(
mh: MaterialHelper,
location,
label,
socket, # socket to connect to
factor, # color factor
tex_info, # texture
):
if isinstance(tex_info, dict):
tex_info = TextureInfo.from_dict(tex_info)
x, y = location
if socket is None:
return
if tex_info is None:
socket.default_value = [*factor, 1]
return
if factor != [1, 1, 1]:
node = mh.nodes.new('ShaderNodeMix')
node.data_type = 'RGBA'
node.label = f'{label} Factor'
node.location = x - 140, y
node.blend_type = 'MULTIPLY'
# Outputs
mh.links.new(socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1
socket = node.inputs[6]
node.inputs[7].default_value = [*factor, 1]
x -= 200
texture(
mh,
tex_info=tex_info,
label=label.upper(),
location=(x, y),
is_data=False,
color_socket=socket,
)
# [Texture] => [Normal Map] => socket
def normal_map(
mh: MaterialHelper,
location,
label,
socket,
tex_info,
):
if isinstance(tex_info, dict):
tex_info = MaterialNormalTextureInfoClass.from_dict(tex_info)
if not tex_info:
return
x,y = location
# Normal map
node = mh.nodes.new('ShaderNodeNormalMap')
node.location = x - 150, y - 40
# Set UVMap
uv_idx = tex_info.tex_coord or 0
try:
uv_idx = tex_info.extensions['KHR_texture_transform']['texCoord']
except Exception:
pass
node.uv_map = 'UVMap' if uv_idx == 0 else 'UVMap.%03d' % uv_idx
# Set strength
scale = tex_info.scale
scale = scale if scale is not None else 1
node.inputs['Strength'].default_value = scale
# Outputs
mh.links.new(socket, node.outputs['Normal'])
x -= 200
texture(
mh,
tex_info=tex_info,
label=label.upper(),
location=(x, y),
is_data=True,
color_socket=node.inputs['Color'],
)

View File

@ -11,6 +11,7 @@ from ...io.imp.gltf2_io_binary import BinaryData
from ...io.com.gltf2_io_constants import DataType, ComponentType
from ...blender.com.gltf2_blender_conversion import get_attribute_type
from ..com.gltf2_blender_extras import set_extras
from ..com.gltf2_blender_utils import fast_structured_np_unique
from .gltf2_blender_material import BlenderMaterial
from .gltf2_io_draco_compression_extension import decode_primitive
@ -296,21 +297,22 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
# Start creating things
mesh.vertices.add(len(vert_locs))
mesh.vertices.foreach_set('co', squish(vert_locs))
position_attribute = attribute_ensure(mesh.attributes, 'position', 'FLOAT_VECTOR', 'POINT')
position_attribute.data.foreach_set('vector', squish(vert_locs, np.float32))
mesh.loops.add(len(loop_vidxs))
mesh.loops.foreach_set('vertex_index', loop_vidxs)
corner_vert_attribute = attribute_ensure(mesh.attributes, '.corner_vert', 'INT', 'CORNER')
corner_vert_attribute.data.foreach_set('value', squish(loop_vidxs, np.intc))
mesh.edges.add(len(edge_vidxs) // 2)
mesh.edges.foreach_set('vertices', edge_vidxs)
edge_verts_attribute = attribute_ensure(mesh.attributes, '.edge_verts', 'INT32_2D', 'EDGE')
edge_verts_attribute.data.foreach_set('value', squish(edge_vidxs, np.intc))
mesh.polygons.add(num_faces)
# All polys are tris
loop_starts = np.arange(0, 3 * num_faces, step=3)
loop_totals = np.full(num_faces, 3)
mesh.polygons.foreach_set('loop_start', loop_starts)
mesh.polygons.foreach_set('loop_total', loop_totals)
for uv_i in range(num_uvs):
name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i
@ -320,18 +322,13 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
print("WARNING: UV map is ignored because the maximum number of UV layers has been reached.")
break
layer.data.foreach_set('uv', squish(loop_uvs[uv_i]))
layer.uv.foreach_set('vector', squish(loop_uvs[uv_i], np.float32))
for col_i in range(num_cols):
name = 'Col' if col_i == 0 else 'Col.%03d' % col_i
layer = mesh.vertex_colors.new(name=name)
name = 'Color' if col_i == 0 else 'Color.%03d' % col_i
layer = mesh.color_attributes.new(name, 'BYTE_COLOR', 'CORNER')
if layer is None:
print("WARNING: Vertex colors are ignored because the maximum number of vertex color layers has been "
"reached.")
break
mesh.color_attributes[layer.name].data.foreach_set('color', squish(loop_cols[col_i]))
layer.data.foreach_set('color', squish(loop_cols[col_i], np.float32))
# Make sure the first Vertex Color Attribute is the rendered one
if num_cols > 0:
@ -370,7 +367,7 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
ob.shape_key_add(name=sk_name)
key_block = mesh.shape_keys.key_blocks[sk_name]
key_block.data.foreach_set('co', squish(sk_vert_locs[sk_i]))
key_block.points.foreach_set('co', squish(sk_vert_locs[sk_i], np.float32))
sk_i += 1
@ -385,7 +382,8 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
and 'mappings' in prim.extensions['KHR_materials_variants'].keys()
if has_materials:
material_indices = np.empty(num_faces, dtype=np.uint32)
bl_material_index_dtype = np.intc
material_indices = np.empty(num_faces, dtype=bl_material_index_dtype)
empty_material_slot_index = None
f = 0
@ -448,7 +446,8 @@ def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
vari = variant_primitive.variants.add()
vari.variant.variant_idx = variant
mesh.polygons.foreach_set('material_index', material_indices)
material_index_attribute = attribute_ensure(mesh.attributes, 'material_index', 'INT', 'FACE')
material_index_attribute.data.foreach_set('value', material_indices)
# Custom Attributes
for idx, attr in enumerate(attributes):
@ -534,27 +533,54 @@ def points_edges_tris(mode, indices):
# 0---2---4
# \ / \ /
# 1---3
# TODO: numpyify
def alternate(i, xs):
even = i % 2 == 0
return xs if even else (xs[0], xs[2], xs[1])
tris = np.array([
alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
for i in range(0, len(indices) - 2)
])
tris = squish(tris)
# in: 01234
# out: 012132234
# out (viewed as triplets): 012, 132, 234
tris = np.empty((len(indices) - 2) * 3, dtype=np.uint32)
# 012__
first_indices = indices[:-2]
# _123_
second_indices = indices[1:-1]
# __234
third_indices = indices[2:]
# Each triplet starts with the first index
# 0__, 1__, 2__ <- 012__
tris[0::3] = first_indices
# Even triplets end with the next two indices in order
# _1_, ___, _3_ <- _1_3_ <- _123_
# 01_, 1__, 23_
tris[1::6] = second_indices[0::2]
# __2, ___, __4 <- __2_4 <- __234
# 012, 1__, 234
tris[2::6] = third_indices[0::2]
# Odd triplets end with the next two indices in reverse order
# ___, _3_, ___ <- ___3_ <- __234
# 012, 13_, 234
tris[4::6] = third_indices[1::2]
# ___, __2, ___ <- __2__ <- _123_
# 012, 132, 234
tris[5::6] = second_indices[1::2]
elif mode == 6:
# TRIANGLE FAN
# 3---2
# / \ / \
# 4---0---1
# TODO: numpyify
tris = np.array([
(indices[0], indices[i], indices[i + 1])
for i in range(1, len(indices) - 1)
])
tris = squish(tris)
# in: 01234
# out: 012023034
# out (viewed as triplets): 012, 023, 034
# Start filled with the first index
# 000, 000, 000
tris = np.full((len(indices) - 2) * 3, indices[0], dtype=np.uint32)
# _1_, _2_, _3_ <- _123_
# 010, 020, 030
tris[1::3] = indices[1:-1]
# __2, __3, __4 <- __234
# 012, 023, 034
tris[2::3] = indices[2:]
else:
raise Exception('primitive mode unimplemented: %d' % mode)
@ -562,9 +588,10 @@ def points_edges_tris(mode, indices):
return points, edges, tris
def squish(array):
"""Squish nD array into 1D array (required by foreach_set)."""
return array.reshape(array.size)
def squish(array, dtype=None):
"""Squish nD array into a C-contiguous (required for faster access with the buffer protocol in foreach_set) 1D array
(required by foreach_set). Optionally converting the array to a different dtype."""
return np.ascontiguousarray(array, dtype=dtype).reshape(array.size)
def colors_rgb_to_rgba(rgb):
@ -646,6 +673,15 @@ def normalize_vecs(vectors):
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
np.divide(vectors, norms, out=vectors, where=norms != 0)
def attribute_ensure(attributes, name, data_type, domain):
attribute = attributes.get(name)
if attribute is None:
return attributes.new(name, data_type, domain)
if attribute.domain == domain and attribute.data_type == data_type:
return attribute
# There is an existing attribute, but it has the wrong domain or data_type.
attributes.remove(attribute)
return attributes.new(name, data_type, domain)
def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
num_polys = len(mesh.polygons)
@ -656,14 +692,15 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
return
if gltf.import_settings['import_shading'] == "SMOOTH":
poly_smooths = np.full(num_polys, True)
poly_sharps = np.full(num_polys, False)
f = 0
for prim in pymesh.primitives:
if 'NORMAL' not in prim.attributes:
# Primitives with no NORMALs should use flat shading
poly_smooths[f:f + prim.num_faces].fill(False)
poly_sharps[f:f + prim.num_faces].fill(True)
f += prim.num_faces
mesh.polygons.foreach_set('use_smooth', poly_smooths)
sharp_face_attribute = attribute_ensure(mesh.attributes, 'sharp_face', 'BOOLEAN', 'FACE')
sharp_face_attribute.data.foreach_set('value', poly_sharps)
return
assert gltf.import_settings['import_shading'] == "NORMALS"
@ -671,17 +708,17 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
# Try to guess which polys should be flat based on the fact that all the
# loop normals for a flat poly are = the poly's normal.
poly_smooths = np.empty(num_polys, dtype=bool)
poly_sharps = np.empty(num_polys, dtype=bool)
poly_normals = np.empty(num_polys * 3, dtype=np.float32)
mesh.polygons.foreach_get('normal', poly_normals)
mesh.polygon_normals.foreach_get('vector', poly_normals)
poly_normals = poly_normals.reshape(num_polys, 3)
f = 0
for prim in pymesh.primitives:
if 'NORMAL' not in prim.attributes:
# Primitives with no NORMALs should use flat shading
poly_smooths[f:f + prim.num_faces].fill(False)
poly_sharps[f:f + prim.num_faces].fill(True)
f += prim.num_faces
continue
@ -704,11 +741,12 @@ def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
dot_prods = np.sum(vert_ns * poly_ns, axis=1)
np.logical_or(smooth, dot_prods <= 0.9999999, out=smooth)
poly_smooths[f:f + prim.num_faces] = smooth
np.logical_not(smooth, out=poly_sharps[f:f + prim.num_faces])
f += prim.num_faces
mesh.polygons.foreach_set('use_smooth', poly_smooths)
sharp_face_attribute = attribute_ensure(mesh.attributes, 'sharp_face', 'BOOLEAN', 'FACE')
sharp_face_attribute.data.foreach_set('value', poly_sharps)
def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs, attribute_data):
@ -766,7 +804,7 @@ def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk
dots['sk%dy' % i] = locs[:, 1]
dots['sk%dz' % i] = locs[:, 2]
unique_dots, unique_ind, inv_indices = np.unique(dots, return_index=True, return_inverse=True)
unique_dots, unique_ind, inv_indices = fast_structured_np_unique(dots, return_index=True, return_inverse=True)
loop_vidxs = inv_indices[loop_vidxs]
edge_vidxs = inv_indices[edge_vidxs]

View File

@ -229,8 +229,9 @@ class BlenderNode():
if gltf.import_settings['bone_heuristic'] == "BLENDER":
pose_bone.custom_shape = bpy.data.objects[gltf.bone_shape]
armature_max_dim = max([blender_arma.dimensions[0] / blender_arma.scale[0], blender_arma.dimensions[1] / blender_arma.scale[1], blender_arma.dimensions[2] / blender_arma.scale[2]])
pose_bone.custom_shape_scale_xyz = Vector([armature_max_dim * 0.2] * 3)
armature_min_dim = min([blender_arma.dimensions[0] / blender_arma.scale[0], blender_arma.dimensions[1] / blender_arma.scale[1], blender_arma.dimensions[2] / blender_arma.scale[2]])
pose_bone.custom_shape_scale_xyz = Vector([armature_min_dim * 0.05] * 3)
pose_bone.use_custom_shape_bone_size = False
@staticmethod
def create_mesh_object(gltf, vnode):

View File

@ -8,37 +8,9 @@ from ...io.com.gltf2_io_constants import GLTF_IOR
from ...io.com.gltf2_io import TextureInfo, MaterialPBRMetallicRoughness
from ..com.gltf2_blender_material_helpers import get_gltf_node_name, create_settings_group
from .gltf2_blender_texture import texture
from .gltf2_blender_KHR_materials_clearcoat import \
clearcoat, clearcoat_roughness, clearcoat_normal
from .gltf2_blender_KHR_materials_transmission import transmission
from .gltf2_blender_KHR_materials_ior import ior
from .gltf2_blender_KHR_materials_volume import volume
from .gltf2_blender_KHR_materials_specular import specular
from .gltf2_blender_KHR_materials_sheen import sheen
from .gltf2_blender_KHR_materials_anisotropy import anisotropy
class MaterialHelper:
"""Helper class. Stores material stuff to be passed around everywhere."""
def __init__(self, gltf, pymat, mat, vertex_color):
self.gltf = gltf
self.pymat = pymat
self.mat = mat
self.node_tree = mat.node_tree
self.vertex_color = vertex_color
if pymat.pbr_metallic_roughness is None:
pymat.pbr_metallic_roughness = \
MaterialPBRMetallicRoughness.from_dict({})
self.settings_node = None
def is_opaque(self):
alpha_mode = self.pymat.alpha_mode
return alpha_mode is None or alpha_mode == 'OPAQUE'
def needs_emissive(self):
return (
self.pymat.emissive_texture is not None or
(self.pymat.emissive_factor or [0, 0, 0]) != [0, 0, 0]
)
from .gltf2_blender_material_utils import \
MaterialHelper, scalar_factor_and_texture, color_factor_and_texture, normal_map
def pbr_metallic_roughness(mh: MaterialHelper):
@ -47,10 +19,6 @@ def pbr_metallic_roughness(mh: MaterialHelper):
pbr_node.location = 10, 300
additional_location = 40, -370 # For occlusion and/or volume / original PBR extensions
# Set IOR to 1.5, this is the default in glTF
# This value may be overridden later if IOR extension is set on file
pbr_node.inputs['IOR'].default_value = GLTF_IOR
if mh.pymat.occlusion_texture is not None:
if mh.settings_node is None:
mh.settings_node = make_settings_node(mh)
@ -119,29 +87,9 @@ def pbr_metallic_roughness(mh: MaterialHelper):
occlusion_socket=mh.settings_node.inputs['Occlusion'],
)
clearcoat(
mh,
location=locs['clearcoat'],
clearcoat_socket=pbr_node.inputs['Coat Weight'],
)
clearcoat(mh, locs, pbr_node)
clearcoat_roughness(
mh,
location=locs['clearcoat_roughness'],
roughness_socket=pbr_node.inputs['Coat Roughness'],
)
clearcoat_normal(
mh,
location=locs['clearcoat_normal'],
normal_socket=pbr_node.inputs['Coat Normal'],
)
transmission(
mh,
location=locs['transmission'],
transmission_socket=pbr_node.inputs['Transmission Weight']
)
transmission(mh, locs, pbr_node)
if need_volume_node:
volume(
@ -151,13 +99,7 @@ def pbr_metallic_roughness(mh: MaterialHelper):
thickness_socket=mh.settings_node.inputs[1] if mh.settings_node else None
)
specular(
mh,
location_specular=locs['specularTexture'],
location_specular_tint=locs['specularColorTexture'],
specular_socket=pbr_node.inputs['Specular IOR Level'],
specular_tint_socket=pbr_node.inputs['Specular Tint']
)
specular(mh, locs, pbr_node)
anisotropy(
mh,
@ -167,18 +109,135 @@ def pbr_metallic_roughness(mh: MaterialHelper):
anisotropy_tangent_socket=pbr_node.inputs['Tangent']
)
sheen(
sheen(mh, locs, pbr_node)
# IOR
ior_ext = mh.get_ext('KHR_materials_ior', {})
ior = ior_ext.get('ior', GLTF_IOR)
pbr_node.inputs['IOR'].default_value = ior
def clearcoat(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_clearcoat', {})
scalar_factor_and_texture(
mh,
location_sheenTint=locs['sheenColorTexture'],
location_sheenRoughness=locs['sheenRoughnessTexture'],
sheen_socket=pbr_node.inputs['Sheen Weight'],
sheenTint_socket=pbr_node.inputs['Sheen Tint'],
sheenRoughness_socket=pbr_node.inputs['Sheen Roughness']
location=locs['clearcoat'],
label='Clearcoat',
socket=pbr_node.inputs['Coat Weight'],
factor=ext.get('clearcoatFactor', 0),
tex_info=ext.get('clearcoatTexture'),
channel=0, # Red
)
ior(
scalar_factor_and_texture(
mh,
ior_socket=pbr_node.inputs['IOR']
location=locs['clearcoat_roughness'],
label='Clearcoat Roughness',
socket=pbr_node.inputs['Coat Roughness'],
factor=ext.get('clearcoatRoughnessFactor', 0),
tex_info=ext.get('clearcoatRoughnessTexture'),
channel=1, # Green
)
normal_map(
mh,
location=locs['clearcoat_normal'],
label='Clearcoat Normal',
socket=pbr_node.inputs['Coat Normal'],
tex_info=ext.get('clearcoatNormalTexture'),
)
def transmission(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_transmission', {})
factor = ext.get('transmissionFactor', 0)
if factor > 0:
# Activate screen refraction (for Eevee)
mh.mat.use_screen_refraction = True
scalar_factor_and_texture(
mh,
location=locs['transmission'],
label='Transmission',
socket=pbr_node.inputs['Transmission Weight'],
factor=factor,
tex_info=ext.get('transmissionTexture'),
channel=0, # Red
)
def volume(mh, location, volume_socket, thickness_socket):
# Based on https://github.com/KhronosGroup/glTF-Blender-IO/issues/1454#issuecomment-928319444
ext = mh.get_ext('KHR_materials_volume', {})
color = ext.get('attenuationColor', [1, 1, 1])
volume_socket.node.inputs[0].default_value = [*color, 1]
distance = ext.get('attenuationDistance', float('inf'))
density = 1 / distance
volume_socket.node.inputs[1].default_value = density
scalar_factor_and_texture(
mh,
location=location,
label='Thickness',
socket=thickness_socket,
factor=ext.get('thicknessFactor', 0),
tex_info=ext.get('thicknessTexture'),
channel=1, # Green
)
def specular(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_specular', {})
# blender.IORLevel = 0.5 * gltf.specular
scalar_factor_and_texture(
mh,
location=locs['specularTexture'],
label='Specular',
socket=pbr_node.inputs['Specular IOR Level'],
factor=0.5 * ext.get('specularFactor', 1),
tex_info=ext.get('specularTexture'),
channel=4, # Alpha
)
color_factor_and_texture(
mh,
location=locs['specularColorTexture'],
label='Specular Color',
socket=pbr_node.inputs['Specular Tint'],
factor=ext.get('specularColorFactor', [1, 1, 1]),
tex_info=ext.get('specularColorTexture'),
)
def sheen(mh, locs, pbr_node):
ext = mh.get_ext('KHR_materials_sheen')
if ext is None:
return
pbr_node.inputs['Sheen Weight'].default_value = 1
color_factor_and_texture(
mh,
location=locs['sheenColorTexture'],
label='Sheen Color',
socket=pbr_node.inputs['Sheen Tint'],
factor=ext.get('sheenColorFactor', [0, 0, 0]),
tex_info=ext.get('sheenColorTexture'),
)
scalar_factor_and_texture(
mh,
location=locs['sheenRoughnessTexture'],
label='Sheen Roughness',
socket=pbr_node.inputs['Sheen Roughness'],
factor=ext.get('sheenRoughnessFactor', 0),
tex_info=ext.get('sheenRoughnessTexture'),
channel=4, # Alpha
)
@ -190,35 +249,12 @@ def calc_locations(mh):
height = 460 # height of each block
locs = {}
try:
clearcoat_ext = mh.pymat.extensions['KHR_materials_clearcoat']
except Exception:
clearcoat_ext = {}
try:
transmission_ext = mh.pymat.exntesions['KHR_materials_transmission']
except:
transmission_ext = {}
try:
volume_ext = mh.pymat.extensions['KHR_materials_volume']
except Exception:
volume_ext = {}
try:
specular_ext = mh.pymat.extensions['KHR_materials_specular']
except:
specular_ext = {}
try:
anisotropy_ext = mh.pymat.extensions['KHR_materials_anisotropy']
except:
anisotropy_ext = {}
try:
sheen_ext = mh.pymat.extensions['KHR_materials_sheen']
except:
sheen_ext = {}
clearcoat_ext = mh.get_ext('KHR_materials_clearcoat', {})
transmission_ext = mh.get_ext('KHR_materials_transmission', {})
volume_ext = mh.get_ext('KHR_materials_volume', {})
specular_ext = mh.get_ext('KHR_materials_specular', {})
anisotropy_ext = mh.get_ext('KHR_materials_anisotropy', {})
sheen_ext = mh.get_ext('KHR_materials_sheen', {})
locs['base_color'] = (x, y)
if mh.pymat.pbr_metallic_roughness.base_color_texture is not None or mh.vertex_color:
@ -283,60 +319,24 @@ def calc_locations(mh):
# [Texture] => [Emissive Factor] =>
def emission(mh: MaterialHelper, location, color_socket, strength_socket):
x, y = location
emissive_factor = mh.pymat.emissive_factor or [0, 0, 0]
factor = mh.pymat.emissive_factor or [0, 0, 0]
ext = mh.get_ext('KHR_materials_emissive_strength', {})
strength = ext.get('emissiveStrength', 1)
strength = 1
try:
# Get strength from KHR_materials_emissive_strength if exists
strength = mh.pymat.extensions['KHR_materials_emissive_strength']['emissiveStrength']
except Exception:
pass
if factor[0] == factor[1] == factor[2]:
# Fold greyscale factor into strength
strength *= factor[0]
factor = [1, 1, 1]
if color_socket is None:
return
if mh.pymat.emissive_texture is None:
if emissive_factor == [0, 0, 0]:
# Keep as close as possible to the default Blender value when there is no emission
color_socket.default_value = [1,1,1,1]
strength_socket.default_value = 0
return
color_socket.default_value = emissive_factor + [1]
strength_socket.default_value = strength
return
# Put grayscale emissive factors into the Emission Strength
e0, e1, e2 = emissive_factor
if strength_socket and e0 == e1 == e2:
strength_socket.default_value = e0 * strength
# Otherwise, use a multiply node for it
else:
if emissive_factor != [1, 1, 1]:
node = mh.node_tree.nodes.new('ShaderNodeMix')
node.label = 'Emissive Factor'
node.data_type = 'RGBA'
node.location = x - 140, y
node.blend_type = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(color_socket, node.outputs[2])
# Inputs
node.inputs['Factor'].default_value = 1.0
color_socket = node.inputs[6]
node.inputs[7].default_value = emissive_factor + [1]
x -= 200
strength_socket.default_value = strength
texture(
color_factor_and_texture(
mh,
location,
label='Emissive',
socket=color_socket,
factor=factor,
tex_info=mh.pymat.emissive_texture,
label='EMISSIVE',
location=(x, y),
color_socket=color_socket,
)
strength_socket.default_value = strength
# [Texture] => [Mix Colors] => [Color Factor] =>
@ -395,7 +395,7 @@ def base_color(
if needs_alpha_factor:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Alpha Factor'
node.location = x - 140, y - 200
node.location = x - 140, y - 230
# Outputs
mh.node_tree.links.new(alpha_socket, node.outputs[0])
# Inputs
@ -428,7 +428,7 @@ def base_color(
if alpha_socket is not None:
node = mh.node_tree.nodes.new('ShaderNodeMath')
node.label = 'Mix Vertex Alpha'
node.location = x - 140, y - 200
node.location = x - 140, y - 230
node.operation = 'MULTIPLY'
# Outputs
mh.node_tree.links.new(alpha_socket, node.outputs[0])
@ -528,40 +528,12 @@ def metallic_roughness(mh: MaterialHelper, location, metallic_socket, roughness_
# [Texture] => [Normal Map] =>
def normal(mh: MaterialHelper, location, normal_socket):
x,y = location
tex_info = mh.pymat.normal_texture
if tex_info is None:
return
# Normal map
node = mh.node_tree.nodes.new('ShaderNodeNormalMap')
node.location = x - 150, y - 40
# Set UVMap
uv_idx = tex_info.tex_coord or 0
try:
uv_idx = tex_info.extensions['KHR_texture_transform']['texCoord']
except Exception:
pass
node.uv_map = 'UVMap' if uv_idx == 0 else 'UVMap.%03d' % uv_idx
# Set strength
scale = tex_info.scale
scale = scale if scale is not None else 1
node.inputs['Strength'].default_value = scale
# Outputs
mh.node_tree.links.new(normal_socket, node.outputs['Normal'])
# Inputs
color_socket = node.inputs['Color']
x -= 200
texture(
normal_map(
mh,
tex_info=tex_info,
label='NORMALMAP',
location=(x, y),
is_data=True,
color_socket=color_socket,
location=location,
label='Normal Map',
socket=normal_socket,
tex_info=mh.pymat.normal_texture,
)

View File

@ -39,25 +39,7 @@ def texture(
# Get image
if forced_image is None:
if mh.gltf.import_settings['import_webp_texture'] is True:
# Get the WebP image if there is one
if pytexture.extensions \
and 'EXT_texture_webp' in pytexture.extensions \
and pytexture.extensions['EXT_texture_webp']['source'] is not None:
source = pytexture.extensions['EXT_texture_webp']['source']
elif pytexture.source is not None:
source = pytexture.source
else:
source = pytexture.source
if mh.gltf.import_settings['import_webp_texture'] is False and source is None:
# In case webp is not used as a fallback, use this as main texture
if pytexture.extensions \
and 'EXT_texture_webp' in pytexture.extensions \
and pytexture.extensions['EXT_texture_webp']['source'] is not None:
source = pytexture.extensions['EXT_texture_webp']['source']
source = get_source(mh, pytexture)
if source is not None:
BlenderImage.create(mh.gltf, source)
pyimg = mh.gltf.data.images[source]
@ -188,6 +170,20 @@ def texture(
import_user_extensions('gather_import_texture_after_hook', mh.gltf, pytexture, mh.node_tree, mh, tex_info, location, label, color_socket, alpha_socket, is_data)
def get_source(mh, pytexture):
src = pytexture.source
try:
webp_src = pytexture.extensions['EXT_texture_webp']['source']
except Exception:
webp_src = None
if mh.gltf.import_settings['import_webp_texture']:
return webp_src if webp_src is not None else src
else:
return src if src is not None else webp_src
def set_filtering(tex_img, pysampler):
"""Set the filtering/interpolation on an Image Texture from the glTf sampler."""
minf = pysampler.min_filter

View File

@ -8,7 +8,7 @@
bl_info = {
"name": "Snap_Utilities_Line",
"author": "Germano Cavalcante",
"version": (6, 7, 1),
"version": (6, 8, 0),
"blender": (3, 2, 0),
"location": "View3D > TOOLS > Line Tool",
"description": "Extends Blender Snap controls",

View File

@ -8,6 +8,8 @@ import bmesh
from mathutils import Vector
from mathutils.geometry import intersect_point_line
from .snap_context_l.utils_projection import intersect_ray_ray_fac
from .common_utilities import snap_utilities
from .common_classes import (
CharMap,
@ -243,6 +245,7 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
self.bool_update = True
self.vector_constrain = ()
self.len = 0
self.curr_dir = Vector()
if not (self.bm and self.obj):
self.obj = context.edit_object
@ -262,8 +265,6 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
if self.navigation_ops.run(context, event, self.prevloc if self.vector_constrain else self.location):
return {'RUNNING_MODAL'}
context.area.tag_redraw()
if event.ctrl and event.type == 'Z' and event.value == 'PRESS':
bpy.ops.ed.undo()
if not self.wait_for_input:
@ -287,44 +288,50 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
is_making_lines = bool(self.list_verts_co)
if (event.type == 'MOUSEMOVE' or self.bool_update) and self.charmap.length_entered_value == 0.0:
if (event.type == 'MOUSEMOVE' or self.bool_update):
mval = Vector((event.mouse_region_x, event.mouse_region_y))
if self.rv3d.view_matrix != self.rotMat:
self.rotMat = self.rv3d.view_matrix.copy()
self.bool_update = True
snap_utilities.cache.clear()
else:
self.bool_update = False
self.snap_obj, self.prevloc, self.location, self.type, self.bm, self.geom, self.len = snap_utilities(
self.sctx,
self.main_snap_obj,
mval,
constrain=self.vector_constrain,
previous_vert=(
self.list_verts[-1] if self.list_verts else None),
increment=self.incremental)
self.snap_to_grid()
if is_making_lines and self.preferences.auto_constrain:
if self.charmap.length_entered_value != 0.0:
ray_dir, ray_orig = self.sctx.get_ray(mval)
loc = self.list_verts_co[-1]
vec, type = self.constrain.update(
self.sctx.region, self.sctx.rv3d, mval, loc)
self.vector_constrain = [loc, loc + vec, type]
fac = intersect_ray_ray_fac(loc, self.curr_dir, ray_orig, ray_dir)
if fac < 0.0:
self.curr_dir.negate()
self.location = loc - (self.location - loc)
else:
if self.rv3d.view_matrix != self.rotMat:
self.rotMat = self.rv3d.view_matrix.copy()
self.bool_update = True
snap_utilities.cache.clear()
else:
self.bool_update = False
if event.value == 'PRESS':
self.snap_obj, self.prevloc, self.location, self.type, self.bm, self.geom, self.len = snap_utilities(
self.sctx,
self.main_snap_obj,
mval,
constrain=self.vector_constrain,
previous_vert=(
self.list_verts[-1] if self.list_verts else None),
increment=self.incremental)
self.snap_to_grid()
if is_making_lines:
loc = self.list_verts_co[-1]
self.curr_dir = self.location - loc
if self.preferences.auto_constrain:
vec, cons_type = self.constrain.update(
self.sctx.region, self.sctx.rv3d, mval, loc)
self.vector_constrain = [loc, loc + vec, cons_type]
elif event.value == 'PRESS':
if is_making_lines and self.charmap.modal_(context, event):
self.bool_update = self.charmap.length_entered_value == 0.0
if not self.bool_update:
text_value = self.charmap.length_entered_value
vector = (self.location -
self.list_verts_co[-1]).normalized()
self.location = self.list_verts_co[-1] + \
(vector * text_value)
del vector
vector = self.curr_dir.normalized()
self.location = self.list_verts_co[-1] + (vector * text_value)
elif self.constrain.modal(event, self._shift_contrain_callback):
self.bool_update = True
@ -379,6 +386,8 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
self.list_verts = []
self.list_verts_co = []
self.charmap.clear()
else:
return {'RUNNING_MODAL'}
a = ""
if is_making_lines:
@ -387,10 +396,8 @@ class SnapUtilitiesLine(SnapUtilities, bpy.types.Operator):
context.area.header_text_set(
text="hit: %.3f %.3f %.3f %s" % (*self.location, a))
if True or is_making_lines:
return {'RUNNING_MODAL'}
return {'PASS_THROUGH'}
context.area.tag_redraw()
return {'RUNNING_MODAL'}
def draw_callback_px(self):
if self.bm:

View File

@ -185,17 +185,21 @@ def intersect_boundbox_threshold(sctx, MVP, ray_origin_local, ray_direction_loca
return (sctx.mval - p).length_squared < sctx._dist_px_sq
def intersect_ray_segment_fac(v0, v1, ray_direction, ray_origin):
a = v1 - v0
t = v0 - ray_origin
n = a.cross(ray_direction)
def intersect_ray_ray_fac(orig_a, dir_a, orig_b, dir_b):
t = orig_a - orig_b
n = dir_a.cross(dir_b)
nlen = n.length_squared
# if (nlen == 0.0f) the lines are parallel, has no nearest point, only distance squared.*/
if nlen == 0.0:
# Calculate the distance to the nearest point to origin then #
return a.dot(ray_direction) < 0
return intersect_point_line(orig_a, orig_b, orig_b + dir_b)
else:
c = n - t
cray = c.cross(ray_direction)
cray = c.cross(dir_b)
return cray.dot(n) / nlen
def intersect_ray_segment_fac(v0, v1, ray_direction, ray_origin):
dir_a = v1 - v0
return intersect_ray_ray_fac(v0, dir_a, ray_origin, ray_direction)

View File

@ -5,7 +5,7 @@
bl_info = {
"name": "Node Wrangler",
"author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer",
"version": (3, 52),
"version": (3, 53),
"blender": (4, 0, 0),
"location": "Node Editor Toolbar or Shift-W",
"description": "Various tools to enhance and speed up node-based workflow",

View File

@ -10,7 +10,7 @@ from nodeitems_utils import node_categories_iter, NodeItemCustom
from . import operators
from .utils.constants import blend_types, geo_combine_operations, operations
from .utils.nodes import get_nodes_links, nw_check, NWBase
from .utils.nodes import get_nodes_links, NWBaseMenu
def drawlayout(context, layout, mode='non-panel'):
@ -71,7 +71,7 @@ def drawlayout(context, layout, mode='non-panel'):
col.separator()
class NodeWranglerPanel(Panel, NWBase):
class NodeWranglerPanel(Panel, NWBaseMenu):
bl_idname = "NODE_PT_nw_node_wrangler"
bl_space_type = 'NODE_EDITOR'
bl_label = "Node Wrangler"
@ -92,7 +92,7 @@ class NodeWranglerPanel(Panel, NWBase):
#
# M E N U S
#
class NodeWranglerMenu(Menu, NWBase):
class NodeWranglerMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_node_wrangler_menu"
bl_label = "Node Wrangler"
@ -101,7 +101,7 @@ class NodeWranglerMenu(Menu, NWBase):
drawlayout(context, self.layout)
class NWMergeNodesMenu(Menu, NWBase):
class NWMergeNodesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_nodes_menu"
bl_label = "Merge Selected Nodes"
@ -124,7 +124,7 @@ class NWMergeNodesMenu(Menu, NWBase):
props.merge_type = 'ALPHAOVER'
class NWMergeGeometryMenu(Menu, NWBase):
class NWMergeGeometryMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_geometry_menu"
bl_label = "Merge Selected Nodes using Geometry Nodes"
@ -137,7 +137,7 @@ class NWMergeGeometryMenu(Menu, NWBase):
props.merge_type = 'GEOMETRY'
class NWMergeShadersMenu(Menu, NWBase):
class NWMergeShadersMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_shaders_menu"
bl_label = "Merge Selected Nodes using Shaders"
@ -150,7 +150,7 @@ class NWMergeShadersMenu(Menu, NWBase):
props.merge_type = 'SHADER'
class NWMergeMixMenu(Menu, NWBase):
class NWMergeMixMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_mix_menu"
bl_label = "Merge Selected Nodes using Mix"
@ -162,7 +162,7 @@ class NWMergeMixMenu(Menu, NWBase):
props.merge_type = 'MIX'
class NWConnectionListOutputs(Menu, NWBase):
class NWConnectionListOutputs(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_connection_list_out"
bl_label = "From:"
@ -180,7 +180,7 @@ class NWConnectionListOutputs(Menu, NWBase):
icon="RADIOBUT_OFF").from_socket = index
class NWConnectionListInputs(Menu, NWBase):
class NWConnectionListInputs(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_connection_list_in"
bl_label = "To:"
@ -201,7 +201,7 @@ class NWConnectionListInputs(Menu, NWBase):
op.to_socket = index
class NWMergeMathMenu(Menu, NWBase):
class NWMergeMathMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_merge_math_menu"
bl_label = "Merge Selected Nodes using Math"
@ -213,7 +213,7 @@ class NWMergeMathMenu(Menu, NWBase):
props.merge_type = 'MATH'
class NWBatchChangeNodesMenu(Menu, NWBase):
class NWBatchChangeNodesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_batch_change_nodes_menu"
bl_label = "Batch Change Selected Nodes"
@ -223,7 +223,7 @@ class NWBatchChangeNodesMenu(Menu, NWBase):
layout.menu(NWBatchChangeOperationMenu.bl_idname)
class NWBatchChangeBlendTypeMenu(Menu, NWBase):
class NWBatchChangeBlendTypeMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_batch_change_blend_type_menu"
bl_label = "Batch Change Blend Type"
@ -235,7 +235,7 @@ class NWBatchChangeBlendTypeMenu(Menu, NWBase):
props.operation = 'CURRENT'
class NWBatchChangeOperationMenu(Menu, NWBase):
class NWBatchChangeOperationMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_batch_change_operation_menu"
bl_label = "Batch Change Math Operation"
@ -247,7 +247,7 @@ class NWBatchChangeOperationMenu(Menu, NWBase):
props.operation = type
class NWCopyToSelectedMenu(Menu, NWBase):
class NWCopyToSelectedMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_copy_node_properties_menu"
bl_label = "Copy to Selected"
@ -257,7 +257,7 @@ class NWCopyToSelectedMenu(Menu, NWBase):
layout.menu(NWCopyLabelMenu.bl_idname)
class NWCopyLabelMenu(Menu, NWBase):
class NWCopyLabelMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_copy_label_menu"
bl_label = "Copy Label"
@ -268,7 +268,7 @@ class NWCopyLabelMenu(Menu, NWBase):
layout.operator(operators.NWCopyLabel.bl_idname, text="from Linked Output's Name").option = 'FROM_SOCKET'
class NWAddReroutesMenu(Menu, NWBase):
class NWAddReroutesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_add_reroutes_menu"
bl_label = "Add Reroutes"
bl_description = "Add Reroute Nodes to Selected Nodes' Outputs"
@ -280,7 +280,7 @@ class NWAddReroutesMenu(Menu, NWBase):
layout.operator(operators.NWAddReroutes.bl_idname, text="to Linked Outputs").option = 'LINKED'
class NWLinkActiveToSelectedMenu(Menu, NWBase):
class NWLinkActiveToSelectedMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_active_to_selected_menu"
bl_label = "Link Active to Selected"
@ -291,7 +291,7 @@ class NWLinkActiveToSelectedMenu(Menu, NWBase):
layout.menu(NWLinkUseOutputsNamesMenu.bl_idname)
class NWLinkStandardMenu(Menu, NWBase):
class NWLinkStandardMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_standard_menu"
bl_label = "To All Selected"
@ -307,7 +307,7 @@ class NWLinkStandardMenu(Menu, NWBase):
props.use_outputs_names = False
class NWLinkUseNodeNameMenu(Menu, NWBase):
class NWLinkUseNodeNameMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_use_node_name_menu"
bl_label = "Use Node Name/Label"
@ -323,7 +323,7 @@ class NWLinkUseNodeNameMenu(Menu, NWBase):
props.use_outputs_names = False
class NWLinkUseOutputsNamesMenu(Menu, NWBase):
class NWLinkUseOutputsNamesMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_link_use_outputs_names_menu"
bl_label = "Use Outputs Names"
@ -345,7 +345,11 @@ class NWAttributeMenu(bpy.types.Menu):
@classmethod
def poll(cls, context):
return nw_check(context) and context.space_data.tree_type == 'ShaderNodeTree'
space = context.space_data
return (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None
and space.tree_type == 'ShaderNodeTree')
def draw(self, context):
l = self.layout
@ -372,7 +376,7 @@ class NWAttributeMenu(bpy.types.Menu):
l.label(text="No attributes on objects with this material")
class NWSwitchNodeTypeMenu(Menu, NWBase):
class NWSwitchNodeTypeMenu(Menu, NWBaseMenu):
bl_idname = "NODE_MT_nw_switch_node_type_menu"
bl_label = "Switch Type to..."
@ -411,8 +415,11 @@ def bgreset_menu_func(self, context):
def save_viewer_menu_func(self, context):
if (nw_check(context)
and context.space_data.tree_type == 'CompositorNodeTree'
space = context.space_data
if (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None
and space.tree_type == 'CompositorNodeTree'
and context.scene.node_tree.nodes.active
and context.scene.node_tree.nodes.active.type == "VIEWER"):
self.layout.operator(operators.NWSaveViewer.bl_idname, icon='FILE_IMAGE')
@ -421,18 +428,22 @@ def save_viewer_menu_func(self, context):
def reset_nodes_button(self, context):
node_active = context.active_node
node_selected = context.selected_nodes
node_ignore = ["FRAME", "REROUTE", "GROUP"]
# Check if active node is in the selection and respective type
if (len(node_selected) == 1) and node_active and node_active.select and node_active.type not in node_ignore:
row = self.layout.row()
row.operator(operators.NWResetNodes.bl_idname, text="Reset Node", icon="FILE_REFRESH")
self.layout.separator()
# Check if active node is in the selection, ignore some node types
if (len(node_selected) != 1
or node_active is None
or not node_active.select
or node_active.type in {"REROUTE", "GROUP"}):
return
elif (len(node_selected) == 1) and node_active and node_active.select and node_active.type == "FRAME":
row = self.layout.row()
row = self.layout.row()
if node_active.type == "FRAME":
row.operator(operators.NWResetNodes.bl_idname, text="Reset Nodes in Frame", icon="FILE_REFRESH")
self.layout.separator()
else:
row.operator(operators.NWResetNodes.bl_idname, text="Reset Node", icon="FILE_REFRESH")
self.layout.separator()
classes = (

View File

@ -29,7 +29,9 @@ from .utils.draw import draw_callback_nodeoutline
from .utils.paths import match_files_to_socket_names, split_into_components
from .utils.nodes import (node_mid_pt, autolink, node_at_pos, get_nodes_links, is_viewer_socket, is_viewer_link,
get_group_output_node, get_output_location, force_update, get_internal_socket, nw_check,
nw_check_space_type, NWBase, get_first_enabled_output, is_visible_socket, viewer_socket_name)
nw_check_not_empty, nw_check_selected, nw_check_active, nw_check_space_type,
nw_check_node_type, nw_check_visible_outputs, nw_check_viewer_node, NWBase,
get_first_enabled_output, is_visible_socket, viewer_socket_name)
class NWLazyMix(Operator, NWBase):
"""Add a Mix RGB/Shader node by interactively drawing lines between nodes"""
@ -37,6 +39,10 @@ class NWLazyMix(Operator, NWBase):
bl_label = "Mix Nodes"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_not_empty(cls, context)
def modal(self, context, event):
context.area.tag_redraw()
nodes, links = get_nodes_links(context)
@ -115,6 +121,10 @@ class NWLazyConnect(Operator, NWBase):
bl_options = {'REGISTER', 'UNDO'}
with_menu: BoolProperty()
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_not_empty(cls, context)
def modal(self, context, event):
context.area.tag_redraw()
nodes, links = get_nodes_links(context)
@ -244,10 +254,10 @@ class NWDeleteUnused(Operator, NWBase):
@classmethod
def poll(cls, context):
"""Disabled for custom nodes as we do not know which nodes are supported."""
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree')
and context.space_data.node_tree.nodes)
return (nw_check(cls, context)
and nw_check_not_empty(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'}))
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -334,7 +344,7 @@ class NWSwapLinks(Operator, NWBase):
@classmethod
def poll(cls, context):
return nw_check(context) and context.selected_nodes and len(context.selected_nodes) <= 2
return nw_check(cls, context) and nw_check_selected(cls, context, max=2)
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -448,8 +458,7 @@ class NWResetBG(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'CompositorNodeTree'))
return nw_check(cls, context) and nw_check_space_type(cls, context, {'CompositorNodeTree'})
def execute(self, context):
context.space_data.backdrop_zoom = 1
@ -468,8 +477,7 @@ class NWAddAttrNode(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree'))
return nw_check(cls, context) and nw_check_space_type(cls, context, {'ShaderNodeTree'})
def execute(self, context):
bpy.ops.node.add_node('INVOKE_DEFAULT', use_transform=True, type="ShaderNodeAttribute")
@ -496,10 +504,8 @@ class NWPreviewNode(Operator, NWBase):
@classmethod
def poll(cls, context):
"""Already implemented natively for compositing nodes."""
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'GeometryNodeTree')
and (not context.active_node
or context.active_node.type not in {"OUTPUT_MATERIAL", "OUTPUT_WORLD"}))
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'GeometryNodeTree'}))
@staticmethod
def get_output_sockets(node_tree):
@ -803,6 +809,11 @@ class NWPreviewNode(Operator, NWBase):
active, output_node, base_node_tree == active_tree, 'SHADER'
)
# Cancel if no socket was found. This can happen for group input
# nodes with only a virtual socket output.
if active_node_socket_index is None:
return {'CANCELLED'}
node_output = active.outputs[active_node_socket_index]
if node_output.name == "Volume":
output_node_socket_index = 1
@ -895,11 +906,10 @@ class NWReloadImages(Operator):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree')
and context.active_node is not None
and any(is_visible_socket(out) for out in context.active_node.outputs))
"""Disabled for custom nodes."""
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'}))
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -1025,9 +1035,10 @@ class NWMergeNodes(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'))
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'})
and nw_check_selected(cls, context))
def execute(self, context):
settings = context.preferences.addons[__package__].preferences
@ -1348,9 +1359,10 @@ class NWBatchChangeNodes(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'))
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'})
and nw_check_selected(cls, context))
def execute(self, context):
blend_type = self.blend_type
@ -1404,6 +1416,10 @@ class NWChangeMixFactor(Operator, NWBase):
bl_description = "Change Factors of Mix Nodes and Mix Shader Nodes"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
# option: Change factor.
# If option is 1.0 or 0.0 - set to 1.0 or 0.0
# Else - change factor by option value.
@ -1437,24 +1453,15 @@ class NWCopySettings(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and context.active_node is not None
and context.active_node.type != 'FRAME')
return (nw_check(cls, context)
and nw_check_active(cls, context)
and nw_check_selected(cls, context, min=2)
and nw_check_node_type(cls, context, 'FRAME', invert=True))
def execute(self, context):
node_active = context.active_node
node_selected = context.selected_nodes
# Error handling
if not (len(node_selected) > 1):
self.report({'ERROR'}, "2 nodes must be selected at least")
return {'CANCELLED'}
# Check if active node is in the selection
selected_node_names = [n.name for n in node_selected]
if node_active.name not in selected_node_names:
self.report({'ERROR'}, "No active node")
return {'CANCELLED'}
# Get nodes in selection by type
valid_nodes = [n for n in node_selected if n.type == node_active.type]
@ -1530,7 +1537,7 @@ class NWCopySettings(Operator, NWBase):
new_node.location = node_loc
for str_from, str_to in reconnections:
node_tree.connect_sockets(eval(str_from), eval(str_to))
connect_sockets(eval(str_from), eval(str_to))
success_names.append(new_node.name)
@ -1548,6 +1555,7 @@ class NWCopyLabel(Operator, NWBase):
bl_idname = "node.nw_copy_label"
bl_label = "Copy Label"
bl_options = {'REGISTER', 'UNDO'}
bl_description = "Copy label from active to selected nodes"
option: EnumProperty(
name="option",
@ -1559,6 +1567,10 @@ class NWCopyLabel(Operator, NWBase):
)
)
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context, min=2)
def execute(self, context):
nodes, links = get_nodes_links(context)
option = self.option
@ -1592,9 +1604,14 @@ class NWClearLabel(Operator, NWBase):
bl_idname = "node.nw_clear_label"
bl_label = "Clear Label"
bl_options = {'REGISTER', 'UNDO'}
bl_description = "Clear labels on selected nodes"
option: BoolProperty()
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context):
nodes, links = get_nodes_links(context)
for node in [n for n in nodes if n.select]:
@ -1610,7 +1627,7 @@ class NWClearLabel(Operator, NWBase):
class NWModifyLabels(Operator, NWBase):
"""Modify Labels of all selected nodes"""
"""Modify labels of all selected nodes"""
bl_idname = "node.nw_modify_labels"
bl_label = "Modify Labels"
bl_options = {'REGISTER', 'UNDO'}
@ -1628,6 +1645,10 @@ class NWModifyLabels(Operator, NWBase):
name="Replace with"
)
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context):
nodes, links = get_nodes_links(context)
for node in [n for n in nodes if n.select]:
@ -1655,8 +1676,9 @@ class NWAddTextureSetup(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree'))
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree'})
and nw_check_selected(cls, context))
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -1758,23 +1780,22 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree'))
return (nw_check(cls, context)
and nw_check_active(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree'})
and nw_check_node_type(cls, context, 'BSDF_PRINCIPLED'))
def execute(self, context):
# Check if everything is ok
if not self.directory:
self.report({'INFO'}, 'No Folder Selected')
self.report({'INFO'}, 'No folder selected')
return {'CANCELLED'}
if not self.files[:]:
self.report({'INFO'}, 'No Files Selected')
self.report({'INFO'}, 'No files selected')
return {'CANCELLED'}
nodes, links = get_nodes_links(context)
active_node = nodes.active
if not (active_node and active_node.bl_idname == 'ShaderNodeBsdfPrincipled'):
self.report({'INFO'}, 'Select Principled BSDF')
return {'CANCELLED'}
# Filter textures names for texturetypes in filenames
# [Socket Name, [abbreviations and keyword list], Filename placeholder]
@ -2032,85 +2053,75 @@ class NWAddReroutes(Operator, NWBase):
]
)
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context):
tree_type = context.space_data.node_tree.type
option = self.option
nodes, links = get_nodes_links(context)
# output valid when option is 'all' or when 'loose' output has no links
valid = False
post_select = [] # nodes to be selected after execution
# create reroutes and recreate links
nodes, _links = get_nodes_links(context)
post_select = [] # Nodes to be selected after execution.
y_offset = -22.0
# Create reroutes and recreate links.
for node in [n for n in nodes if n.select]:
if node.outputs:
x = node.location.x
y = node.location.y
width = node.width
# unhide 'REROUTE' nodes to avoid issues with location.y
if node.type == 'REROUTE':
node.hide = False
# Hack needed to calculate real width
if node.hide:
bpy.ops.node.select_all(action='DESELECT')
helper = nodes.new('NodeReroute')
helper.select = True
node.select = True
# resize node and helper to zero. Then check locations to calculate width
bpy.ops.transform.resize(value=(0.0, 0.0, 0.0))
width = 2.0 * (helper.location.x - node.location.x)
# restore node location
node.location = x, y
# delete helper
node.select = False
# only helper is selected now
bpy.ops.node.delete()
x = node.location.x + width + 20.0
if node.type != 'REROUTE':
y -= 35.0
y_offset = -22.0
loc = x, y
reroutes_count = 0 # will be used when aligning reroutes added to hidden nodes
if not node.outputs:
continue
x = node.location.x + node.width + 20.0
y = node.location.y
new_node_reroutes = []
# Unhide 'REROUTE' nodes to avoid issues with location.y
if node.type == 'REROUTE':
node.hide = False
else:
y -= 35.0
reroutes_count = 0 # Will be used when aligning reroutes added to hidden nodes.
for out_i, output in enumerate(node.outputs):
pass_used = False # initial value to be analyzed if 'R_LAYERS'
# if node != 'R_LAYERS' - "pass_used" not needed, so set it to True
if node.type != 'R_LAYERS':
pass_used = True
else: # if 'R_LAYERS' check if output represent used render pass
if output.is_unavailable:
continue
if node.type == 'R_LAYERS' and output.name != 'Alpha':
# If 'R_LAYERS' check if output is used in render pass.
# If output is "Alpha", assume it's used. Not available in passes.
node_scene = node.scene
node_layer = node.layer
# If output - "Alpha" is analyzed - assume it's used. Not represented in passes.
if output.name == 'Alpha':
pass_used = True
else:
# check entries in global 'rl_outputs' variable
for rlo in rl_outputs:
if output.name in {rlo.output_name, rlo.exr_output_name}:
pass_used = getattr(node_scene.view_layers[node_layer], rlo.render_pass)
break
if pass_used:
valid = ((option == 'ALL') or
(option == 'LOOSE' and not output.links) or
(option == 'LINKED' and output.links))
# Add reroutes only if valid, but offset location in all cases.
if valid:
n = nodes.new('NodeReroute')
nodes.active = n
for link in output.links:
connect_sockets(n.outputs[0], link.to_socket)
connect_sockets(output, n.inputs[0])
n.location = loc
post_select.append(n)
for rlo in rl_outputs:
# Check entries in global 'rl_outputs' variable.
if output.name in {rlo.output_name, rlo.exr_output_name}:
if not getattr(node_scene.view_layers[node_layer], rlo.render_pass):
continue
# Output is valid when option is 'all' or when 'loose' output has no links.
valid = ((self.option == 'ALL') or
(self.option == 'LOOSE' and not output.links) or
(self.option == 'LINKED' and output.links))
if valid:
# Add reroutes only if valid.
n = nodes.new('NodeReroute')
nodes.active = n
for link in output.links:
connect_sockets(n.outputs[0], link.to_socket)
connect_sockets(output, n.inputs[0])
n.location = x, y
new_node_reroutes.append(n)
post_select.append(n)
if valid or not output.hide:
# Offset reroutes for all outputs, except hidden ones.
reroutes_count += 1
y += y_offset
loc = x, y
# disselect the node so that after execution of script only newly created nodes are selected
node.select = False
# nicer reroutes distribution along y when node.hide
# Nicer reroutes distribution along y when node.hide.
if node.hide:
y_translate = reroutes_count * y_offset / 2.0 - y_offset - 35.0
for reroute in [r for r in nodes if r.select]:
for reroute in new_node_reroutes:
reroute.location.y -= y_translate
for node in post_select:
node.select = True
if post_select:
for node in nodes:
# Select only newly created nodes.
node.select = node in post_select
else:
# No new nodes were created.
return {'CANCELLED'}
return {'FINISHED'}
@ -2127,9 +2138,9 @@ class NWLinkActiveToSelected(Operator, NWBase):
@classmethod
def poll(cls, context):
return (nw_check(context)
and context.active_node is not None
and context.active_node.select)
return (nw_check(cls, context)
and nw_check_active(cls, context)
and nw_check_selected(cls, context, min=2))
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -2200,6 +2211,10 @@ class NWAlignNodes(Operator, NWBase):
bl_options = {'REGISTER', 'UNDO'}
margin: IntProperty(name='Margin', default=50, description='The amount of space between nodes')
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_not_empty(cls, context)
def execute(self, context):
nodes, links = get_nodes_links(context)
margin = self.margin
@ -2278,6 +2293,10 @@ class NWSelectParentChildren(Operator, NWBase):
)
)
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context):
nodes, links = get_nodes_links(context)
option = self.option
@ -2302,6 +2321,10 @@ class NWDetachOutputs(Operator, NWBase):
bl_label = "Detach Outputs"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
return nw_check(cls, context) and nw_check_selected(cls, context)
def execute(self, context):
nodes, links = get_nodes_links(context)
selected = context.selected_nodes
@ -2327,11 +2350,11 @@ class NWLinkToOutputNode(Operator):
@classmethod
def poll(cls, context):
"""Disabled for custom nodes as we do not know which nodes are outputs."""
return (nw_check(context)
and nw_check_space_type(cls, context, 'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree')
and context.active_node is not None
and any(is_visible_socket(out) for out in context.active_node.outputs))
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree',
'TextureNodeTree', 'GeometryNodeTree'})
and nw_check_active(cls, context)
and nw_check_visible_outputs(cls, context))
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -2451,6 +2474,11 @@ class NWAddSequence(Operator, NWBase, ImportHelper):
default=True
)
@classmethod
def poll(cls, context):
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree'}))
def draw(self, context):
layout = self.layout
layout.alignment = 'LEFT'
@ -2563,6 +2591,11 @@ class NWAddMultipleImages(Operator, NWBase, ImportHelper):
options={'HIDDEN', 'SKIP_SAVE'}
)
@classmethod
def poll(cls, context):
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'ShaderNodeTree', 'CompositorNodeTree'}))
def execute(self, context):
nodes, links = get_nodes_links(context)
@ -2612,8 +2645,8 @@ class NWViewerFocus(bpy.types.Operator):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'CompositorNodeTree'))
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'CompositorNodeTree'}))
def execute(self, context):
return {'FINISHED'}
@ -2683,12 +2716,9 @@ class NWSaveViewer(bpy.types.Operator, ExportHelper):
@classmethod
def poll(cls, context):
return (nw_check(context)
and nw_check_space_type(cls, context, 'CompositorNodeTree')
and any(img.source == 'VIEWER'
and img.render_slots == 0
for img in bpy.data.images)
and sum(bpy.data.images["Viewer Node"].size) > 0) # False if not connected or connected but no image
return (nw_check(cls, context)
and nw_check_space_type(cls, context, {'CompositorNodeTree'})
and nw_check_viewer_node(cls))
def execute(self, context):
fp = self.filepath
@ -2726,19 +2756,15 @@ class NWResetNodes(bpy.types.Operator):
@classmethod
def poll(cls, context):
space = context.space_data
return space.type == 'NODE_EDITOR'
return (nw_check(cls, context)
and nw_check_selected(cls, context)
and nw_check_active(cls, context))
def execute(self, context):
node_active = context.active_node
node_selected = context.selected_nodes
node_ignore = ["FRAME", "REROUTE", "GROUP", "SIMULATION_INPUT", "SIMULATION_OUTPUT"]
# Check if one node is selected at least
if not (len(node_selected) > 0):
self.report({'ERROR'}, "1 node must be selected at least")
return {'CANCELLED'}
active_node_name = node_active.name if node_active.select else None
valid_nodes = [n for n in node_selected if n.type not in node_ignore]

View File

@ -4,7 +4,7 @@
import bpy
from bpy_extras.node_utils import connect_sockets
from math import hypot
from math import hypot, inf
def force_update(context):
@ -200,23 +200,85 @@ def get_output_location(tree):
return loc_x, loc_y
def nw_check(context):
def nw_check(cls, context):
space = context.space_data
return (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None)
if space.type != 'NODE_EDITOR':
cls.poll_message_set("Current editor is not a node editor.")
return False
if space.node_tree is None:
cls.poll_message_set("No node tree was found in the current node editor.")
return False
if space.node_tree.library is not None:
cls.poll_message_set("Current node tree is linked from another .blend file.")
return False
return True
def nw_check_space_type(cls, context, *args):
if context.space_data.tree_type not in args:
tree_types_str = ", ".join(t.split('NodeTree')[0].lower() for t in sorted(args))
def nw_check_not_empty(cls, context):
if not context.space_data.node_tree.nodes:
cls.poll_message_set("Current node tree does not contain any nodes.")
return False
return True
def nw_check_active(cls, context):
if context.active_node is None or not context.active_node.select:
cls.poll_message_set("No active node.")
return False
return True
def nw_check_selected(cls, context, min=1, max=inf):
num_selected = len(context.selected_nodes)
if num_selected < min:
if min > 1:
cls.poll_message_set(f"At least {min} nodes must be selected.")
else:
cls.poll_message_set(f"At least {min} node must be selected.")
return False
if num_selected > max:
cls.poll_message_set(f"{num_selected} nodes are selected, but this operator can only work on {max}.")
return False
return True
def nw_check_space_type(cls, context, types):
if context.space_data.tree_type not in types:
tree_types_str = ", ".join(t.split('NodeTree')[0].lower() for t in sorted(types))
cls.poll_message_set("Current node tree type not supported.\n"
"Should be one of " + tree_types_str + ".")
return False
return True
def nw_check_node_type(cls, context, type, invert=False):
if invert and context.active_node.type == type:
cls.poll_message_set(f"Active node should be not of type {type}.")
return False
elif not invert and context.active_node.type != type:
cls.poll_message_set(f"Active node should be of type {type}.")
return False
return True
def nw_check_visible_outputs(cls, context):
if not any(is_visible_socket(out) for out in context.active_node.outputs):
cls.poll_message_set("Current node has no visible outputs.")
return False
return True
def nw_check_viewer_node(cls):
for img in bpy.data.images:
# False if not connected or connected but no image
if (img.source == 'VIEWER'
and len(img.render_slots) == 0
and sum(img.size) > 0):
return True
cls.poll_message_set("Viewer image not found.")
return False
def get_first_enabled_output(node):
for output in node.outputs:
if output.enabled:
@ -232,4 +294,13 @@ def is_visible_socket(socket):
class NWBase:
@classmethod
def poll(cls, context):
return nw_check(context)
return nw_check(cls, context)
class NWBaseMenu:
@classmethod
def poll(cls, context):
space = context.space_data
return (space.type == 'NODE_EDITOR'
and space.node_tree is not None
and space.node_tree.library is None)

View File

@ -15,24 +15,30 @@ bl_info = {
"category": "Object"
}
if "bpy" in locals():
import importlib
importlib.reload(carver_utils)
importlib.reload(carver_profils)
importlib.reload(carver_draw)
importlib.reload(carver_operator)
import bpy
import imp
from bpy.props import (
BoolProperty,
StringProperty,
IntProperty
)
)
from bpy.types import (AddonPreferences, WorkSpaceTool)
from bpy.utils.toolsystem import ToolDef
from . import carver_utils
imp.reload(carver_utils)
from . import carver_profils
imp.reload(carver_profils)
from . import carver_draw
imp.reload(carver_draw)
from . import carver_operator
imp.reload(carver_operator)
from . import (
carver_utils,
carver_profils,
carver_draw,
carver_operator,
)
# TODO : Create an icon for Carver MT
# Add an icon in the toolbar

View File

@ -504,14 +504,14 @@ class RigifyBoneCollectionReference(bpy.types.PropertyGroup):
arm = self.id_data.data
if new_coll := arm.collections.get(new_val):
if new_coll := arm.collections_all.get(new_val):
self.set_collection(new_coll)
else:
self.find_collection(update=True)
def _name_search(self, _context, _edit):
arm = self.id_data.data
return [coll.name for coll in arm.collections]
return [coll.name for coll in utils.misc.flatten_children(arm.collections)]
name: StringProperty(
name="Collection Name", description="Name of the referenced bone collection",

View File

@ -17,7 +17,8 @@ from .utils.naming import (ORG_PREFIX, MCH_PREFIX, DEF_PREFIX, ROOT_NAME, make_o
from .utils.widgets import WGT_PREFIX, WGT_GROUP_PREFIX
from .utils.widgets_special import create_root_widget
from .utils.mechanism import refresh_all_drivers
from .utils.misc import select_object, ArmatureObject, verify_armature_obj, choose_next_uid
from .utils.misc import select_object, ArmatureObject, verify_armature_obj, choose_next_uid, flatten_children,\
flatten_parents
from .utils.collections import (ensure_collection, list_layer_collections,
filter_layer_collections_by_object)
from .utils.rig import get_rigify_type, get_rigify_target_rig,\
@ -134,7 +135,7 @@ class Generator(base_generate.BaseGenerator):
def __save_rig_data(self, obj: ArmatureObject, obj_found: bool):
if obj_found:
self.saved_visible_layers = {coll.name: coll.is_visible for coll in obj.data.collections}
self.saved_visible_layers = {coll.name: coll.is_visible for coll in obj.data.collections_all}
self.artifacts.generate_init_existing(obj)
@ -216,14 +217,14 @@ class Generator(base_generate.BaseGenerator):
self.widget_mirror_mesh[mid_name] = widget.data
def ensure_root_bone_collection(self):
collections = self.metarig.data.collections
collections = self.metarig.data.collections_all
validate_collection_references(self.metarig)
coll = collections.get(ROOT_COLLECTION)
if not coll:
coll = collections.new(ROOT_COLLECTION)
coll = self.metarig.data.collections.new(ROOT_COLLECTION)
if coll.rigify_ui_row <= 0:
coll.rigify_ui_row = 2 + choose_next_uid(collections, 'rigify_ui_row', min_value=1)
@ -240,7 +241,7 @@ class Generator(base_generate.BaseGenerator):
bpy.ops.object.mode_set(mode='OBJECT')
# Remove all bone collections from the target armature.
for coll in list(obj.data.collections):
for coll in list(obj.data.collections_all):
obj.data.collections.remove(coll)
# Select and duplicate metarig
@ -355,7 +356,7 @@ class Generator(base_generate.BaseGenerator):
pb.lock_scale = (True, True, True)
def ensure_bone_collection(self, name):
coll = self.obj.data.collections.get(name)
coll = self.obj.data.collections_all.get(name)
if not coll:
coll = self.obj.data.collections.new(name)
@ -435,10 +436,16 @@ class Generator(base_generate.BaseGenerator):
bone.custom_shape = obj_table[wgt_name]
def __compute_visible_layers(self):
has_ui_buttons = set().union(*[
{p.name for p in flatten_parents(coll)}
for coll in self.obj.data.collections_all
if coll.rigify_ui_row > 0
])
# Hide all layers without UI buttons
for coll in self.obj.data.collections:
for coll in self.obj.data.collections_all:
user_visible = self.saved_visible_layers.get(coll.name, coll.is_visible)
coll.is_visible = user_visible and coll.rigify_ui_row > 0
coll.is_visible = user_visible and coll.name in has_ui_buttons
def generate(self):
context = self.context
@ -709,7 +716,7 @@ def create_selection_sets(obj: ArmatureObject, _metarig: ArmatureObject):
obj.selection_sets.clear() # noqa
for coll in obj.data.collections:
for coll in obj.data.collections_all:
if not coll.rigify_sel_set:
continue
@ -725,7 +732,7 @@ def apply_bone_colors(obj, metarig, priorities: Optional[dict[str, dict[str, flo
collection_table: dict[str, tuple[int, 'RigifyColorSet']] = {
coll.name: (i, color_map[coll.rigify_color_set_id])
for i, coll in enumerate(obj.data.collections)
for i, coll in enumerate(flatten_children(obj.data.collections))
if coll.rigify_color_set_id in color_map
}

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -48,7 +48,7 @@ def create(obj): # noqa
bone_collections = {}
for bcoll in list(arm.collections):
for bcoll in list(arm.collections_all):
arm.collections.remove(bcoll)
def add_bone_collection(name, *, ui_row=0, ui_title='', sel_set=False, color_set_id=0):

View File

@ -96,6 +96,13 @@ def ternarySearch(f, left, right, absolutePrecision):
left = leftThird
else:
right = rightThird
def flatten_children(iterable):
"""Enumerate the iterator items as well as their children in the tree order."""
for item in iterable:
yield item
yield from flatten_children(item.children)
'''
UTILITIES_FUNC_COMMON_IK_FK = ['''
@ -902,7 +909,7 @@ class RigLayers(bpy.types.Panel):
def draw(self, context):
layout = self.layout
row_table = collections.defaultdict(list)
for coll in context.active_object.data.collections:
for coll in flatten_children(context.active_object.data.collections):
row_id = coll.get('rigify_ui_row', 0)
if row_id > 0:
row_table[row_id].append(coll)
@ -913,7 +920,9 @@ class RigLayers(bpy.types.Panel):
if row_buttons:
for coll in row_buttons:
title = coll.get('rigify_ui_title') or coll.name
row.prop(coll, 'is_visible', toggle=True, text=title)
row2 = row.row()
row2.active = coll.is_visible_ancestors
row2.prop(coll, 'is_visible', toggle=True, text=title)
else:
row.separator()
'''

View File

@ -23,7 +23,7 @@ from .utils.widgets import write_widget
from .utils.naming import unique_name
from .utils.rig import upgrade_metarig_types, outdated_types, upgrade_metarig_layers, \
is_valid_metarig, metarig_needs_upgrade
from .utils.misc import verify_armature_obj, ArmatureObject, IdPropSequence
from .utils.misc import verify_armature_obj, ArmatureObject, IdPropSequence, flatten_children
from .rigs.utils import get_limb_generated_names
@ -243,6 +243,29 @@ class DATA_PT_rigify_samples(bpy.types.Panel):
# noinspection SpellCheckingInspection
# noinspection PyPep8Naming
class DATA_UL_rigify_bone_collections(UIList):
def filter_items(self, _context, data, propname):
assert propname == 'collections_all'
collections = data.collections_all
flags = []
# Filtering by name
if self.filter_name:
print(self.filter_name, self.use_filter_invert)
flags = bpy.types.UI_UL_list.filter_items_by_name(
self.filter_name, self.bitflag_filter_item, collections, "name")
if not flags:
flags = [self.bitflag_filter_item] * len(collections)
# Reorder by name.
if self.use_filter_sort_alpha:
indices = bpy.types.UI_UL_list.sort_items_by_name(collections, "name")
# Sort by tree order
else:
index_map = {c.name: i for i, c in enumerate(flatten_children(data.collections))}
indices = [index_map[c.name] for c in collections]
return flags, indices
def draw_item(self, _context, layout, armature, bcoll, _icon, _active_data,
_active_prop_name, _index=0, _flt_flag=0):
active_bone = armature.edit_bones.active or armature.bones.active
@ -290,9 +313,9 @@ class DATA_PT_rigify_collection_list(bpy.types.Panel):
row.template_list(
"DATA_UL_rigify_bone_collections",
"collections",
"",
arm,
"collections",
"collections_all",
arm.collections,
"active_index",
rows=(4 if active_coll else 1),
@ -322,7 +345,7 @@ class DATA_PT_rigify_collection_list(bpy.types.Panel):
row.active = active_coll.rigify_ui_row > 0 # noqa
row.prop(active_coll, "rigify_ui_title")
if ROOT_COLLECTION not in arm.collections:
if ROOT_COLLECTION not in arm.collections_all:
layout.label(text=f"The '{ROOT_COLLECTION}' collection will be added upon generation", icon='INFO')
@ -337,11 +360,11 @@ class DATA_PT_rigify_collection_ui(bpy.types.Panel):
@classmethod
def poll(cls, context):
return is_valid_metarig(context) and len(verify_armature_obj(context.object).data.collections)
return is_valid_metarig(context) and len(verify_armature_obj(context.object).data.collections_all)
@staticmethod
def draw_btn_block(arm: Armature, parent: UILayout, bcoll_id: int, loose=False):
bcoll = arm.collections[bcoll_id]
bcoll = arm.collections_all[bcoll_id]
block = parent.row(align=True)
if bcoll == arm.collections.active:
@ -364,8 +387,10 @@ class DATA_PT_rigify_collection_ui(bpy.types.Panel):
row_table = defaultdict(list)
has_buttons = False
for i, bcoll in enumerate(arm.collections):
row_table[bcoll.rigify_ui_row].append(i)
index_map = {c.name: i for i, c in enumerate(arm.collections_all)}
for bcoll in flatten_children(arm.collections):
row_table[bcoll.rigify_ui_row].append(index_map[bcoll.name])
if bcoll.rigify_ui_row > 0:
has_buttons = True
@ -469,7 +494,7 @@ class DATA_OT_rigify_collection_set_ui_row(bpy.types.Operator):
obj = verify_armature_obj(context.object)
if self.select:
obj.data.collections.active_index = self.index
obj.data.collections[self.index].rigify_ui_row = self.row
obj.data.collections_all[self.index].rigify_ui_row = self.row
return {'FINISHED'}
@ -495,7 +520,7 @@ class DATA_OT_rigify_collection_add_ui_row(bpy.types.Operator):
def execute(self, context):
obj = verify_armature_obj(context.object)
for coll in obj.data.collections:
for coll in obj.data.collections_all:
if coll.rigify_ui_row >= self.row:
coll.rigify_ui_row += (1 if self.add else -1)
return {'FINISHED'}
@ -716,7 +741,7 @@ class DATA_OT_rigify_color_set_remove(bpy.types.Operator):
rigify_colors.remove(self.idx)
# set layers references to 0
for coll in obj.data.collections:
for coll in obj.data.collections_all:
idx = coll.rigify_color_set_id
if idx == self.idx + 1:
@ -745,7 +770,7 @@ class DATA_OT_rigify_color_set_remove_all(bpy.types.Operator):
rigify_colors.remove(0)
# set layers references to 0
for coll in obj.data.collections:
for coll in obj.data.collections_all:
coll.rigify_color_set_id = 0
return {'FINISHED'}
@ -1025,7 +1050,7 @@ class Generate(bpy.types.Operator):
def execute(self, context):
metarig = verify_armature_obj(context.object)
for bcoll in metarig.data.collections:
for bcoll in metarig.data.collections_all:
if bcoll.rigify_ui_row > 0 and bcoll.name not in SPECIAL_COLLECTIONS:
break
else:

View File

@ -102,7 +102,7 @@ def ensure_collection_uid(bcoll: BoneCollection):
uid = zlib.adler32(bcoll.name.encode("utf-8")) & max_uid
# Ensure the uid is unique within the armature
used_ids = set(coll.rigify_uid for coll in bcoll.id_data.collections)
used_ids = set(coll.rigify_uid for coll in bcoll.id_data.collections_all)
while uid in used_ids:
uid = random.randint(0, max_uid)
@ -126,14 +126,14 @@ def resolve_collection_reference(obj: ArmatureObject, ref: Any, *,
arm = obj.data
name = ref.get("name", "")
name_coll = arm.collections.get(name) if name else None
name_coll = arm.collections_all.get(name) if name else None
# First try an exact match of both name and uid
if name_coll and name_coll.rigify_uid == uid:
return name_coll
# Then try searching by the uid
for coll in arm.collections:
for coll in arm.collections_all:
if coll.rigify_uid == uid:
if update:
ref["name"] = coll.name
@ -194,7 +194,7 @@ def validate_collection_references(obj: ArmatureObject):
# Ensure uids are unique
known_uids = dict()
for bcoll in obj.data.collections:
for bcoll in obj.data.collections_all:
uid = bcoll.rigify_uid
if uid < 0:
continue

View File

@ -172,6 +172,20 @@ def find_index(sequence, item, default=None):
return default
def flatten_children(iterable: typing.Iterable):
"""Enumerate the iterator items as well as their children in the tree order."""
for item in iterable:
yield item
yield from flatten_children(item.children)
def flatten_parents(item):
"""Enumerate the item and all its parents."""
while item:
yield item
item = item.parent
##############################################
# Lazy references
##############################################

View File

@ -16,7 +16,7 @@ from bpy.types import bpy_prop_array, bpy_prop_collection # noqa
from idprop.types import IDPropertyArray
from mathutils import Vector
from .misc import ArmatureObject, wrap_list_to_lines, IdPropSequence, find_index
from .misc import ArmatureObject, wrap_list_to_lines, IdPropSequence, find_index, flatten_children
if TYPE_CHECKING:
from ..base_rig import BaseRig
@ -193,7 +193,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
# Find layer collections
coll_table = {}
for coll in arm.collections:
for coll in arm.collections_all:
if m := re.match(r'^Layer (\d+)', coll.name):
coll_table[int(m[1]) - 1] = coll
@ -229,7 +229,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
coll.name = new_name
if coll:
coll_idx = find_index(arm.collections, coll)
coll_idx = find_index(arm.collections_all, coll)
arm.collections.move(coll_idx, cur_idx)
cur_idx += 1
@ -249,7 +249,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
root_bcoll = coll_table.get(28)
used_rows = set()
for bcoll in arm.collections:
for bcoll in arm.collections_all:
if bcoll != root_bcoll and bcoll.rigify_ui_row > 0:
used_rows.add(bcoll.rigify_ui_row)
@ -258,7 +258,7 @@ def upgrade_metarig_layers(metarig: ArmatureObject):
if i in used_rows:
row_map[i] = len(row_map) + 1
for bcoll in arm.collections:
for bcoll in arm.collections_all:
if bcoll == root_bcoll:
bcoll.rigify_ui_row = len(row_map) + 3
elif bcoll.rigify_ui_row > 0:
@ -535,13 +535,13 @@ def write_metarig(obj: ArmatureObject, layers=False, func_name="create",
code.append('\n bone_collections = {}')
code.append('\n for bcoll in list(arm.collections):'
code.append('\n for bcoll in list(arm.collections_all):'
'\n arm.collections.remove(bcoll)\n')
args = ', '.join(f'{k}={repr(v)}' for k, v in collection_attrs.items())
code.append(f" def add_bone_collection(name, *, {args}):")
code.append(f" new_bcoll = arm.collections.new(name)")
code.append(f" def add_bone_collection(name, *, parent=None, {args}):")
code.append(f" new_bcoll = arm.collections.new(name, parent=bone_collections.get(parent))")
for k, _v in collection_attrs.items():
code.append(f" new_bcoll.rigify_{k} = {k}")
code.append(" bone_collections[name] = new_bcoll")
@ -559,8 +559,10 @@ def write_metarig(obj: ArmatureObject, layers=False, func_name="create",
ref_list.add().set_collection(bone_collections[name])
""")
for i, bcoll in enumerate(arm.collections):
for bcoll in flatten_children(arm.collections):
args = [repr(bcoll.name)]
if bcoll.parent:
args.append(f"parent={bcoll.parent.name!r}")
for k, v in collection_attrs.items():
value = getattr(bcoll, "rigify_" + k)
if value != v:

View File

@ -131,7 +131,7 @@ def move_sun(context):
azimuth, elevation = get_sun_coordinates(
local_time, sun_props.latitude,
sun_props.longitude, zone,
sun_props.month, sun_props.day)
sun_props.month, sun_props.day, sun_props.year)
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
local_time -= time_increment
obj.rotation_euler = ((elevation - pi/2, 0, -azimuth))

View File

@ -12,7 +12,7 @@
translations_tuple = (
(("*", ""),
((), ()),
("fr_FR", "Project-Id-Version: Sun Position 3.3.3 (0)\n",
("fr_FR", "Project-Id-Version: Sun Position 3.5.4 (0)\n",
(False,
("Blender's translation file (po format).",
"Copyright (C) 2022 The Blender Foundation.",
@ -31,12 +31,6 @@ translations_tuple = (
("fr_FR", "Afficher les infos dazimut et de hauteur du Soleil",
(False, ())),
),
(("*", "Daylight Savings"),
(("bpy.types.SunPosProperties.use_daylight_savings",),
()),
("fr_FR", "Heure dété",
(False, ())),
),
(("*", "Display overlays in the viewport: the direction of the north, analemmas and the Sun surface"),
(("bpy.types.SunPosAddonPreferences.show_overlays",),
()),
@ -82,10 +76,10 @@ translations_tuple = (
("fr_FR", "Options de position du Soleil",
(False, ())),
),
(("*", "Sun Position Presets"),
(("bpy.types.SUNPOS_PT_Presets",),
(("Operator", "Add Sun Position preset"),
(("bpy.types.WORLD_OT_sunpos_add_preset",),
()),
("fr_FR", "Préréglages de position du Soleil",
("fr_FR", "Ajouter un préréglage de position du Soleil",
(False, ())),
),
(("Operator", "Pick Sun in Viewport"),
@ -100,6 +94,18 @@ translations_tuple = (
("fr_FR", "Sélectionner la position du Soleil dans nimporte quelle vue 3D, puis la synchroniser avec lenvironnement",
(False, ())),
),
(("*", "Add a new preset for Sun Position settings"),
(("bpy.types.WORLD_OT_sunpos_add_preset",),
()),
("fr_FR", "Ajouter un nouveau préréglage pour les réglages de position du Soleil",
(False, ())),
),
(("*", "Sun Position Presets"),
(("bpy.types.SUNPOS_PT_Presets",),
()),
("fr_FR", "Préréglages de position du Soleil",
(False, ())),
),
(("*", "UTC Zone"),
(("bpy.types.SunPosProperties.UTC_zone",),
()),
@ -392,6 +398,12 @@ translations_tuple = (
("fr_FR", "Utiliser une seule valeur pour le jour de lannée",
(False, ())),
),
(("*", "Daylight Savings"),
(("bpy.types.SunPosProperties.use_daylight_savings",),
()),
("fr_FR", "Heure dété",
(False, ())),
),
(("*", "Daylight savings time adds 1 hour to standard time"),
(("bpy.types.SunPosProperties.use_daylight_savings",),
()),
@ -416,43 +428,55 @@ translations_tuple = (
("fr_FR", "Année",
(False, ())),
),
(("*", "Unknown projection"),
(("scripts/addons/sun_position/hdr.py:181",),
(("*", "Please select a Sun object"),
(("scripts/addons/sun_position/hdr.py:82",),
()),
("fr_FR", "Projection inconnue",
(False, ())),
),
(("*", "Enter/LMB: confirm, Esc/RMB: cancel, MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure"),
(("scripts/addons/sun_position/hdr.py:252",),
()),
("fr_FR", "Entrée/ClicG : Confirmer, Échap/ClicD : Annuler, ClicM : défiler, "
"molette : zoom, Ctrl + molette : exposition",
(False, ())),
),
(("*", "Could not find 3D View"),
(("scripts/addons/sun_position/hdr.py:263",),
()),
("fr_FR", "Impossible de trouver la vue 3D",
("fr_FR", "Veuillez sélectionner un objet Soleil",
(False, ())),
),
(("*", "Please select an Environment Texture node"),
(("scripts/addons/sun_position/hdr.py:269",),
(("scripts/addons/sun_position/hdr.py:85",
"scripts/addons/sun_position/hdr.py:253"),
()),
("fr_FR", "Veuillez utiliser un nœud de texture denvironnement",
(False, ())),
),
(("*", "Show options and info:"),
(("scripts/addons/sun_position/properties.py:297",),
(("*", "Please select a valid Environment Texture node"),
(("scripts/addons/sun_position/hdr.py:91",),
()),
("fr_FR", "Afficher les options et infos :",
("fr_FR", "Veuillez utiliser un nœud de texture denvironnement valide",
(False, ())),
),
(("*", "Could not find 3D View"),
(("scripts/addons/sun_position/hdr.py:247",),
()),
("fr_FR", "Impossible de trouver la vue 3D",
(False, ())),
),
(("*", "Enter/LMB: confirm, Esc/RMB: cancel, MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure"),
(("scripts/addons/sun_position/hdr.py:264",),
()),
("fr_FR", "Entrée/ClicG : Confirmer, Échap/ClicD : Annuler, ClicM : défiler, molette : zoom, Ctrl + molette : exposition",
(False, ())),
),
(("*", "Unknown projection"),
(("scripts/addons/sun_position/hdr.py:165",),
()),
("fr_FR", "Projection inconnue",
(False, ())),
),
(("*", "ERROR: Could not parse coordinates"),
(("scripts/addons/sun_position/sun_calc.py:54",),
(("scripts/addons/sun_position/properties.py:36",),
()),
("fr_FR", "ERREUR : Impossible danalyser les coordonnées",
(False, ())),
),
(("*", "Show options and info:"),
(("scripts/addons/sun_position/properties.py:317",),
()),
("fr_FR", "Afficher les options et infos :",
(False, ())),
),
(("Hour", "Time"),
(("scripts/addons/sun_position/ui_sun.py:224",),
()),
@ -460,13 +484,13 @@ translations_tuple = (
(False, ())),
),
(("*", "Time Local:"),
(("scripts/addons/sun_position/ui_sun.py:242",),
(("scripts/addons/sun_position/ui_sun.py:241",),
()),
("fr_FR", "Heure locale :",
(False, ())),
),
(("*", "UTC:"),
(("scripts/addons/sun_position/ui_sun.py:243",),
(("scripts/addons/sun_position/ui_sun.py:242",),
()),
("fr_FR", "UTC :",
(False, ())),

View File

@ -23,7 +23,7 @@ class SUNPOS_PT_Presets(PresetPanel, bpy.types.Panel):
class SUNPOS_OT_AddPreset(AddPresetBase, Operator):
'''Add Sun Position preset'''
'''Add a new preset for Sun Position settings'''
bl_idname = "world.sunpos_add_preset"
bl_label = "Add Sun Position preset"
preset_menu = "SUNPOS_PT_Presets"

View File

@ -11,7 +11,7 @@ bl_info = {
"description": "Allows managing UI translations directly from Blender "
"(update main .po files, update scripts' translations, etc.)",
"warning": "Still in development, not all features are fully implemented yet!",
"doc_url": "http://wiki.blender.org/index.php/Dev:Doc/How_to/Translate_Blender",
"doc_url": "https://developer.blender.org/docs/handbook/translating/translator_guide/",
"support": 'OFFICIAL',
"category": "System",
}