new addon simple_deform_helper #104464

Closed
EMM wants to merge 29 commits from Guai_Wo_Ge_EMM/blender-addons:simple_deform_helper into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
34 changed files with 3849 additions and 476 deletions
Showing only changes of commit 2c6f3c4327 - Show all commits

View File

@ -11,8 +11,8 @@ It's called "global" to avoid confusion with the Blender World data-block.
bl_info = { bl_info = {
"name": "Copy Global Transform", "name": "Copy Global Transform",
"author": "Sybren A. Stüvel", "author": "Sybren A. Stüvel",
"version": (2, 0), "version": (2, 1),
"blender": (3, 1, 0), "blender": (3, 5, 0),
"location": "N-panel in the 3D Viewport", "location": "N-panel in the 3D Viewport",
"category": "Animation", "category": "Animation",
"support": 'OFFICIAL', "support": 'OFFICIAL',
@ -23,10 +23,17 @@ import ast
from typing import Iterable, Optional, Union, Any from typing import Iterable, Optional, Union, Any
import bpy import bpy
from bpy.types import Context, Object, Operator, Panel, PoseBone from bpy.types import Context, Object, Operator, Panel, PoseBone, UILayout
from mathutils import Matrix from mathutils import Matrix
_axis_enum_items = [
("x", "X", "", 1),
("y", "Y", "", 2),
("z", "Z", "", 3),
]
class AutoKeying: class AutoKeying:
"""Auto-keying support. """Auto-keying support.
@ -235,6 +242,10 @@ class OBJECT_OT_copy_global_transform(Operator):
return {'FINISHED'} return {'FINISHED'}
class UnableToMirrorError(Exception):
"""Raised when mirroring is enabled but no mirror object/bone is set."""
class OBJECT_OT_paste_transform(Operator): class OBJECT_OT_paste_transform(Operator):
bl_idname = "object.paste_transform" bl_idname = "object.paste_transform"
bl_label = "Paste Global Transform" bl_label = "Paste Global Transform"
@ -273,12 +284,33 @@ class OBJECT_OT_paste_transform(Operator):
soft_max=5, soft_max=5,
) )
use_mirror: bpy.props.BoolProperty( # type: ignore
name="Mirror Transform",
description="When pasting, mirror the transform relative to a specific object or bone",
default=False,
)
mirror_axis_loc: bpy.props.EnumProperty( # type: ignore
items=_axis_enum_items,
name="Location Axis",
description="Coordinate axis used to mirror the location part of the transform",
default='x',
)
mirror_axis_rot: bpy.props.EnumProperty( # type: ignore
items=_axis_enum_items,
name="Rotation Axis",
description="Coordinate axis used to mirror the rotation part of the transform",
default='z',
)
@classmethod @classmethod
def poll(cls, context: Context) -> bool: def poll(cls, context: Context) -> bool:
if not context.active_pose_bone and not context.active_object: if not context.active_pose_bone and not context.active_object:
cls.poll_message_set("Select an object or pose bone") cls.poll_message_set("Select an object or pose bone")
return False return False
if not context.window_manager.clipboard.startswith("Matrix("):
clipboard = context.window_manager.clipboard.strip()
if not (clipboard.startswith("Matrix(") or clipboard.startswith("<Matrix 4x4")):
cls.poll_message_set("Clipboard does not contain a valid matrix") cls.poll_message_set("Clipboard does not contain a valid matrix")
return False return False
return True return True
@ -297,10 +329,23 @@ class OBJECT_OT_paste_transform(Operator):
floats = tuple(tuple(float(item) for item in line.split()) for line in lines) floats = tuple(tuple(float(item) for item in line.split()) for line in lines)
return Matrix(floats) return Matrix(floats)
@staticmethod
def parse_repr_m4(value: str) -> Optional[Matrix]:
"""Four lines of (a, b, c, d) floats."""
lines = value.strip().splitlines()
if len(lines) != 4:
return None
floats = tuple(tuple(float(item.strip()) for item in line.strip()[1:-1].split(',')) for line in lines)
return Matrix(floats)
def execute(self, context: Context) -> set[str]: def execute(self, context: Context) -> set[str]:
clipboard = context.window_manager.clipboard clipboard = context.window_manager.clipboard.strip()
if clipboard.startswith("Matrix"): if clipboard.startswith("Matrix"):
mat = Matrix(ast.literal_eval(clipboard[6:])) mat = Matrix(ast.literal_eval(clipboard[6:]))
elif clipboard.startswith("<Matrix 4x4"):
mat = self.parse_repr_m4(clipboard[12:-1])
else: else:
mat = self.parse_print_m4(clipboard) mat = self.parse_print_m4(clipboard)
@ -308,6 +353,12 @@ class OBJECT_OT_paste_transform(Operator):
self.report({'ERROR'}, "Clipboard does not contain a valid matrix") self.report({'ERROR'}, "Clipboard does not contain a valid matrix")
return {'CANCELLED'} return {'CANCELLED'}
try:
mat = self._maybe_mirror(context, mat)
except UnableToMirrorError:
self.report({'ERROR'}, "Unable to mirror, no mirror object/bone configured")
return {'CANCELLED'}
applicator = { applicator = {
'CURRENT': self._paste_current, 'CURRENT': self._paste_current,
'EXISTING_KEYS': self._paste_existing_keys, 'EXISTING_KEYS': self._paste_existing_keys,
@ -315,6 +366,68 @@ class OBJECT_OT_paste_transform(Operator):
}[self.method] }[self.method]
return applicator(context, mat) return applicator(context, mat)
def _maybe_mirror(self, context: Context, matrix: Matrix) -> Matrix:
if not self.use_mirror:
return matrix
mirror_ob = context.scene.addon_copy_global_transform_mirror_ob
mirror_bone = context.scene.addon_copy_global_transform_mirror_bone
# No mirror object means "current armature object".
ctx_ob = context.object
if not mirror_ob and mirror_bone and ctx_ob and ctx_ob.type == 'ARMATURE':
mirror_ob = ctx_ob
if not mirror_ob:
raise UnableToMirrorError()
if mirror_ob.type == 'ARMATURE' and mirror_bone:
return self._mirror_over_bone(matrix, mirror_ob, mirror_bone)
return self._mirror_over_ob(matrix, mirror_ob)
def _mirror_over_ob(self, matrix: Matrix, mirror_ob: bpy.types.Object) -> Matrix:
mirror_matrix = mirror_ob.matrix_world
return self._mirror_over_matrix(matrix, mirror_matrix)
def _mirror_over_bone(self, matrix: Matrix, mirror_ob: bpy.types.Object, mirror_bone_name: str) -> Matrix:
bone = mirror_ob.pose.bones[mirror_bone_name]
mirror_matrix = mirror_ob.matrix_world @ bone.matrix
return self._mirror_over_matrix(matrix, mirror_matrix)
def _mirror_over_matrix(self, matrix: Matrix, mirror_matrix: Matrix) -> Matrix:
# Compute the matrix in the space of the mirror matrix:
mat_local = mirror_matrix.inverted() @ matrix
# Decompose the matrix, as we don't want to touch the scale. This
# operator should only mirror the translation and rotation components.
trans, rot_q, scale = mat_local.decompose()
# Mirror the translation component:
axis_index = ord(self.mirror_axis_loc) - ord('x')
trans[axis_index] *= -1
# Flip the rotation, and use a rotation order that applies the to-be-flipped axes first.
match self.mirror_axis_rot:
case 'x':
rot_e = rot_q.to_euler('XYZ')
rot_e.x *= -1 # Flip the requested rotation axis.
rot_e.y *= -1 # Also flip the bone roll.
case 'y':
rot_e = rot_q.to_euler('YZX')
rot_e.y *= -1 # Flip the requested rotation axis.
rot_e.z *= -1 # Also flip another axis? Not sure how to handle this one.
case 'z':
rot_e = rot_q.to_euler('ZYX')
rot_e.z *= -1 # Flip the requested rotation axis.
rot_e.y *= -1 # Also flip the bone roll.
# Recompose the local matrix:
mat_local = Matrix.LocRotScale(trans, rot_e, scale)
# Go back to world space:
mirrored_world = mirror_matrix @ mat_local
return mirrored_world
@staticmethod @staticmethod
def _paste_current(context: Context, matrix: Matrix) -> set[str]: def _paste_current(context: Context, matrix: Matrix) -> set[str]:
set_matrix(context, matrix) set_matrix(context, matrix)
@ -370,10 +483,13 @@ class OBJECT_OT_paste_transform(Operator):
context.scene.frame_set(int(current_frame), subframe=current_frame % 1.0) context.scene.frame_set(int(current_frame), subframe=current_frame % 1.0)
class VIEW3D_PT_copy_global_transform(Panel): class PanelMixin:
bl_space_type = 'VIEW_3D' bl_space_type = 'VIEW_3D'
bl_region_type = 'UI' bl_region_type = 'UI'
bl_category = "Animation" bl_category = "Animation"
class VIEW3D_PT_copy_global_transform(PanelMixin, Panel):
bl_label = "Global Transform" bl_label = "Global Transform"
def draw(self, context: Context) -> None: def draw(self, context: Context) -> None:
@ -383,7 +499,15 @@ class VIEW3D_PT_copy_global_transform(Panel):
layout.operator("object.copy_global_transform", text="Copy", icon='COPYDOWN') layout.operator("object.copy_global_transform", text="Copy", icon='COPYDOWN')
paste_col = layout.column(align=True) paste_col = layout.column(align=True)
paste_col.operator("object.paste_transform", text="Paste", icon='PASTEDOWN').method = 'CURRENT'
paste_row = paste_col.row(align=True)
paste_props = paste_row.operator("object.paste_transform", text="Paste", icon='PASTEDOWN')
paste_props.method = 'CURRENT'
paste_props.use_mirror = False
paste_props = paste_row.operator("object.paste_transform", text="Mirrored", icon='PASTEFLIPDOWN')
paste_props.method = 'CURRENT'
paste_props.use_mirror = True
wants_autokey_col = paste_col.column(align=True) wants_autokey_col = paste_col.column(align=True)
has_autokey = context.scene.tool_settings.use_keyframe_insert_auto has_autokey = context.scene.tool_settings.use_keyframe_insert_auto
wants_autokey_col.enabled = has_autokey wants_autokey_col.enabled = has_autokey
@ -402,6 +526,42 @@ class VIEW3D_PT_copy_global_transform(Panel):
).method = 'BAKE' ).method = 'BAKE'
class VIEW3D_PT_copy_global_transform_mirror(PanelMixin, Panel):
bl_label = "Mirror Options"
bl_parent_id = "VIEW3D_PT_copy_global_transform"
def draw(self, context: Context) -> None:
layout = self.layout
scene = context.scene
layout.prop(scene, 'addon_copy_global_transform_mirror_ob', text="Object")
mirror_ob = scene.addon_copy_global_transform_mirror_ob
if mirror_ob is None:
# No explicit mirror object means "the current armature", so then the bone name should be editable.
if context.object and context.object.type == 'ARMATURE':
self._bone_search(layout, scene, context.object)
else:
self._bone_entry(layout, scene)
elif mirror_ob.type == 'ARMATURE':
self._bone_search(layout, scene, mirror_ob)
def _bone_search(self, layout: UILayout, scene: bpy.types.Scene, armature_ob: bpy.types.Object) -> None:
"""Search within the bones of the given armature."""
assert armature_ob and armature_ob.type == 'ARMATURE'
layout.prop_search(
scene,
"addon_copy_global_transform_mirror_bone",
armature_ob.data,
"edit_bones" if armature_ob.mode == 'EDIT' else "bones",
text="Bone",
)
def _bone_entry(self, layout: UILayout, scene: bpy.types.Scene) -> None:
"""Allow manual entry of a bone name."""
layout.prop(scene, "addon_copy_global_transform_mirror_bone", text="Bone")
### Messagebus subscription to monitor changes & refresh panels. ### Messagebus subscription to monitor changes & refresh panels.
_msgbus_owner = object() _msgbus_owner = object()
@ -419,6 +579,7 @@ classes = (
OBJECT_OT_copy_global_transform, OBJECT_OT_copy_global_transform,
OBJECT_OT_paste_transform, OBJECT_OT_paste_transform,
VIEW3D_PT_copy_global_transform, VIEW3D_PT_copy_global_transform,
VIEW3D_PT_copy_global_transform_mirror,
) )
_register, _unregister = bpy.utils.register_classes_factory(classes) _register, _unregister = bpy.utils.register_classes_factory(classes)
@ -447,8 +608,28 @@ def register():
_register() _register()
bpy.app.handlers.load_post.append(_on_blendfile_load_post) bpy.app.handlers.load_post.append(_on_blendfile_load_post)
# The mirror object & bone name are stored on the scene, and not on the
# operator. This makes it possible to set up the operator for use in a
# certain scene, while keeping hotkey assignments working as usual.
#
# The goal is to allow hotkeys for "copy", "paste", and "paste mirrored",
# while keeping the other choices in a more global place.
bpy.types.Scene.addon_copy_global_transform_mirror_ob = bpy.props.PointerProperty(
type=bpy.types.Object,
name="Mirror Object",
description="Object to mirror over. Leave empty and name a bone to always mirror "
"over that bone of the active armature",
)
bpy.types.Scene.addon_copy_global_transform_mirror_bone = bpy.props.StringProperty(
name="Mirror Bone",
description="Bone to use for the mirroring",
)
def unregister(): def unregister():
_unregister() _unregister()
_unregister_message_bus() _unregister_message_bus()
bpy.app.handlers.load_post.remove(_on_blendfile_load_post) bpy.app.handlers.load_post.remove(_on_blendfile_load_post)
del bpy.types.Scene.addon_copy_global_transform_mirror_ob
del bpy.types.Scene.addon_copy_global_transform_mirror_bone

View File

@ -13,6 +13,7 @@ bl_info = {
"version": (0, 1), "version": (0, 1),
"blender": (2, 80, 0), "blender": (2, 80, 0),
"description": "Various dependency graph debugging tools", "description": "Various dependency graph debugging tools",
"location": "Properties > View Layer > Dependency Graph",
"warning": "", "warning": "",
"doc_url": "", "doc_url": "",
"tracker_url": "", "tracker_url": "",

View File

@ -98,7 +98,7 @@ def sorted_nodes(bvh_nodes):
def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0): def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
# File loading stuff # File loading stuff
# Open the file for importing # Open the file for importing
file = open(file_path, 'rU') file = open(file_path, 'r')
# Separate into a list of lists, each line a list of words. # Separate into a list of lists, each line a list of words.
file_lines = file.readlines() file_lines = file.readlines()

164
io_scene_3ds/__init__.py Normal file
View File

@ -0,0 +1,164 @@
# SPDX-License-Identifier: GPL-2.0-or-later
from bpy_extras.io_utils import (
ImportHelper,
ExportHelper,
orientation_helper,
axis_conversion,
)
from bpy.props import (
BoolProperty,
EnumProperty,
FloatProperty,
StringProperty,
)
import bpy
bl_info = {
"name": "Autodesk 3DS format",
"author": "Bob Holcomb, Campbell Barton, Andreas Atteneder, Sebastian Schrand",
"version": (2, 3, 2),
"blender": (3, 0, 0),
"location": "File > Import",
"description": "Import 3DS, meshes, uvs, materials, textures, "
"cameras & lamps",
"warning": "Images must be in file folder, "
"filenames are limited to DOS 8.3 format",
"doc_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Autodesk_3DS",
"category": "Import-Export",
}
if "bpy" in locals():
import importlib
if "import_3ds" in locals():
importlib.reload(import_3ds)
if "export_3ds" in locals():
importlib.reload(export_3ds)
@orientation_helper(axis_forward='Y', axis_up='Z')
class Import3DS(bpy.types.Operator, ImportHelper):
"""Import from 3DS file format (.3ds)"""
bl_idname = "import_scene.autodesk_3ds"
bl_label = 'Import 3DS'
bl_options = {'UNDO'}
filename_ext = ".3ds"
filter_glob: StringProperty(default="*.3ds", options={'HIDDEN'})
constrain_size: FloatProperty(
name="Size Constraint",
description="Scale the model by 10 until it reaches the "
"size constraint (0 to disable)",
min=0.0, max=1000.0,
soft_min=0.0, soft_max=1000.0,
default=10.0,
)
use_image_search: BoolProperty(
name="Image Search",
description="Search subdirectories for any associated images "
"(Warning, may be slow)",
default=True,
)
use_apply_transform: BoolProperty(
name="Apply Transform",
description="Workaround for object transformations "
"importing incorrectly",
default=True,
)
read_keyframe: bpy.props.BoolProperty(
name="Read Keyframe",
description="Read the keyframe data",
default=True,
)
use_world_matrix: bpy.props.BoolProperty(
name="World Space",
description="Transform to matrix world",
default=False,
)
def execute(self, context):
from . import import_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.axis_forward,
from_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return import_3ds.load(self, context, **keywords)
@orientation_helper(axis_forward='Y', axis_up='Z')
class Export3DS(bpy.types.Operator, ExportHelper):
"""Export to 3DS file format (.3ds)"""
bl_idname = "export_scene.autodesk_3ds"
bl_label = 'Export 3DS'
filename_ext = ".3ds"
filter_glob: StringProperty(
default="*.3ds",
options={'HIDDEN'},
)
use_selection: BoolProperty(
name="Selection Only",
description="Export selected objects only",
default=False,
)
def execute(self, context):
from . import export_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
"check_existing",
))
global_matrix = axis_conversion(to_forward=self.axis_forward,
to_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return export_3ds.save(self, context, **keywords)
# Add to a menu
def menu_func_export(self, context):
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
def menu_func_import(self, context):
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
def register():
bpy.utils.register_class(Import3DS)
bpy.utils.register_class(Export3DS)
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_class(Import3DS)
bpy.utils.unregister_class(Export3DS)
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
# NOTES:
# why add 1 extra vertex? and remove it when done? -
# "Answer - eekadoodle - would need to re-order UV's without this since face
# order isnt always what we give blender, BMesh will solve :D"
#
# disabled scaling to size, this requires exposing bb (easy) and understanding
# how it works (needs some time)
if __name__ == "__main__":
register()

1437
io_scene_3ds/export_3ds.py Normal file

File diff suppressed because it is too large Load Diff

1270
io_scene_3ds/import_3ds.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@
bl_info = { bl_info = {
"name": "FBX format", "name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem", "author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 1, 0), "version": (5, 2, 0),
"blender": (3, 6, 0), "blender": (3, 6, 0),
"location": "File > Import-Export", "location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions", "description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -3,14 +3,13 @@
# Script copyright (C) Campbell Barton, Bastien Montagne # Script copyright (C) Campbell Barton, Bastien Montagne
import array
import datetime import datetime
import math import math
import numpy as np import numpy as np
import os import os
import time import time
from itertools import zip_longest, chain from itertools import zip_longest
from functools import cache from functools import cache
if "bpy" in locals(): if "bpy" in locals():
@ -51,7 +50,7 @@ from .fbx_utils import (
matrix4_to_array, similar_values, shape_difference_exclude_similar, astype_view_signedness, fast_first_axis_unique, matrix4_to_array, similar_values, shape_difference_exclude_similar, astype_view_signedness, fast_first_axis_unique,
fast_first_axis_flat, fast_first_axis_flat,
# Mesh transform helpers. # Mesh transform helpers.
vcos_transformed_gen, vcos_transformed, nors_transformed, vcos_transformed, nors_transformed,
# UUID from key. # UUID from key.
get_fbx_uuid_from_key, get_fbx_uuid_from_key,
# Key generators. # Key generators.
@ -932,6 +931,26 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
me.edges.foreach_get("vertices", t_ev) me.edges.foreach_get("vertices", t_ev)
me.loops.foreach_get("edge_index", t_lei) me.loops.foreach_get("edge_index", t_lei)
# Polygons might not be in the same order as loops. To export per-loop and per-polygon data in a matching order,
# one must be set into the order of the other. Since there are fewer polygons than loops and there are usually
# more geometry layers exported that are per-loop than per-polygon, it's more efficient to re-order polygons and
# per-polygon data.
perm_polygons_to_loop_order = None
# t_ls indicates the ordering of polygons compared to loops. When t_ls is sorted, polygons and loops are in the same
# order. Since each loop must be assigned to exactly one polygon for the mesh to be valid, every value in t_ls must
# be unique, so t_ls will be monotonically increasing when sorted.
# t_ls is expected to be in the same order as loops in most cases since exiting Edit mode will sort t_ls, so do an
# initial check for any element being smaller than the previous element to determine if sorting is required.
sort_polygon_data = np.any(t_ls[1:] < t_ls[:-1])
if sort_polygon_data:
# t_ls is not sorted, so get the indices that would sort t_ls using argsort, these will be re-used to sort
# per-polygon data.
# Using 'stable' for radix sort, which performs much better with partially ordered data and slightly worse with
# completely random data, compared to the default of 'quicksort' for introsort.
perm_polygons_to_loop_order = np.argsort(t_ls, kind='stable')
# Sort t_ls into the same order as loops.
t_ls = t_ls[perm_polygons_to_loop_order]
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon. # Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
if scene_data.settings.use_mesh_edges: if scene_data.settings.use_mesh_edges:
bl_edge_is_loose_dtype = bool bl_edge_is_loose_dtype = bool
@ -999,7 +1018,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
# We have to ^-1 last index of each loop. # We have to ^-1 last index of each loop.
# Ensure t_pvi is the correct number of bits before inverting. # Ensure t_pvi is the correct number of bits before inverting.
t_pvi = astype_view_signedness(t_lvi, pvi_fbx_dtype) # t_lvi may be used again later, so always create a copy to avoid modifying it in the next step.
t_pvi = t_lvi.astype(pvi_fbx_dtype)
# The index of the end of each loop is one before the index of the start of the next loop. # The index of the end of each loop is one before the index of the start of the next loop.
t_pvi[t_ls[1:] - 1] ^= -1 t_pvi[t_ls[1:] - 1] ^= -1
# The index of the end of the last loop will be the very last index. # The index of the end of the last loop will be the very last index.
@ -1015,7 +1035,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
t_eli = astype_view_signedness(t_eli, eli_fbx_dtype) t_eli = astype_view_signedness(t_eli, eli_fbx_dtype)
elem_data_single_int32_array(geom, b"PolygonVertexIndex", t_pvi) elem_data_single_int32_array(geom, b"PolygonVertexIndex", t_pvi)
elem_data_single_int32_array(geom, b"Edges", t_eli) elem_data_single_int32_array(geom, b"Edges", t_eli)
del t_lvi
del t_pvi del t_pvi
del t_eli del t_eli
del t_ev del t_ev
@ -1032,6 +1051,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
if smooth_type == 'FACE': if smooth_type == 'FACE':
t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype) t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype)
me.polygons.foreach_get("use_smooth", t_ps) me.polygons.foreach_get("use_smooth", t_ps)
if sort_polygon_data:
t_ps = t_ps[perm_polygons_to_loop_order]
_map = b"ByPolygon" _map = b"ByPolygon"
else: # EDGE else: # EDGE
_map = b"ByEdge" _map = b"ByEdge"
@ -1050,14 +1071,17 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
# Get the 'use_smooth' attribute of all polygons. # Get the 'use_smooth' attribute of all polygons.
p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype) p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype)
me.polygons.foreach_get('use_smooth', p_use_smooth_mask) me.polygons.foreach_get('use_smooth', p_use_smooth_mask)
if sort_polygon_data:
p_use_smooth_mask = p_use_smooth_mask[perm_polygons_to_loop_order]
# Invert to get all flat shaded polygons. # Invert to get all flat shaded polygons.
p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask) p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask)
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of # Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of
# that polygon. # that polygon.
# Polygon sides can be calculated from the element-wise difference of loop starts appended by the number # Polygon sides can be calculated from the element-wise difference of sorted loop starts appended by the
# of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total' attribute of # number of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total'
# polygons, but since we already have t_ls, it tends to be quicker to calculate from t_ls when above # attribute of polygons, but that might need to be sorted, and we already have t_ls which is sorted loop
# around 10_000 polygons. # starts. It tends to be quicker to calculate from t_ls when above around 10_000 polygons even when the
# 'loop_total' array wouldn't need sorting.
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr) polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides) p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
# Convert flat shaded loops to flat shaded (sharp) edge indices. # Convert flat shaded loops to flat shaded (sharp) edge indices.
@ -1168,7 +1192,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
elem_data_single_float64_array(lay_nor, b"Normals", t_ln) elem_data_single_float64_array(lay_nor, b"Normals", t_ln)
# Normal weights, no idea what it is. # Normal weights, no idea what it is.
# t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(t_ln) # t_lnw = np.zeros(len(t_ln), dtype=np.float64)
# elem_data_single_float64_array(lay_nor, b"NormalsW", t_lnw) # elem_data_single_float64_array(lay_nor, b"NormalsW", t_lnw)
elem_data_single_int32_array(lay_nor, b"NormalsIndex", t_lnidx) elem_data_single_int32_array(lay_nor, b"NormalsIndex", t_lnidx)
@ -1183,7 +1207,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct") elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Normals", t_ln) elem_data_single_float64_array(lay_nor, b"Normals", t_ln)
# Normal weights, no idea what it is. # Normal weights, no idea what it is.
# t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) # t_ln = np.zeros(len(me.loops), dtype=np.float64)
# elem_data_single_float64_array(lay_nor, b"NormalsW", t_ln) # elem_data_single_float64_array(lay_nor, b"NormalsW", t_ln)
del t_ln del t_ln
@ -1205,7 +1229,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
del t_lt del t_lt
num_loops = len(me.loops) num_loops = len(me.loops)
t_ln = np.empty(num_loops * 3, dtype=ln_bl_dtype) t_ln = np.empty(num_loops * 3, dtype=ln_bl_dtype)
# t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) # t_lnw = np.zeros(len(me.loops), dtype=np.float64)
uv_names = [uvlayer.name for uvlayer in me.uv_layers] uv_names = [uvlayer.name for uvlayer in me.uv_layers]
# Annoying, `me.calc_tangent` errors in case there is no geometry... # Annoying, `me.calc_tangent` errors in case there is no geometry...
if num_loops > 0: if num_loops > 0:
@ -1252,15 +1276,13 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
color_prop_name = "color_srgb" if colors_type == 'SRGB' else "color" color_prop_name = "color_srgb" if colors_type == 'SRGB' else "color"
# ByteColorAttribute color also gets returned by the API as single precision float # ByteColorAttribute color also gets returned by the API as single precision float
bl_lc_dtype = np.single bl_lc_dtype = np.single
bl_lvi_dtype = np.uintc
fbx_lc_dtype = np.float64 fbx_lc_dtype = np.float64
fbx_lcidx_dtype = np.int32 fbx_lcidx_dtype = np.int32
t_lvi = None
color_attributes = me.color_attributes color_attributes = me.color_attributes
if scene_data.settings.prioritize_active_color: if scene_data.settings.prioritize_active_color:
active_color = me.color_attributes.active_color active_color = me.color_attributes.active_color
color_attributes = sorted(color_attributes, key=lambda x: x == active_color, reverse=True) color_attributes = sorted(color_attributes, key=lambda x: x == active_color, reverse=True)
for colindex, collayer in enumerate(color_attributes): for colindex, collayer in enumerate(color_attributes):
is_point = collayer.domain == "POINT" is_point = collayer.domain == "POINT"
@ -1282,10 +1304,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
# with a "ByVertex" mapping type, but some software does not # with a "ByVertex" mapping type, but some software does not
# properly understand that. So expand to full "ByPolygonVertex" # properly understand that. So expand to full "ByPolygonVertex"
# index map. # index map.
if t_lvi is None: # Ignore loops added for loose edges.
t_lvi = np.empty(len(me.loops), dtype=bl_lvi_dtype) col_indices = col_indices[t_lvi[:len(me.loops)]]
me.loops.foreach_get("vertex_index", t_lvi)
col_indices = col_indices[t_lvi]
t_lc = t_lc.astype(fbx_lc_dtype, copy=False) t_lc = t_lc.astype(fbx_lc_dtype, copy=False)
col_indices = astype_view_signedness(col_indices, fbx_lcidx_dtype) col_indices = astype_view_signedness(col_indices, fbx_lcidx_dtype)
@ -1295,7 +1315,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
del t_lc del t_lc
del col_indices del col_indices
del t_lvi
# Write UV layers. # Write UV layers.
# Note: LayerElementTexture is deprecated since FBX 2011 - luckily! # Note: LayerElementTexture is deprecated since FBX 2011 - luckily!
@ -1304,7 +1323,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
if uvnumber: if uvnumber:
luv_bl_dtype = np.single luv_bl_dtype = np.single
luv_fbx_dtype = np.float64 luv_fbx_dtype = np.float64
lv_idx_bl_dtype = np.uintc
lv_idx_fbx_dtype = np.int32 lv_idx_fbx_dtype = np.int32
t_luv = np.empty(len(me.loops) * 2, dtype=luv_bl_dtype) t_luv = np.empty(len(me.loops) * 2, dtype=luv_bl_dtype)
@ -1315,8 +1333,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
# Looks like this mapping is also expected to convey UV islands (arg..... :((((( ). # Looks like this mapping is also expected to convey UV islands (arg..... :((((( ).
# So we need to generate unique triplets (uv, vertex_idx) here, not only just based on UV values. # So we need to generate unique triplets (uv, vertex_idx) here, not only just based on UV values.
t_lvidx = np.empty(len(me.loops), dtype=lv_idx_bl_dtype) # Ignore loops added for loose edges.
me.loops.foreach_get("vertex_index", t_lvidx) t_lvidx = t_lvi[:len(me.loops)]
# If we were to create a combined array of (uv, vertex_idx) elements, we could find unique triplets by sorting # If we were to create a combined array of (uv, vertex_idx) elements, we could find unique triplets by sorting
# that array by first sorting by the vertex_idx column and then sorting by the uv column using a stable sorting # that array by first sorting by the vertex_idx column and then sorting by the uv column using a stable sorting
@ -1407,6 +1425,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
del t_lvidx del t_lvidx
del t_luv del t_luv
del t_luv_fast_pair_view del t_luv_fast_pair_view
del t_lvi
# Face's materials. # Face's materials.
me_fbxmaterials_idx = scene_data.mesh_material_indices.get(me) me_fbxmaterials_idx = scene_data.mesh_material_indices.get(me)
@ -1423,6 +1442,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
fbx_pm_dtype = np.int32 fbx_pm_dtype = np.int32
t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype) t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype)
me.polygons.foreach_get("material_index", t_pm) me.polygons.foreach_get("material_index", t_pm)
if sort_polygon_data:
t_pm = t_pm[perm_polygons_to_loop_order]
# We have to validate mat indices, and map them to FBX indices. # We have to validate mat indices, and map them to FBX indices.
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored). # Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
@ -1453,6 +1474,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame") elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect") elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
elem_data_single_int32_array(lay_ma, b"Materials", [0]) elem_data_single_int32_array(lay_ma, b"Materials", [0])
del perm_polygons_to_loop_order
# And the "layer TOC"... # And the "layer TOC"...

View File

@ -244,6 +244,11 @@ def array_to_matrix4(arr):
return Matrix(tuple(zip(*[iter(arr)]*4))).transposed() return Matrix(tuple(zip(*[iter(arr)]*4))).transposed()
def parray_as_ndarray(arr):
"""Convert an array.array into an np.ndarray that shares the same memory"""
return np.frombuffer(arr, dtype=arr.typecode)
def similar_values(v1, v2, e=1e-6): def similar_values(v1, v2, e=1e-6):
"""Return True if v1 and v2 are nearly the same.""" """Return True if v1 and v2 are nearly the same."""
if v1 == v2: if v1 == v2:
@ -295,12 +300,6 @@ def shape_difference_exclude_similar(sv_cos, ref_cos, e=1e-6):
return difference_cos, not_similar_verts_idx return difference_cos, not_similar_verts_idx
def vcos_transformed_gen(raw_cos, m=None):
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
gen = zip(*(iter(raw_cos),) * 3)
return gen if m is None else (m @ Vector(v) for v in gen)
def _mat4_vec3_array_multiply(mat4, vec3_array, dtype=None, return_4d=False): def _mat4_vec3_array_multiply(mat4, vec3_array, dtype=None, return_4d=False):
"""Multiply a 4d matrix by each 3d vector in an array and return as an array of either 3d or 4d vectors. """Multiply a 4d matrix by each 3d vector in an array and return as an array of either 3d or 4d vectors.

View File

@ -18,6 +18,9 @@ import bpy
from bpy.app.translations import pgettext_tip as tip_ from bpy.app.translations import pgettext_tip as tip_
from mathutils import Matrix, Euler, Vector from mathutils import Matrix, Euler, Vector
# Also imported in .fbx_utils, so importing here is unlikely to further affect Blender startup time.
import numpy as np
# ----- # -----
# Utils # Utils
from . import parse_fbx, fbx_utils from . import parse_fbx, fbx_utils
@ -34,6 +37,10 @@ from .fbx_utils import (
similar_values, similar_values,
similar_values_iter, similar_values_iter,
FBXImportSettings, FBXImportSettings,
vcos_transformed,
nors_transformed,
parray_as_ndarray,
astype_view_signedness,
) )
# global singleton, assign on execution # global singleton, assign on execution
@ -454,8 +461,9 @@ def add_vgroup_to_objects(vg_indices, vg_weights, vg_name, objects):
vg = obj.vertex_groups.get(vg_name) vg = obj.vertex_groups.get(vg_name)
if vg is None: if vg is None:
vg = obj.vertex_groups.new(name=vg_name) vg = obj.vertex_groups.new(name=vg_name)
vg_add = vg.add
for i, w in zip(vg_indices, vg_weights): for i, w in zip(vg_indices, vg_weights):
vg.add((i,), w, 'REPLACE') vg_add((i,), w, 'REPLACE')
def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_prepost_rot): def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_prepost_rot):
@ -777,87 +785,258 @@ def blen_read_geom_layerinfo(fbx_layer):
) )
def blen_read_geom_array_setattr(generator, blen_data, blen_attr, fbx_data, stride, item_size, descr, xform): def blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size):
"""Generic fbx_layer to blen_data setter, generator is expected to yield tuples (ble_idx, fbx_idx).""" """Validate blen_data when it's not a bpy_prop_collection.
max_blen_idx = len(blen_data) - 1 Returns whether blen_data is a bpy_prop_collection"""
max_fbx_idx = len(fbx_data) - 1 blen_data_is_collection = isinstance(blen_data, bpy.types.bpy_prop_collection)
print_error = True if not blen_data_is_collection:
if item_size > 1:
assert(len(blen_data.shape) == 2)
assert(blen_data.shape[1] == item_size)
assert(blen_data.dtype == blen_dtype)
return blen_data_is_collection
def check_skip(blen_idx, fbx_idx):
nonlocal print_error
if fbx_idx < 0: # Negative values mean 'skip'.
return True
if blen_idx > max_blen_idx:
if print_error:
print("ERROR: too much data in this Blender layer, compared to elements in mesh, skipping!")
print_error = False
return True
if fbx_idx + item_size - 1 > max_fbx_idx:
if print_error:
print("ERROR: not enough data in this FBX layer, skipping!")
print_error = False
return True
return False
if xform is not None: def blen_read_geom_parse_fbx_data(fbx_data, stride, item_size):
if isinstance(blen_data, list): """Parse fbx_data as an array.array into a 2d np.ndarray that shares the same memory, where each row is a single
if item_size == 1: item"""
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): # Technically stride < item_size could be supported, but there's probably not a use case for it since it would
blen_data[blen_idx] = xform(fbx_data[fbx_idx]) # result in a view of the data with self-overlapping memory.
else: assert(stride >= item_size)
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): # View the array.array as an np.ndarray.
blen_data[blen_idx] = xform(fbx_data[fbx_idx:fbx_idx + item_size]) fbx_data_np = parray_as_ndarray(fbx_data)
else:
if item_size == 1: if stride == item_size:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): if item_size > 1:
setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx])) # Need to make sure fbx_data_np has a whole number of items to be able to view item_size elements per row.
else: items_remainder = len(fbx_data_np) % item_size
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): if items_remainder:
setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx:fbx_idx + item_size])) print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
fbx_data_np = fbx_data_np[:-items_remainder]
fbx_data_np = fbx_data_np.reshape(-1, item_size)
else: else:
if isinstance(blen_data, list): # Create a view of fbx_data_np that is only the first item_size elements of each stride. Note that the view will
if item_size == 1: # not be C-contiguous.
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): stride_remainder = len(fbx_data_np) % stride
blen_data[blen_idx] = fbx_data[fbx_idx] if stride_remainder:
if stride_remainder < item_size:
print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
# Not enough in the remainder for a full item, so cut off the partial stride
fbx_data_np = fbx_data_np[:-stride_remainder]
# Reshape to one stride per row and then create a view that includes only the first item_size elements
# of each stride.
fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
else: else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): print("ERROR: not a whole number of strides in this FBX layer! There are a whole number of items, but"
blen_data[blen_idx] = fbx_data[fbx_idx:fbx_idx + item_size] " this could indicate an error!")
# There is not a whole number of strides, but there is a whole number of items.
# This is a pain to deal with because fbx_data_np.reshape(-1, stride) is not possible.
# A view of just the items can be created using stride_tricks.as_strided by specifying the shape and
# strides of the view manually.
# Extreme care must be taken when using stride_tricks.as_strided because improper usage can result in
# a view that gives access to memory outside the array.
from numpy.lib import stride_tricks
# fbx_data_np should always start off as flat and C-contiguous.
assert(fbx_data_np.strides == (fbx_data_np.itemsize,))
num_whole_strides = len(fbx_data_np) // stride
# Plus the one partial stride that is enough elements for a complete item.
num_items = num_whole_strides + 1
shape = (num_items, item_size)
# strides are the number of bytes to step to get to the next element, for each axis.
step_per_item = fbx_data_np.itemsize * stride
step_per_item_element = fbx_data_np.itemsize
strides = (step_per_item, step_per_item_element)
fbx_data_np = stride_tricks.as_strided(fbx_data_np, shape, strides)
else: else:
if item_size == 1: # There's a whole number of strides, so first reshape to one stride per row and then create a view that
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx): # includes only the first item_size elements of each stride.
setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx]) fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
else:
def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):
setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx:fbx_idx + item_size])
for blen_idx, fbx_idx in generator: return fbx_data_np
if check_skip(blen_idx, fbx_idx):
continue
_process(blen_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx)
# generic generators. def blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np, is_indices=False):
def blen_read_geom_array_gen_allsame(data_len): """Check that there are the same number of items in blen_data and fbx_data_np.
return zip(*(range(data_len), (0,) * data_len))
Returns a tuple of two elements:
0: fbx_data_np or, if fbx_data_np contains more items than blen_data, a view of fbx_data_np with the excess
items removed
1: Whether the returned fbx_data_np contains enough items to completely fill blen_data"""
bl_num_items = len(blen_data)
fbx_num_items = len(fbx_data_np)
enough_data = fbx_num_items >= bl_num_items
if not enough_data:
if is_indices:
print("ERROR: not enough indices in this FBX layer, missing data will be left as default!")
else:
print("ERROR: not enough data in this FBX layer, missing data will be left as default!")
elif fbx_num_items > bl_num_items:
if is_indices:
print("ERROR: too many indices in this FBX layer, skipping excess!")
else:
print("ERROR: too much data in this FBX layer, skipping excess!")
fbx_data_np = fbx_data_np[:bl_num_items]
return fbx_data_np, enough_data
def blen_read_geom_array_gen_direct(fbx_data, stride): def blen_read_geom_xform(fbx_data_np, xform):
fbx_data_len = len(fbx_data) """xform is either None, or a function that takes fbx_data_np as its only positional argument and returns an
return zip(*(range(fbx_data_len // stride), range(0, fbx_data_len, stride))) np.ndarray with the same total number of elements as fbx_data_np.
It is acceptable for xform to return an array with a different dtype to fbx_data_np.
Returns xform(fbx_data_np) when xform is not None and ensures the result of xform(fbx_data_np) has the same shape as
fbx_data_np before returning it.
When xform is None, fbx_data_np is returned as is."""
if xform is not None:
item_size = fbx_data_np.shape[1]
fbx_total_data = fbx_data_np.size
fbx_data_np = xform(fbx_data_np)
# The amount of data should not be changed by xform
assert(fbx_data_np.size == fbx_total_data)
# Ensure fbx_data_np is still item_size elements per row
if len(fbx_data_np.shape) != 2 or fbx_data_np.shape[1] != item_size:
fbx_data_np = fbx_data_np.reshape(-1, item_size)
return fbx_data_np
def blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride): def blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
return ((bi, fi * stride) for bi, fi in enumerate(fbx_layer_index)) xform):
"""Generic fbx_layer to blen_data foreach setter for Direct layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np, enough_data = blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
if blen_data_is_collection:
if not enough_data:
blen_total_data = len(blen_data) * item_size
buffer = np.empty(blen_total_data, dtype=blen_dtype)
# It's not clear what values should be used for the missing data, so read the current values into a buffer.
blen_data.foreach_get(blen_attr, buffer)
# Change the buffer shape to one item per row
buffer.shape = (-1, item_size)
# Copy the fbx data into the start of the buffer
buffer[:len(fbx_data_np)] = fbx_data_np
else:
# Convert the buffer to the Blender C type of blen_attr
buffer = astype_view_signedness(fbx_data_np, blen_dtype)
# Set blen_attr of blen_data. The buffer must be flat and C-contiguous, which ravel() ensures
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
assert(blen_data.size % item_size == 0)
blen_data = blen_data.view()
blen_data.shape = (-1, item_size)
blen_data[:len(fbx_data_np)] = fbx_data_np
def blen_read_geom_array_gen_direct_looptovert(mesh, fbx_data, stride): def blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, fbx_layer_index, stride,
fbx_data_len = len(fbx_data) // stride item_size, descr, xform):
loops = mesh.loops """Generic fbx_layer to blen_data foreach setter for IndexToDirect layers.
for p in mesh.polygons: blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
for lidx in p.loop_indices: fbx_data must be an array.array or a 1d np.ndarray."""
vidx = loops[lidx].vertex_index fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
if vidx < fbx_data_len: fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
yield lidx, vidx * stride
# fbx_layer_index is allowed to be a 1d np.ndarray for use with blen_read_geom_array_foreach_set_looptovert.
if not isinstance(fbx_layer_index, np.ndarray):
fbx_layer_index = parray_as_ndarray(fbx_layer_index)
fbx_layer_index, enough_indices = blen_read_geom_check_fbx_data_length(blen_data, fbx_layer_index, is_indices=True)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
blen_data_items_len = len(blen_data)
blen_data_len = blen_data_items_len * item_size
fbx_num_items = len(fbx_data_np)
# Find all indices that are out of bounds of fbx_data_np.
min_index_inclusive = -fbx_num_items
max_index_inclusive = fbx_num_items - 1
valid_index_mask = np.equal(fbx_layer_index, fbx_layer_index.clip(min_index_inclusive, max_index_inclusive))
indices_invalid = not valid_index_mask.all()
fbx_data_items = fbx_data_np.reshape(-1, item_size)
if indices_invalid or not enough_indices:
if blen_data_is_collection:
buffer = np.empty(blen_data_len, dtype=blen_dtype)
buffer_item_view = buffer.view()
buffer_item_view.shape = (-1, item_size)
# Since we don't know what the default values should be for the missing data, read the current values into a
# buffer.
blen_data.foreach_get(blen_attr, buffer)
else:
buffer_item_view = blen_data
if not enough_indices:
# Reduce the length of the view to the same length as the number of indices.
buffer_item_view = buffer_item_view[:len(fbx_layer_index)]
# Copy the result of indexing fbx_data_items by each element in fbx_layer_index into the buffer.
if indices_invalid:
print("ERROR: indices in this FBX layer out of bounds of the FBX data, skipping invalid indices!")
buffer_item_view[valid_index_mask] = fbx_data_items[fbx_layer_index[valid_index_mask]]
else:
buffer_item_view[:] = fbx_data_items[fbx_layer_index]
if blen_data_is_collection:
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
if blen_data_is_collection:
# Cast the buffer to the Blender C type of blen_attr
fbx_data_items = astype_view_signedness(fbx_data_items, blen_dtype)
buffer_items = fbx_data_items[fbx_layer_index]
blen_data.foreach_set(blen_attr, buffer_items.ravel())
else:
blen_data[:] = fbx_data_items[fbx_layer_index]
def blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
xform):
"""Generic fbx_layer to blen_data foreach setter for AllSame layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array."""
fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
fbx_items_len = len(fbx_data_np)
blen_items_len = len(blen_data)
if fbx_items_len < 1:
print("ERROR: not enough data in this FBX layer, skipping!")
return
if blen_data_is_collection:
# Create an array filled with the value from fbx_data_np
buffer = np.full((blen_items_len, item_size), fbx_data_np[0], dtype=blen_dtype)
blen_data.foreach_set(blen_attr, buffer.ravel())
else:
blen_data[:] = fbx_data_np[0]
def blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size,
descr, xform):
"""Generic fbx_layer to blen_data foreach setter for polyloop ByVertice layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array"""
# The fbx_data is mapped to vertices. To expand fbx_data to polygon loops, get an array of the vertex index of each
# polygon loop that will then be used to index fbx_data
loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, loop_vertex_indices, stride,
item_size, descr, xform)
# generic error printers. # generic error printers.
@ -872,7 +1051,7 @@ def blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet=False):
def blen_read_geom_array_mapped_vert( def blen_read_geom_array_mapped_vert(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -881,15 +1060,15 @@ def blen_read_geom_array_mapped_vert(
if fbx_layer_mapping == b'ByVertice': if fbx_layer_mapping == b'ByVertice':
if fbx_layer_ref == b'Direct': if fbx_layer_ref == b'Direct':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -899,7 +1078,7 @@ def blen_read_geom_array_mapped_vert(
def blen_read_geom_array_mapped_edge( def blen_read_geom_array_mapped_edge(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -907,15 +1086,15 @@ def blen_read_geom_array_mapped_edge(
): ):
if fbx_layer_mapping == b'ByEdge': if fbx_layer_mapping == b'ByEdge':
if fbx_layer_ref == b'Direct': if fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -925,7 +1104,7 @@ def blen_read_geom_array_mapped_edge(
def blen_read_geom_array_mapped_polygon( def blen_read_geom_array_mapped_polygon(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -937,22 +1116,22 @@ def blen_read_geom_array_mapped_polygon(
# We fallback to 'Direct' mapping in this case. # We fallback to 'Direct' mapping in this case.
#~ assert(fbx_layer_index is not None) #~ assert(fbx_layer_index is not None)
if fbx_layer_index is None: if fbx_layer_index is None:
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
else: else:
blen_read_geom_array_setattr(blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride), blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) fbx_layer_index, stride, item_size, descr, xform)
return True return True
elif fbx_layer_ref == b'Direct': elif fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -962,7 +1141,7 @@ def blen_read_geom_array_mapped_polygon(
def blen_read_geom_array_mapped_polyloop( def blen_read_geom_array_mapped_polyloop(
mesh, blen_data, blen_attr, mesh, blen_data, blen_attr, blen_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
stride, item_size, descr, stride, item_size, descr,
@ -974,29 +1153,29 @@ def blen_read_geom_array_mapped_polyloop(
# We fallback to 'Direct' mapping in this case. # We fallback to 'Direct' mapping in this case.
#~ assert(fbx_layer_index is not None) #~ assert(fbx_layer_index is not None)
if fbx_layer_index is None: if fbx_layer_index is None:
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
else: else:
blen_read_geom_array_setattr(blen_read_geom_array_gen_indextodirect(fbx_layer_index, stride), blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) fbx_layer_index, stride, item_size, descr, xform)
return True return True
elif fbx_layer_ref == b'Direct': elif fbx_layer_ref == b'Direct':
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct(fbx_layer_data, stride), blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'ByVertice': elif fbx_layer_mapping == b'ByVertice':
if fbx_layer_ref == b'Direct': if fbx_layer_ref == b'Direct':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_direct_looptovert(mesh, fbx_layer_data, stride), blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
elif fbx_layer_mapping == b'AllSame': elif fbx_layer_mapping == b'AllSame':
if fbx_layer_ref == b'IndexToDirect': if fbx_layer_ref == b'IndexToDirect':
assert(fbx_layer_index is None) assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)), blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform) item_size, descr, xform)
return True return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet) blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
else: else:
@ -1021,7 +1200,7 @@ def blen_read_geom_layer_material(fbx_obj, mesh):
blen_data = mesh.polygons blen_data = mesh.polygons
blen_read_geom_array_mapped_polygon( blen_read_geom_array_mapped_polygon(
mesh, blen_data, "material_index", mesh, blen_data, "material_index", np.uintc,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
@ -1055,7 +1234,7 @@ def blen_read_geom_layer_uv(fbx_obj, mesh):
continue continue
blen_read_geom_array_mapped_polyloop( blen_read_geom_array_mapped_polyloop(
mesh, blen_data, "uv", mesh, blen_data, "uv", np.single,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
2, 2, layer_id, 2, 2, layer_id,
@ -1095,7 +1274,7 @@ def blen_read_geom_layer_color(fbx_obj, mesh, colors_type):
continue continue
blen_read_geom_array_mapped_polyloop( blen_read_geom_array_mapped_polyloop(
mesh, blen_data, color_prop_name, mesh, blen_data, color_prop_name, np.single,
fbx_layer_data, fbx_layer_index, fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
4, 4, layer_id, 4, 4, layer_id,
@ -1129,11 +1308,11 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
blen_data = mesh.edges blen_data = mesh.edges
blen_read_geom_array_mapped_edge( blen_read_geom_array_mapped_edge(
mesh, blen_data, "use_edge_sharp", mesh, blen_data, "use_edge_sharp", bool,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
xform=lambda s: not s, xform=np.logical_not,
) )
# We only set sharp edges here, not face smoothing itself... # We only set sharp edges here, not face smoothing itself...
mesh.use_auto_smooth = True mesh.use_auto_smooth = True
@ -1141,7 +1320,7 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
elif fbx_layer_mapping == b'ByPolygon': elif fbx_layer_mapping == b'ByPolygon':
blen_data = mesh.polygons blen_data = mesh.polygons
return blen_read_geom_array_mapped_polygon( return blen_read_geom_array_mapped_polygon(
mesh, blen_data, "use_smooth", mesh, blen_data, "use_smooth", bool,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
@ -1152,8 +1331,6 @@ def blen_read_geom_layer_smooth(fbx_obj, mesh):
return False return False
def blen_read_geom_layer_edge_crease(fbx_obj, mesh): def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
from math import sqrt
fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease') fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease')
if fbx_layer is None: if fbx_layer is None:
@ -1184,13 +1361,13 @@ def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
blen_data = mesh.edges blen_data = mesh.edges
return blen_read_geom_array_mapped_edge( return blen_read_geom_array_mapped_edge(
mesh, blen_data, "crease", mesh, blen_data, "crease", np.single,
fbx_layer_data, None, fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref, fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id, 1, 1, layer_id,
# Blender squares those values before sending them to OpenSubdiv, when other software don't, # Blender squares those values before sending them to OpenSubdiv, when other software don't,
# so we need to compensate that to get similar results through FBX... # so we need to compensate that to get similar results through FBX...
xform=sqrt, xform=np.sqrt,
) )
else: else:
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping)) print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
@ -1215,22 +1392,28 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
print("warning %r %r missing data" % (layer_id, fbx_layer_name)) print("warning %r %r missing data" % (layer_id, fbx_layer_name))
return False return False
# try loops, then vertices. # Normals are temporarily set here so that they can be retrieved again after a call to Mesh.validate().
bl_norm_dtype = np.single
item_size = 3
# try loops, then polygons, then vertices.
tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop), tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop),
(mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon), (mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon),
(mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert)) (mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert))
for blen_data, blen_data_type, is_fake, func in tries: for blen_data, blen_data_type, is_fake, func in tries:
bdata = [None] * len(blen_data) if is_fake else blen_data bdata = np.zeros((len(blen_data), item_size), dtype=bl_norm_dtype) if is_fake else blen_data
if func(mesh, bdata, "normal", if func(mesh, bdata, "normal", bl_norm_dtype,
fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, 3, layer_id, xform, True): fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, item_size, layer_id, xform, True):
if blen_data_type == "Polygons": if blen_data_type == "Polygons":
for pidx, p in enumerate(mesh.polygons): # To expand to per-loop normals, repeat each per-polygon normal by the number of loops of each polygon.
for lidx in range(p.loop_start, p.loop_start + p.loop_total): poly_loop_totals = np.empty(len(mesh.polygons), dtype=np.uintc)
mesh.loops[lidx].normal[:] = bdata[pidx] mesh.polygons.foreach_get("loop_total", poly_loop_totals)
loop_normals = np.repeat(bdata, poly_loop_totals, axis=0)
mesh.loops.foreach_set("normal", loop_normals.ravel())
elif blen_data_type == "Vertices": elif blen_data_type == "Vertices":
# We have to copy vnors to lnors! Far from elegant, but simple. # We have to copy vnors to lnors! Far from elegant, but simple.
for l in mesh.loops: loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
l.normal[:] = bdata[l.vertex_index] mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
mesh.loops.foreach_set("normal", bdata[loop_vertex_indices].ravel())
return True return True
blen_read_geom_array_error_mapping("normal", fbx_layer_mapping) blen_read_geom_array_error_mapping("normal", fbx_layer_mapping)
@ -1239,9 +1422,6 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
def blen_read_geom(fbx_tmpl, fbx_obj, settings): def blen_read_geom(fbx_tmpl, fbx_obj, settings):
from itertools import chain
import array
# Vertices are in object space, but we are post-multiplying all transforms with the inverse of the # Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
# global matrix, so we need to apply the global matrix to the vertices to get the correct result. # global matrix, so we need to apply the global matrix to the vertices to get the correct result.
geom_mat_co = settings.global_matrix if settings.bake_space_transform else None geom_mat_co = settings.global_matrix if settings.bake_space_transform else None
@ -1259,36 +1439,61 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex')) fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex'))
fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges')) fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges'))
if geom_mat_co is not None: bl_vcos_dtype = np.single
def _vcos_transformed_gen(raw_cos, m=None):
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
return chain(*(m @ Vector(v) for v in zip(*(iter(raw_cos),) * 3)))
fbx_verts = array.array(fbx_verts.typecode, _vcos_transformed_gen(fbx_verts, geom_mat_co))
if fbx_verts is None: # The dtypes when empty don't matter, but are set to what the fbx arrays are expected to be.
fbx_verts = () fbx_verts = parray_as_ndarray(fbx_verts) if fbx_verts else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
if fbx_polys is None: fbx_polys = parray_as_ndarray(fbx_polys) if fbx_polys else np.empty(0, dtype=data_types.ARRAY_INT32)
fbx_polys = () fbx_edges = parray_as_ndarray(fbx_edges) if fbx_edges else np.empty(0, dtype=data_types.ARRAY_INT32)
# Each vert is a 3d vector so is made of 3 components.
tot_verts = len(fbx_verts) // 3
if tot_verts * 3 != len(fbx_verts):
print("ERROR: Not a whole number of vertices. Ignoring the partial vertex!")
# Remove any remainder.
fbx_verts = fbx_verts[:tot_verts * 3]
tot_loops = len(fbx_polys)
tot_edges = len(fbx_edges)
mesh = bpy.data.meshes.new(name=elem_name_utf8) mesh = bpy.data.meshes.new(name=elem_name_utf8)
mesh.vertices.add(len(fbx_verts) // 3)
mesh.vertices.foreach_set("co", fbx_verts)
if fbx_polys: if tot_verts:
mesh.loops.add(len(fbx_polys)) if geom_mat_co is not None:
poly_loop_starts = [] fbx_verts = vcos_transformed(fbx_verts, geom_mat_co, bl_vcos_dtype)
poly_loop_totals = [] else:
poly_loop_prev = 0 fbx_verts = fbx_verts.astype(bl_vcos_dtype, copy=False)
for i, l in enumerate(mesh.loops):
index = fbx_polys[i]
if index < 0:
poly_loop_starts.append(poly_loop_prev)
poly_loop_totals.append((i - poly_loop_prev) + 1)
poly_loop_prev = i + 1
index ^= -1
l.vertex_index = index
mesh.polygons.add(len(poly_loop_starts)) mesh.vertices.add(tot_verts)
mesh.vertices.foreach_set("co", fbx_verts.ravel())
if tot_loops:
bl_loop_start_dtype = bl_loop_total_dtype = bl_loop_vertex_index_dtype = np.uintc
mesh.loops.add(tot_loops)
# The end of each polygon is specified by an inverted index.
fbx_loop_end_idx = np.flatnonzero(fbx_polys < 0)
tot_polys = len(fbx_loop_end_idx)
# Un-invert the loop ends.
fbx_polys[fbx_loop_end_idx] ^= -1
# Set loop vertex indices, casting to the Blender C type first for performance.
mesh.loops.foreach_set("vertex_index", astype_view_signedness(fbx_polys, bl_loop_vertex_index_dtype))
poly_loop_totals = np.empty(tot_polys, dtype=bl_loop_total_dtype)
# The loop total of the first polygon is first loop end index plus 1.
poly_loop_totals[0] = fbx_loop_end_idx[0] + 1
# The differences between consecutive loop end indices are the remaining loop totals.
poly_loop_totals[1:] = np.diff(fbx_loop_end_idx)
poly_loop_starts = np.empty(tot_polys, dtype=bl_loop_start_dtype)
# The first loop is always a loop start.
poly_loop_starts[0] = 0
# Ignoring the last loop end, the indices after every loop end are the remaining loop starts.
poly_loop_starts[1:] = fbx_loop_end_idx[:-1] + 1
mesh.polygons.add(tot_polys)
mesh.polygons.foreach_set("loop_start", poly_loop_starts) mesh.polygons.foreach_set("loop_start", poly_loop_starts)
mesh.polygons.foreach_set("loop_total", poly_loop_totals) mesh.polygons.foreach_set("loop_total", poly_loop_totals)
@ -1296,36 +1501,40 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
blen_read_geom_layer_uv(fbx_obj, mesh) blen_read_geom_layer_uv(fbx_obj, mesh)
blen_read_geom_layer_color(fbx_obj, mesh, settings.colors_type) blen_read_geom_layer_color(fbx_obj, mesh, settings.colors_type)
if fbx_edges: if tot_edges:
# edges in fact index the polygons (NOT the vertices) # edges in fact index the polygons (NOT the vertices)
import array bl_edge_vertex_indices_dtype = np.uintc
tot_edges = len(fbx_edges)
edges_conv = array.array('i', [0]) * (tot_edges * 2)
edge_index = 0 # The first vertex index of each edge is the vertex index of the corresponding loop in fbx_polys.
for i in fbx_edges: edges_a = fbx_polys[fbx_edges]
e_a = fbx_polys[i]
if e_a >= 0:
e_b = fbx_polys[i + 1]
if e_b < 0:
e_b ^= -1
else:
# Last index of polygon, wrap back to the start.
# ideally we wouldn't have to search back, # The second vertex index of each edge is the vertex index of the next loop in the same polygon. The
# but it should only be 2-3 iterations. # complexity here is that if the first vertex index was the last loop of that polygon in fbx_polys, the next
j = i - 1 # loop in the polygon is the first loop of that polygon, which is not the next loop in fbx_polys.
while j >= 0 and fbx_polys[j] >= 0:
j -= 1
e_a ^= -1
e_b = fbx_polys[j + 1]
edges_conv[edge_index] = e_a # Copy fbx_polys, but rolled backwards by 1 so that indexing the result by [fbx_edges] will get the next
edges_conv[edge_index + 1] = e_b # loop of the same polygon unless the first vertex index was the last loop of the polygon.
edge_index += 2 fbx_polys_next = np.roll(fbx_polys, -1)
# Get the first loop of each polygon and set them into fbx_polys_next at the same indices as the last loop
# of each polygon in fbx_polys.
fbx_polys_next[fbx_loop_end_idx] = fbx_polys[poly_loop_starts]
mesh.edges.add(tot_edges) # Indexing fbx_polys_next by fbx_edges now gets the vertex index of the next loop in fbx_polys.
mesh.edges.foreach_set("vertices", edges_conv) edges_b = fbx_polys_next[fbx_edges]
# edges_a and edges_b need to be combined so that the first vertex index of each edge is immediately
# followed by the second vertex index of that same edge.
# Stack edges_a and edges_b as individual columns like np.column_stack((edges_a, edges_b)).
# np.concatenate is used because np.column_stack doesn't allow specifying the dtype of the returned array.
edges_conv = np.concatenate((edges_a.reshape(-1, 1), edges_b.reshape(-1, 1)),
axis=1, dtype=bl_edge_vertex_indices_dtype, casting='unsafe')
# Add the edges and set their vertex indices.
mesh.edges.add(len(edges_conv))
# ravel() because edges_conv must be flat and C-contiguous when passed to foreach_set.
mesh.edges.foreach_set("vertices", edges_conv.ravel())
elif tot_edges:
print("ERROR: No polygons, but edges exist. Ignoring the edges!")
# must be after edge, face loading. # must be after edge, face loading.
ok_smooth = blen_read_geom_layer_smooth(fbx_obj, mesh) ok_smooth = blen_read_geom_layer_smooth(fbx_obj, mesh)
@ -1340,21 +1549,23 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
if geom_mat_no is None: if geom_mat_no is None:
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh) ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh)
else: else:
def nortrans(v): ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh,
return geom_mat_no @ Vector(v) lambda v_array: nors_transformed(v_array, geom_mat_no))
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh, nortrans)
mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here! mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here!
if ok_normals: if ok_normals:
clnors = array.array('f', [0.0] * (len(mesh.loops) * 3)) bl_nors_dtype = np.single
clnors = np.empty(len(mesh.loops) * 3, dtype=bl_nors_dtype)
mesh.loops.foreach_get("normal", clnors) mesh.loops.foreach_get("normal", clnors)
if not ok_smooth: if not ok_smooth:
mesh.polygons.foreach_set("use_smooth", [True] * len(mesh.polygons)) mesh.polygons.foreach_set("use_smooth", np.full(len(mesh.polygons), True, dtype=bool))
ok_smooth = True ok_smooth = True
mesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3))) # Iterating clnors into a nested tuple first is faster than passing clnors.reshape(-1, 3) directly into
# normals_split_custom_set. We use clnors.data since it is a memoryview, which is faster to iterate than clnors.
mesh.normals_split_custom_set(tuple(zip(*(iter(clnors.data),) * 3)))
mesh.use_auto_smooth = True mesh.use_auto_smooth = True
else: else:
mesh.calc_normals() mesh.calc_normals()
@ -1363,7 +1574,7 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
mesh.free_normals_split() mesh.free_normals_split()
if not ok_smooth: if not ok_smooth:
mesh.polygons.foreach_set("use_smooth", [True] * len(mesh.polygons)) mesh.polygons.foreach_set("use_smooth", np.full(len(mesh.polygons), True, dtype=bool))
if settings.use_custom_props: if settings.use_custom_props:
blen_read_custom_properties(fbx_obj, mesh, settings) blen_read_custom_properties(fbx_obj, mesh, settings)
@ -1371,46 +1582,78 @@ def blen_read_geom(fbx_tmpl, fbx_obj, settings):
return mesh return mesh
def blen_read_shape(fbx_tmpl, fbx_sdata, fbx_bcdata, meshes, scene): def blen_read_shapes(fbx_tmpl, fbx_data, objects, me, scene):
elem_name_utf8 = elem_name_ensure_class(fbx_sdata, b'Geometry') if not fbx_data:
indices = elem_prop_first(elem_find_first(fbx_sdata, b'Indexes'), default=()) # No shape key data. Nothing to do.
dvcos = tuple(co for co in zip(*[iter(elem_prop_first(elem_find_first(fbx_sdata, b'Vertices'), default=()))] * 3)) return
# We completely ignore normals here!
weight = elem_prop_first(elem_find_first(fbx_bcdata, b'DeformPercent'), default=100.0) / 100.0
vgweights = tuple(vgw / 100.0 for vgw in elem_prop_first(elem_find_first(fbx_bcdata, b'FullWeights'), default=()))
# Special case, in case all weights are the same, FullWeight can have only one element - *sigh!* bl_vcos_dtype = np.single
nbr_indices = len(indices) me_vcos = np.empty(len(me.vertices) * 3, dtype=bl_vcos_dtype)
if len(vgweights) == 1 and nbr_indices > 1: me.vertices.foreach_get("co", me_vcos)
vgweights = (vgweights[0],) * nbr_indices me_vcos_vector_view = me_vcos.reshape(-1, 3)
assert(len(vgweights) == nbr_indices == len(dvcos)) objects = list({node.bl_obj for node in objects})
create_vg = bool(set(vgweights) - {1.0}) assert(objects)
keyblocks = [] bc_uuid_to_keyblocks = {}
for bc_uuid, fbx_sdata, fbx_bcdata in fbx_data:
elem_name_utf8 = elem_name_ensure_class(fbx_sdata, b'Geometry')
indices = elem_prop_first(elem_find_first(fbx_sdata, b'Indexes'))
dvcos = elem_prop_first(elem_find_first(fbx_sdata, b'Vertices'))
for me, objects in meshes: indices = parray_as_ndarray(indices) if indices else np.empty(0, dtype=data_types.ARRAY_INT32)
vcos = tuple((idx, me.vertices[idx].co + Vector(dvco)) for idx, dvco in zip(indices, dvcos)) dvcos = parray_as_ndarray(dvcos) if dvcos else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
objects = list({node.bl_obj for node in objects})
assert(objects)
# If there's not a whole number of vectors, trim off the remainder.
# 3 components per vector.
remainder = len(dvcos) % 3
if remainder:
dvcos = dvcos[:-remainder]
dvcos = dvcos.reshape(-1, 3)
# We completely ignore normals here!
weight = elem_prop_first(elem_find_first(fbx_bcdata, b'DeformPercent'), default=100.0) / 100.0
vgweights = elem_prop_first(elem_find_first(fbx_bcdata, b'FullWeights'))
vgweights = parray_as_ndarray(vgweights) if vgweights else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
# Not doing the division in-place in-case it's possible for FBX shape keys to be used by more than one mesh.
vgweights = vgweights / 100.0
create_vg = (vgweights != 1.0).any()
# Special case, in case all weights are the same, FullWeight can have only one element - *sigh!*
nbr_indices = len(indices)
if len(vgweights) == 1 and nbr_indices > 1:
vgweights = np.full_like(indices, vgweights[0], dtype=vgweights.dtype)
assert(len(vgweights) == nbr_indices == len(dvcos))
# To add shape keys to the mesh, an Object using the mesh is needed.
if me.shape_keys is None: if me.shape_keys is None:
objects[0].shape_key_add(name="Basis", from_mix=False) objects[0].shape_key_add(name="Basis", from_mix=False)
kb = objects[0].shape_key_add(name=elem_name_utf8, from_mix=False) kb = objects[0].shape_key_add(name=elem_name_utf8, from_mix=False)
me.shape_keys.use_relative = True # Should already be set as such. me.shape_keys.use_relative = True # Should already be set as such.
for idx, co in vcos: # Only need to set the shape key co if there are any non-zero dvcos.
kb.data[idx].co[:] = co if dvcos.any():
shape_cos = me_vcos_vector_view.copy()
shape_cos[indices] += dvcos
kb.data.foreach_set("co", shape_cos.ravel())
kb.value = weight kb.value = weight
# Add vgroup if necessary. # Add vgroup if necessary.
if create_vg: if create_vg:
vgoups = add_vgroup_to_objects(indices, vgweights, kb.name, objects) # VertexGroup.add only allows sequences of int indices, but iterating the indices array directly would
# produce numpy scalars of types such as np.int32. The underlying memoryview of the indices array, however,
# does produce standard Python ints when iterated, so pass indices.data to add_vgroup_to_objects instead of
# indices.
# memoryviews tend to be faster to iterate than numpy arrays anyway, so vgweights.data is passed too.
add_vgroup_to_objects(indices.data, vgweights.data, kb.name, objects)
kb.vertex_group = kb.name kb.vertex_group = kb.name
keyblocks.append(kb) bc_uuid_to_keyblocks.setdefault(bc_uuid, []).append(kb)
return bc_uuid_to_keyblocks
return keyblocks
# -------- # --------
@ -2861,6 +3104,7 @@ def load(operator, context, filepath="",
def _(): def _():
fbx_tmpl = fbx_template_get((b'Geometry', b'KFbxShape')) fbx_tmpl = fbx_template_get((b'Geometry', b'KFbxShape'))
mesh_to_shapes = {}
for s_uuid, s_item in fbx_table_nodes.items(): for s_uuid, s_item in fbx_table_nodes.items():
fbx_sdata, bl_sdata = s_item = fbx_table_nodes.get(s_uuid, (None, None)) fbx_sdata, bl_sdata = s_item = fbx_table_nodes.get(s_uuid, (None, None))
if fbx_sdata is None or fbx_sdata.id != b'Geometry' or fbx_sdata.props[2] != b'Shape': if fbx_sdata is None or fbx_sdata.id != b'Geometry' or fbx_sdata.props[2] != b'Shape':
@ -2873,8 +3117,6 @@ def load(operator, context, filepath="",
fbx_bcdata, _bl_bcdata = fbx_table_nodes.get(bc_uuid, (None, None)) fbx_bcdata, _bl_bcdata = fbx_table_nodes.get(bc_uuid, (None, None))
if fbx_bcdata is None or fbx_bcdata.id != b'Deformer' or fbx_bcdata.props[2] != b'BlendShapeChannel': if fbx_bcdata is None or fbx_bcdata.id != b'Deformer' or fbx_bcdata.props[2] != b'BlendShapeChannel':
continue continue
meshes = []
objects = []
for bs_uuid, bs_ctype in fbx_connection_map.get(bc_uuid, ()): for bs_uuid, bs_ctype in fbx_connection_map.get(bc_uuid, ()):
if bs_ctype.props[0] != b'OO': if bs_ctype.props[0] != b'OO':
continue continue
@ -2889,20 +3131,29 @@ def load(operator, context, filepath="",
continue continue
# Blenmeshes are assumed already created at that time! # Blenmeshes are assumed already created at that time!
assert(isinstance(bl_mdata, bpy.types.Mesh)) assert(isinstance(bl_mdata, bpy.types.Mesh))
# And we have to find all objects using this mesh! # Group shapes by mesh so that each mesh only needs to be processed once for all of its shape
objects = [] # keys.
for o_uuid, o_ctype in fbx_connection_map.get(m_uuid, ()): if bl_mdata not in mesh_to_shapes:
if o_ctype.props[0] != b'OO': # And we have to find all objects using this mesh!
continue objects = []
node = fbx_helper_nodes[o_uuid] for o_uuid, o_ctype in fbx_connection_map.get(m_uuid, ()):
if node: if o_ctype.props[0] != b'OO':
objects.append(node) continue
meshes.append((bl_mdata, objects)) node = fbx_helper_nodes[o_uuid]
if node:
objects.append(node)
shapes_list = []
mesh_to_shapes[bl_mdata] = (objects, shapes_list)
else:
shapes_list = mesh_to_shapes[bl_mdata][1]
shapes_list.append((bc_uuid, fbx_sdata, fbx_bcdata))
# BlendShape deformers are only here to connect BlendShapeChannels to meshes, nothing else to do. # BlendShape deformers are only here to connect BlendShapeChannels to meshes, nothing else to do.
# Iterate through each mesh and create its shape keys
for bl_mdata, (objects, shapes) in mesh_to_shapes.items():
for bc_uuid, keyblocks in blen_read_shapes(fbx_tmpl, shapes, objects, bl_mdata, scene).items():
# keyblocks is a list of tuples (mesh, keyblock) matching that shape/blendshapechannel, for animation. # keyblocks is a list of tuples (mesh, keyblock) matching that shape/blendshapechannel, for animation.
keyblocks = blen_read_shape(fbx_tmpl, fbx_sdata, fbx_bcdata, meshes, scene) blend_shape_channels.setdefault(bc_uuid, []).extend(keyblocks)
blend_shape_channels[bc_uuid] = keyblocks
_(); del _ _(); del _
if settings.use_subsurf: if settings.use_subsurf:
@ -3224,8 +3475,16 @@ def load(operator, context, filepath="",
if decal_offset != 0.0: if decal_offset != 0.0:
for material in mesh.materials: for material in mesh.materials:
if material in material_decals: if material in material_decals:
for v in mesh.vertices: num_verts = len(mesh.vertices)
v.co += v.normal * decal_offset blen_cos_dtype = blen_norm_dtype = np.single
vcos = np.empty(num_verts * 3, dtype=blen_cos_dtype)
vnorm = np.empty(num_verts * 3, dtype=blen_norm_dtype)
mesh.vertices.foreach_get("co", vcos)
mesh.vertices.foreach_get("normal", vnorm)
vcos += vnorm * decal_offset
mesh.vertices.foreach_set("co", vcos)
break break
for obj in (obj for obj in bpy.data.objects if obj.data == mesh): for obj in (obj for obj in bpy.data.objects if obj.data == mesh):

View File

@ -4,7 +4,7 @@
bl_info = { bl_info = {
'name': 'glTF 2.0 format', 'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors', 'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (3, 6, 5), "version": (3, 6, 11),
'blender': (3, 5, 0), 'blender': (3, 5, 0),
'location': 'File > Import-Export', 'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0', 'description': 'Import-Export as glTF 2.0',

View File

@ -14,7 +14,6 @@ def get_mesh_cache_key(blender_mesh,
blender_object, blender_object,
vertex_groups, vertex_groups,
modifiers, modifiers,
skip_filter,
materials, materials,
original_mesh, original_mesh,
export_settings): export_settings):
@ -34,21 +33,19 @@ def get_mesh_cache_key(blender_mesh,
return ( return (
(id(mesh_to_id_cache),), (id(mesh_to_id_cache),),
(modifiers,), (modifiers,),
(skip_filter,), #TODO to check if still needed
mats mats
) )
@cached_by_key(key=get_mesh_cache_key) @cached_by_key(key=get_mesh_cache_key)
def gather_mesh(blender_mesh: bpy.types.Mesh, def gather_mesh(blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
skip_filter: bool,
materials: Tuple[bpy.types.Material], materials: Tuple[bpy.types.Material],
original_mesh: bpy.types.Mesh, original_mesh: bpy.types.Mesh,
export_settings export_settings
) -> Optional[gltf2_io.Mesh]: ) -> Optional[gltf2_io.Mesh]:
if not skip_filter and not __filter_mesh(blender_mesh, vertex_groups, modifiers, export_settings): if not __filter_mesh(blender_mesh, vertex_groups, modifiers, export_settings):
return None return None
mesh = gltf2_io.Mesh( mesh = gltf2_io.Mesh(
@ -75,25 +72,21 @@ def gather_mesh(blender_mesh: bpy.types.Mesh,
blender_object, blender_object,
vertex_groups, vertex_groups,
modifiers, modifiers,
skip_filter,
materials) materials)
return mesh return mesh
def __filter_mesh(blender_mesh: bpy.types.Mesh, def __filter_mesh(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> bool: ) -> bool:
if blender_mesh.users == 0:
return False
return True return True
def __gather_extensions(blender_mesh: bpy.types.Mesh, def __gather_extensions(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> Any: ) -> Any:
@ -101,7 +94,7 @@ def __gather_extensions(blender_mesh: bpy.types.Mesh,
def __gather_extras(blender_mesh: bpy.types.Mesh, def __gather_extras(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> Optional[Dict[Any, Any]]: ) -> Optional[Dict[Any, Any]]:
@ -128,7 +121,7 @@ def __gather_extras(blender_mesh: bpy.types.Mesh,
def __gather_name(blender_mesh: bpy.types.Mesh, def __gather_name(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> str: ) -> str:
@ -137,7 +130,7 @@ def __gather_name(blender_mesh: bpy.types.Mesh,
def __gather_primitives(blender_mesh: bpy.types.Mesh, def __gather_primitives(blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
materials: Tuple[bpy.types.Material], materials: Tuple[bpy.types.Material],
export_settings export_settings
@ -151,7 +144,7 @@ def __gather_primitives(blender_mesh: bpy.types.Mesh,
def __gather_weights(blender_mesh: bpy.types.Mesh, def __gather_weights(blender_mesh: bpy.types.Mesh,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> Optional[List[float]]: ) -> Optional[List[float]]:

View File

@ -182,11 +182,7 @@ def __gather_mesh(vnode, blender_object, export_settings):
# Be sure that object is valid (no NaN for example) # Be sure that object is valid (no NaN for example)
blender_object.data.validate() blender_object.data.validate()
# If not using vertex group, they are irrelevant for caching --> ensure that they do not trigger a cache miss
vertex_groups = blender_object.vertex_groups
modifiers = blender_object.modifiers modifiers = blender_object.modifiers
if len(vertex_groups) == 0:
vertex_groups = None
if len(modifiers) == 0: if len(modifiers) == 0:
modifiers = None modifiers = None
@ -194,7 +190,6 @@ def __gather_mesh(vnode, blender_object, export_settings):
if export_settings['gltf_apply']: if export_settings['gltf_apply']:
if modifiers is None: # If no modifier, use original mesh, it will instance all shared mesh in a single glTF mesh if modifiers is None: # If no modifier, use original mesh, it will instance all shared mesh in a single glTF mesh
blender_mesh = blender_object.data blender_mesh = blender_object.data
skip_filter = False
else: else:
armature_modifiers = {} armature_modifiers = {}
if export_settings['gltf_skins']: if export_settings['gltf_skins']:
@ -209,7 +204,6 @@ def __gather_mesh(vnode, blender_object, export_settings):
blender_mesh = blender_mesh_owner.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph) blender_mesh = blender_mesh_owner.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph)
for prop in blender_object.data.keys(): for prop in blender_object.data.keys():
blender_mesh[prop] = blender_object.data[prop] blender_mesh[prop] = blender_object.data[prop]
skip_filter = True
if export_settings['gltf_skins']: if export_settings['gltf_skins']:
# restore Armature modifiers # restore Armature modifiers
@ -217,15 +211,12 @@ def __gather_mesh(vnode, blender_object, export_settings):
blender_object.modifiers[idx].show_viewport = show_viewport blender_object.modifiers[idx].show_viewport = show_viewport
else: else:
blender_mesh = blender_object.data blender_mesh = blender_object.data
skip_filter = False
# If no skin are exported, no need to have vertex group, this will create a cache miss # If no skin are exported, no need to have vertex group, this will create a cache miss
if not export_settings['gltf_skins']: if not export_settings['gltf_skins']:
vertex_groups = None
modifiers = None modifiers = None
else: else:
# Check if there is an armature modidier # Check if there is an armature modidier
if len([mod for mod in blender_object.modifiers if mod.type == "ARMATURE"]) == 0: if len([mod for mod in blender_object.modifiers if mod.type == "ARMATURE"]) == 0:
vertex_groups = None # Not needed if no armature, avoid a cache miss
modifiers = None modifiers = None
materials = tuple(ms.material for ms in blender_object.material_slots) materials = tuple(ms.material for ms in blender_object.material_slots)
@ -241,9 +232,8 @@ def __gather_mesh(vnode, blender_object, export_settings):
result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh, result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups, blender_object.vertex_groups,
modifiers, modifiers,
skip_filter,
materials, materials,
None, None,
export_settings) export_settings)
@ -279,17 +269,14 @@ def __gather_mesh_from_nonmesh(blender_object, export_settings):
needs_to_mesh_clear = True needs_to_mesh_clear = True
skip_filter = True
materials = tuple([ms.material for ms in blender_object.material_slots if ms.material is not None]) materials = tuple([ms.material for ms in blender_object.material_slots if ms.material is not None])
vertex_groups = None
modifiers = None modifiers = None
blender_object_for_skined_data = None blender_object_for_skined_data = None
result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh, result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
blender_object_for_skined_data, blender_object_for_skined_data,
vertex_groups, blender_object.vertex_groups,
modifiers, modifiers,
skip_filter,
materials, materials,
blender_object.data, blender_object.data,
export_settings) export_settings)
@ -361,8 +348,7 @@ def gather_skin(vnode, export_settings):
return None return None
# no skin needed when the modifier is linked without having a vertex group # no skin needed when the modifier is linked without having a vertex group
vertex_groups = blender_object.vertex_groups if len(blender_object.vertex_groups) == 0:
if len(vertex_groups) == 0:
return None return None
# check if any vertices in the mesh are part of a vertex group # check if any vertices in the mesh are part of a vertex group

View File

@ -15,9 +15,9 @@ from .material import gltf2_blender_gather_materials
from .material.extensions import gltf2_blender_gather_materials_variants from .material.extensions import gltf2_blender_gather_materials_variants
@cached @cached
def get_primitive_cache_key( def gather_primitive_cache_key(
blender_mesh, blender_mesh,
blender_object, uuid_for_skined_data,
vertex_groups, vertex_groups,
modifiers, modifiers,
materials, materials,
@ -36,11 +36,11 @@ def get_primitive_cache_key(
) )
@cached_by_key(key=get_primitive_cache_key) @cached_by_key(key=gather_primitive_cache_key)
def gather_primitives( def gather_primitives(
blender_mesh: bpy.types.Mesh, blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
materials: Tuple[bpy.types.Material], materials: Tuple[bpy.types.Material],
export_settings export_settings
@ -92,11 +92,33 @@ def gather_primitives(
return primitives return primitives
@cached @cached
def get_primitive_cache_key(
blender_mesh,
uuid_for_skined_data,
vertex_groups,
modifiers,
export_settings):
# Use id of mesh
# Do not use bpy.types that can be unhashable
# Do not use mesh name, that can be not unique (when linked)
# Do not use materials here
# TODO check what is really needed for modifiers
return (
(id(blender_mesh),),
(modifiers,)
)
@cached_by_key(key=get_primitive_cache_key)
def __gather_cache_primitives( def __gather_cache_primitives(
blender_mesh: bpy.types.Mesh, blender_mesh: bpy.types.Mesh,
uuid_for_skined_data, uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups], vertex_groups: bpy.types.VertexGroups,
modifiers: Optional[bpy.types.ObjectModifiers], modifiers: Optional[bpy.types.ObjectModifiers],
export_settings export_settings
) -> List[dict]: ) -> List[dict]:

View File

@ -85,7 +85,7 @@ class PrimitiveCreator:
# Check if we have to export skin # Check if we have to export skin
self.armature = None self.armature = None
self.skin = None self.skin = None
if self.blender_vertex_groups and self.export_settings['gltf_skins']: if self.export_settings['gltf_skins']:
if self.modifiers is not None: if self.modifiers is not None:
modifiers_dict = {m.type: m for m in self.modifiers} modifiers_dict = {m.type: m for m in self.modifiers}
if "ARMATURE" in modifiers_dict: if "ARMATURE" in modifiers_dict:
@ -197,15 +197,6 @@ class PrimitiveCreator:
attr['skip_getting_to_dots'] = True attr['skip_getting_to_dots'] = True
self.blender_attributes.append(attr) self.blender_attributes.append(attr)
# Manage uvs TEX_COORD_x
for tex_coord_i in range(self.tex_coord_max):
attr = {}
attr['blender_data_type'] = 'FLOAT2'
attr['blender_domain'] = 'CORNER'
attr['gltf_attribute_name'] = 'TEXCOORD_' + str(tex_coord_i)
attr['get'] = self.get_function()
self.blender_attributes.append(attr)
# Manage NORMALS # Manage NORMALS
if self.use_normals: if self.use_normals:
attr = {} attr = {}
@ -216,6 +207,15 @@ class PrimitiveCreator:
attr['get'] = self.get_function() attr['get'] = self.get_function()
self.blender_attributes.append(attr) self.blender_attributes.append(attr)
# Manage uvs TEX_COORD_x
for tex_coord_i in range(self.tex_coord_max):
attr = {}
attr['blender_data_type'] = 'FLOAT2'
attr['blender_domain'] = 'CORNER'
attr['gltf_attribute_name'] = 'TEXCOORD_' + str(tex_coord_i)
attr['get'] = self.get_function()
self.blender_attributes.append(attr)
# Manage TANGENT # Manage TANGENT
if self.use_tangents: if self.use_tangents:
attr = {} attr = {}
@ -269,6 +269,13 @@ class PrimitiveCreator:
attr['len'] = gltf2_blender_conversion.get_data_length(attr['blender_data_type']) attr['len'] = gltf2_blender_conversion.get_data_length(attr['blender_data_type'])
attr['type'] = gltf2_blender_conversion.get_numpy_type(attr['blender_data_type']) attr['type'] = gltf2_blender_conversion.get_numpy_type(attr['blender_data_type'])
# Now we have all attribtues, we can change order if we want
# Note that the glTF specification doesn't say anything about order
# Attributes are defined only by name
# But if user want it in a particular order, he can use this hook to perform it
export_user_extensions('gather_attributes_change', self.export_settings, self.blender_attributes)
def create_dots_data_structure(self): def create_dots_data_structure(self):
# Now that we get all attributes that are going to be exported, create numpy array that will store them # Now that we get all attributes that are going to be exported, create numpy array that will store them
dot_fields = [('vertex_index', np.uint32)] dot_fields = [('vertex_index', np.uint32)]
@ -698,6 +705,8 @@ class PrimitiveCreator:
self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3) self.normals = self.normals.reshape(len(self.blender_mesh.loops), 3)
self.normals = np.round(self.normals, NORMALS_ROUNDING_DIGIT) self.normals = np.round(self.normals, NORMALS_ROUNDING_DIGIT)
# Force normalization of normals in case some normals are not (why ?)
PrimitiveCreator.normalize_vecs(self.normals)
self.morph_normals = [] self.morph_normals = []
for key_block in key_blocks: for key_block in key_blocks:

View File

@ -2,12 +2,11 @@
# Copyright 2018-2021 The glTF-Blender-IO authors. # Copyright 2018-2021 The glTF-Blender-IO authors.
import re import re
import os import os
import urllib.parse
from typing import List from typing import List
from ... import get_version_string from ... import get_version_string
from ...io.com import gltf2_io, gltf2_io_extensions from ...io.com import gltf2_io, gltf2_io_extensions
from ...io.com.gltf2_io_path import path_to_uri from ...io.com.gltf2_io_path import path_to_uri, uri_to_path
from ...io.exp import gltf2_io_binary_data, gltf2_io_buffer, gltf2_io_image_data from ...io.exp import gltf2_io_binary_data, gltf2_io_buffer, gltf2_io_image_data
from ...io.exp.gltf2_io_user_extensions import export_user_extensions from ...io.exp.gltf2_io_user_extensions import export_user_extensions
@ -110,7 +109,7 @@ class GlTF2Exporter:
if is_glb: if is_glb:
uri = None uri = None
elif output_path and buffer_name: elif output_path and buffer_name:
with open(output_path + buffer_name, 'wb') as f: with open(output_path + uri_to_path(buffer_name), 'wb') as f:
f.write(self.__buffer.to_bytes()) f.write(self.__buffer.to_bytes())
uri = buffer_name uri = buffer_name
else: else:

View File

@ -7,6 +7,8 @@ import numpy as np
from ...io.imp.gltf2_io_user_extensions import import_user_extensions from ...io.imp.gltf2_io_user_extensions import import_user_extensions
from ...io.com.gltf2_io_debug import print_console from ...io.com.gltf2_io_debug import print_console
from ...io.imp.gltf2_io_binary import BinaryData from ...io.imp.gltf2_io_binary import BinaryData
from ...io.com.gltf2_io_constants import DataType, ComponentType
from ...blender.com.gltf2_blender_conversion import get_attribute_type
from ..com.gltf2_blender_extras import set_extras from ..com.gltf2_blender_extras import set_extras
from .gltf2_blender_material import BlenderMaterial from .gltf2_blender_material import BlenderMaterial
from .gltf2_io_draco_compression_extension import decode_primitive from .gltf2_io_draco_compression_extension import decode_primitive
@ -594,7 +596,22 @@ def skin_into_bind_pose(gltf, skin_idx, vert_joints, vert_weights, locs, vert_no
for i in range(4): for i in range(4):
skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]] skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]]
weight_sums += ws[:, i] weight_sums += ws[:, i]
# Normalize weights to one; necessary for old files / quantized weights
# Some invalid files have 0 weight sum.
# To avoid to have this vertices at 0.0 / 0.0 / 0.0
# We set all weight ( aka 1.0 ) to the first bone
zeros_indices = np.where(weight_sums == 0)[0]
if zeros_indices.shape[0] > 0:
print_console('ERROR', 'File is invalid: Some vertices are not assigned to bone(s) ')
vert_weights[0][:, 0][zeros_indices] = 1.0 # Assign to first bone with all weight
# Reprocess IBM for these vertices
skinning_mats[zeros_indices] = np.zeros((4, 4), dtype=np.float32)
for js, ws in zip(vert_joints, vert_weights):
for i in range(4):
skinning_mats[zeros_indices] += ws[:, i][zeros_indices].reshape(len(ws[zeros_indices]), 1, 1) * joint_mats[js[:, i][zeros_indices]]
weight_sums[zeros_indices] += ws[:, i][zeros_indices]
skinning_mats /= weight_sums.reshape(num_verts, 1, 1) skinning_mats /= weight_sums.reshape(num_verts, 1, 1)
skinning_mats_3x3 = skinning_mats[:, :3, :3] skinning_mats_3x3 = skinning_mats[:, :3, :3]

View File

@ -1256,7 +1256,7 @@ def gzipOpen(path):
if data is None: if data is None:
try: try:
filehandle = open(path, 'rU', encoding='utf-8', errors='surrogateescape') filehandle = open(path, 'r', encoding='utf-8', errors='surrogateescape')
data = filehandle.read() data = filehandle.read()
filehandle.close() filehandle.close()
except: except:

View File

@ -146,7 +146,8 @@ class NWMergeShadersMenu(Menu, NWBase):
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
for type in ('MIX', 'ADD'): for type in ('MIX', 'ADD'):
props = layout.operator(operators.NWMergeNodes.bl_idname, text=type) name = f'{type.capitalize()} Shader'
props = layout.operator(operators.NWMergeNodes.bl_idname, text=name)
props.mode = type props.mode = type
props.merge_type = 'SHADER' props.merge_type = 'SHADER'

View File

@ -1304,6 +1304,8 @@ class NWMergeNodes(Operator, NWBase):
if tree_type == 'GEOMETRY': if tree_type == 'GEOMETRY':
if nodes_list is selected_math or nodes_list is selected_vector or nodes_list is selected_mix: if nodes_list is selected_math or nodes_list is selected_vector or nodes_list is selected_mix:
node_type = 'ShaderNode' node_type = 'ShaderNode'
if mode == 'MIX':
mode = 'ADD'
else: else:
node_type = 'GeometryNode' node_type = 'GeometryNode'
if merge_position == 'CENTER': if merge_position == 'CENTER':

View File

@ -3,7 +3,7 @@
bl_info = { bl_info = {
"name": "3D-Print Toolbox", "name": "3D-Print Toolbox",
"author": "Campbell Barton", "author": "Campbell Barton",
"blender": (3, 0, 0), "blender": (3, 6, 0),
"location": "3D View > Sidebar", "location": "3D View > Sidebar",
"description": "Utilities for 3D printing", "description": "Utilities for 3D printing",
"doc_url": "{BLENDER_MANUAL_URL}/addons/mesh/3d_print_toolbox.html", "doc_url": "{BLENDER_MANUAL_URL}/addons/mesh/3d_print_toolbox.html",

View File

@ -79,7 +79,8 @@ def write_mesh(context, report_cb):
name = data_("untitled") name = data_("untitled")
# add object name # add object name
name += f"-{bpy.path.clean_name(obj.name)}" import re
name += "-" + re.sub(r'[\\/:*?"<>|]', "", obj.name)
# first ensure the path is created # first ensure the path is created
if export_path: if export_path:
@ -113,17 +114,16 @@ def write_mesh(context, report_cb):
global_scale=global_scale, global_scale=global_scale,
) )
elif export_format == 'PLY': elif export_format == 'PLY':
addon_ensure("io_mesh_ply")
filepath = bpy.path.ensure_ext(filepath, ".ply") filepath = bpy.path.ensure_ext(filepath, ".ply")
ret = bpy.ops.export_mesh.ply( ret = bpy.ops.wm.ply_export(
filepath=filepath, filepath=filepath,
use_ascii=False, ascii_format=False,
use_mesh_modifiers=True, apply_modifiers=True,
use_selection=True, export_selected_objects=True,
global_scale=global_scale, global_scale=global_scale,
use_normals=export_data_layers, export_normals=export_data_layers,
use_uv_coords=export_data_layers, export_uv=export_data_layers,
use_colors=export_data_layers, export_colors="SRGB" if export_data_layers else "NONE",
) )
elif export_format == 'X3D': elif export_format == 'X3D':
addon_ensure("io_scene_x3d") addon_ensure("io_scene_x3d")

View File

@ -62,7 +62,7 @@ def pose_library_list_item_context_menu(self: UIList, context: Context) -> None:
list = getattr(context, "ui_list", None) list = getattr(context, "ui_list", None)
if not list or list.bl_idname != "UI_UL_asset_view" or list.list_id != "pose_assets": if not list or list.bl_idname != "UI_UL_asset_view" or list.list_id != "pose_assets":
return False return False
if not context.asset_handle: if not context.active_file:
return False return False
return True return True

View File

@ -60,9 +60,9 @@ class POSELIB_OT_create_pose_asset(PoseAssetCreator, Operator):
@classmethod @classmethod
def poll(cls, context: Context) -> bool: def poll(cls, context: Context) -> bool:
if context.object.mode != "POSE": if context.object is None or context.object.mode != "POSE":
# The operator assumes pose mode, so that bone selection is visible. # The operator assumes pose mode, so that bone selection is visible.
cls.poll_message_set("The object must be in Pose mode") cls.poll_message_set("An active armature object in pose mode is needed")
return False return False
# Make sure that if there is an asset browser open, the artist can see the newly created pose asset. # Make sure that if there is an asset browser open, the artist can see the newly created pose asset.

View File

@ -5,7 +5,7 @@
import bpy import bpy
from .shading import write_object_material_interior from .shading import write_object_material_interior
def export_meta(file, metas, tab_write, DEF_MAT_NAME): def export_meta(file, metas, material_names_dictionary, tab_write, DEF_MAT_NAME):
"""write all POV blob primitives and Blender Metas to exported file """ """write all POV blob primitives and Blender Metas to exported file """
# TODO - blenders 'motherball' naming is not supported. # TODO - blenders 'motherball' naming is not supported.
@ -221,7 +221,8 @@ def export_meta(file, metas, tab_write, DEF_MAT_NAME):
write_object_material_interior(file, one_material, mob, tab_write) write_object_material_interior(file, one_material, mob, tab_write)
# write_object_material_interior(file, one_material, elems[1]) # write_object_material_interior(file, one_material, elems[1])
tab_write(file, "radiosity{importance %3g}\n" % mob.pov.importance_value) tab_write(file, "radiosity{importance %3g}\n" % mob.pov.importance_value)
tab_write(file, "}\n\n") # End of Metaball block
tab_write(file, "}\n\n") # End of Metaball block
''' '''

View File

@ -554,6 +554,7 @@ def write_pov(filename, scene=None, info_callback=None):
model_meta_topology.export_meta(file, model_meta_topology.export_meta(file,
[m for m in sel if m.type == 'META'], [m for m in sel if m.type == 'META'],
material_names_dictionary,
tab_write, tab_write,
DEF_MAT_NAME,) DEF_MAT_NAME,)

View File

@ -812,6 +812,14 @@ class STORYPENCIL_OT_TabSwitch(Operator):
bl_options = {'INTERNAL'} bl_options = {'INTERNAL'}
def execute(self, context): def execute(self, context):
# For meta strips the tab key must be processed by other operator, so
# just pass through to the next operator in the stack.
if context.active_sequence_strip and context.active_sequence_strip.type == 'META':
return {'PASS_THROUGH'}
if context.scene.sequence_editor and context.scene.sequence_editor.meta_stack:
return {'PASS_THROUGH'}
if context.scene.storypencil_use_new_window: if context.scene.storypencil_use_new_window:
bpy.ops.storypencil.sync_set_main('INVOKE_DEFAULT', True) bpy.ops.storypencil.sync_set_main('INVOKE_DEFAULT', True)
else: else:
@ -821,15 +829,7 @@ class STORYPENCIL_OT_TabSwitch(Operator):
# Get strip under time cursor # Get strip under time cursor
strip, old_frame = get_sequence_at_frame( strip, old_frame = get_sequence_at_frame(
scene.frame_current, sequences=sequences) scene.frame_current, sequences=sequences)
# For meta strips the tab key must be processed by other operator, so if strip and strip.type == 'SCENE':
# just pass through to the next operator in the stack.
if strip is None or strip.type != 'SCENE':
if context.active_sequence_strip and context.active_sequence_strip.type == 'META':
return {'PASS_THROUGH'}
if context.scene.sequence_editor and context.scene.sequence_editor.meta_stack:
return {'PASS_THROUGH'}
else:
bpy.ops.storypencil.switch('INVOKE_DEFAULT', True) bpy.ops.storypencil.switch('INVOKE_DEFAULT', True)
return {'FINISHED'} return {'FINISHED'}

View File

@ -15,8 +15,8 @@
bl_info = { bl_info = {
"name": "Sun Position", "name": "Sun Position",
"author": "Michael Martin", "author": "Michael Martin, Damien Picard",
"version": (3, 2, 2), "version": (3, 3, 3),
"blender": (3, 0, 0), "blender": (3, 0, 0),
"location": "World > Sun Position", "location": "World > Sun Position",
"description": "Show sun position with objects and/or sky texture", "description": "Show sun position with objects and/or sky texture",
@ -63,6 +63,7 @@ def register():
bpy.app.handlers.load_post.append(sun_scene_handler) bpy.app.handlers.load_post.append(sun_scene_handler)
bpy.app.translations.register(__name__, translations.translations_dict) bpy.app.translations.register(__name__, translations.translations_dict)
def unregister(): def unregister():
bpy.app.translations.unregister(__name__) bpy.app.translations.unregister(__name__)
bpy.app.handlers.frame_change_post.remove(sun_calc.sun_handler) bpy.app.handlers.frame_change_post.remove(sun_calc.sun_handler)

View File

@ -23,10 +23,6 @@ else:
shader_info.vertex_out(shader_interface) shader_info.vertex_out(shader_interface)
shader_info.vertex_source( shader_info.vertex_source(
# uniform mat4 u_ViewProjectionMatrix;
# in vec3 position;
# flat out vec2 v_StartPos;
# out vec4 v_VertPos;
''' '''
void main() void main()
{ {
@ -40,11 +36,6 @@ else:
shader_info.fragment_out(0, 'VEC4', "FragColor") shader_info.fragment_out(0, 'VEC4', "FragColor")
shader_info.fragment_source( shader_info.fragment_source(
# uniform vec4 u_Color;
# uniform vec2 u_Resolution;
# flat in vec2 v_StartPos;
# in vec4 v_VertPos;
# out vec4 FragColor;
''' '''
void main() void main()
{ {

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
# SPDX-License-Identifier: GPL-2.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
# Copyright 2010 Maximilian Hoegner <hp.maxi@hoegners.de>. # Copyright 2010 Maximilian Hoegner <hp.maxi@hoegners.de>.
# geo.py is a python module with no dependencies on extra packages, # geo.py is a python module with no dependencies on extra packages,
@ -51,7 +51,7 @@ class Parser:
# do matching # do matching
m = re.match(pattern, text) m = re.match(pattern, text)
if m == None: if m is None:
return None return None
# build tree recursively by parsing subgroups # build tree recursively by parsing subgroups
@ -59,7 +59,7 @@ class Parser:
for i in range(len(subpattern_names)): for i in range(len(subpattern_names)):
text_part = m.group(i + 1) text_part = m.group(i + 1)
if not text_part == None: if text_part is not None:
subpattern = subpattern_names[i] subpattern = subpattern_names[i]
tree[subpattern] = self.parse(subpattern, text_part) tree[subpattern] = self.parse(subpattern, text_part)
@ -158,7 +158,8 @@ def parse_position(s):
Tries to be as tolerant as possible with input. Returns None if parsing doesn't succeed. """ Tries to be as tolerant as possible with input. Returns None if parsing doesn't succeed. """
parse_tree = position_parser.parse("position", s) parse_tree = position_parser.parse("position", s)
if parse_tree == None: return None if parse_tree is None:
return None
lat_sign = +1. lat_sign = +1.
if parse_tree.get( if parse_tree.get(

View File

@ -64,8 +64,7 @@ def draw_callback_px(self, context):
coords = ((-0.5, -0.5), (0.5, -0.5), (0.5, 0.5), (-0.5, 0.5)) coords = ((-0.5, -0.5), (0.5, -0.5), (0.5, 0.5), (-0.5, 0.5))
uv_coords = ((0, 0), (1, 0), (1, 1), (0, 1)) uv_coords = ((0, 0), (1, 0), (1, 1), (0, 1))
batch = batch_for_shader(shader, 'TRI_FAN', batch = batch_for_shader(shader, 'TRI_FAN',
{"pos" : coords, {"pos": coords, "texCoord": uv_coords})
"texCoord" : uv_coords})
with gpu.matrix.push_pop(): with gpu.matrix.push_pop():
gpu.matrix.translate(position) gpu.matrix.translate(position)
@ -79,7 +78,7 @@ def draw_callback_px(self, context):
# Crosshair # Crosshair
# vertical # vertical
coords = ((self.mouse_position[0], bottom), (self.mouse_position[0], top)) coords = ((self.mouse_position[0], bottom), (self.mouse_position[0], top))
colors = ((1,)*4,)*2 colors = ((1,) * 4,) * 2
shader = gpu.shader.from_builtin('2D_FLAT_COLOR') shader = gpu.shader.from_builtin('2D_FLAT_COLOR')
batch = batch_for_shader(shader, 'LINES', batch = batch_for_shader(shader, 'LINES',
{"pos": coords, "color": colors}) {"pos": coords, "color": colors})
@ -134,7 +133,9 @@ class SUNPOS_OT_ShowHdr(bpy.types.Operator):
self.mouse_position = Vector((mouse_position_abs.x - self.area.x, self.mouse_position = Vector((mouse_position_abs.x - self.area.x,
mouse_position_abs.y - self.area.y)) mouse_position_abs.y - self.area.y))
self.selected_point = (self.mouse_position - self.offset - Vector((self.right, self.top))/2) / self.scale self.selected_point = (self.mouse_position
- self.offset
- Vector((self.right, self.top)) / 2) / self.scale
u = self.selected_point.x / self.area.width + 0.5 u = self.selected_point.x / self.area.width + 0.5
v = (self.selected_point.y) / (self.area.width / 2) + 0.5 v = (self.selected_point.y) / (self.area.width / 2) + 0.5
@ -275,10 +276,13 @@ class SUNPOS_OT_ShowHdr(bpy.types.Operator):
self.initial_elevation = context.scene.sun_pos_properties.hdr_elevation self.initial_elevation = context.scene.sun_pos_properties.hdr_elevation
self.initial_azimuth = context.scene.sun_pos_properties.hdr_azimuth self.initial_azimuth = context.scene.sun_pos_properties.hdr_azimuth
context.workspace.status_text_set("Enter/LMB: confirm, Esc/RMB: cancel, MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure") context.workspace.status_text_set(
"Enter/LMB: confirm, Esc/RMB: cancel,"
" MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure")
self._handle = bpy.types.SpaceView3D.draw_handler_add(draw_callback_px, self._handle = bpy.types.SpaceView3D.draw_handler_add(
(self, context), 'WINDOW', 'POST_PIXEL') draw_callback_px, (self, context), 'WINDOW', 'POST_PIXEL'
)
context.window_manager.modal_handler_add(self) context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'} return {'RUNNING_MODAL'}

View File

@ -5,7 +5,7 @@ from bpy.types import AddonPreferences, PropertyGroup
from bpy.props import (StringProperty, EnumProperty, IntProperty, from bpy.props import (StringProperty, EnumProperty, IntProperty,
FloatProperty, BoolProperty, PointerProperty) FloatProperty, BoolProperty, PointerProperty)
from .sun_calc import sun_update, parse_coordinates, surface_update, analemmas_update from .sun_calc import sun_update, parse_coordinates, surface_update, analemmas_update, sun
from .draw import north_update from .draw import north_update
from math import pi from math import pi
@ -19,7 +19,7 @@ TODAY = datetime.today()
class SunPosProperties(PropertyGroup): class SunPosProperties(PropertyGroup):
usage_mode: EnumProperty( usage_mode: EnumProperty(
name="Usage mode", name="Usage Mode",
description="Operate in normal mode or environment texture mode", description="Operate in normal mode or environment texture mode",
items=( items=(
('NORMAL', "Normal", ""), ('NORMAL', "Normal", ""),
@ -29,14 +29,14 @@ class SunPosProperties(PropertyGroup):
update=sun_update) update=sun_update)
use_daylight_savings: BoolProperty( use_daylight_savings: BoolProperty(
name="Daylight savings", name="Daylight Savings",
description="Daylight savings time adds 1 hour to standard time", description="Daylight savings time adds 1 hour to standard time",
default=False, default=False,
update=sun_update) update=sun_update)
use_refraction: BoolProperty( use_refraction: BoolProperty(
name="Use refraction", name="Use Refraction",
description="Show apparent sun position due to refraction", description="Show apparent Sun position due to refraction",
default=True, default=True,
update=sun_update) update=sun_update)
@ -81,6 +81,34 @@ class SunPosProperties(PropertyGroup):
default=0.0, default=0.0,
update=sun_update) update=sun_update)
sunrise_time: FloatProperty(
name="Sunrise Time",
description="Time at which the Sun rises",
soft_min=0.0, soft_max=24.0,
default=0.0,
get=lambda _: sun.sunrise)
sunset_time: FloatProperty(
name="Sunset Time",
description="Time at which the Sun sets",
soft_min=0.0, soft_max=24.0,
default=0.0,
get=lambda _: sun.sunset)
sun_azimuth: FloatProperty(
name="Sun Azimuth",
description="Rotation angle of the Sun from the north direction",
soft_min=-pi, soft_max=pi,
default=0.0,
get=lambda _: sun.azimuth)
sun_elevation: FloatProperty(
name="Sunset Time",
description="Elevation angle of the Sun",
soft_min=-pi/2, soft_max=pi/2,
default=0.0,
get=lambda _: sun.elevation)
co_parser: StringProperty( co_parser: StringProperty(
name="Enter coordinates", name="Enter coordinates",
description="Enter coordinates from an online map", description="Enter coordinates from an online map",

View File

@ -4,9 +4,10 @@ import bpy
from bpy.app.handlers import persistent from bpy.app.handlers import persistent
import gpu import gpu
from gpu_extras.batch import batch_for_shader from gpu_extras.batch import batch_for_shader
from mathutils import Euler, Vector from mathutils import Euler, Vector
import math
from math import degrees, radians, pi from math import degrees, radians, pi, sin, cos, asin, acos, tan, floor
import datetime import datetime
from .geo import parse_position from .geo import parse_position
@ -15,21 +16,14 @@ class SunInfo:
""" """
Store intermediate sun calculations Store intermediate sun calculations
""" """
class TAzEl:
time = 0.0
azimuth = 0.0
elevation = 0.0
class CLAMP: class SunBind:
azimuth = 0.0 azimuth = 0.0
elevation = 0.0 elevation = 0.0
az_start_sun = 0.0 az_start_sun = 0.0
az_start_env = 0.0 az_start_env = 0.0
sunrise = TAzEl() bind = SunBind()
sunset = TAzEl()
bind = CLAMP()
bind_to_sun = False bind_to_sun = False
latitude = 0.0 latitude = 0.0
@ -37,6 +31,9 @@ class SunInfo:
elevation = 0.0 elevation = 0.0
azimuth = 0.0 azimuth = 0.0
sunrise = 0.0
sunset = 0.0
month = 0 month = 0
day = 0 day = 0
year = 0 year = 0
@ -47,6 +44,7 @@ class SunInfo:
sun_distance = 0.0 sun_distance = 0.0
use_daylight_savings = False use_daylight_savings = False
sun = SunInfo() sun = SunInfo()
@ -78,8 +76,8 @@ def parse_coordinates(self, context):
def move_sun(context): def move_sun(context):
""" """
Cycle through all the selected objects and call set_sun_location and Cycle through all the selected objects and set their position and rotation
set_sun_rotations to place them in the sky in the sky.
""" """
addon_prefs = context.preferences.addons[__package__].preferences addon_prefs = context.preferences.addons[__package__].preferences
sun_props = context.scene.sun_pos_properties sun_props = context.scene.sun_pos_properties
@ -100,11 +98,9 @@ def move_sun(context):
env_tex.texture_mapping.rotation.z = az env_tex.texture_mapping.rotation.z = az
if sun_props.sun_object: if sun_props.sun_object:
theta = math.pi / 2 - sun_props.hdr_elevation
phi = -sun_props.hdr_azimuth
obj = sun_props.sun_object obj = sun_props.sun_object
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance obj.location = get_sun_vector(
sun_props.hdr_azimuth, sun_props.hdr_elevation) * sun_props.sun_distance
rotation_euler = Euler((sun_props.hdr_elevation - pi/2, rotation_euler = Euler((sun_props.hdr_elevation - pi/2,
0, -sun_props.hdr_azimuth)) 0, -sun_props.hdr_azimuth))
@ -118,34 +114,33 @@ def move_sun(context):
if sun.use_daylight_savings: if sun.use_daylight_savings:
zone -= 1 zone -= 1
north_offset = degrees(sun_props.north_offset)
if addon_prefs.show_rise_set: if addon_prefs.show_rise_set:
calc_sunrise_sunset(rise=True) calc_sunrise_sunset(rise=True)
calc_sunrise_sunset(rise=False) calc_sunrise_sunset(rise=False)
az_north, theta, phi, azimuth, elevation = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
local_time, sun_props.latitude, sun_props.longitude, local_time, sun_props.latitude, sun_props.longitude,
north_offset, zone, sun_props.month, sun_props.day, sun_props.year, zone, sun_props.month, sun_props.day, sun_props.year,
sun_props.sun_distance) sun_props.sun_distance)
sun.azimuth = azimuth sun.azimuth = azimuth
sun.elevation = elevation sun.elevation = elevation
sun_vector = get_sun_vector(azimuth, elevation)
if sun_props.sky_texture: if sun_props.sky_texture:
sky_node = bpy.context.scene.world.node_tree.nodes.get(sun_props.sky_texture) sky_node = bpy.context.scene.world.node_tree.nodes.get(sun_props.sky_texture)
if sky_node is not None and sky_node.type == "TEX_SKY": if sky_node is not None and sky_node.type == "TEX_SKY":
sky_node.texture_mapping.rotation.z = 0.0 sky_node.texture_mapping.rotation.z = 0.0
sky_node.sun_direction = get_sun_vector(theta, phi) sky_node.sun_direction = sun_vector
sky_node.sun_elevation = math.radians(elevation) sky_node.sun_elevation = elevation
sky_node.sun_rotation = math.radians(az_north) sky_node.sun_rotation = azimuth
# Sun object # Sun object
if (sun_props.sun_object is not None if (sun_props.sun_object is not None
and sun_props.sun_object.name in context.view_layer.objects): and sun_props.sun_object.name in context.view_layer.objects):
obj = sun_props.sun_object obj = sun_props.sun_object
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance obj.location = sun_vector * sun_props.sun_distance
rotation_euler = Euler((math.radians(elevation - 90), 0, rotation_euler = Euler((elevation - pi/2, 0, -azimuth))
math.radians(-az_north)))
set_sun_rotations(obj, rotation_euler) set_sun_rotations(obj, rotation_euler)
# Sun collection # Sun collection
@ -161,16 +156,14 @@ def move_sun(context):
time_increment = sun_props.time_spread time_increment = sun_props.time_spread
for obj in sun_objects: for obj in sun_objects:
az_north, theta, phi, azimuth, elevation = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
local_time, sun_props.latitude, local_time, sun_props.latitude,
sun_props.longitude, north_offset, zone, sun_props.longitude, zone,
sun_props.month, sun_props.day, sun_props.month, sun_props.day,
sun_props.year, sun_props.sun_distance) sun_props.year, sun_props.sun_distance)
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
local_time -= time_increment local_time -= time_increment
obj.rotation_euler = ( obj.rotation_euler = ((elevation - pi/2, 0, -azimuth))
(math.radians(elevation - 90), 0,
math.radians(-az_north)))
else: else:
# Analemma # Analemma
day_increment = 365 / object_count day_increment = 365 / object_count
@ -178,22 +171,21 @@ def move_sun(context):
for obj in sun_objects: for obj in sun_objects:
dt = (datetime.date(sun_props.year, 1, 1) + dt = (datetime.date(sun_props.year, 1, 1) +
datetime.timedelta(day - 1)) datetime.timedelta(day - 1))
az_north, theta, phi, azimuth, elevation = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
local_time, sun_props.latitude, local_time, sun_props.latitude,
sun_props.longitude, north_offset, zone, sun_props.longitude, zone,
dt.month, dt.day, sun_props.year, dt.month, dt.day, sun_props.year,
sun_props.sun_distance) sun_props.sun_distance)
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
day -= day_increment day -= day_increment
obj.rotation_euler = ( obj.rotation_euler = (elevation - pi/2, 0, -azimuth)
(math.radians(elevation - 90), 0,
math.radians(-az_north)))
def day_of_year_to_month_day(year, day_of_year): def day_of_year_to_month_day(year, day_of_year):
dt = (datetime.date(year, 1, 1) + datetime.timedelta(day_of_year - 1)) dt = (datetime.date(year, 1, 1) + datetime.timedelta(day_of_year - 1))
return dt.day, dt.month return dt.day, dt.month
def month_day_to_day_of_year(year, month, day): def month_day_to_day_of_year(year, month, day):
dt = datetime.date(year, month, day) dt = datetime.date(year, month, day)
return dt.timetuple().tm_yday return dt.timetuple().tm_yday
@ -275,7 +267,7 @@ def format_lat_long(lat_long, is_latitude):
return hh + "° " + mm + "' " + ss + '"' + coord_tag return hh + "° " + mm + "' " + ss + '"' + coord_tag
def get_sun_coordinates(local_time, latitude, longitude, north_offset, def get_sun_coordinates(local_time, latitude, longitude,
utc_zone, month, day, year, distance): utc_zone, month, day, year, distance):
""" """
Calculate the actual position of the sun based on input parameters. Calculate the actual position of the sun based on input parameters.
@ -319,31 +311,31 @@ def get_sun_coordinates(local_time, latitude, longitude, north_offset,
if hour_angle < -180.0: if hour_angle < -180.0:
hour_angle += 360.0 hour_angle += 360.0
csz = (math.sin(latitude) * math.sin(solar_dec) + csz = (sin(latitude) * sin(solar_dec) +
math.cos(latitude) * math.cos(solar_dec) * cos(latitude) * cos(solar_dec) *
math.cos(radians(hour_angle))) cos(radians(hour_angle)))
if csz > 1.0: if csz > 1.0:
csz = 1.0 csz = 1.0
elif csz < -1.0: elif csz < -1.0:
csz = -1.0 csz = -1.0
zenith = math.acos(csz) zenith = acos(csz)
az_denom = math.cos(latitude) * math.sin(zenith) az_denom = cos(latitude) * sin(zenith)
if abs(az_denom) > 0.001: if abs(az_denom) > 0.001:
az_rad = ((math.sin(latitude) * az_rad = ((sin(latitude) *
math.cos(zenith)) - math.sin(solar_dec)) / az_denom cos(zenith)) - sin(solar_dec)) / az_denom
if abs(az_rad) > 1.0: if abs(az_rad) > 1.0:
az_rad = -1.0 if (az_rad < 0.0) else 1.0 az_rad = -1.0 if (az_rad < 0.0) else 1.0
azimuth = 180.0 - degrees(math.acos(az_rad)) azimuth = pi - acos(az_rad)
if hour_angle > 0.0: if hour_angle > 0.0:
azimuth = -azimuth azimuth = -azimuth
else: else:
azimuth = 180.0 if (latitude > 0.0) else 0.0 azimuth = pi if (latitude > 0.0) else 0.0
if azimuth < 0.0: if azimuth < 0.0:
azimuth = azimuth + 360.0 azimuth += 2*pi
exoatm_elevation = 90.0 - degrees(zenith) exoatm_elevation = 90.0 - degrees(zenith)
@ -351,43 +343,39 @@ def get_sun_coordinates(local_time, latitude, longitude, north_offset,
if exoatm_elevation > 85.0: if exoatm_elevation > 85.0:
refraction_correction = 0.0 refraction_correction = 0.0
else: else:
te = math.tan(radians(exoatm_elevation)) te = tan(radians(exoatm_elevation))
if exoatm_elevation > 5.0: if exoatm_elevation > 5.0:
refraction_correction = ( refraction_correction = (
58.1 / te - 0.07 / (te ** 3) + 0.000086 / (te ** 5)) 58.1 / te - 0.07 / (te ** 3) + 0.000086 / (te ** 5))
elif (exoatm_elevation > -0.575): elif exoatm_elevation > -0.575:
s1 = (-12.79 + exoatm_elevation * 0.711) s1 = -12.79 + exoatm_elevation * 0.711
s2 = (103.4 + exoatm_elevation * (s1)) s2 = 103.4 + exoatm_elevation * s1
s3 = (-518.2 + exoatm_elevation * (s2)) s3 = -518.2 + exoatm_elevation * s2
refraction_correction = 1735.0 + exoatm_elevation * (s3) refraction_correction = 1735.0 + exoatm_elevation * (s3)
else: else:
refraction_correction = -20.774 / te refraction_correction = -20.774 / te
refraction_correction = refraction_correction / 3600 refraction_correction /= 3600
solar_elevation = 90.0 - (degrees(zenith) - refraction_correction) elevation = pi/2 - (zenith - radians(refraction_correction))
else: else:
solar_elevation = 90.0 - degrees(zenith) elevation = pi/2 - zenith
solar_azimuth = azimuth azimuth += sun_props.north_offset
solar_azimuth += north_offset
az_north = solar_azimuth return azimuth, elevation
theta = math.pi / 2 - radians(solar_elevation)
phi = radians(solar_azimuth) * -1
azimuth = azimuth
elevation = solar_elevation
return az_north, theta, phi, azimuth, elevation
def get_sun_vector(theta, phi): def get_sun_vector(azimuth, elevation):
""" """
Convert the sun coordinates to cartesian Convert the sun coordinates to cartesian
""" """
loc_x = math.sin(phi) * math.sin(-theta) phi = -azimuth
loc_y = math.sin(theta) * math.cos(phi) theta = pi/2 - elevation
loc_z = math.cos(theta)
loc_x = sin(phi) * sin(-theta)
loc_y = sin(theta) * cos(phi)
loc_z = cos(theta)
return Vector((loc_x, loc_y, loc_z)) return Vector((loc_x, loc_y, loc_z))
@ -426,14 +414,14 @@ def calc_sun_declination(t):
def calc_hour_angle_sunrise(lat, solar_dec): def calc_hour_angle_sunrise(lat, solar_dec):
lat_rad = radians(lat) lat_rad = radians(lat)
HAarg = (math.cos(radians(90.833)) / HAarg = (cos(radians(90.833)) /
(math.cos(lat_rad) * math.cos(solar_dec)) (cos(lat_rad) * cos(solar_dec))
- math.tan(lat_rad) * math.tan(solar_dec)) - tan(lat_rad) * tan(solar_dec))
if HAarg < -1.0: if HAarg < -1.0:
HAarg = -1.0 HAarg = -1.0
elif HAarg > 1.0: elif HAarg > 1.0:
HAarg = 1.0 HAarg = 1.0
HA = math.acos(HAarg) HA = acos(HAarg)
return HA return HA
@ -458,8 +446,8 @@ def calc_sunrise_sunset(rise):
sun.latitude, sun.longitude) sun.latitude, sun.longitude)
time_local = new_time_UTC + (-zone * 60.0) time_local = new_time_UTC + (-zone * 60.0)
tl = time_local / 60.0 tl = time_local / 60.0
az_north, theta, phi, azimuth, elevation = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
tl, sun.latitude, sun.longitude, 0.0, tl, sun.latitude, sun.longitude,
zone, sun.month, sun.day, sun.year, zone, sun.month, sun.day, sun.year,
sun.sun_distance) sun.sun_distance)
if sun.use_daylight_savings: if sun.use_daylight_savings:
@ -467,13 +455,9 @@ def calc_sunrise_sunset(rise):
tl = time_local / 60.0 tl = time_local / 60.0
tl %= 24.0 tl %= 24.0
if rise: if rise:
sun.sunrise.time = tl sun.sunrise = tl
sun.sunrise.azimuth = azimuth
sun.sunrise.elevation = elevation
else: else:
sun.sunset.time = tl sun.sunset = tl
sun.sunset.azimuth = azimuth
sun.sunset.elevation = elevation
def julian_time_from_y2k(utc_time, year, month, day): def julian_time_from_y2k(utc_time, year, month, day):
@ -491,10 +475,10 @@ def get_julian_day(year, month, day):
if month <= 2: if month <= 2:
year -= 1 year -= 1
month += 12 month += 12
A = math.floor(year / 100) A = floor(year / 100)
B = 2 - A + math.floor(A / 4.0) B = 2 - A + floor(A / 4.0)
jd = (math.floor((365.25 * (year + 4716.0))) + jd = (floor((365.25 * (year + 4716.0))) +
math.floor(30.6001 * (month + 1)) + day + B - 1524.5) floor(30.6001 * (month + 1)) + day + B - 1524.5)
return jd return jd
@ -504,7 +488,7 @@ def calc_time_julian_cent(jd):
def sun_declination(e, L): def sun_declination(e, L):
return (math.asin(math.sin(e) * math.sin(L))) return (asin(sin(e) * sin(L)))
def calc_equation_of_time(t): def calc_equation_of_time(t):
@ -512,13 +496,13 @@ def calc_equation_of_time(t):
ml = radians(mean_longitude_sun(t)) ml = radians(mean_longitude_sun(t))
e = eccentricity_earth_orbit(t) e = eccentricity_earth_orbit(t)
m = radians(mean_anomaly_sun(t)) m = radians(mean_anomaly_sun(t))
y = math.tan(radians(epsilon) / 2.0) y = tan(radians(epsilon) / 2.0)
y = y * y y = y * y
sin2ml = math.sin(2.0 * ml) sin2ml = sin(2.0 * ml)
cos2ml = math.cos(2.0 * ml) cos2ml = cos(2.0 * ml)
sin4ml = math.sin(4.0 * ml) sin4ml = sin(4.0 * ml)
sinm = math.sin(m) sinm = sin(m)
sin2m = math.sin(2.0 * m) sin2m = sin(2.0 * m)
etime = (y * sin2ml - 2.0 * e * sinm + 4.0 * e * y * etime = (y * sin2ml - 2.0 * e * sinm + 4.0 * e * y *
sinm * cos2ml - 0.5 * y ** 2 * sin4ml - 1.25 * e ** 2 * sin2m) sinm * cos2ml - 0.5 * y ** 2 * sin4ml - 1.25 * e ** 2 * sin2m)
return (degrees(etime) * 4) return (degrees(etime) * 4)
@ -527,7 +511,7 @@ def calc_equation_of_time(t):
def obliquity_correction(t): def obliquity_correction(t):
ec = obliquity_of_ecliptic(t) ec = obliquity_of_ecliptic(t)
omega = 125.04 - 1934.136 * t omega = 125.04 - 1934.136 * t
return (ec + 0.00256 * math.cos(radians(omega))) return (ec + 0.00256 * cos(radians(omega)))
def obliquity_of_ecliptic(t): def obliquity_of_ecliptic(t):
@ -542,13 +526,13 @@ def true_longitude_of_sun(t):
def calc_sun_apparent_long(t): def calc_sun_apparent_long(t):
o = true_longitude_of_sun(t) o = true_longitude_of_sun(t)
omega = 125.04 - 1934.136 * t omega = 125.04 - 1934.136 * t
lamb = o - 0.00569 - 0.00478 * math.sin(radians(omega)) lamb = o - 0.00569 - 0.00478 * sin(radians(omega))
return lamb return lamb
def apparent_longitude_of_sun(t): def apparent_longitude_of_sun(t):
return (radians(true_longitude_of_sun(t) - 0.00569 - 0.00478 * return (radians(true_longitude_of_sun(t) - 0.00569 - 0.00478 *
math.sin(radians(125.04 - 1934.136 * t)))) sin(radians(125.04 - 1934.136 * t))))
def mean_longitude_sun(t): def mean_longitude_sun(t):
@ -557,9 +541,9 @@ def mean_longitude_sun(t):
def equation_of_sun_center(t): def equation_of_sun_center(t):
m = radians(mean_anomaly_sun(t)) m = radians(mean_anomaly_sun(t))
c = ((1.914602 - 0.004817 * t - 0.000014 * t**2) * math.sin(m) + c = ((1.914602 - 0.004817 * t - 0.000014 * t**2) * sin(m) +
(0.019993 - 0.000101 * t) * math.sin(m * 2) + (0.019993 - 0.000101 * t) * sin(m * 2) +
0.000289 * math.sin(m * 3)) 0.000289 * sin(m * 3))
return c return c
@ -575,13 +559,12 @@ def calc_surface(context):
coords = [] coords = []
sun_props = context.scene.sun_pos_properties sun_props = context.scene.sun_pos_properties
zone = -sun_props.UTC_zone zone = -sun_props.UTC_zone
north_offset = degrees(sun_props.north_offset)
def get_surface_coordinates(time, month): def get_surface_coordinates(time, month):
_, theta, phi, _, _ = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
time, sun_props.latitude, sun_props.longitude, north_offset, time, sun_props.latitude, sun_props.longitude,
zone, month, 1, sun_props.year, sun_props.sun_distance) zone, month, 1, sun_props.year, sun_props.sun_distance)
sun_vector = get_sun_vector(theta, phi) * sun_props.sun_distance sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
sun_vector.z = max(0, sun_vector.z) sun_vector.z = max(0, sun_vector.z)
return sun_vector return sun_vector
@ -601,21 +584,19 @@ def calc_analemma(context, h):
vertices = [] vertices = []
sun_props = context.scene.sun_pos_properties sun_props = context.scene.sun_pos_properties
zone = -sun_props.UTC_zone zone = -sun_props.UTC_zone
north_offset = degrees(sun_props.north_offset)
for day_of_year in range(1, 367, 5): for day_of_year in range(1, 367, 5):
day, month = day_of_year_to_month_day(sun_props.year, day_of_year) day, month = day_of_year_to_month_day(sun_props.year, day_of_year)
_, theta, phi, _, _ = get_sun_coordinates( azimuth, elevation = get_sun_coordinates(
h, sun_props.latitude, sun_props.longitude, h, sun_props.latitude, sun_props.longitude,
north_offset, zone, month, day, sun_props.year, zone, month, day, sun_props.year,
sun_props.sun_distance) sun_props.sun_distance)
sun_vector = get_sun_vector(theta, phi) * sun_props.sun_distance sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
if sun_vector.z > 0: if sun_vector.z > 0:
vertices.append(sun_vector) vertices.append(sun_vector)
return vertices return vertices
def draw_surface(batch, shader): def draw_surface(batch, shader):
blend = gpu.state.blend_get() blend = gpu.state.blend_get()
gpu.state.blend_set("ALPHA") gpu.state.blend_set("ALPHA")
shader.uniform_float("color", (.8, .6, 0, 0.2)) shader.uniform_float("color", (.8, .6, 0, 0.2))
@ -630,6 +611,7 @@ def draw_analemmas(batch, shader):
_handle_surface = None _handle_surface = None
def surface_update(self, context): def surface_update(self, context):
global _handle_surface global _handle_surface
if self.show_surface: if self.show_surface:
@ -648,6 +630,7 @@ def surface_update(self, context):
_handle_analemmas = None _handle_analemmas = None
def analemmas_update(self, context): def analemmas_update(self, context):
global _handle_analemmas global _handle_analemmas
if self.show_analemmas: if self.show_analemmas:
@ -664,7 +647,7 @@ def analemmas_update(self, context):
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
batch = batch_for_shader(shader, 'LINES', batch = batch_for_shader(shader, 'LINES',
{"pos": coords}, indices=indices) {"pos": coords}, indices=indices)
if _handle_analemmas is not None: if _handle_analemmas is not None:
bpy.types.SpaceView3D.draw_handler_remove(_handle_analemmas, 'WINDOW') bpy.types.SpaceView3D.draw_handler_remove(_handle_analemmas, 'WINDOW')

View File

@ -4,6 +4,7 @@ import bpy
from bpy.types import Operator, Menu from bpy.types import Operator, Menu
from bl_operators.presets import AddPresetBase from bl_operators.presets import AddPresetBase
import os import os
from math import degrees
from .sun_calc import (format_lat_long, format_time, format_hms, sun) from .sun_calc import (format_lat_long, format_time, format_hms, sun)
@ -79,7 +80,7 @@ class SUNPOS_PT_Panel(bpy.types.Panel):
def draw_environ_mode_panel(self, context, sp, p, layout): def draw_environ_mode_panel(self, context, sp, p, layout):
flow = layout.grid_flow(row_major=True, columns=0, even_columns=True, flow = layout.grid_flow(row_major=True, columns=0, even_columns=True,
even_rows=False, align=False) even_rows=False, align=False)
col = flow.column(align=True) col = flow.column(align=True)
col.label(text="Environment Texture") col.label(text="Environment Texture")
@ -153,6 +154,7 @@ class SUNPOS_PT_Panel(bpy.types.Panel):
col.label(text="Please select World in the World panel.", col.label(text="Please select World in the World panel.",
icon="ERROR") icon="ERROR")
class SUNPOS_PT_Location(bpy.types.Panel): class SUNPOS_PT_Location(bpy.types.Panel):
bl_space_type = "PROPERTIES" bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW" bl_region_type = "WINDOW"
@ -211,10 +213,10 @@ class SUNPOS_PT_Location(bpy.types.Panel):
col = flow.column(align=True) col = flow.column(align=True)
split = col.split(factor=0.4, align=True) split = col.split(factor=0.4, align=True)
split.label(text="Azimuth:") split.label(text="Azimuth:")
split.label(text=str(round(sun.azimuth, 3)) + "°") split.label(text=str(round(degrees(sun.azimuth), 3)) + "°")
split = col.split(factor=0.4, align=True) split = col.split(factor=0.4, align=True)
split.label(text="Elevation:") split.label(text="Elevation:")
split.label(text=str(round(sun.elevation, 3)) + "°") split.label(text=str(round(degrees(sun.elevation), 3)) + "°")
col.separator() col.separator()
if p.show_refraction: if p.show_refraction:
@ -282,12 +284,11 @@ class SUNPOS_PT_Time(bpy.types.Panel):
split.label(text=ut) split.label(text=ut)
col.separator() col.separator()
col = flow.column(align=True) col = flow.column(align=True)
col.alignment = 'CENTER' col.alignment = 'CENTER'
if p.show_rise_set: if p.show_rise_set:
sr = format_hms(sun.sunrise.time) sr = format_hms(sun.sunrise)
ss = format_hms(sun.sunset.time) ss = format_hms(sun.sunset)
split = col.split(factor=0.5, align=True) split = col.split(factor=0.5, align=True)
split.label(text="Sunrise:", icon='LIGHT_SUN') split.label(text="Sunrise:", icon='LIGHT_SUN')