Pose Library: Update to use the asset shelf (when enabled) #104546
@ -2,4 +2,4 @@ ${CommitTitle}
|
|||||||
|
|
||||||
${CommitBody}
|
${CommitBody}
|
||||||
|
|
||||||
Pull Request #${PullRequestIndex}
|
Pull Request: https://projects.blender.org/blender/blender-addons/pulls/${PullRequestIndex}
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
${PullRequestTitle}
|
${PullRequestTitle}
|
||||||
|
|
||||||
Pull Request #${PullRequestIndex}
|
Pull Request: https://projects.blender.org/blender/blender-addons/pulls/${PullRequestIndex}
|
||||||
|
@ -11,8 +11,8 @@ It's called "global" to avoid confusion with the Blender World data-block.
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Copy Global Transform",
|
"name": "Copy Global Transform",
|
||||||
"author": "Sybren A. Stüvel",
|
"author": "Sybren A. Stüvel",
|
||||||
"version": (2, 0),
|
"version": (2, 1),
|
||||||
"blender": (3, 1, 0),
|
"blender": (3, 5, 0),
|
||||||
"location": "N-panel in the 3D Viewport",
|
"location": "N-panel in the 3D Viewport",
|
||||||
"category": "Animation",
|
"category": "Animation",
|
||||||
"support": 'OFFICIAL',
|
"support": 'OFFICIAL',
|
||||||
@ -23,10 +23,17 @@ import ast
|
|||||||
from typing import Iterable, Optional, Union, Any
|
from typing import Iterable, Optional, Union, Any
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
from bpy.types import Context, Object, Operator, Panel, PoseBone
|
from bpy.types import Context, Object, Operator, Panel, PoseBone, UILayout
|
||||||
from mathutils import Matrix
|
from mathutils import Matrix
|
||||||
|
|
||||||
|
|
||||||
|
_axis_enum_items = [
|
||||||
|
("x", "X", "", 1),
|
||||||
|
("y", "Y", "", 2),
|
||||||
|
("z", "Z", "", 3),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class AutoKeying:
|
class AutoKeying:
|
||||||
"""Auto-keying support.
|
"""Auto-keying support.
|
||||||
|
|
||||||
@ -235,6 +242,10 @@ class OBJECT_OT_copy_global_transform(Operator):
|
|||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class UnableToMirrorError(Exception):
|
||||||
|
"""Raised when mirroring is enabled but no mirror object/bone is set."""
|
||||||
|
|
||||||
|
|
||||||
class OBJECT_OT_paste_transform(Operator):
|
class OBJECT_OT_paste_transform(Operator):
|
||||||
bl_idname = "object.paste_transform"
|
bl_idname = "object.paste_transform"
|
||||||
bl_label = "Paste Global Transform"
|
bl_label = "Paste Global Transform"
|
||||||
@ -273,12 +284,33 @@ class OBJECT_OT_paste_transform(Operator):
|
|||||||
soft_max=5,
|
soft_max=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
use_mirror: bpy.props.BoolProperty( # type: ignore
|
||||||
|
name="Mirror Transform",
|
||||||
|
description="When pasting, mirror the transform relative to a specific object or bone",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
mirror_axis_loc: bpy.props.EnumProperty( # type: ignore
|
||||||
|
items=_axis_enum_items,
|
||||||
|
name="Location Axis",
|
||||||
|
description="Coordinate axis used to mirror the location part of the transform",
|
||||||
|
default='x',
|
||||||
|
)
|
||||||
|
mirror_axis_rot: bpy.props.EnumProperty( # type: ignore
|
||||||
|
items=_axis_enum_items,
|
||||||
|
name="Rotation Axis",
|
||||||
|
description="Coordinate axis used to mirror the rotation part of the transform",
|
||||||
|
default='z',
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context: Context) -> bool:
|
def poll(cls, context: Context) -> bool:
|
||||||
if not context.active_pose_bone and not context.active_object:
|
if not context.active_pose_bone and not context.active_object:
|
||||||
cls.poll_message_set("Select an object or pose bone")
|
cls.poll_message_set("Select an object or pose bone")
|
||||||
return False
|
return False
|
||||||
if not context.window_manager.clipboard.startswith("Matrix("):
|
|
||||||
|
clipboard = context.window_manager.clipboard.strip()
|
||||||
|
if not (clipboard.startswith("Matrix(") or clipboard.startswith("<Matrix 4x4")):
|
||||||
cls.poll_message_set("Clipboard does not contain a valid matrix")
|
cls.poll_message_set("Clipboard does not contain a valid matrix")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -297,10 +329,23 @@ class OBJECT_OT_paste_transform(Operator):
|
|||||||
floats = tuple(tuple(float(item) for item in line.split()) for line in lines)
|
floats = tuple(tuple(float(item) for item in line.split()) for line in lines)
|
||||||
return Matrix(floats)
|
return Matrix(floats)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_repr_m4(value: str) -> Optional[Matrix]:
|
||||||
|
"""Four lines of (a, b, c, d) floats."""
|
||||||
|
|
||||||
|
lines = value.strip().splitlines()
|
||||||
|
if len(lines) != 4:
|
||||||
|
return None
|
||||||
|
|
||||||
|
floats = tuple(tuple(float(item.strip()) for item in line.strip()[1:-1].split(',')) for line in lines)
|
||||||
|
return Matrix(floats)
|
||||||
|
|
||||||
def execute(self, context: Context) -> set[str]:
|
def execute(self, context: Context) -> set[str]:
|
||||||
clipboard = context.window_manager.clipboard
|
clipboard = context.window_manager.clipboard.strip()
|
||||||
if clipboard.startswith("Matrix"):
|
if clipboard.startswith("Matrix"):
|
||||||
mat = Matrix(ast.literal_eval(clipboard[6:]))
|
mat = Matrix(ast.literal_eval(clipboard[6:]))
|
||||||
|
elif clipboard.startswith("<Matrix 4x4"):
|
||||||
|
mat = self.parse_repr_m4(clipboard[12:-1])
|
||||||
else:
|
else:
|
||||||
mat = self.parse_print_m4(clipboard)
|
mat = self.parse_print_m4(clipboard)
|
||||||
|
|
||||||
@ -308,6 +353,12 @@ class OBJECT_OT_paste_transform(Operator):
|
|||||||
self.report({'ERROR'}, "Clipboard does not contain a valid matrix")
|
self.report({'ERROR'}, "Clipboard does not contain a valid matrix")
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
mat = self._maybe_mirror(context, mat)
|
||||||
|
except UnableToMirrorError:
|
||||||
|
self.report({'ERROR'}, "Unable to mirror, no mirror object/bone configured")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
applicator = {
|
applicator = {
|
||||||
'CURRENT': self._paste_current,
|
'CURRENT': self._paste_current,
|
||||||
'EXISTING_KEYS': self._paste_existing_keys,
|
'EXISTING_KEYS': self._paste_existing_keys,
|
||||||
@ -315,6 +366,68 @@ class OBJECT_OT_paste_transform(Operator):
|
|||||||
}[self.method]
|
}[self.method]
|
||||||
return applicator(context, mat)
|
return applicator(context, mat)
|
||||||
|
|
||||||
|
def _maybe_mirror(self, context: Context, matrix: Matrix) -> Matrix:
|
||||||
|
if not self.use_mirror:
|
||||||
|
return matrix
|
||||||
|
|
||||||
|
mirror_ob = context.scene.addon_copy_global_transform_mirror_ob
|
||||||
|
mirror_bone = context.scene.addon_copy_global_transform_mirror_bone
|
||||||
|
|
||||||
|
# No mirror object means "current armature object".
|
||||||
|
ctx_ob = context.object
|
||||||
|
if not mirror_ob and mirror_bone and ctx_ob and ctx_ob.type == 'ARMATURE':
|
||||||
|
mirror_ob = ctx_ob
|
||||||
|
|
||||||
|
if not mirror_ob:
|
||||||
|
raise UnableToMirrorError()
|
||||||
|
|
||||||
|
if mirror_ob.type == 'ARMATURE' and mirror_bone:
|
||||||
|
return self._mirror_over_bone(matrix, mirror_ob, mirror_bone)
|
||||||
|
return self._mirror_over_ob(matrix, mirror_ob)
|
||||||
|
|
||||||
|
def _mirror_over_ob(self, matrix: Matrix, mirror_ob: bpy.types.Object) -> Matrix:
|
||||||
|
mirror_matrix = mirror_ob.matrix_world
|
||||||
|
return self._mirror_over_matrix(matrix, mirror_matrix)
|
||||||
|
|
||||||
|
def _mirror_over_bone(self, matrix: Matrix, mirror_ob: bpy.types.Object, mirror_bone_name: str) -> Matrix:
|
||||||
|
bone = mirror_ob.pose.bones[mirror_bone_name]
|
||||||
|
mirror_matrix = mirror_ob.matrix_world @ bone.matrix
|
||||||
|
return self._mirror_over_matrix(matrix, mirror_matrix)
|
||||||
|
|
||||||
|
def _mirror_over_matrix(self, matrix: Matrix, mirror_matrix: Matrix) -> Matrix:
|
||||||
|
# Compute the matrix in the space of the mirror matrix:
|
||||||
|
mat_local = mirror_matrix.inverted() @ matrix
|
||||||
|
|
||||||
|
# Decompose the matrix, as we don't want to touch the scale. This
|
||||||
|
# operator should only mirror the translation and rotation components.
|
||||||
|
trans, rot_q, scale = mat_local.decompose()
|
||||||
|
|
||||||
|
# Mirror the translation component:
|
||||||
|
axis_index = ord(self.mirror_axis_loc) - ord('x')
|
||||||
|
trans[axis_index] *= -1
|
||||||
|
|
||||||
|
# Flip the rotation, and use a rotation order that applies the to-be-flipped axes first.
|
||||||
|
match self.mirror_axis_rot:
|
||||||
|
case 'x':
|
||||||
|
rot_e = rot_q.to_euler('XYZ')
|
||||||
|
rot_e.x *= -1 # Flip the requested rotation axis.
|
||||||
|
rot_e.y *= -1 # Also flip the bone roll.
|
||||||
|
case 'y':
|
||||||
|
rot_e = rot_q.to_euler('YZX')
|
||||||
|
rot_e.y *= -1 # Flip the requested rotation axis.
|
||||||
|
rot_e.z *= -1 # Also flip another axis? Not sure how to handle this one.
|
||||||
|
case 'z':
|
||||||
|
rot_e = rot_q.to_euler('ZYX')
|
||||||
|
rot_e.z *= -1 # Flip the requested rotation axis.
|
||||||
|
rot_e.y *= -1 # Also flip the bone roll.
|
||||||
|
|
||||||
|
# Recompose the local matrix:
|
||||||
|
mat_local = Matrix.LocRotScale(trans, rot_e, scale)
|
||||||
|
|
||||||
|
# Go back to world space:
|
||||||
|
mirrored_world = mirror_matrix @ mat_local
|
||||||
|
return mirrored_world
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _paste_current(context: Context, matrix: Matrix) -> set[str]:
|
def _paste_current(context: Context, matrix: Matrix) -> set[str]:
|
||||||
set_matrix(context, matrix)
|
set_matrix(context, matrix)
|
||||||
@ -370,10 +483,13 @@ class OBJECT_OT_paste_transform(Operator):
|
|||||||
context.scene.frame_set(int(current_frame), subframe=current_frame % 1.0)
|
context.scene.frame_set(int(current_frame), subframe=current_frame % 1.0)
|
||||||
|
|
||||||
|
|
||||||
class VIEW3D_PT_copy_global_transform(Panel):
|
class PanelMixin:
|
||||||
bl_space_type = 'VIEW_3D'
|
bl_space_type = 'VIEW_3D'
|
||||||
bl_region_type = 'UI'
|
bl_region_type = 'UI'
|
||||||
bl_category = "Animation"
|
bl_category = "Animation"
|
||||||
|
|
||||||
|
|
||||||
|
class VIEW3D_PT_copy_global_transform(PanelMixin, Panel):
|
||||||
bl_label = "Global Transform"
|
bl_label = "Global Transform"
|
||||||
|
|
||||||
def draw(self, context: Context) -> None:
|
def draw(self, context: Context) -> None:
|
||||||
@ -383,7 +499,15 @@ class VIEW3D_PT_copy_global_transform(Panel):
|
|||||||
layout.operator("object.copy_global_transform", text="Copy", icon='COPYDOWN')
|
layout.operator("object.copy_global_transform", text="Copy", icon='COPYDOWN')
|
||||||
|
|
||||||
paste_col = layout.column(align=True)
|
paste_col = layout.column(align=True)
|
||||||
paste_col.operator("object.paste_transform", text="Paste", icon='PASTEDOWN').method = 'CURRENT'
|
|
||||||
|
paste_row = paste_col.row(align=True)
|
||||||
|
paste_props = paste_row.operator("object.paste_transform", text="Paste", icon='PASTEDOWN')
|
||||||
|
paste_props.method = 'CURRENT'
|
||||||
|
paste_props.use_mirror = False
|
||||||
|
paste_props = paste_row.operator("object.paste_transform", text="Mirrored", icon='PASTEFLIPDOWN')
|
||||||
|
paste_props.method = 'CURRENT'
|
||||||
|
paste_props.use_mirror = True
|
||||||
|
|
||||||
wants_autokey_col = paste_col.column(align=True)
|
wants_autokey_col = paste_col.column(align=True)
|
||||||
has_autokey = context.scene.tool_settings.use_keyframe_insert_auto
|
has_autokey = context.scene.tool_settings.use_keyframe_insert_auto
|
||||||
wants_autokey_col.enabled = has_autokey
|
wants_autokey_col.enabled = has_autokey
|
||||||
@ -402,6 +526,42 @@ class VIEW3D_PT_copy_global_transform(Panel):
|
|||||||
).method = 'BAKE'
|
).method = 'BAKE'
|
||||||
|
|
||||||
|
|
||||||
|
class VIEW3D_PT_copy_global_transform_mirror(PanelMixin, Panel):
|
||||||
|
bl_label = "Mirror Options"
|
||||||
|
bl_parent_id = "VIEW3D_PT_copy_global_transform"
|
||||||
|
|
||||||
|
def draw(self, context: Context) -> None:
|
||||||
|
layout = self.layout
|
||||||
|
scene = context.scene
|
||||||
|
layout.prop(scene, 'addon_copy_global_transform_mirror_ob', text="Object")
|
||||||
|
|
||||||
|
mirror_ob = scene.addon_copy_global_transform_mirror_ob
|
||||||
|
if mirror_ob is None:
|
||||||
|
# No explicit mirror object means "the current armature", so then the bone name should be editable.
|
||||||
|
if context.object and context.object.type == 'ARMATURE':
|
||||||
|
self._bone_search(layout, scene, context.object)
|
||||||
|
else:
|
||||||
|
self._bone_entry(layout, scene)
|
||||||
|
elif mirror_ob.type == 'ARMATURE':
|
||||||
|
self._bone_search(layout, scene, mirror_ob)
|
||||||
|
|
||||||
|
def _bone_search(self, layout: UILayout, scene: bpy.types.Scene, armature_ob: bpy.types.Object) -> None:
|
||||||
|
"""Search within the bones of the given armature."""
|
||||||
|
assert armature_ob and armature_ob.type == 'ARMATURE'
|
||||||
|
|
||||||
|
layout.prop_search(
|
||||||
|
scene,
|
||||||
|
"addon_copy_global_transform_mirror_bone",
|
||||||
|
armature_ob.data,
|
||||||
|
"edit_bones" if armature_ob.mode == 'EDIT' else "bones",
|
||||||
|
text="Bone",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _bone_entry(self, layout: UILayout, scene: bpy.types.Scene) -> None:
|
||||||
|
"""Allow manual entry of a bone name."""
|
||||||
|
layout.prop(scene, "addon_copy_global_transform_mirror_bone", text="Bone")
|
||||||
|
|
||||||
|
|
||||||
### Messagebus subscription to monitor changes & refresh panels.
|
### Messagebus subscription to monitor changes & refresh panels.
|
||||||
_msgbus_owner = object()
|
_msgbus_owner = object()
|
||||||
|
|
||||||
@ -419,6 +579,7 @@ classes = (
|
|||||||
OBJECT_OT_copy_global_transform,
|
OBJECT_OT_copy_global_transform,
|
||||||
OBJECT_OT_paste_transform,
|
OBJECT_OT_paste_transform,
|
||||||
VIEW3D_PT_copy_global_transform,
|
VIEW3D_PT_copy_global_transform,
|
||||||
|
VIEW3D_PT_copy_global_transform_mirror,
|
||||||
)
|
)
|
||||||
_register, _unregister = bpy.utils.register_classes_factory(classes)
|
_register, _unregister = bpy.utils.register_classes_factory(classes)
|
||||||
|
|
||||||
@ -447,8 +608,28 @@ def register():
|
|||||||
_register()
|
_register()
|
||||||
bpy.app.handlers.load_post.append(_on_blendfile_load_post)
|
bpy.app.handlers.load_post.append(_on_blendfile_load_post)
|
||||||
|
|
||||||
|
# The mirror object & bone name are stored on the scene, and not on the
|
||||||
|
# operator. This makes it possible to set up the operator for use in a
|
||||||
|
# certain scene, while keeping hotkey assignments working as usual.
|
||||||
|
#
|
||||||
|
# The goal is to allow hotkeys for "copy", "paste", and "paste mirrored",
|
||||||
|
# while keeping the other choices in a more global place.
|
||||||
|
bpy.types.Scene.addon_copy_global_transform_mirror_ob = bpy.props.PointerProperty(
|
||||||
|
type=bpy.types.Object,
|
||||||
|
name="Mirror Object",
|
||||||
|
description="Object to mirror over. Leave empty and name a bone to always mirror "
|
||||||
|
"over that bone of the active armature",
|
||||||
|
)
|
||||||
|
bpy.types.Scene.addon_copy_global_transform_mirror_bone = bpy.props.StringProperty(
|
||||||
|
name="Mirror Bone",
|
||||||
|
description="Bone to use for the mirroring",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
_unregister()
|
_unregister()
|
||||||
_unregister_message_bus()
|
_unregister_message_bus()
|
||||||
bpy.app.handlers.load_post.remove(_on_blendfile_load_post)
|
bpy.app.handlers.load_post.remove(_on_blendfile_load_post)
|
||||||
|
|
||||||
|
del bpy.types.Scene.addon_copy_global_transform_mirror_ob
|
||||||
|
del bpy.types.Scene.addon_copy_global_transform_mirror_bone
|
||||||
|
@ -13,6 +13,7 @@ bl_info = {
|
|||||||
"version": (0, 1),
|
"version": (0, 1),
|
||||||
"blender": (2, 80, 0),
|
"blender": (2, 80, 0),
|
||||||
"description": "Various dependency graph debugging tools",
|
"description": "Various dependency graph debugging tools",
|
||||||
|
"location": "Properties > View Layer > Dependency Graph",
|
||||||
"warning": "",
|
"warning": "",
|
||||||
"doc_url": "",
|
"doc_url": "",
|
||||||
"tracker_url": "",
|
"tracker_url": "",
|
||||||
|
@ -3,14 +3,13 @@
|
|||||||
# Script copyright (C) Campbell Barton, Bastien Montagne
|
# Script copyright (C) Campbell Barton, Bastien Montagne
|
||||||
|
|
||||||
|
|
||||||
import array
|
|
||||||
import datetime
|
import datetime
|
||||||
import math
|
import math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from itertools import zip_longest, chain
|
from itertools import zip_longest
|
||||||
from functools import cache
|
from functools import cache
|
||||||
|
|
||||||
if "bpy" in locals():
|
if "bpy" in locals():
|
||||||
@ -51,7 +50,7 @@ from .fbx_utils import (
|
|||||||
matrix4_to_array, similar_values, shape_difference_exclude_similar, astype_view_signedness, fast_first_axis_unique,
|
matrix4_to_array, similar_values, shape_difference_exclude_similar, astype_view_signedness, fast_first_axis_unique,
|
||||||
fast_first_axis_flat,
|
fast_first_axis_flat,
|
||||||
# Mesh transform helpers.
|
# Mesh transform helpers.
|
||||||
vcos_transformed_gen, vcos_transformed, nors_transformed,
|
vcos_transformed, nors_transformed,
|
||||||
# UUID from key.
|
# UUID from key.
|
||||||
get_fbx_uuid_from_key,
|
get_fbx_uuid_from_key,
|
||||||
# Key generators.
|
# Key generators.
|
||||||
@ -932,6 +931,26 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
me.edges.foreach_get("vertices", t_ev)
|
me.edges.foreach_get("vertices", t_ev)
|
||||||
me.loops.foreach_get("edge_index", t_lei)
|
me.loops.foreach_get("edge_index", t_lei)
|
||||||
|
|
||||||
|
# Polygons might not be in the same order as loops. To export per-loop and per-polygon data in a matching order,
|
||||||
|
# one must be set into the order of the other. Since there are fewer polygons than loops and there are usually
|
||||||
|
# more geometry layers exported that are per-loop than per-polygon, it's more efficient to re-order polygons and
|
||||||
|
# per-polygon data.
|
||||||
|
perm_polygons_to_loop_order = None
|
||||||
|
# t_ls indicates the ordering of polygons compared to loops. When t_ls is sorted, polygons and loops are in the same
|
||||||
|
# order. Since each loop must be assigned to exactly one polygon for the mesh to be valid, every value in t_ls must
|
||||||
|
# be unique, so t_ls will be monotonically increasing when sorted.
|
||||||
|
# t_ls is expected to be in the same order as loops in most cases since exiting Edit mode will sort t_ls, so do an
|
||||||
|
# initial check for any element being smaller than the previous element to determine if sorting is required.
|
||||||
|
sort_polygon_data = np.any(t_ls[1:] < t_ls[:-1])
|
||||||
|
if sort_polygon_data:
|
||||||
|
# t_ls is not sorted, so get the indices that would sort t_ls using argsort, these will be re-used to sort
|
||||||
|
# per-polygon data.
|
||||||
|
# Using 'stable' for radix sort, which performs much better with partially ordered data and slightly worse with
|
||||||
|
# completely random data, compared to the default of 'quicksort' for introsort.
|
||||||
|
perm_polygons_to_loop_order = np.argsort(t_ls, kind='stable')
|
||||||
|
# Sort t_ls into the same order as loops.
|
||||||
|
t_ls = t_ls[perm_polygons_to_loop_order]
|
||||||
|
|
||||||
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
|
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
|
||||||
if scene_data.settings.use_mesh_edges:
|
if scene_data.settings.use_mesh_edges:
|
||||||
bl_edge_is_loose_dtype = bool
|
bl_edge_is_loose_dtype = bool
|
||||||
@ -999,7 +1018,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
# We have to ^-1 last index of each loop.
|
# We have to ^-1 last index of each loop.
|
||||||
# Ensure t_pvi is the correct number of bits before inverting.
|
# Ensure t_pvi is the correct number of bits before inverting.
|
||||||
t_pvi = astype_view_signedness(t_lvi, pvi_fbx_dtype)
|
# t_lvi may be used again later, so always create a copy to avoid modifying it in the next step.
|
||||||
|
t_pvi = t_lvi.astype(pvi_fbx_dtype)
|
||||||
# The index of the end of each loop is one before the index of the start of the next loop.
|
# The index of the end of each loop is one before the index of the start of the next loop.
|
||||||
t_pvi[t_ls[1:] - 1] ^= -1
|
t_pvi[t_ls[1:] - 1] ^= -1
|
||||||
# The index of the end of the last loop will be the very last index.
|
# The index of the end of the last loop will be the very last index.
|
||||||
@ -1015,7 +1035,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
t_eli = astype_view_signedness(t_eli, eli_fbx_dtype)
|
t_eli = astype_view_signedness(t_eli, eli_fbx_dtype)
|
||||||
elem_data_single_int32_array(geom, b"PolygonVertexIndex", t_pvi)
|
elem_data_single_int32_array(geom, b"PolygonVertexIndex", t_pvi)
|
||||||
elem_data_single_int32_array(geom, b"Edges", t_eli)
|
elem_data_single_int32_array(geom, b"Edges", t_eli)
|
||||||
del t_lvi
|
|
||||||
del t_pvi
|
del t_pvi
|
||||||
del t_eli
|
del t_eli
|
||||||
del t_ev
|
del t_ev
|
||||||
@ -1032,6 +1051,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
if smooth_type == 'FACE':
|
if smooth_type == 'FACE':
|
||||||
t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype)
|
t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype)
|
||||||
me.polygons.foreach_get("use_smooth", t_ps)
|
me.polygons.foreach_get("use_smooth", t_ps)
|
||||||
|
if sort_polygon_data:
|
||||||
|
t_ps = t_ps[perm_polygons_to_loop_order]
|
||||||
_map = b"ByPolygon"
|
_map = b"ByPolygon"
|
||||||
else: # EDGE
|
else: # EDGE
|
||||||
_map = b"ByEdge"
|
_map = b"ByEdge"
|
||||||
@ -1050,14 +1071,17 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
# Get the 'use_smooth' attribute of all polygons.
|
# Get the 'use_smooth' attribute of all polygons.
|
||||||
p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype)
|
p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype)
|
||||||
me.polygons.foreach_get('use_smooth', p_use_smooth_mask)
|
me.polygons.foreach_get('use_smooth', p_use_smooth_mask)
|
||||||
|
if sort_polygon_data:
|
||||||
|
p_use_smooth_mask = p_use_smooth_mask[perm_polygons_to_loop_order]
|
||||||
# Invert to get all flat shaded polygons.
|
# Invert to get all flat shaded polygons.
|
||||||
p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask)
|
p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask)
|
||||||
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of
|
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of
|
||||||
# that polygon.
|
# that polygon.
|
||||||
# Polygon sides can be calculated from the element-wise difference of loop starts appended by the number
|
# Polygon sides can be calculated from the element-wise difference of sorted loop starts appended by the
|
||||||
# of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total' attribute of
|
# number of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total'
|
||||||
# polygons, but since we already have t_ls, it tends to be quicker to calculate from t_ls when above
|
# attribute of polygons, but that might need to be sorted, and we already have t_ls which is sorted loop
|
||||||
# around 10_000 polygons.
|
# starts. It tends to be quicker to calculate from t_ls when above around 10_000 polygons even when the
|
||||||
|
# 'loop_total' array wouldn't need sorting.
|
||||||
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
|
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
|
||||||
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
|
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
|
||||||
# Convert flat shaded loops to flat shaded (sharp) edge indices.
|
# Convert flat shaded loops to flat shaded (sharp) edge indices.
|
||||||
@ -1168,7 +1192,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
elem_data_single_float64_array(lay_nor, b"Normals", t_ln)
|
elem_data_single_float64_array(lay_nor, b"Normals", t_ln)
|
||||||
# Normal weights, no idea what it is.
|
# Normal weights, no idea what it is.
|
||||||
# t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(t_ln)
|
# t_lnw = np.zeros(len(t_ln), dtype=np.float64)
|
||||||
# elem_data_single_float64_array(lay_nor, b"NormalsW", t_lnw)
|
# elem_data_single_float64_array(lay_nor, b"NormalsW", t_lnw)
|
||||||
|
|
||||||
elem_data_single_int32_array(lay_nor, b"NormalsIndex", t_lnidx)
|
elem_data_single_int32_array(lay_nor, b"NormalsIndex", t_lnidx)
|
||||||
@ -1183,7 +1207,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
|
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
|
||||||
elem_data_single_float64_array(lay_nor, b"Normals", t_ln)
|
elem_data_single_float64_array(lay_nor, b"Normals", t_ln)
|
||||||
# Normal weights, no idea what it is.
|
# Normal weights, no idea what it is.
|
||||||
# t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops)
|
# t_ln = np.zeros(len(me.loops), dtype=np.float64)
|
||||||
# elem_data_single_float64_array(lay_nor, b"NormalsW", t_ln)
|
# elem_data_single_float64_array(lay_nor, b"NormalsW", t_ln)
|
||||||
del t_ln
|
del t_ln
|
||||||
|
|
||||||
@ -1205,7 +1229,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
del t_lt
|
del t_lt
|
||||||
num_loops = len(me.loops)
|
num_loops = len(me.loops)
|
||||||
t_ln = np.empty(num_loops * 3, dtype=ln_bl_dtype)
|
t_ln = np.empty(num_loops * 3, dtype=ln_bl_dtype)
|
||||||
# t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops)
|
# t_lnw = np.zeros(len(me.loops), dtype=np.float64)
|
||||||
uv_names = [uvlayer.name for uvlayer in me.uv_layers]
|
uv_names = [uvlayer.name for uvlayer in me.uv_layers]
|
||||||
# Annoying, `me.calc_tangent` errors in case there is no geometry...
|
# Annoying, `me.calc_tangent` errors in case there is no geometry...
|
||||||
if num_loops > 0:
|
if num_loops > 0:
|
||||||
@ -1252,15 +1276,13 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
color_prop_name = "color_srgb" if colors_type == 'SRGB' else "color"
|
color_prop_name = "color_srgb" if colors_type == 'SRGB' else "color"
|
||||||
# ByteColorAttribute color also gets returned by the API as single precision float
|
# ByteColorAttribute color also gets returned by the API as single precision float
|
||||||
bl_lc_dtype = np.single
|
bl_lc_dtype = np.single
|
||||||
bl_lvi_dtype = np.uintc
|
|
||||||
fbx_lc_dtype = np.float64
|
fbx_lc_dtype = np.float64
|
||||||
fbx_lcidx_dtype = np.int32
|
fbx_lcidx_dtype = np.int32
|
||||||
t_lvi = None
|
|
||||||
|
|
||||||
color_attributes = me.color_attributes
|
color_attributes = me.color_attributes
|
||||||
if scene_data.settings.prioritize_active_color:
|
if scene_data.settings.prioritize_active_color:
|
||||||
active_color = me.color_attributes.active_color
|
active_color = me.color_attributes.active_color
|
||||||
color_attributes = sorted(color_attributes, key=lambda x: x == active_color, reverse=True)
|
color_attributes = sorted(color_attributes, key=lambda x: x == active_color, reverse=True)
|
||||||
|
|
||||||
for colindex, collayer in enumerate(color_attributes):
|
for colindex, collayer in enumerate(color_attributes):
|
||||||
is_point = collayer.domain == "POINT"
|
is_point = collayer.domain == "POINT"
|
||||||
@ -1282,10 +1304,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
# with a "ByVertex" mapping type, but some software does not
|
# with a "ByVertex" mapping type, but some software does not
|
||||||
# properly understand that. So expand to full "ByPolygonVertex"
|
# properly understand that. So expand to full "ByPolygonVertex"
|
||||||
# index map.
|
# index map.
|
||||||
if t_lvi is None:
|
# Ignore loops added for loose edges.
|
||||||
t_lvi = np.empty(len(me.loops), dtype=bl_lvi_dtype)
|
col_indices = col_indices[t_lvi[:len(me.loops)]]
|
||||||
me.loops.foreach_get("vertex_index", t_lvi)
|
|
||||||
col_indices = col_indices[t_lvi]
|
|
||||||
|
|
||||||
t_lc = t_lc.astype(fbx_lc_dtype, copy=False)
|
t_lc = t_lc.astype(fbx_lc_dtype, copy=False)
|
||||||
col_indices = astype_view_signedness(col_indices, fbx_lcidx_dtype)
|
col_indices = astype_view_signedness(col_indices, fbx_lcidx_dtype)
|
||||||
@ -1295,7 +1315,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
del t_lc
|
del t_lc
|
||||||
del col_indices
|
del col_indices
|
||||||
del t_lvi
|
|
||||||
|
|
||||||
# Write UV layers.
|
# Write UV layers.
|
||||||
# Note: LayerElementTexture is deprecated since FBX 2011 - luckily!
|
# Note: LayerElementTexture is deprecated since FBX 2011 - luckily!
|
||||||
@ -1304,7 +1323,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
if uvnumber:
|
if uvnumber:
|
||||||
luv_bl_dtype = np.single
|
luv_bl_dtype = np.single
|
||||||
luv_fbx_dtype = np.float64
|
luv_fbx_dtype = np.float64
|
||||||
lv_idx_bl_dtype = np.uintc
|
|
||||||
lv_idx_fbx_dtype = np.int32
|
lv_idx_fbx_dtype = np.int32
|
||||||
|
|
||||||
t_luv = np.empty(len(me.loops) * 2, dtype=luv_bl_dtype)
|
t_luv = np.empty(len(me.loops) * 2, dtype=luv_bl_dtype)
|
||||||
@ -1315,8 +1333,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
|
|
||||||
# Looks like this mapping is also expected to convey UV islands (arg..... :((((( ).
|
# Looks like this mapping is also expected to convey UV islands (arg..... :((((( ).
|
||||||
# So we need to generate unique triplets (uv, vertex_idx) here, not only just based on UV values.
|
# So we need to generate unique triplets (uv, vertex_idx) here, not only just based on UV values.
|
||||||
t_lvidx = np.empty(len(me.loops), dtype=lv_idx_bl_dtype)
|
# Ignore loops added for loose edges.
|
||||||
me.loops.foreach_get("vertex_index", t_lvidx)
|
t_lvidx = t_lvi[:len(me.loops)]
|
||||||
|
|
||||||
# If we were to create a combined array of (uv, vertex_idx) elements, we could find unique triplets by sorting
|
# If we were to create a combined array of (uv, vertex_idx) elements, we could find unique triplets by sorting
|
||||||
# that array by first sorting by the vertex_idx column and then sorting by the uv column using a stable sorting
|
# that array by first sorting by the vertex_idx column and then sorting by the uv column using a stable sorting
|
||||||
@ -1407,6 +1425,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
del t_lvidx
|
del t_lvidx
|
||||||
del t_luv
|
del t_luv
|
||||||
del t_luv_fast_pair_view
|
del t_luv_fast_pair_view
|
||||||
|
del t_lvi
|
||||||
|
|
||||||
# Face's materials.
|
# Face's materials.
|
||||||
me_fbxmaterials_idx = scene_data.mesh_material_indices.get(me)
|
me_fbxmaterials_idx = scene_data.mesh_material_indices.get(me)
|
||||||
@ -1423,6 +1442,8 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
fbx_pm_dtype = np.int32
|
fbx_pm_dtype = np.int32
|
||||||
t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype)
|
t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype)
|
||||||
me.polygons.foreach_get("material_index", t_pm)
|
me.polygons.foreach_get("material_index", t_pm)
|
||||||
|
if sort_polygon_data:
|
||||||
|
t_pm = t_pm[perm_polygons_to_loop_order]
|
||||||
|
|
||||||
# We have to validate mat indices, and map them to FBX indices.
|
# We have to validate mat indices, and map them to FBX indices.
|
||||||
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
|
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
|
||||||
@ -1453,6 +1474,7 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
|
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
|
||||||
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
||||||
elem_data_single_int32_array(lay_ma, b"Materials", [0])
|
elem_data_single_int32_array(lay_ma, b"Materials", [0])
|
||||||
|
del perm_polygons_to_loop_order
|
||||||
|
|
||||||
# And the "layer TOC"...
|
# And the "layer TOC"...
|
||||||
|
|
||||||
|
@ -295,12 +295,6 @@ def shape_difference_exclude_similar(sv_cos, ref_cos, e=1e-6):
|
|||||||
return difference_cos, not_similar_verts_idx
|
return difference_cos, not_similar_verts_idx
|
||||||
|
|
||||||
|
|
||||||
def vcos_transformed_gen(raw_cos, m=None):
|
|
||||||
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
|
|
||||||
gen = zip(*(iter(raw_cos),) * 3)
|
|
||||||
return gen if m is None else (m @ Vector(v) for v in gen)
|
|
||||||
|
|
||||||
|
|
||||||
def _mat4_vec3_array_multiply(mat4, vec3_array, dtype=None, return_4d=False):
|
def _mat4_vec3_array_multiply(mat4, vec3_array, dtype=None, return_4d=False):
|
||||||
"""Multiply a 4d matrix by each 3d vector in an array and return as an array of either 3d or 4d vectors.
|
"""Multiply a 4d matrix by each 3d vector in an array and return as an array of either 3d or 4d vectors.
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
'name': 'glTF 2.0 format',
|
'name': 'glTF 2.0 format',
|
||||||
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
||||||
"version": (3, 6, 5),
|
"version": (3, 6, 6),
|
||||||
'blender': (3, 5, 0),
|
'blender': (3, 5, 0),
|
||||||
'location': 'File > Import-Export',
|
'location': 'File > Import-Export',
|
||||||
'description': 'Import-Export as glTF 2.0',
|
'description': 'Import-Export as glTF 2.0',
|
||||||
|
@ -7,6 +7,8 @@ import numpy as np
|
|||||||
from ...io.imp.gltf2_io_user_extensions import import_user_extensions
|
from ...io.imp.gltf2_io_user_extensions import import_user_extensions
|
||||||
from ...io.com.gltf2_io_debug import print_console
|
from ...io.com.gltf2_io_debug import print_console
|
||||||
from ...io.imp.gltf2_io_binary import BinaryData
|
from ...io.imp.gltf2_io_binary import BinaryData
|
||||||
|
from ...io.com.gltf2_io_constants import DataType, ComponentType
|
||||||
|
from ...blender.com.gltf2_blender_conversion import get_attribute_type
|
||||||
from ..com.gltf2_blender_extras import set_extras
|
from ..com.gltf2_blender_extras import set_extras
|
||||||
from .gltf2_blender_material import BlenderMaterial
|
from .gltf2_blender_material import BlenderMaterial
|
||||||
from .gltf2_io_draco_compression_extension import decode_primitive
|
from .gltf2_io_draco_compression_extension import decode_primitive
|
||||||
|
@ -146,7 +146,8 @@ class NWMergeShadersMenu(Menu, NWBase):
|
|||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
for type in ('MIX', 'ADD'):
|
for type in ('MIX', 'ADD'):
|
||||||
props = layout.operator(operators.NWMergeNodes.bl_idname, text=type)
|
name = f'{type.capitalize()} Shader'
|
||||||
|
props = layout.operator(operators.NWMergeNodes.bl_idname, text=name)
|
||||||
props.mode = type
|
props.mode = type
|
||||||
props.merge_type = 'SHADER'
|
props.merge_type = 'SHADER'
|
||||||
|
|
||||||
|
@ -1304,6 +1304,8 @@ class NWMergeNodes(Operator, NWBase):
|
|||||||
if tree_type == 'GEOMETRY':
|
if tree_type == 'GEOMETRY':
|
||||||
if nodes_list is selected_math or nodes_list is selected_vector or nodes_list is selected_mix:
|
if nodes_list is selected_math or nodes_list is selected_vector or nodes_list is selected_mix:
|
||||||
node_type = 'ShaderNode'
|
node_type = 'ShaderNode'
|
||||||
|
if mode == 'MIX':
|
||||||
|
mode = 'ADD'
|
||||||
else:
|
else:
|
||||||
node_type = 'GeometryNode'
|
node_type = 'GeometryNode'
|
||||||
if merge_position == 'CENTER':
|
if merge_position == 'CENTER':
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "3D-Print Toolbox",
|
"name": "3D-Print Toolbox",
|
||||||
"author": "Campbell Barton",
|
"author": "Campbell Barton",
|
||||||
"blender": (3, 0, 0),
|
"blender": (3, 6, 0),
|
||||||
"location": "3D View > Sidebar",
|
"location": "3D View > Sidebar",
|
||||||
"description": "Utilities for 3D printing",
|
"description": "Utilities for 3D printing",
|
||||||
"doc_url": "{BLENDER_MANUAL_URL}/addons/mesh/3d_print_toolbox.html",
|
"doc_url": "{BLENDER_MANUAL_URL}/addons/mesh/3d_print_toolbox.html",
|
||||||
|
@ -79,7 +79,8 @@ def write_mesh(context, report_cb):
|
|||||||
name = data_("untitled")
|
name = data_("untitled")
|
||||||
|
|
||||||
# add object name
|
# add object name
|
||||||
name += f"-{bpy.path.clean_name(obj.name)}"
|
import re
|
||||||
|
name += "-" + re.sub(r'[\\/:*?"<>|]', "", obj.name)
|
||||||
|
|
||||||
# first ensure the path is created
|
# first ensure the path is created
|
||||||
if export_path:
|
if export_path:
|
||||||
@ -113,17 +114,16 @@ def write_mesh(context, report_cb):
|
|||||||
global_scale=global_scale,
|
global_scale=global_scale,
|
||||||
)
|
)
|
||||||
elif export_format == 'PLY':
|
elif export_format == 'PLY':
|
||||||
addon_ensure("io_mesh_ply")
|
|
||||||
filepath = bpy.path.ensure_ext(filepath, ".ply")
|
filepath = bpy.path.ensure_ext(filepath, ".ply")
|
||||||
ret = bpy.ops.export_mesh.ply(
|
ret = bpy.ops.wm.ply_export(
|
||||||
filepath=filepath,
|
filepath=filepath,
|
||||||
use_ascii=False,
|
ascii_format=False,
|
||||||
use_mesh_modifiers=True,
|
apply_modifiers=True,
|
||||||
use_selection=True,
|
export_selected_objects=True,
|
||||||
global_scale=global_scale,
|
global_scale=global_scale,
|
||||||
use_normals=export_data_layers,
|
export_normals=export_data_layers,
|
||||||
use_uv_coords=export_data_layers,
|
export_uv=export_data_layers,
|
||||||
use_colors=export_data_layers,
|
export_colors="SRGB" if export_data_layers else "NONE",
|
||||||
)
|
)
|
||||||
elif export_format == 'X3D':
|
elif export_format == 'X3D':
|
||||||
addon_ensure("io_scene_x3d")
|
addon_ensure("io_scene_x3d")
|
||||||
|
@ -64,7 +64,7 @@ def pose_library_list_item_context_menu(self: UIList, context: Context) -> None:
|
|||||||
list = getattr(context, "ui_list", None)
|
list = getattr(context, "ui_list", None)
|
||||||
if not list or list.bl_idname != "UI_UL_asset_view" or list.list_id != "pose_assets":
|
if not list or list.bl_idname != "UI_UL_asset_view" or list.list_id != "pose_assets":
|
||||||
return False
|
return False
|
||||||
if not context.asset_handle:
|
if not context.active_file:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -60,9 +60,9 @@ class POSELIB_OT_create_pose_asset(PoseAssetCreator, Operator):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context: Context) -> bool:
|
def poll(cls, context: Context) -> bool:
|
||||||
if context.object.mode != "POSE":
|
if context.object is None or context.object.mode != "POSE":
|
||||||
# The operator assumes pose mode, so that bone selection is visible.
|
# The operator assumes pose mode, so that bone selection is visible.
|
||||||
cls.poll_message_set("The object must be in Pose mode")
|
cls.poll_message_set("An active armature object in pose mode is needed")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Make sure that if there is an asset browser open, the artist can see the newly created pose asset.
|
# Make sure that if there is an asset browser open, the artist can see the newly created pose asset.
|
||||||
|
@ -812,6 +812,14 @@ class STORYPENCIL_OT_TabSwitch(Operator):
|
|||||||
bl_options = {'INTERNAL'}
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
|
# For meta strips the tab key must be processed by other operator, so
|
||||||
|
# just pass through to the next operator in the stack.
|
||||||
|
if context.active_sequence_strip and context.active_sequence_strip.type == 'META':
|
||||||
|
return {'PASS_THROUGH'}
|
||||||
|
|
||||||
|
if context.scene.sequence_editor and context.scene.sequence_editor.meta_stack:
|
||||||
|
return {'PASS_THROUGH'}
|
||||||
|
|
||||||
if context.scene.storypencil_use_new_window:
|
if context.scene.storypencil_use_new_window:
|
||||||
bpy.ops.storypencil.sync_set_main('INVOKE_DEFAULT', True)
|
bpy.ops.storypencil.sync_set_main('INVOKE_DEFAULT', True)
|
||||||
else:
|
else:
|
||||||
@ -821,15 +829,7 @@ class STORYPENCIL_OT_TabSwitch(Operator):
|
|||||||
# Get strip under time cursor
|
# Get strip under time cursor
|
||||||
strip, old_frame = get_sequence_at_frame(
|
strip, old_frame = get_sequence_at_frame(
|
||||||
scene.frame_current, sequences=sequences)
|
scene.frame_current, sequences=sequences)
|
||||||
# For meta strips the tab key must be processed by other operator, so
|
if strip and strip.type == 'SCENE':
|
||||||
# just pass through to the next operator in the stack.
|
|
||||||
if strip is None or strip.type != 'SCENE':
|
|
||||||
if context.active_sequence_strip and context.active_sequence_strip.type == 'META':
|
|
||||||
return {'PASS_THROUGH'}
|
|
||||||
|
|
||||||
if context.scene.sequence_editor and context.scene.sequence_editor.meta_stack:
|
|
||||||
return {'PASS_THROUGH'}
|
|
||||||
else:
|
|
||||||
bpy.ops.storypencil.switch('INVOKE_DEFAULT', True)
|
bpy.ops.storypencil.switch('INVOKE_DEFAULT', True)
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
@ -15,8 +15,8 @@
|
|||||||
|
|
||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Sun Position",
|
"name": "Sun Position",
|
||||||
"author": "Michael Martin",
|
"author": "Michael Martin, Damien Picard",
|
||||||
"version": (3, 2, 2),
|
"version": (3, 3, 3),
|
||||||
"blender": (3, 0, 0),
|
"blender": (3, 0, 0),
|
||||||
"location": "World > Sun Position",
|
"location": "World > Sun Position",
|
||||||
"description": "Show sun position with objects and/or sky texture",
|
"description": "Show sun position with objects and/or sky texture",
|
||||||
@ -63,6 +63,7 @@ def register():
|
|||||||
bpy.app.handlers.load_post.append(sun_scene_handler)
|
bpy.app.handlers.load_post.append(sun_scene_handler)
|
||||||
bpy.app.translations.register(__name__, translations.translations_dict)
|
bpy.app.translations.register(__name__, translations.translations_dict)
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
bpy.app.translations.unregister(__name__)
|
bpy.app.translations.unregister(__name__)
|
||||||
bpy.app.handlers.frame_change_post.remove(sun_calc.sun_handler)
|
bpy.app.handlers.frame_change_post.remove(sun_calc.sun_handler)
|
||||||
|
@ -23,10 +23,6 @@ else:
|
|||||||
shader_info.vertex_out(shader_interface)
|
shader_info.vertex_out(shader_interface)
|
||||||
|
|
||||||
shader_info.vertex_source(
|
shader_info.vertex_source(
|
||||||
# uniform mat4 u_ViewProjectionMatrix;
|
|
||||||
# in vec3 position;
|
|
||||||
# flat out vec2 v_StartPos;
|
|
||||||
# out vec4 v_VertPos;
|
|
||||||
'''
|
'''
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
@ -40,11 +36,6 @@ else:
|
|||||||
|
|
||||||
shader_info.fragment_out(0, 'VEC4', "FragColor")
|
shader_info.fragment_out(0, 'VEC4', "FragColor")
|
||||||
shader_info.fragment_source(
|
shader_info.fragment_source(
|
||||||
# uniform vec4 u_Color;
|
|
||||||
# uniform vec2 u_Resolution;
|
|
||||||
# flat in vec2 v_StartPos;
|
|
||||||
# in vec4 v_VertPos;
|
|
||||||
# out vec4 FragColor;
|
|
||||||
'''
|
'''
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
# Copyright 2010 Maximilian Hoegner <hp.maxi@hoegners.de>.
|
# Copyright 2010 Maximilian Hoegner <hp.maxi@hoegners.de>.
|
||||||
|
|
||||||
# geo.py is a python module with no dependencies on extra packages,
|
# geo.py is a python module with no dependencies on extra packages,
|
||||||
@ -51,7 +51,7 @@ class Parser:
|
|||||||
# do matching
|
# do matching
|
||||||
m = re.match(pattern, text)
|
m = re.match(pattern, text)
|
||||||
|
|
||||||
if m == None:
|
if m is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# build tree recursively by parsing subgroups
|
# build tree recursively by parsing subgroups
|
||||||
@ -59,7 +59,7 @@ class Parser:
|
|||||||
|
|
||||||
for i in range(len(subpattern_names)):
|
for i in range(len(subpattern_names)):
|
||||||
text_part = m.group(i + 1)
|
text_part = m.group(i + 1)
|
||||||
if not text_part == None:
|
if text_part is not None:
|
||||||
subpattern = subpattern_names[i]
|
subpattern = subpattern_names[i]
|
||||||
tree[subpattern] = self.parse(subpattern, text_part)
|
tree[subpattern] = self.parse(subpattern, text_part)
|
||||||
|
|
||||||
@ -158,7 +158,8 @@ def parse_position(s):
|
|||||||
Tries to be as tolerant as possible with input. Returns None if parsing doesn't succeed. """
|
Tries to be as tolerant as possible with input. Returns None if parsing doesn't succeed. """
|
||||||
|
|
||||||
parse_tree = position_parser.parse("position", s)
|
parse_tree = position_parser.parse("position", s)
|
||||||
if parse_tree == None: return None
|
if parse_tree is None:
|
||||||
|
return None
|
||||||
|
|
||||||
lat_sign = +1.
|
lat_sign = +1.
|
||||||
if parse_tree.get(
|
if parse_tree.get(
|
||||||
|
@ -64,8 +64,7 @@ def draw_callback_px(self, context):
|
|||||||
coords = ((-0.5, -0.5), (0.5, -0.5), (0.5, 0.5), (-0.5, 0.5))
|
coords = ((-0.5, -0.5), (0.5, -0.5), (0.5, 0.5), (-0.5, 0.5))
|
||||||
uv_coords = ((0, 0), (1, 0), (1, 1), (0, 1))
|
uv_coords = ((0, 0), (1, 0), (1, 1), (0, 1))
|
||||||
batch = batch_for_shader(shader, 'TRI_FAN',
|
batch = batch_for_shader(shader, 'TRI_FAN',
|
||||||
{"pos" : coords,
|
{"pos": coords, "texCoord": uv_coords})
|
||||||
"texCoord" : uv_coords})
|
|
||||||
|
|
||||||
with gpu.matrix.push_pop():
|
with gpu.matrix.push_pop():
|
||||||
gpu.matrix.translate(position)
|
gpu.matrix.translate(position)
|
||||||
@ -79,7 +78,7 @@ def draw_callback_px(self, context):
|
|||||||
# Crosshair
|
# Crosshair
|
||||||
# vertical
|
# vertical
|
||||||
coords = ((self.mouse_position[0], bottom), (self.mouse_position[0], top))
|
coords = ((self.mouse_position[0], bottom), (self.mouse_position[0], top))
|
||||||
colors = ((1,)*4,)*2
|
colors = ((1,) * 4,) * 2
|
||||||
shader = gpu.shader.from_builtin('2D_FLAT_COLOR')
|
shader = gpu.shader.from_builtin('2D_FLAT_COLOR')
|
||||||
batch = batch_for_shader(shader, 'LINES',
|
batch = batch_for_shader(shader, 'LINES',
|
||||||
{"pos": coords, "color": colors})
|
{"pos": coords, "color": colors})
|
||||||
@ -134,7 +133,9 @@ class SUNPOS_OT_ShowHdr(bpy.types.Operator):
|
|||||||
self.mouse_position = Vector((mouse_position_abs.x - self.area.x,
|
self.mouse_position = Vector((mouse_position_abs.x - self.area.x,
|
||||||
mouse_position_abs.y - self.area.y))
|
mouse_position_abs.y - self.area.y))
|
||||||
|
|
||||||
self.selected_point = (self.mouse_position - self.offset - Vector((self.right, self.top))/2) / self.scale
|
self.selected_point = (self.mouse_position
|
||||||
|
- self.offset
|
||||||
|
- Vector((self.right, self.top)) / 2) / self.scale
|
||||||
u = self.selected_point.x / self.area.width + 0.5
|
u = self.selected_point.x / self.area.width + 0.5
|
||||||
v = (self.selected_point.y) / (self.area.width / 2) + 0.5
|
v = (self.selected_point.y) / (self.area.width / 2) + 0.5
|
||||||
|
|
||||||
@ -275,10 +276,13 @@ class SUNPOS_OT_ShowHdr(bpy.types.Operator):
|
|||||||
self.initial_elevation = context.scene.sun_pos_properties.hdr_elevation
|
self.initial_elevation = context.scene.sun_pos_properties.hdr_elevation
|
||||||
self.initial_azimuth = context.scene.sun_pos_properties.hdr_azimuth
|
self.initial_azimuth = context.scene.sun_pos_properties.hdr_azimuth
|
||||||
|
|
||||||
context.workspace.status_text_set("Enter/LMB: confirm, Esc/RMB: cancel, MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure")
|
context.workspace.status_text_set(
|
||||||
|
"Enter/LMB: confirm, Esc/RMB: cancel,"
|
||||||
|
" MMB: pan, mouse wheel: zoom, Ctrl + mouse wheel: set exposure")
|
||||||
|
|
||||||
self._handle = bpy.types.SpaceView3D.draw_handler_add(draw_callback_px,
|
self._handle = bpy.types.SpaceView3D.draw_handler_add(
|
||||||
(self, context), 'WINDOW', 'POST_PIXEL')
|
draw_callback_px, (self, context), 'WINDOW', 'POST_PIXEL'
|
||||||
|
)
|
||||||
context.window_manager.modal_handler_add(self)
|
context.window_manager.modal_handler_add(self)
|
||||||
|
|
||||||
return {'RUNNING_MODAL'}
|
return {'RUNNING_MODAL'}
|
||||||
|
@ -5,7 +5,7 @@ from bpy.types import AddonPreferences, PropertyGroup
|
|||||||
from bpy.props import (StringProperty, EnumProperty, IntProperty,
|
from bpy.props import (StringProperty, EnumProperty, IntProperty,
|
||||||
FloatProperty, BoolProperty, PointerProperty)
|
FloatProperty, BoolProperty, PointerProperty)
|
||||||
|
|
||||||
from .sun_calc import sun_update, parse_coordinates, surface_update, analemmas_update
|
from .sun_calc import sun_update, parse_coordinates, surface_update, analemmas_update, sun
|
||||||
from .draw import north_update
|
from .draw import north_update
|
||||||
|
|
||||||
from math import pi
|
from math import pi
|
||||||
@ -19,7 +19,7 @@ TODAY = datetime.today()
|
|||||||
|
|
||||||
class SunPosProperties(PropertyGroup):
|
class SunPosProperties(PropertyGroup):
|
||||||
usage_mode: EnumProperty(
|
usage_mode: EnumProperty(
|
||||||
name="Usage mode",
|
name="Usage Mode",
|
||||||
description="Operate in normal mode or environment texture mode",
|
description="Operate in normal mode or environment texture mode",
|
||||||
items=(
|
items=(
|
||||||
('NORMAL', "Normal", ""),
|
('NORMAL', "Normal", ""),
|
||||||
@ -29,14 +29,14 @@ class SunPosProperties(PropertyGroup):
|
|||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
use_daylight_savings: BoolProperty(
|
use_daylight_savings: BoolProperty(
|
||||||
name="Daylight savings",
|
name="Daylight Savings",
|
||||||
description="Daylight savings time adds 1 hour to standard time",
|
description="Daylight savings time adds 1 hour to standard time",
|
||||||
default=False,
|
default=False,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
use_refraction: BoolProperty(
|
use_refraction: BoolProperty(
|
||||||
name="Use refraction",
|
name="Use Refraction",
|
||||||
description="Show apparent sun position due to refraction",
|
description="Show apparent Sun position due to refraction",
|
||||||
default=True,
|
default=True,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
@ -81,6 +81,34 @@ class SunPosProperties(PropertyGroup):
|
|||||||
default=0.0,
|
default=0.0,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
|
sunrise_time: FloatProperty(
|
||||||
|
name="Sunrise Time",
|
||||||
|
description="Time at which the Sun rises",
|
||||||
|
soft_min=0.0, soft_max=24.0,
|
||||||
|
default=0.0,
|
||||||
|
get=lambda _: sun.sunrise)
|
||||||
|
|
||||||
|
sunset_time: FloatProperty(
|
||||||
|
name="Sunset Time",
|
||||||
|
description="Time at which the Sun sets",
|
||||||
|
soft_min=0.0, soft_max=24.0,
|
||||||
|
default=0.0,
|
||||||
|
get=lambda _: sun.sunset)
|
||||||
|
|
||||||
|
sun_azimuth: FloatProperty(
|
||||||
|
name="Sun Azimuth",
|
||||||
|
description="Rotation angle of the Sun from the north direction",
|
||||||
|
soft_min=-pi, soft_max=pi,
|
||||||
|
default=0.0,
|
||||||
|
get=lambda _: sun.azimuth)
|
||||||
|
|
||||||
|
sun_elevation: FloatProperty(
|
||||||
|
name="Sunset Time",
|
||||||
|
description="Elevation angle of the Sun",
|
||||||
|
soft_min=-pi/2, soft_max=pi/2,
|
||||||
|
default=0.0,
|
||||||
|
get=lambda _: sun.elevation)
|
||||||
|
|
||||||
co_parser: StringProperty(
|
co_parser: StringProperty(
|
||||||
name="Enter coordinates",
|
name="Enter coordinates",
|
||||||
description="Enter coordinates from an online map",
|
description="Enter coordinates from an online map",
|
||||||
|
@ -4,9 +4,10 @@ import bpy
|
|||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
import gpu
|
import gpu
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
|
|
||||||
from mathutils import Euler, Vector
|
from mathutils import Euler, Vector
|
||||||
import math
|
|
||||||
from math import degrees, radians, pi
|
from math import degrees, radians, pi, sin, cos, asin, acos, tan, floor
|
||||||
import datetime
|
import datetime
|
||||||
from .geo import parse_position
|
from .geo import parse_position
|
||||||
|
|
||||||
@ -15,21 +16,14 @@ class SunInfo:
|
|||||||
"""
|
"""
|
||||||
Store intermediate sun calculations
|
Store intermediate sun calculations
|
||||||
"""
|
"""
|
||||||
class TAzEl:
|
|
||||||
time = 0.0
|
|
||||||
azimuth = 0.0
|
|
||||||
elevation = 0.0
|
|
||||||
|
|
||||||
class CLAMP:
|
class SunBind:
|
||||||
azimuth = 0.0
|
azimuth = 0.0
|
||||||
elevation = 0.0
|
elevation = 0.0
|
||||||
az_start_sun = 0.0
|
az_start_sun = 0.0
|
||||||
az_start_env = 0.0
|
az_start_env = 0.0
|
||||||
|
|
||||||
sunrise = TAzEl()
|
bind = SunBind()
|
||||||
sunset = TAzEl()
|
|
||||||
|
|
||||||
bind = CLAMP()
|
|
||||||
bind_to_sun = False
|
bind_to_sun = False
|
||||||
|
|
||||||
latitude = 0.0
|
latitude = 0.0
|
||||||
@ -37,6 +31,9 @@ class SunInfo:
|
|||||||
elevation = 0.0
|
elevation = 0.0
|
||||||
azimuth = 0.0
|
azimuth = 0.0
|
||||||
|
|
||||||
|
sunrise = 0.0
|
||||||
|
sunset = 0.0
|
||||||
|
|
||||||
month = 0
|
month = 0
|
||||||
day = 0
|
day = 0
|
||||||
year = 0
|
year = 0
|
||||||
@ -47,6 +44,7 @@ class SunInfo:
|
|||||||
sun_distance = 0.0
|
sun_distance = 0.0
|
||||||
use_daylight_savings = False
|
use_daylight_savings = False
|
||||||
|
|
||||||
|
|
||||||
sun = SunInfo()
|
sun = SunInfo()
|
||||||
|
|
||||||
|
|
||||||
@ -78,8 +76,8 @@ def parse_coordinates(self, context):
|
|||||||
|
|
||||||
def move_sun(context):
|
def move_sun(context):
|
||||||
"""
|
"""
|
||||||
Cycle through all the selected objects and call set_sun_location and
|
Cycle through all the selected objects and set their position and rotation
|
||||||
set_sun_rotations to place them in the sky
|
in the sky.
|
||||||
"""
|
"""
|
||||||
addon_prefs = context.preferences.addons[__package__].preferences
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
sun_props = context.scene.sun_pos_properties
|
sun_props = context.scene.sun_pos_properties
|
||||||
@ -100,11 +98,9 @@ def move_sun(context):
|
|||||||
env_tex.texture_mapping.rotation.z = az
|
env_tex.texture_mapping.rotation.z = az
|
||||||
|
|
||||||
if sun_props.sun_object:
|
if sun_props.sun_object:
|
||||||
theta = math.pi / 2 - sun_props.hdr_elevation
|
|
||||||
phi = -sun_props.hdr_azimuth
|
|
||||||
|
|
||||||
obj = sun_props.sun_object
|
obj = sun_props.sun_object
|
||||||
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance
|
obj.location = get_sun_vector(
|
||||||
|
sun_props.hdr_azimuth, sun_props.hdr_elevation) * sun_props.sun_distance
|
||||||
|
|
||||||
rotation_euler = Euler((sun_props.hdr_elevation - pi/2,
|
rotation_euler = Euler((sun_props.hdr_elevation - pi/2,
|
||||||
0, -sun_props.hdr_azimuth))
|
0, -sun_props.hdr_azimuth))
|
||||||
@ -118,34 +114,33 @@ def move_sun(context):
|
|||||||
if sun.use_daylight_savings:
|
if sun.use_daylight_savings:
|
||||||
zone -= 1
|
zone -= 1
|
||||||
|
|
||||||
north_offset = degrees(sun_props.north_offset)
|
|
||||||
|
|
||||||
if addon_prefs.show_rise_set:
|
if addon_prefs.show_rise_set:
|
||||||
calc_sunrise_sunset(rise=True)
|
calc_sunrise_sunset(rise=True)
|
||||||
calc_sunrise_sunset(rise=False)
|
calc_sunrise_sunset(rise=False)
|
||||||
|
|
||||||
az_north, theta, phi, azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
local_time, sun_props.latitude, sun_props.longitude,
|
local_time, sun_props.latitude, sun_props.longitude,
|
||||||
north_offset, zone, sun_props.month, sun_props.day, sun_props.year,
|
zone, sun_props.month, sun_props.day, sun_props.year,
|
||||||
sun_props.sun_distance)
|
sun_props.sun_distance)
|
||||||
|
|
||||||
sun.azimuth = azimuth
|
sun.azimuth = azimuth
|
||||||
sun.elevation = elevation
|
sun.elevation = elevation
|
||||||
|
sun_vector = get_sun_vector(azimuth, elevation)
|
||||||
|
|
||||||
if sun_props.sky_texture:
|
if sun_props.sky_texture:
|
||||||
sky_node = bpy.context.scene.world.node_tree.nodes.get(sun_props.sky_texture)
|
sky_node = bpy.context.scene.world.node_tree.nodes.get(sun_props.sky_texture)
|
||||||
if sky_node is not None and sky_node.type == "TEX_SKY":
|
if sky_node is not None and sky_node.type == "TEX_SKY":
|
||||||
sky_node.texture_mapping.rotation.z = 0.0
|
sky_node.texture_mapping.rotation.z = 0.0
|
||||||
sky_node.sun_direction = get_sun_vector(theta, phi)
|
sky_node.sun_direction = sun_vector
|
||||||
sky_node.sun_elevation = math.radians(elevation)
|
sky_node.sun_elevation = elevation
|
||||||
sky_node.sun_rotation = math.radians(az_north)
|
sky_node.sun_rotation = azimuth
|
||||||
|
|
||||||
# Sun object
|
# Sun object
|
||||||
if (sun_props.sun_object is not None
|
if (sun_props.sun_object is not None
|
||||||
and sun_props.sun_object.name in context.view_layer.objects):
|
and sun_props.sun_object.name in context.view_layer.objects):
|
||||||
obj = sun_props.sun_object
|
obj = sun_props.sun_object
|
||||||
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance
|
obj.location = sun_vector * sun_props.sun_distance
|
||||||
rotation_euler = Euler((math.radians(elevation - 90), 0,
|
rotation_euler = Euler((elevation - pi/2, 0, -azimuth))
|
||||||
math.radians(-az_north)))
|
|
||||||
set_sun_rotations(obj, rotation_euler)
|
set_sun_rotations(obj, rotation_euler)
|
||||||
|
|
||||||
# Sun collection
|
# Sun collection
|
||||||
@ -161,16 +156,14 @@ def move_sun(context):
|
|||||||
time_increment = sun_props.time_spread
|
time_increment = sun_props.time_spread
|
||||||
|
|
||||||
for obj in sun_objects:
|
for obj in sun_objects:
|
||||||
az_north, theta, phi, azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
local_time, sun_props.latitude,
|
local_time, sun_props.latitude,
|
||||||
sun_props.longitude, north_offset, zone,
|
sun_props.longitude, zone,
|
||||||
sun_props.month, sun_props.day,
|
sun_props.month, sun_props.day,
|
||||||
sun_props.year, sun_props.sun_distance)
|
sun_props.year, sun_props.sun_distance)
|
||||||
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance
|
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
local_time -= time_increment
|
local_time -= time_increment
|
||||||
obj.rotation_euler = (
|
obj.rotation_euler = ((elevation - pi/2, 0, -azimuth))
|
||||||
(math.radians(elevation - 90), 0,
|
|
||||||
math.radians(-az_north)))
|
|
||||||
else:
|
else:
|
||||||
# Analemma
|
# Analemma
|
||||||
day_increment = 365 / object_count
|
day_increment = 365 / object_count
|
||||||
@ -178,22 +171,21 @@ def move_sun(context):
|
|||||||
for obj in sun_objects:
|
for obj in sun_objects:
|
||||||
dt = (datetime.date(sun_props.year, 1, 1) +
|
dt = (datetime.date(sun_props.year, 1, 1) +
|
||||||
datetime.timedelta(day - 1))
|
datetime.timedelta(day - 1))
|
||||||
az_north, theta, phi, azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
local_time, sun_props.latitude,
|
local_time, sun_props.latitude,
|
||||||
sun_props.longitude, north_offset, zone,
|
sun_props.longitude, zone,
|
||||||
dt.month, dt.day, sun_props.year,
|
dt.month, dt.day, sun_props.year,
|
||||||
sun_props.sun_distance)
|
sun_props.sun_distance)
|
||||||
obj.location = get_sun_vector(theta, phi) * sun_props.sun_distance
|
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
day -= day_increment
|
day -= day_increment
|
||||||
obj.rotation_euler = (
|
obj.rotation_euler = (elevation - pi/2, 0, -azimuth)
|
||||||
(math.radians(elevation - 90), 0,
|
|
||||||
math.radians(-az_north)))
|
|
||||||
|
|
||||||
|
|
||||||
def day_of_year_to_month_day(year, day_of_year):
|
def day_of_year_to_month_day(year, day_of_year):
|
||||||
dt = (datetime.date(year, 1, 1) + datetime.timedelta(day_of_year - 1))
|
dt = (datetime.date(year, 1, 1) + datetime.timedelta(day_of_year - 1))
|
||||||
return dt.day, dt.month
|
return dt.day, dt.month
|
||||||
|
|
||||||
|
|
||||||
def month_day_to_day_of_year(year, month, day):
|
def month_day_to_day_of_year(year, month, day):
|
||||||
dt = datetime.date(year, month, day)
|
dt = datetime.date(year, month, day)
|
||||||
return dt.timetuple().tm_yday
|
return dt.timetuple().tm_yday
|
||||||
@ -275,7 +267,7 @@ def format_lat_long(lat_long, is_latitude):
|
|||||||
return hh + "° " + mm + "' " + ss + '"' + coord_tag
|
return hh + "° " + mm + "' " + ss + '"' + coord_tag
|
||||||
|
|
||||||
|
|
||||||
def get_sun_coordinates(local_time, latitude, longitude, north_offset,
|
def get_sun_coordinates(local_time, latitude, longitude,
|
||||||
utc_zone, month, day, year, distance):
|
utc_zone, month, day, year, distance):
|
||||||
"""
|
"""
|
||||||
Calculate the actual position of the sun based on input parameters.
|
Calculate the actual position of the sun based on input parameters.
|
||||||
@ -319,31 +311,31 @@ def get_sun_coordinates(local_time, latitude, longitude, north_offset,
|
|||||||
if hour_angle < -180.0:
|
if hour_angle < -180.0:
|
||||||
hour_angle += 360.0
|
hour_angle += 360.0
|
||||||
|
|
||||||
csz = (math.sin(latitude) * math.sin(solar_dec) +
|
csz = (sin(latitude) * sin(solar_dec) +
|
||||||
math.cos(latitude) * math.cos(solar_dec) *
|
cos(latitude) * cos(solar_dec) *
|
||||||
math.cos(radians(hour_angle)))
|
cos(radians(hour_angle)))
|
||||||
if csz > 1.0:
|
if csz > 1.0:
|
||||||
csz = 1.0
|
csz = 1.0
|
||||||
elif csz < -1.0:
|
elif csz < -1.0:
|
||||||
csz = -1.0
|
csz = -1.0
|
||||||
|
|
||||||
zenith = math.acos(csz)
|
zenith = acos(csz)
|
||||||
|
|
||||||
az_denom = math.cos(latitude) * math.sin(zenith)
|
az_denom = cos(latitude) * sin(zenith)
|
||||||
|
|
||||||
if abs(az_denom) > 0.001:
|
if abs(az_denom) > 0.001:
|
||||||
az_rad = ((math.sin(latitude) *
|
az_rad = ((sin(latitude) *
|
||||||
math.cos(zenith)) - math.sin(solar_dec)) / az_denom
|
cos(zenith)) - sin(solar_dec)) / az_denom
|
||||||
if abs(az_rad) > 1.0:
|
if abs(az_rad) > 1.0:
|
||||||
az_rad = -1.0 if (az_rad < 0.0) else 1.0
|
az_rad = -1.0 if (az_rad < 0.0) else 1.0
|
||||||
azimuth = 180.0 - degrees(math.acos(az_rad))
|
azimuth = pi - acos(az_rad)
|
||||||
if hour_angle > 0.0:
|
if hour_angle > 0.0:
|
||||||
azimuth = -azimuth
|
azimuth = -azimuth
|
||||||
else:
|
else:
|
||||||
azimuth = 180.0 if (latitude > 0.0) else 0.0
|
azimuth = pi if (latitude > 0.0) else 0.0
|
||||||
|
|
||||||
if azimuth < 0.0:
|
if azimuth < 0.0:
|
||||||
azimuth = azimuth + 360.0
|
azimuth += 2*pi
|
||||||
|
|
||||||
exoatm_elevation = 90.0 - degrees(zenith)
|
exoatm_elevation = 90.0 - degrees(zenith)
|
||||||
|
|
||||||
@ -351,43 +343,39 @@ def get_sun_coordinates(local_time, latitude, longitude, north_offset,
|
|||||||
if exoatm_elevation > 85.0:
|
if exoatm_elevation > 85.0:
|
||||||
refraction_correction = 0.0
|
refraction_correction = 0.0
|
||||||
else:
|
else:
|
||||||
te = math.tan(radians(exoatm_elevation))
|
te = tan(radians(exoatm_elevation))
|
||||||
if exoatm_elevation > 5.0:
|
if exoatm_elevation > 5.0:
|
||||||
refraction_correction = (
|
refraction_correction = (
|
||||||
58.1 / te - 0.07 / (te ** 3) + 0.000086 / (te ** 5))
|
58.1 / te - 0.07 / (te ** 3) + 0.000086 / (te ** 5))
|
||||||
elif (exoatm_elevation > -0.575):
|
elif exoatm_elevation > -0.575:
|
||||||
s1 = (-12.79 + exoatm_elevation * 0.711)
|
s1 = -12.79 + exoatm_elevation * 0.711
|
||||||
s2 = (103.4 + exoatm_elevation * (s1))
|
s2 = 103.4 + exoatm_elevation * s1
|
||||||
s3 = (-518.2 + exoatm_elevation * (s2))
|
s3 = -518.2 + exoatm_elevation * s2
|
||||||
refraction_correction = 1735.0 + exoatm_elevation * (s3)
|
refraction_correction = 1735.0 + exoatm_elevation * (s3)
|
||||||
else:
|
else:
|
||||||
refraction_correction = -20.774 / te
|
refraction_correction = -20.774 / te
|
||||||
|
|
||||||
refraction_correction = refraction_correction / 3600
|
refraction_correction /= 3600
|
||||||
solar_elevation = 90.0 - (degrees(zenith) - refraction_correction)
|
elevation = pi/2 - (zenith - radians(refraction_correction))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
solar_elevation = 90.0 - degrees(zenith)
|
elevation = pi/2 - zenith
|
||||||
|
|
||||||
solar_azimuth = azimuth
|
azimuth += sun_props.north_offset
|
||||||
solar_azimuth += north_offset
|
|
||||||
|
|
||||||
az_north = solar_azimuth
|
return azimuth, elevation
|
||||||
theta = math.pi / 2 - radians(solar_elevation)
|
|
||||||
phi = radians(solar_azimuth) * -1
|
|
||||||
azimuth = azimuth
|
|
||||||
elevation = solar_elevation
|
|
||||||
|
|
||||||
return az_north, theta, phi, azimuth, elevation
|
|
||||||
|
|
||||||
|
|
||||||
def get_sun_vector(theta, phi):
|
def get_sun_vector(azimuth, elevation):
|
||||||
"""
|
"""
|
||||||
Convert the sun coordinates to cartesian
|
Convert the sun coordinates to cartesian
|
||||||
"""
|
"""
|
||||||
loc_x = math.sin(phi) * math.sin(-theta)
|
phi = -azimuth
|
||||||
loc_y = math.sin(theta) * math.cos(phi)
|
theta = pi/2 - elevation
|
||||||
loc_z = math.cos(theta)
|
|
||||||
|
loc_x = sin(phi) * sin(-theta)
|
||||||
|
loc_y = sin(theta) * cos(phi)
|
||||||
|
loc_z = cos(theta)
|
||||||
return Vector((loc_x, loc_y, loc_z))
|
return Vector((loc_x, loc_y, loc_z))
|
||||||
|
|
||||||
|
|
||||||
@ -426,14 +414,14 @@ def calc_sun_declination(t):
|
|||||||
|
|
||||||
def calc_hour_angle_sunrise(lat, solar_dec):
|
def calc_hour_angle_sunrise(lat, solar_dec):
|
||||||
lat_rad = radians(lat)
|
lat_rad = radians(lat)
|
||||||
HAarg = (math.cos(radians(90.833)) /
|
HAarg = (cos(radians(90.833)) /
|
||||||
(math.cos(lat_rad) * math.cos(solar_dec))
|
(cos(lat_rad) * cos(solar_dec))
|
||||||
- math.tan(lat_rad) * math.tan(solar_dec))
|
- tan(lat_rad) * tan(solar_dec))
|
||||||
if HAarg < -1.0:
|
if HAarg < -1.0:
|
||||||
HAarg = -1.0
|
HAarg = -1.0
|
||||||
elif HAarg > 1.0:
|
elif HAarg > 1.0:
|
||||||
HAarg = 1.0
|
HAarg = 1.0
|
||||||
HA = math.acos(HAarg)
|
HA = acos(HAarg)
|
||||||
return HA
|
return HA
|
||||||
|
|
||||||
|
|
||||||
@ -458,8 +446,8 @@ def calc_sunrise_sunset(rise):
|
|||||||
sun.latitude, sun.longitude)
|
sun.latitude, sun.longitude)
|
||||||
time_local = new_time_UTC + (-zone * 60.0)
|
time_local = new_time_UTC + (-zone * 60.0)
|
||||||
tl = time_local / 60.0
|
tl = time_local / 60.0
|
||||||
az_north, theta, phi, azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
tl, sun.latitude, sun.longitude, 0.0,
|
tl, sun.latitude, sun.longitude,
|
||||||
zone, sun.month, sun.day, sun.year,
|
zone, sun.month, sun.day, sun.year,
|
||||||
sun.sun_distance)
|
sun.sun_distance)
|
||||||
if sun.use_daylight_savings:
|
if sun.use_daylight_savings:
|
||||||
@ -467,13 +455,9 @@ def calc_sunrise_sunset(rise):
|
|||||||
tl = time_local / 60.0
|
tl = time_local / 60.0
|
||||||
tl %= 24.0
|
tl %= 24.0
|
||||||
if rise:
|
if rise:
|
||||||
sun.sunrise.time = tl
|
sun.sunrise = tl
|
||||||
sun.sunrise.azimuth = azimuth
|
|
||||||
sun.sunrise.elevation = elevation
|
|
||||||
else:
|
else:
|
||||||
sun.sunset.time = tl
|
sun.sunset = tl
|
||||||
sun.sunset.azimuth = azimuth
|
|
||||||
sun.sunset.elevation = elevation
|
|
||||||
|
|
||||||
|
|
||||||
def julian_time_from_y2k(utc_time, year, month, day):
|
def julian_time_from_y2k(utc_time, year, month, day):
|
||||||
@ -491,10 +475,10 @@ def get_julian_day(year, month, day):
|
|||||||
if month <= 2:
|
if month <= 2:
|
||||||
year -= 1
|
year -= 1
|
||||||
month += 12
|
month += 12
|
||||||
A = math.floor(year / 100)
|
A = floor(year / 100)
|
||||||
B = 2 - A + math.floor(A / 4.0)
|
B = 2 - A + floor(A / 4.0)
|
||||||
jd = (math.floor((365.25 * (year + 4716.0))) +
|
jd = (floor((365.25 * (year + 4716.0))) +
|
||||||
math.floor(30.6001 * (month + 1)) + day + B - 1524.5)
|
floor(30.6001 * (month + 1)) + day + B - 1524.5)
|
||||||
return jd
|
return jd
|
||||||
|
|
||||||
|
|
||||||
@ -504,7 +488,7 @@ def calc_time_julian_cent(jd):
|
|||||||
|
|
||||||
|
|
||||||
def sun_declination(e, L):
|
def sun_declination(e, L):
|
||||||
return (math.asin(math.sin(e) * math.sin(L)))
|
return (asin(sin(e) * sin(L)))
|
||||||
|
|
||||||
|
|
||||||
def calc_equation_of_time(t):
|
def calc_equation_of_time(t):
|
||||||
@ -512,13 +496,13 @@ def calc_equation_of_time(t):
|
|||||||
ml = radians(mean_longitude_sun(t))
|
ml = radians(mean_longitude_sun(t))
|
||||||
e = eccentricity_earth_orbit(t)
|
e = eccentricity_earth_orbit(t)
|
||||||
m = radians(mean_anomaly_sun(t))
|
m = radians(mean_anomaly_sun(t))
|
||||||
y = math.tan(radians(epsilon) / 2.0)
|
y = tan(radians(epsilon) / 2.0)
|
||||||
y = y * y
|
y = y * y
|
||||||
sin2ml = math.sin(2.0 * ml)
|
sin2ml = sin(2.0 * ml)
|
||||||
cos2ml = math.cos(2.0 * ml)
|
cos2ml = cos(2.0 * ml)
|
||||||
sin4ml = math.sin(4.0 * ml)
|
sin4ml = sin(4.0 * ml)
|
||||||
sinm = math.sin(m)
|
sinm = sin(m)
|
||||||
sin2m = math.sin(2.0 * m)
|
sin2m = sin(2.0 * m)
|
||||||
etime = (y * sin2ml - 2.0 * e * sinm + 4.0 * e * y *
|
etime = (y * sin2ml - 2.0 * e * sinm + 4.0 * e * y *
|
||||||
sinm * cos2ml - 0.5 * y ** 2 * sin4ml - 1.25 * e ** 2 * sin2m)
|
sinm * cos2ml - 0.5 * y ** 2 * sin4ml - 1.25 * e ** 2 * sin2m)
|
||||||
return (degrees(etime) * 4)
|
return (degrees(etime) * 4)
|
||||||
@ -527,7 +511,7 @@ def calc_equation_of_time(t):
|
|||||||
def obliquity_correction(t):
|
def obliquity_correction(t):
|
||||||
ec = obliquity_of_ecliptic(t)
|
ec = obliquity_of_ecliptic(t)
|
||||||
omega = 125.04 - 1934.136 * t
|
omega = 125.04 - 1934.136 * t
|
||||||
return (ec + 0.00256 * math.cos(radians(omega)))
|
return (ec + 0.00256 * cos(radians(omega)))
|
||||||
|
|
||||||
|
|
||||||
def obliquity_of_ecliptic(t):
|
def obliquity_of_ecliptic(t):
|
||||||
@ -542,13 +526,13 @@ def true_longitude_of_sun(t):
|
|||||||
def calc_sun_apparent_long(t):
|
def calc_sun_apparent_long(t):
|
||||||
o = true_longitude_of_sun(t)
|
o = true_longitude_of_sun(t)
|
||||||
omega = 125.04 - 1934.136 * t
|
omega = 125.04 - 1934.136 * t
|
||||||
lamb = o - 0.00569 - 0.00478 * math.sin(radians(omega))
|
lamb = o - 0.00569 - 0.00478 * sin(radians(omega))
|
||||||
return lamb
|
return lamb
|
||||||
|
|
||||||
|
|
||||||
def apparent_longitude_of_sun(t):
|
def apparent_longitude_of_sun(t):
|
||||||
return (radians(true_longitude_of_sun(t) - 0.00569 - 0.00478 *
|
return (radians(true_longitude_of_sun(t) - 0.00569 - 0.00478 *
|
||||||
math.sin(radians(125.04 - 1934.136 * t))))
|
sin(radians(125.04 - 1934.136 * t))))
|
||||||
|
|
||||||
|
|
||||||
def mean_longitude_sun(t):
|
def mean_longitude_sun(t):
|
||||||
@ -557,9 +541,9 @@ def mean_longitude_sun(t):
|
|||||||
|
|
||||||
def equation_of_sun_center(t):
|
def equation_of_sun_center(t):
|
||||||
m = radians(mean_anomaly_sun(t))
|
m = radians(mean_anomaly_sun(t))
|
||||||
c = ((1.914602 - 0.004817 * t - 0.000014 * t**2) * math.sin(m) +
|
c = ((1.914602 - 0.004817 * t - 0.000014 * t**2) * sin(m) +
|
||||||
(0.019993 - 0.000101 * t) * math.sin(m * 2) +
|
(0.019993 - 0.000101 * t) * sin(m * 2) +
|
||||||
0.000289 * math.sin(m * 3))
|
0.000289 * sin(m * 3))
|
||||||
return c
|
return c
|
||||||
|
|
||||||
|
|
||||||
@ -575,13 +559,12 @@ def calc_surface(context):
|
|||||||
coords = []
|
coords = []
|
||||||
sun_props = context.scene.sun_pos_properties
|
sun_props = context.scene.sun_pos_properties
|
||||||
zone = -sun_props.UTC_zone
|
zone = -sun_props.UTC_zone
|
||||||
north_offset = degrees(sun_props.north_offset)
|
|
||||||
|
|
||||||
def get_surface_coordinates(time, month):
|
def get_surface_coordinates(time, month):
|
||||||
_, theta, phi, _, _ = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
time, sun_props.latitude, sun_props.longitude, north_offset,
|
time, sun_props.latitude, sun_props.longitude,
|
||||||
zone, month, 1, sun_props.year, sun_props.sun_distance)
|
zone, month, 1, sun_props.year, sun_props.sun_distance)
|
||||||
sun_vector = get_sun_vector(theta, phi) * sun_props.sun_distance
|
sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
sun_vector.z = max(0, sun_vector.z)
|
sun_vector.z = max(0, sun_vector.z)
|
||||||
return sun_vector
|
return sun_vector
|
||||||
|
|
||||||
@ -601,21 +584,19 @@ def calc_analemma(context, h):
|
|||||||
vertices = []
|
vertices = []
|
||||||
sun_props = context.scene.sun_pos_properties
|
sun_props = context.scene.sun_pos_properties
|
||||||
zone = -sun_props.UTC_zone
|
zone = -sun_props.UTC_zone
|
||||||
north_offset = degrees(sun_props.north_offset)
|
|
||||||
for day_of_year in range(1, 367, 5):
|
for day_of_year in range(1, 367, 5):
|
||||||
day, month = day_of_year_to_month_day(sun_props.year, day_of_year)
|
day, month = day_of_year_to_month_day(sun_props.year, day_of_year)
|
||||||
_, theta, phi, _, _ = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
h, sun_props.latitude, sun_props.longitude,
|
h, sun_props.latitude, sun_props.longitude,
|
||||||
north_offset, zone, month, day, sun_props.year,
|
zone, month, day, sun_props.year,
|
||||||
sun_props.sun_distance)
|
sun_props.sun_distance)
|
||||||
sun_vector = get_sun_vector(theta, phi) * sun_props.sun_distance
|
sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
if sun_vector.z > 0:
|
if sun_vector.z > 0:
|
||||||
vertices.append(sun_vector)
|
vertices.append(sun_vector)
|
||||||
return vertices
|
return vertices
|
||||||
|
|
||||||
|
|
||||||
def draw_surface(batch, shader):
|
def draw_surface(batch, shader):
|
||||||
|
|
||||||
blend = gpu.state.blend_get()
|
blend = gpu.state.blend_get()
|
||||||
gpu.state.blend_set("ALPHA")
|
gpu.state.blend_set("ALPHA")
|
||||||
shader.uniform_float("color", (.8, .6, 0, 0.2))
|
shader.uniform_float("color", (.8, .6, 0, 0.2))
|
||||||
@ -630,6 +611,7 @@ def draw_analemmas(batch, shader):
|
|||||||
|
|
||||||
_handle_surface = None
|
_handle_surface = None
|
||||||
|
|
||||||
|
|
||||||
def surface_update(self, context):
|
def surface_update(self, context):
|
||||||
global _handle_surface
|
global _handle_surface
|
||||||
if self.show_surface:
|
if self.show_surface:
|
||||||
@ -648,6 +630,7 @@ def surface_update(self, context):
|
|||||||
|
|
||||||
_handle_analemmas = None
|
_handle_analemmas = None
|
||||||
|
|
||||||
|
|
||||||
def analemmas_update(self, context):
|
def analemmas_update(self, context):
|
||||||
global _handle_analemmas
|
global _handle_analemmas
|
||||||
if self.show_analemmas:
|
if self.show_analemmas:
|
||||||
@ -664,7 +647,7 @@ def analemmas_update(self, context):
|
|||||||
|
|
||||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||||
batch = batch_for_shader(shader, 'LINES',
|
batch = batch_for_shader(shader, 'LINES',
|
||||||
{"pos": coords}, indices=indices)
|
{"pos": coords}, indices=indices)
|
||||||
|
|
||||||
if _handle_analemmas is not None:
|
if _handle_analemmas is not None:
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(_handle_analemmas, 'WINDOW')
|
bpy.types.SpaceView3D.draw_handler_remove(_handle_analemmas, 'WINDOW')
|
||||||
|
@ -4,6 +4,7 @@ import bpy
|
|||||||
from bpy.types import Operator, Menu
|
from bpy.types import Operator, Menu
|
||||||
from bl_operators.presets import AddPresetBase
|
from bl_operators.presets import AddPresetBase
|
||||||
import os
|
import os
|
||||||
|
from math import degrees
|
||||||
|
|
||||||
from .sun_calc import (format_lat_long, format_time, format_hms, sun)
|
from .sun_calc import (format_lat_long, format_time, format_hms, sun)
|
||||||
|
|
||||||
@ -79,7 +80,7 @@ class SUNPOS_PT_Panel(bpy.types.Panel):
|
|||||||
|
|
||||||
def draw_environ_mode_panel(self, context, sp, p, layout):
|
def draw_environ_mode_panel(self, context, sp, p, layout):
|
||||||
flow = layout.grid_flow(row_major=True, columns=0, even_columns=True,
|
flow = layout.grid_flow(row_major=True, columns=0, even_columns=True,
|
||||||
even_rows=False, align=False)
|
even_rows=False, align=False)
|
||||||
|
|
||||||
col = flow.column(align=True)
|
col = flow.column(align=True)
|
||||||
col.label(text="Environment Texture")
|
col.label(text="Environment Texture")
|
||||||
@ -153,6 +154,7 @@ class SUNPOS_PT_Panel(bpy.types.Panel):
|
|||||||
col.label(text="Please select World in the World panel.",
|
col.label(text="Please select World in the World panel.",
|
||||||
icon="ERROR")
|
icon="ERROR")
|
||||||
|
|
||||||
|
|
||||||
class SUNPOS_PT_Location(bpy.types.Panel):
|
class SUNPOS_PT_Location(bpy.types.Panel):
|
||||||
bl_space_type = "PROPERTIES"
|
bl_space_type = "PROPERTIES"
|
||||||
bl_region_type = "WINDOW"
|
bl_region_type = "WINDOW"
|
||||||
@ -211,10 +213,10 @@ class SUNPOS_PT_Location(bpy.types.Panel):
|
|||||||
col = flow.column(align=True)
|
col = flow.column(align=True)
|
||||||
split = col.split(factor=0.4, align=True)
|
split = col.split(factor=0.4, align=True)
|
||||||
split.label(text="Azimuth:")
|
split.label(text="Azimuth:")
|
||||||
split.label(text=str(round(sun.azimuth, 3)) + "°")
|
split.label(text=str(round(degrees(sun.azimuth), 3)) + "°")
|
||||||
split = col.split(factor=0.4, align=True)
|
split = col.split(factor=0.4, align=True)
|
||||||
split.label(text="Elevation:")
|
split.label(text="Elevation:")
|
||||||
split.label(text=str(round(sun.elevation, 3)) + "°")
|
split.label(text=str(round(degrees(sun.elevation), 3)) + "°")
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
if p.show_refraction:
|
if p.show_refraction:
|
||||||
@ -282,12 +284,11 @@ class SUNPOS_PT_Time(bpy.types.Panel):
|
|||||||
split.label(text=ut)
|
split.label(text=ut)
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
|
|
||||||
col = flow.column(align=True)
|
col = flow.column(align=True)
|
||||||
col.alignment = 'CENTER'
|
col.alignment = 'CENTER'
|
||||||
if p.show_rise_set:
|
if p.show_rise_set:
|
||||||
sr = format_hms(sun.sunrise.time)
|
sr = format_hms(sun.sunrise)
|
||||||
ss = format_hms(sun.sunset.time)
|
ss = format_hms(sun.sunset)
|
||||||
|
|
||||||
split = col.split(factor=0.5, align=True)
|
split = col.split(factor=0.5, align=True)
|
||||||
split.label(text="Sunrise:", icon='LIGHT_SUN')
|
split.label(text="Sunrise:", icon='LIGHT_SUN')
|
||||||
|
Loading…
Reference in New Issue
Block a user