Pose Library: Update to use the asset shelf (when enabled) #104546
@ -29,13 +29,6 @@ translations_tuple = (
|
|||||||
("fr_FR", "Choisir un nom pour la catégorie du panneau",
|
("fr_FR", "Choisir un nom pour la catégorie du panneau",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("Operator", "Insert Key"),
|
|
||||||
(("bpy.types.ANIM_OT_insert_keyframe_animall",
|
|
||||||
"bpy.types.ANIM_OT_insert_keyframe_animall"),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Insérer une clé",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("Operator", "Clear Animation"),
|
(("Operator", "Clear Animation"),
|
||||||
(("bpy.types.ANIM_OT_clear_animation_animall",),
|
(("bpy.types.ANIM_OT_clear_animation_animall",),
|
||||||
()),
|
()),
|
||||||
@ -49,6 +42,12 @@ translations_tuple = (
|
|||||||
"En cas d’échec, essayez de les supprimer manuellement",
|
"En cas d’échec, essayez de les supprimer manuellement",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
|
(("Operator", "Insert Key"),
|
||||||
|
(("bpy.types.ANIM_OT_insert_keyframe_animall",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Insérer une clé",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
(("*", "Insert a Keyframe"),
|
(("*", "Insert a Keyframe"),
|
||||||
(("bpy.types.ANIM_OT_insert_keyframe_animall",),
|
(("bpy.types.ANIM_OT_insert_keyframe_animall",),
|
||||||
()),
|
()),
|
||||||
@ -67,6 +66,18 @@ translations_tuple = (
|
|||||||
("fr_FR", "Supprimer une image clé",
|
("fr_FR", "Supprimer une image clé",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
|
(("Operator", "Update Vertex Color Animation"),
|
||||||
|
(("bpy.types.ANIM_OT_update_vertex_color_animation_animall",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Mettre à jour l’animation des couleurs de sommets",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Update old vertex color channel formats from pre-3.3 versions"),
|
||||||
|
(("bpy.types.ANIM_OT_update_vertex_color_animation_animall",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Mettre à jour les formats des canaux depuis les versions antérieures à la 3.3",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
(("*", "Animate"),
|
(("*", "Animate"),
|
||||||
(("bpy.types.VIEW3D_PT_animall",),
|
(("bpy.types.VIEW3D_PT_animall",),
|
||||||
()),
|
()),
|
||||||
|
@ -203,36 +203,36 @@ class BlenderIdPreferences(AddonPreferences):
|
|||||||
now = datetime.datetime.utcnow()
|
now = datetime.datetime.utcnow()
|
||||||
|
|
||||||
if expiry is None:
|
if expiry is None:
|
||||||
layout.label(text='We do not know when your token expires, please validate it.')
|
layout.label(text='We do not know when your token expires, please validate it')
|
||||||
elif now >= expiry:
|
elif now >= expiry:
|
||||||
layout.label(text='Your login has expired! Log out and log in again to refresh it.',
|
layout.label(text='Your login has expired! Log out and log in again to refresh it',
|
||||||
icon='ERROR')
|
icon='ERROR')
|
||||||
else:
|
else:
|
||||||
time_left = expiry - now
|
time_left = expiry - now
|
||||||
if time_left.days > 14:
|
if time_left.days > 14:
|
||||||
exp_str = tip_('on {:%Y-%m-%d}').format(expiry)
|
exp_str = tip_('on {:%Y-%m-%d}').format(expiry)
|
||||||
elif time_left.days > 1:
|
elif time_left.days > 1:
|
||||||
exp_str = tip_('in %i days.') % time_left.days
|
exp_str = tip_('in %i days') % time_left.days
|
||||||
elif time_left.seconds >= 7200:
|
elif time_left.seconds >= 7200:
|
||||||
exp_str = tip_('in %i hours.') % round(time_left.seconds / 3600)
|
exp_str = tip_('in %i hours') % round(time_left.seconds / 3600)
|
||||||
elif time_left.seconds >= 120:
|
elif time_left.seconds >= 120:
|
||||||
exp_str = tip_('in %i minutes.') % round(time_left.seconds / 60)
|
exp_str = tip_('in %i minutes') % round(time_left.seconds / 60)
|
||||||
else:
|
else:
|
||||||
exp_str = tip_('within seconds')
|
exp_str = tip_('within seconds')
|
||||||
|
|
||||||
endpoint = communication.blender_id_endpoint()
|
endpoint = communication.blender_id_endpoint()
|
||||||
if endpoint == communication.BLENDER_ID_ENDPOINT:
|
if endpoint == communication.BLENDER_ID_ENDPOINT:
|
||||||
msg = tip_('You are logged in as %s.') % active_profile.username
|
msg = tip_('You are logged in as %s') % active_profile.username
|
||||||
else:
|
else:
|
||||||
msg = tip_('You are logged in as %s at %s.') % (active_profile.username, endpoint)
|
msg = tip_('You are logged in as %s at %s') % (active_profile.username, endpoint)
|
||||||
|
|
||||||
col = layout.column(align=True)
|
col = layout.column(align=True)
|
||||||
col.label(text=msg, icon='WORLD_DATA')
|
col.label(text=msg, icon='WORLD_DATA')
|
||||||
if time_left.days < 14:
|
if time_left.days < 14:
|
||||||
col.label(text=tip_('Your token will expire %s. Please log out and log in again '
|
col.label(text=tip_('Your token will expire %s. Please log out and log in again '
|
||||||
'to refresh it.') % exp_str, icon='PREVIEW_RANGE')
|
'to refresh it') % exp_str, icon='PREVIEW_RANGE')
|
||||||
else:
|
else:
|
||||||
col.label(text=tip_('Your authentication token expires %s.') % exp_str,
|
col.label(text=tip_('Your authentication token expires %s') % exp_str,
|
||||||
icon='BLANK1')
|
icon='BLANK1')
|
||||||
|
|
||||||
row = layout.row().split(factor=0.8)
|
row = layout.row().split(factor=0.8)
|
||||||
@ -307,9 +307,9 @@ class BlenderIdValidate(BlenderIdMixin, Operator):
|
|||||||
|
|
||||||
err = validate_token()
|
err = validate_token()
|
||||||
if err is None:
|
if err is None:
|
||||||
addon_prefs.ok_message = tip_('Authentication token is valid.')
|
addon_prefs.ok_message = tip_('Authentication token is valid')
|
||||||
else:
|
else:
|
||||||
addon_prefs.error_message = tip_('%s; you probably want to log out and log in again.') % err
|
addon_prefs.error_message = tip_('%s; you probably want to log out and log in again') % err
|
||||||
|
|
||||||
BlenderIdProfile.read_json()
|
BlenderIdProfile.read_json()
|
||||||
|
|
||||||
@ -329,7 +329,7 @@ class BlenderIdLogout(BlenderIdMixin, Operator):
|
|||||||
profiles.logout(BlenderIdProfile.user_id)
|
profiles.logout(BlenderIdProfile.user_id)
|
||||||
BlenderIdProfile.read_json()
|
BlenderIdProfile.read_json()
|
||||||
|
|
||||||
addon_prefs.ok_message = tip_('You have been logged out.')
|
addon_prefs.ok_message = tip_('You have been logged out')
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ except:
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Import AutoCAD DXF Format (.dxf)",
|
"name": "Import AutoCAD DXF Format (.dxf)",
|
||||||
"author": "Lukas Treyer, Manfred Moitzi (support + dxfgrabber library), Vladimir Elistratov, Bastien Montagne, Remigiusz Fiedler (AKA migius)",
|
"author": "Lukas Treyer, Manfred Moitzi (support + dxfgrabber library), Vladimir Elistratov, Bastien Montagne, Remigiusz Fiedler (AKA migius)",
|
||||||
"version": (0, 9, 6),
|
"version": (0, 9, 8),
|
||||||
"blender": (2, 80, 0),
|
"blender": (2, 80, 0),
|
||||||
"location": "File > Import > AutoCAD DXF",
|
"location": "File > Import > AutoCAD DXF",
|
||||||
"description": "Import files in the Autocad DXF format (.dxf)",
|
"description": "Import files in the Autocad DXF format (.dxf)",
|
||||||
|
@ -9,6 +9,8 @@ __author__ = "mozman <mozman@gmx.at>"
|
|||||||
|
|
||||||
import math
|
import math
|
||||||
|
|
||||||
|
from mathutils import Vector
|
||||||
|
|
||||||
from . import const
|
from . import const
|
||||||
from .color import TrueColor
|
from .color import TrueColor
|
||||||
from .styles import default_text_style
|
from .styles import default_text_style
|
||||||
@ -733,11 +735,6 @@ def deg2vec(deg):
|
|||||||
return math.cos(rad), math.sin(rad), 0.
|
return math.cos(rad), math.sin(rad), 0.
|
||||||
|
|
||||||
|
|
||||||
def normalized(vector):
|
|
||||||
x, y, z = vector
|
|
||||||
m = (x**2 + y**2 + z**2)**0.5
|
|
||||||
return x/m, y/m, z/m
|
|
||||||
|
|
||||||
##################################################
|
##################################################
|
||||||
# MTEXT inline codes
|
# MTEXT inline codes
|
||||||
# \L Start underline
|
# \L Start underline
|
||||||
@ -850,7 +847,7 @@ class MText(DXFEntity):
|
|||||||
self.raw_text = "".join(lines)
|
self.raw_text = "".join(lines)
|
||||||
if xdir is None:
|
if xdir is None:
|
||||||
xdir = deg2vec(rotation)
|
xdir = deg2vec(rotation)
|
||||||
self.xdirection = normalized(xdir)
|
self.xdirection = Vector(xdir).normalized()
|
||||||
self.set_default_extrusion()
|
self.set_default_extrusion()
|
||||||
|
|
||||||
def lines(self):
|
def lines(self):
|
||||||
|
@ -949,7 +949,7 @@ class Do:
|
|||||||
|
|
||||||
# create the block
|
# create the block
|
||||||
if len(block_group.objects) == 0 or name not in self.known_blocks.keys():
|
if len(block_group.objects) == 0 or name not in self.known_blocks.keys():
|
||||||
bpy.context.screen.scene = block_scene
|
bpy.context.window.scene = block_scene
|
||||||
block_inserts = [en for en in entity if is_.insert(en.dxftype)]
|
block_inserts = [en for en in entity if is_.insert(en.dxftype)]
|
||||||
bc = (en for en in entity if is_.combined_entity(en))
|
bc = (en for en in entity if is_.combined_entity(en))
|
||||||
bs = (en for en in entity if is_.separated_entity(en) and not is_.insert(en.dxftype))
|
bs = (en for en in entity if is_.separated_entity(en) and not is_.insert(en.dxftype))
|
||||||
@ -985,7 +985,7 @@ class Do:
|
|||||||
else:
|
else:
|
||||||
bbox = self.known_blocks[name][2]
|
bbox = self.known_blocks[name][2]
|
||||||
|
|
||||||
bpy.context.screen.scene = scene
|
bpy.context.window.scene = scene
|
||||||
o = bbox.copy()
|
o = bbox.copy()
|
||||||
# o.empty_display_size = 0.3
|
# o.empty_display_size = 0.3
|
||||||
o.instance_type = "COLLECTION"
|
o.instance_type = "COLLECTION"
|
||||||
@ -1379,7 +1379,7 @@ class Do:
|
|||||||
return o
|
return o
|
||||||
|
|
||||||
def _recenter(self, scene, name):
|
def _recenter(self, scene, name):
|
||||||
bpy.context.screen.scene = scene
|
bpy.context.window.scene = scene
|
||||||
bpy.context.view_layer.update()
|
bpy.context.view_layer.update()
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
|
|
||||||
@ -1621,7 +1621,7 @@ class Do:
|
|||||||
elif self.pScene is not None: # assume Proj
|
elif self.pScene is not None: # assume Proj
|
||||||
scene['SRID'] = re.findall(r"\+init=(.+)\s", self.pScene.srs)[0]
|
scene['SRID'] = re.findall(r"\+init=(.+)\s", self.pScene.srs)[0]
|
||||||
|
|
||||||
#bpy.context.screen.scene = scene
|
#bpy.context.window.scene = scene
|
||||||
|
|
||||||
return self.errors
|
return self.errors
|
||||||
# trying to import dimensions:
|
# trying to import dimensions:
|
||||||
|
@ -16,15 +16,14 @@ import bpy
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Autodesk 3DS format",
|
"name": "Autodesk 3DS format",
|
||||||
"author": "Bob Holcomb, Campbell Barton, Andreas Atteneder, Sebastian Schrand",
|
"author": "Bob Holcomb, Campbell Barton, Andreas Atteneder, Sebastian Schrand",
|
||||||
"version": (2, 3, 2),
|
"version": (2, 3, 4),
|
||||||
"blender": (3, 6, 0),
|
"blender": (3, 6, 0),
|
||||||
"location": "File > Import",
|
"location": "File > Import-Export",
|
||||||
"description": "3DS Import/Export meshes, UVs, materials, textures, "
|
"description": "3DS Import/Export meshes, UVs, materials, textures, "
|
||||||
"cameras & lamps",
|
"cameras, lamps & animation",
|
||||||
"warning": "Images must be in file folder, "
|
"warning": "Images must be in file folder, "
|
||||||
"filenames are limited to DOS 8.3 format",
|
"filenames are limited to DOS 8.3 format",
|
||||||
"doc_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
|
"doc_url": "{BLENDER_MANUAL_URL}/addons/import_export/scene_3ds.html",
|
||||||
"Scripts/Import-Export/Autodesk_3DS",
|
|
||||||
"category": "Import-Export",
|
"category": "Import-Export",
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -151,14 +150,6 @@ def unregister():
|
|||||||
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
|
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
|
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
|
||||||
|
|
||||||
# NOTES:
|
|
||||||
# why add 1 extra vertex? and remove it when done? -
|
|
||||||
# "Answer - eekadoodle - would need to re-order UV's without this since face
|
|
||||||
# order isnt always what we give blender, BMesh will solve :D"
|
|
||||||
#
|
|
||||||
# disabled scaling to size, this requires exposing bb (easy) and understanding
|
|
||||||
# how it works (needs some time)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
register()
|
register()
|
||||||
|
@ -13,19 +13,19 @@ import mathutils
|
|||||||
import bpy_extras
|
import bpy_extras
|
||||||
from bpy_extras import node_shader_utils
|
from bpy_extras import node_shader_utils
|
||||||
|
|
||||||
######################################################
|
###################
|
||||||
# Data Structures
|
# Data Structures #
|
||||||
######################################################
|
###################
|
||||||
|
|
||||||
# Some of the chunks that we will export
|
# Some of the chunks that we will export
|
||||||
# ----- Primary Chunk, at the beginning of each file
|
# >----- Primary Chunk, at the beginning of each file
|
||||||
PRIMARY = 0x4D4D
|
PRIMARY = 0x4D4D
|
||||||
|
|
||||||
# ------ Main Chunks
|
# >----- Main Chunks
|
||||||
VERSION = 0x0002 # This gives the version of the .3ds file
|
VERSION = 0x0002 # This gives the version of the .3ds file
|
||||||
KFDATA = 0xB000 # This is the header for all of the key frame info
|
KFDATA = 0xB000 # This is the header for all of the key frame info
|
||||||
|
|
||||||
# ------ sub defines of OBJECTINFO
|
# >----- sub defines of OBJECTINFO
|
||||||
OBJECTINFO = 0x3D3D # Main mesh object chunk before the material and object information
|
OBJECTINFO = 0x3D3D # Main mesh object chunk before the material and object information
|
||||||
MESHVERSION = 0x3D3E # This gives the version of the mesh
|
MESHVERSION = 0x3D3E # This gives the version of the mesh
|
||||||
AMBIENTLIGHT = 0x2100 # The color of the ambient light
|
AMBIENTLIGHT = 0x2100 # The color of the ambient light
|
||||||
@ -41,9 +41,16 @@ MATSHINESS = 0xA040 # Specular intensity of the object/material (percent)
|
|||||||
MATSHIN2 = 0xA041 # Reflection of the object/material (percent)
|
MATSHIN2 = 0xA041 # Reflection of the object/material (percent)
|
||||||
MATSHIN3 = 0xA042 # metallic/mirror of the object/material (percent)
|
MATSHIN3 = 0xA042 # metallic/mirror of the object/material (percent)
|
||||||
MATTRANS = 0xA050 # Transparency value (100-OpacityValue) (percent)
|
MATTRANS = 0xA050 # Transparency value (100-OpacityValue) (percent)
|
||||||
|
MATSELFILLUM = 0xA080 # # Material self illumination flag
|
||||||
MATSELFILPCT = 0xA084 # Self illumination strength (percent)
|
MATSELFILPCT = 0xA084 # Self illumination strength (percent)
|
||||||
|
MATWIRE = 0xA085 # Material wireframe rendered flag
|
||||||
|
MATFACEMAP = 0xA088 # Face mapped textures flag
|
||||||
|
MATPHONGSOFT = 0xA08C # Phong soften material flag
|
||||||
|
MATWIREABS = 0xA08E # Wire size in units flag
|
||||||
|
MATWIRESIZE = 0xA087 # Rendered wire size in pixels
|
||||||
MATSHADING = 0xA100 # Material shading method
|
MATSHADING = 0xA100 # Material shading method
|
||||||
|
|
||||||
|
# >------ sub defines of MAT_MAP
|
||||||
MAT_DIFFUSEMAP = 0xA200 # This is a header for a new diffuse texture
|
MAT_DIFFUSEMAP = 0xA200 # This is a header for a new diffuse texture
|
||||||
MAT_SPECMAP = 0xA204 # head for specularity map
|
MAT_SPECMAP = 0xA204 # head for specularity map
|
||||||
MAT_OPACMAP = 0xA210 # head for opacity map
|
MAT_OPACMAP = 0xA210 # head for opacity map
|
||||||
@ -53,9 +60,7 @@ MAT_BUMP_PERCENT = 0xA252 # Normalmap strength (percent)
|
|||||||
MAT_TEX2MAP = 0xA33A # head for secondary texture
|
MAT_TEX2MAP = 0xA33A # head for secondary texture
|
||||||
MAT_SHINMAP = 0xA33C # head for roughness map
|
MAT_SHINMAP = 0xA33C # head for roughness map
|
||||||
MAT_SELFIMAP = 0xA33D # head for emission map
|
MAT_SELFIMAP = 0xA33D # head for emission map
|
||||||
|
MAT_MAP_FILE = 0xA300 # This holds the file name of a texture
|
||||||
# >------ sub defines of MAT_MAP
|
|
||||||
MATMAPFILE = 0xA300 # This holds the file name of a texture
|
|
||||||
MAT_MAP_TILING = 0xa351 # 2nd bit (from LSB) is mirror UV flag
|
MAT_MAP_TILING = 0xa351 # 2nd bit (from LSB) is mirror UV flag
|
||||||
MAT_MAP_TEXBLUR = 0xA353 # Texture blurring factor
|
MAT_MAP_TEXBLUR = 0xA353 # Texture blurring factor
|
||||||
MAT_MAP_USCALE = 0xA354 # U axis scaling
|
MAT_MAP_USCALE = 0xA354 # U axis scaling
|
||||||
@ -85,7 +90,10 @@ OBJECT_CAMERA = 0x4700 # This lets us know we are reading a camera object
|
|||||||
# >------ Sub defines of LIGHT
|
# >------ Sub defines of LIGHT
|
||||||
LIGHT_MULTIPLIER = 0x465B # The light energy factor
|
LIGHT_MULTIPLIER = 0x465B # The light energy factor
|
||||||
LIGHT_SPOTLIGHT = 0x4610 # The target of a spotlight
|
LIGHT_SPOTLIGHT = 0x4610 # The target of a spotlight
|
||||||
LIGHT_SPOTROLL = 0x4656 # The roll angle of the spot
|
LIGHT_SPOT_ROLL = 0x4656 # Light spot roll angle
|
||||||
|
LIGHT_SPOT_SHADOWED = 0x4630 # Light spot shadow flag
|
||||||
|
LIGHT_SPOT_SEE_CONE = 0x4650 # Light spot show cone flag
|
||||||
|
LIGHT_SPOT_RECTANGLE = 0x4651 # Light spot rectangle flag
|
||||||
|
|
||||||
# >------ sub defines of CAMERA
|
# >------ sub defines of CAMERA
|
||||||
OBJECT_CAM_RANGES = 0x4720 # The camera range values
|
OBJECT_CAM_RANGES = 0x4720 # The camera range values
|
||||||
@ -100,19 +108,34 @@ OBJECT_SMOOTH = 0x4150 # The objects smooth groups
|
|||||||
OBJECT_TRANS_MATRIX = 0x4160 # The Object Matrix
|
OBJECT_TRANS_MATRIX = 0x4160 # The Object Matrix
|
||||||
|
|
||||||
# >------ sub defines of KFDATA
|
# >------ sub defines of KFDATA
|
||||||
KFDATA_KFHDR = 0xB00A
|
AMBIENT_NODE_TAG = 0xB001 # Ambient node tag
|
||||||
KFDATA_KFSEG = 0xB008
|
OBJECT_NODE_TAG = 0xB002 # Object tree tag
|
||||||
KFDATA_KFCURTIME = 0xB009
|
CAMERA_NODE_TAG = 0xB003 # Camera object tag
|
||||||
KFDATA_OBJECT_NODE_TAG = 0xB002
|
TARGET_NODE_TAG = 0xB004 # Camera target tag
|
||||||
|
LIGHT_NODE_TAG = 0xB005 # Light object tag
|
||||||
|
LTARGET_NODE_TAG = 0xB006 # Light target tag
|
||||||
|
SPOT_NODE_TAG = 0xB007 # Spotlight tag
|
||||||
|
KFDATA_KFSEG = 0xB008 # Frame start & end
|
||||||
|
KFDATA_KFCURTIME = 0xB009 # Frame current
|
||||||
|
KFDATA_KFHDR = 0xB00A # Keyframe header
|
||||||
|
|
||||||
# >------ sub defines of OBJECT_NODE_TAG
|
# >------ sub defines of OBJECT_NODE_TAG
|
||||||
OBJECT_NODE_ID = 0xB030
|
OBJECT_NODE_ID = 0xB030 # Object hierachy ID
|
||||||
OBJECT_NODE_HDR = 0xB010
|
OBJECT_NODE_HDR = 0xB010 # Hierachy tree header
|
||||||
OBJECT_PIVOT = 0xB013
|
OBJECT_INSTANCE_NAME = 0xB011 # Object instance name
|
||||||
OBJECT_INSTANCE_NAME = 0xB011
|
OBJECT_PIVOT = 0xB013 # Object pivot position
|
||||||
POS_TRACK_TAG = 0xB020
|
OBJECT_BOUNDBOX = 0xB014 # Object boundbox
|
||||||
ROT_TRACK_TAG = 0xB021
|
OBJECT_MORPH_SMOOTH = 0xB015 # Object smooth angle
|
||||||
SCL_TRACK_TAG = 0xB022
|
POS_TRACK_TAG = 0xB020 # Position transform tag
|
||||||
|
ROT_TRACK_TAG = 0xB021 # Rotation transform tag
|
||||||
|
SCL_TRACK_TAG = 0xB022 # Scale transform tag
|
||||||
|
FOV_TRACK_TAG = 0xB023 # Field of view tag
|
||||||
|
ROLL_TRACK_TAG = 0xB024 # Roll transform tag
|
||||||
|
COL_TRACK_TAG = 0xB025 # Color transform tag
|
||||||
|
HOTSPOT_TRACK_TAG = 0xB027 # Hotspot transform tag
|
||||||
|
FALLOFF_TRACK_TAG = 0xB028 # Falloff transform tag
|
||||||
|
|
||||||
|
ROOT_OBJECT = 0xFFFF # Root object
|
||||||
|
|
||||||
|
|
||||||
# So 3ds max can open files, limit names to 12 in length
|
# So 3ds max can open files, limit names to 12 in length
|
||||||
@ -120,7 +143,6 @@ SCL_TRACK_TAG = 0xB022
|
|||||||
name_unique = [] # stores str, ascii only
|
name_unique = [] # stores str, ascii only
|
||||||
name_mapping = {} # stores {orig: byte} mapping
|
name_mapping = {} # stores {orig: byte} mapping
|
||||||
|
|
||||||
|
|
||||||
def sane_name(name):
|
def sane_name(name):
|
||||||
name_fixed = name_mapping.get(name)
|
name_fixed = name_mapping.get(name)
|
||||||
if name_fixed is not None:
|
if name_fixed is not None:
|
||||||
@ -131,7 +153,7 @@ def sane_name(name):
|
|||||||
i = 0
|
i = 0
|
||||||
|
|
||||||
while new_name in name_unique:
|
while new_name in name_unique:
|
||||||
new_name = new_name_clean + ".%.3d" % i
|
new_name = new_name_clean + '.%.3d' % i
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
# note, appending the 'str' version.
|
# note, appending the 'str' version.
|
||||||
@ -143,13 +165,11 @@ def sane_name(name):
|
|||||||
def uv_key(uv):
|
def uv_key(uv):
|
||||||
return round(uv[0], 6), round(uv[1], 6)
|
return round(uv[0], 6), round(uv[1], 6)
|
||||||
|
|
||||||
|
|
||||||
# size defines:
|
# size defines:
|
||||||
SZ_SHORT = 2
|
SZ_SHORT = 2
|
||||||
SZ_INT = 4
|
SZ_INT = 4
|
||||||
SZ_FLOAT = 4
|
SZ_FLOAT = 4
|
||||||
|
|
||||||
|
|
||||||
class _3ds_ushort(object):
|
class _3ds_ushort(object):
|
||||||
"""Class representing a short (2-byte integer) for a 3ds file.
|
"""Class representing a short (2-byte integer) for a 3ds file.
|
||||||
*** This looks like an unsigned short H is unsigned from the struct docs - Cam***"""
|
*** This looks like an unsigned short H is unsigned from the struct docs - Cam***"""
|
||||||
@ -162,7 +182,7 @@ class _3ds_ushort(object):
|
|||||||
return SZ_SHORT
|
return SZ_SHORT
|
||||||
|
|
||||||
def write(self, file):
|
def write(self, file):
|
||||||
file.write(struct.pack("<H", self.value))
|
file.write(struct.pack('<H', self.value))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.value)
|
return str(self.value)
|
||||||
@ -179,7 +199,7 @@ class _3ds_uint(object):
|
|||||||
return SZ_INT
|
return SZ_INT
|
||||||
|
|
||||||
def write(self, file):
|
def write(self, file):
|
||||||
file.write(struct.pack("<I", self.value))
|
file.write(struct.pack('<I', self.value))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.value)
|
return str(self.value)
|
||||||
@ -196,7 +216,7 @@ class _3ds_float(object):
|
|||||||
return SZ_FLOAT
|
return SZ_FLOAT
|
||||||
|
|
||||||
def write(self, file):
|
def write(self, file):
|
||||||
file.write(struct.pack("<f", self.value))
|
file.write(struct.pack('<f', self.value))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.value)
|
return str(self.value)
|
||||||
@ -214,7 +234,7 @@ class _3ds_string(object):
|
|||||||
return (len(self.value) + 1)
|
return (len(self.value) + 1)
|
||||||
|
|
||||||
def write(self, file):
|
def write(self, file):
|
||||||
binary_format = "<%ds" % (len(self.value) + 1)
|
binary_format = '<%ds' % (len(self.value) + 1)
|
||||||
file.write(struct.pack(binary_format, self.value))
|
file.write(struct.pack(binary_format, self.value))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -242,19 +262,19 @@ class _3ds_point_3d(object):
|
|||||||
'''
|
'''
|
||||||
class _3ds_point_4d(object):
|
class _3ds_point_4d(object):
|
||||||
"""Class representing a four-dimensional point for a 3ds file, for instance a quaternion."""
|
"""Class representing a four-dimensional point for a 3ds file, for instance a quaternion."""
|
||||||
__slots__ = "x","y","z","w"
|
__slots__ = "w","x","y","z"
|
||||||
def __init__(self, point=(0.0,0.0,0.0,0.0)):
|
def __init__(self, point=(0.0,0.0,0.0,0.0)):
|
||||||
self.x, self.y, self.z, self.w = point
|
self.w, self.x, self.y, self.z = point
|
||||||
|
|
||||||
def get_size(self):
|
def get_size(self):
|
||||||
return 4*SZ_FLOAT
|
return 4*SZ_FLOAT
|
||||||
|
|
||||||
def write(self,file):
|
def write(self,file):
|
||||||
data=struct.pack('<4f', self.x, self.y, self.z, self.w)
|
data=struct.pack('<4f', self.w, self.x, self.y, self.z)
|
||||||
file.write(data)
|
file.write(data)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '(%f, %f, %f, %f)' % (self.x, self.y, self.z, self.w)
|
return '(%f, %f, %f, %f)' % (self.w, self.x, self.y, self.z)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
@ -312,7 +332,7 @@ class _3ds_rgb_color(object):
|
|||||||
|
|
||||||
class _3ds_face(object):
|
class _3ds_face(object):
|
||||||
"""Class representing a face for a 3ds file."""
|
"""Class representing a face for a 3ds file."""
|
||||||
__slots__ = ("vindex", "flag")
|
__slots__ = ("vindex", "flag", )
|
||||||
|
|
||||||
def __init__(self, vindex, flag):
|
def __init__(self, vindex, flag):
|
||||||
self.vindex = vindex
|
self.vindex = vindex
|
||||||
@ -326,15 +346,14 @@ class _3ds_face(object):
|
|||||||
|
|
||||||
def write(self, file):
|
def write(self, file):
|
||||||
# The last short is used for face flags
|
# The last short is used for face flags
|
||||||
file.write(struct.pack("<4H", self.vindex[0], self.vindex[1], self.vindex[2], self.flag))
|
file.write(struct.pack('<4H', self.vindex[0], self.vindex[1], self.vindex[2], self.flag))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[%d %d %d %d]" % (self.vindex[0], self.vindex[1], self.vindex[2], self.flag)
|
return '[%d %d %d %d]' % (self.vindex[0], self.vindex[1], self.vindex[2], self.flag)
|
||||||
|
|
||||||
|
|
||||||
class _3ds_array(object):
|
class _3ds_array(object):
|
||||||
"""Class representing an array of variables for a 3ds file.
|
"""Class representing an array of variables for a 3ds file.
|
||||||
|
|
||||||
Consists of a _3ds_ushort to indicate the number of items, followed by the items themselves.
|
Consists of a _3ds_ushort to indicate the number of items, followed by the items themselves.
|
||||||
"""
|
"""
|
||||||
__slots__ = "values", "size"
|
__slots__ = "values", "size"
|
||||||
@ -395,7 +414,6 @@ class _3ds_named_variable(object):
|
|||||||
# the chunk class
|
# the chunk class
|
||||||
class _3ds_chunk(object):
|
class _3ds_chunk(object):
|
||||||
"""Class representing a chunk in a 3ds file.
|
"""Class representing a chunk in a 3ds file.
|
||||||
|
|
||||||
Chunks contain zero or more variables, followed by zero or more subchunks.
|
Chunks contain zero or more variables, followed by zero or more subchunks.
|
||||||
"""
|
"""
|
||||||
__slots__ = "ID", "size", "variables", "subchunks"
|
__slots__ = "ID", "size", "variables", "subchunks"
|
||||||
@ -408,8 +426,8 @@ class _3ds_chunk(object):
|
|||||||
|
|
||||||
def add_variable(self, name, var):
|
def add_variable(self, name, var):
|
||||||
"""Add a named variable.
|
"""Add a named variable.
|
||||||
|
|
||||||
The name is mostly for debugging purposes."""
|
The name is mostly for debugging purposes."""
|
||||||
|
|
||||||
self.variables.append(_3ds_named_variable(name, var))
|
self.variables.append(_3ds_named_variable(name, var))
|
||||||
|
|
||||||
def add_subchunk(self, chunk):
|
def add_subchunk(self, chunk):
|
||||||
@ -418,8 +436,8 @@ class _3ds_chunk(object):
|
|||||||
|
|
||||||
def get_size(self):
|
def get_size(self):
|
||||||
"""Calculate the size of the chunk and return it.
|
"""Calculate the size of the chunk and return it.
|
||||||
|
|
||||||
The sizes of the variables and subchunks are used to determine this chunk\'s size."""
|
The sizes of the variables and subchunks are used to determine this chunk\'s size."""
|
||||||
|
|
||||||
tmpsize = self.ID.get_size() + self.size.get_size()
|
tmpsize = self.ID.get_size() + self.size.get_size()
|
||||||
for variable in self.variables:
|
for variable in self.variables:
|
||||||
tmpsize += variable.get_size()
|
tmpsize += variable.get_size()
|
||||||
@ -443,8 +461,8 @@ class _3ds_chunk(object):
|
|||||||
|
|
||||||
def write(self, file):
|
def write(self, file):
|
||||||
"""Write the chunk to a file.
|
"""Write the chunk to a file.
|
||||||
|
|
||||||
Uses the write function of the variables and the subchunks to do the actual work."""
|
Uses the write function of the variables and the subchunks to do the actual work."""
|
||||||
|
|
||||||
# write header
|
# write header
|
||||||
self.ID.write(file)
|
self.ID.write(file)
|
||||||
self.size.write(file)
|
self.size.write(file)
|
||||||
@ -455,21 +473,20 @@ class _3ds_chunk(object):
|
|||||||
|
|
||||||
def dump(self, indent=0):
|
def dump(self, indent=0):
|
||||||
"""Write the chunk to a file.
|
"""Write the chunk to a file.
|
||||||
|
|
||||||
Dump is used for debugging purposes, to dump the contents of a chunk to the standard output.
|
Dump is used for debugging purposes, to dump the contents of a chunk to the standard output.
|
||||||
Uses the dump function of the named variables and the subchunks to do the actual work."""
|
Uses the dump function of the named variables and the subchunks to do the actual work."""
|
||||||
print(indent * " ",
|
print(indent * " ",
|
||||||
"ID=%r" % hex(self.ID.value),
|
'ID=%r' % hex(self.ID.value),
|
||||||
"size=%r" % self.get_size())
|
'size=%r' % self.get_size())
|
||||||
for variable in self.variables:
|
for variable in self.variables:
|
||||||
variable.dump(indent + 1)
|
variable.dump(indent + 1)
|
||||||
for subchunk in self.subchunks:
|
for subchunk in self.subchunks:
|
||||||
subchunk.dump(indent + 1)
|
subchunk.dump(indent + 1)
|
||||||
|
|
||||||
|
|
||||||
######################################################
|
##########
|
||||||
# EXPORT
|
# EXPORT #
|
||||||
######################################################
|
##########
|
||||||
|
|
||||||
def get_material_image(material):
|
def get_material_image(material):
|
||||||
""" Get images from paint slots."""
|
""" Get images from paint slots."""
|
||||||
@ -485,17 +502,16 @@ def get_material_image(material):
|
|||||||
def get_uv_image(ma):
|
def get_uv_image(ma):
|
||||||
""" Get image from material wrapper."""
|
""" Get image from material wrapper."""
|
||||||
if ma and ma.use_nodes:
|
if ma and ma.use_nodes:
|
||||||
ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma)
|
mat_wrap = node_shader_utils.PrincipledBSDFWrapper(ma)
|
||||||
ma_tex = ma_wrap.base_color_texture
|
mat_tex = mat_wrap.base_color_texture
|
||||||
if ma_tex and ma_tex.image is not None:
|
if mat_tex and mat_tex.image is not None:
|
||||||
return ma_tex.image
|
return mat_tex.image
|
||||||
else:
|
else:
|
||||||
return get_material_image(ma)
|
return get_material_image(ma)
|
||||||
|
|
||||||
|
|
||||||
def make_material_subchunk(chunk_id, color):
|
def make_material_subchunk(chunk_id, color):
|
||||||
"""Make a material subchunk.
|
"""Make a material subchunk.
|
||||||
|
|
||||||
Used for color subchunks, such as diffuse color or ambient color subchunks."""
|
Used for color subchunks, such as diffuse color or ambient color subchunks."""
|
||||||
mat_sub = _3ds_chunk(chunk_id)
|
mat_sub = _3ds_chunk(chunk_id)
|
||||||
col1 = _3ds_chunk(RGB1)
|
col1 = _3ds_chunk(RGB1)
|
||||||
@ -514,33 +530,38 @@ def make_percent_subchunk(chunk_id, percent):
|
|||||||
pcti = _3ds_chunk(PCT)
|
pcti = _3ds_chunk(PCT)
|
||||||
pcti.add_variable("percent", _3ds_ushort(int(round(percent * 100, 0))))
|
pcti.add_variable("percent", _3ds_ushort(int(round(percent * 100, 0))))
|
||||||
pct_sub.add_subchunk(pcti)
|
pct_sub.add_subchunk(pcti)
|
||||||
|
# optional:
|
||||||
|
# pctf = _3ds_chunk(PCTF)
|
||||||
|
# pctf.add_variable("pctfloat", _3ds_float(round(percent, 6)))
|
||||||
|
# pct_sub.add_subchunk(pctf)
|
||||||
return pct_sub
|
return pct_sub
|
||||||
|
|
||||||
|
|
||||||
def make_texture_chunk(chunk_id, images):
|
def make_texture_chunk(chunk_id, images):
|
||||||
"""Make Material Map texture chunk."""
|
"""Make Material Map texture chunk."""
|
||||||
# Add texture percentage value (100 = 1.0)
|
# Add texture percentage value (100 = 1.0)
|
||||||
ma_sub = make_percent_subchunk(chunk_id, 1)
|
mat_sub = make_percent_subchunk(chunk_id, 1)
|
||||||
has_entry = False
|
has_entry = False
|
||||||
|
|
||||||
def add_image(img):
|
def add_image(img):
|
||||||
filename = bpy.path.basename(image.filepath)
|
filename = bpy.path.basename(image.filepath)
|
||||||
ma_sub_file = _3ds_chunk(MATMAPFILE)
|
mat_sub_file = _3ds_chunk(MAT_MAP_FILE)
|
||||||
ma_sub_file.add_variable("image", _3ds_string(sane_name(filename)))
|
mat_sub_file.add_variable("image", _3ds_string(sane_name(filename)))
|
||||||
ma_sub.add_subchunk(ma_sub_file)
|
mat_sub.add_subchunk(mat_sub_file)
|
||||||
|
|
||||||
for image in images:
|
for image in images:
|
||||||
add_image(image)
|
add_image(image)
|
||||||
has_entry = True
|
has_entry = True
|
||||||
|
|
||||||
return ma_sub if has_entry else None
|
return mat_sub if has_entry else None
|
||||||
|
|
||||||
|
|
||||||
def make_material_texture_chunk(chunk_id, texslots, pct):
|
def make_material_texture_chunk(chunk_id, texslots, pct):
|
||||||
"""Make Material Map texture chunk given a seq. of `MaterialTextureSlot`'s
|
"""Make Material Map texture chunk given a seq. of `MaterialTextureSlot`'s
|
||||||
Paint slots are optionally used as image source if no nodes are
|
Paint slots are optionally used as image source if no nodes are
|
||||||
used. No additional filtering for mapping modes is done, all
|
used. No additional filtering for mapping modes is done, all
|
||||||
slots are written "as is"."""
|
slots are written "as is"."""
|
||||||
|
|
||||||
# Add texture percentage value
|
# Add texture percentage value
|
||||||
mat_sub = make_percent_subchunk(chunk_id, pct)
|
mat_sub = make_percent_subchunk(chunk_id, pct)
|
||||||
has_entry = False
|
has_entry = False
|
||||||
@ -549,35 +570,36 @@ def make_material_texture_chunk(chunk_id, texslots, pct):
|
|||||||
image = texslot.image
|
image = texslot.image
|
||||||
|
|
||||||
filename = bpy.path.basename(image.filepath)
|
filename = bpy.path.basename(image.filepath)
|
||||||
mat_sub_file = _3ds_chunk(MATMAPFILE)
|
mat_sub_file = _3ds_chunk(MAT_MAP_FILE)
|
||||||
mat_sub_file.add_variable("mapfile", _3ds_string(sane_name(filename)))
|
mat_sub_file.add_variable("mapfile", _3ds_string(sane_name(filename)))
|
||||||
mat_sub.add_subchunk(mat_sub_file)
|
mat_sub.add_subchunk(mat_sub_file)
|
||||||
for link in texslot.socket_dst.links:
|
for link in texslot.socket_dst.links:
|
||||||
socket = link.from_socket.identifier
|
socket = link.from_socket.identifier
|
||||||
|
|
||||||
maptile = 0
|
mat_sub_mapflags = _3ds_chunk(MAT_MAP_TILING)
|
||||||
|
"""Control bit flags, where 0x1 activates decaling, 0x2 activates mirror,
|
||||||
|
0x8 activates inversion, 0x10 deactivates tiling, 0x20 activates summed area sampling,
|
||||||
|
0x40 activates alpha source, 0x80 activates tinting, 0x100 ignores alpha, 0x200 activates RGB tint.
|
||||||
|
Bits 0x80, 0x100, and 0x200 are only used with TEXMAP, TEX2MAP, and SPECMAP chunks.
|
||||||
|
0x40, when used with a TEXMAP, TEX2MAP, or SPECMAP chunk must be accompanied with a tint bit,
|
||||||
|
either 0x100 or 0x200, tintcolor will be processed if colorchunks are present"""
|
||||||
|
|
||||||
|
mapflags = 0
|
||||||
|
|
||||||
# no perfect mapping for mirror modes - 3DS only has uniform mirror w. repeat=2
|
# no perfect mapping for mirror modes - 3DS only has uniform mirror w. repeat=2
|
||||||
if texslot.extension == 'EXTEND':
|
if texslot.extension == 'EXTEND':
|
||||||
maptile |= 0x1
|
mapflags |= 0x1
|
||||||
# CLIP maps to 3DS' decal flag
|
|
||||||
elif texslot.extension == 'CLIP':
|
|
||||||
maptile |= 0x10
|
|
||||||
|
|
||||||
mat_sub_tile = _3ds_chunk(MAT_MAP_TILING)
|
if texslot.extension == 'CLIP':
|
||||||
mat_sub_tile.add_variable("tiling", _3ds_ushort(maptile))
|
mapflags |= 0x10
|
||||||
mat_sub.add_subchunk(mat_sub_tile)
|
|
||||||
|
|
||||||
if socket == 'Alpha':
|
if socket == 'Alpha':
|
||||||
mat_sub_alpha = _3ds_chunk(MAP_TILING)
|
mapflags |= 0x40
|
||||||
alphaflag = 0x40 # summed area sampling 0x20
|
|
||||||
mat_sub_alpha.add_variable("alpha", _3ds_ushort(alphaflag))
|
|
||||||
mat_sub.add_subchunk(mat_sub_alpha)
|
|
||||||
if texslot.socket_dst.identifier in {'Base Color', 'Specular'}:
|
if texslot.socket_dst.identifier in {'Base Color', 'Specular'}:
|
||||||
mat_sub_tint = _3ds_chunk(MAP_TILING) # RGB tint 0x200
|
mapflags |= 0x80 if image.colorspace_settings.name=='Non-Color' else 0x200
|
||||||
tint = 0x80 if texslot.image.colorspace_settings.name == 'Non-Color' else 0x200
|
|
||||||
mat_sub_tint.add_variable("tint", _3ds_ushort(tint))
|
mat_sub_mapflags.add_variable("mapflags", _3ds_ushort(mapflags))
|
||||||
mat_sub.add_subchunk(mat_sub_tint)
|
mat_sub.add_subchunk(mat_sub_mapflags)
|
||||||
|
|
||||||
mat_sub_texblur = _3ds_chunk(MAT_MAP_TEXBLUR) # Based on observation this is usually 1.0
|
mat_sub_texblur = _3ds_chunk(MAT_MAP_TEXBLUR) # Based on observation this is usually 1.0
|
||||||
mat_sub_texblur.add_variable("maptexblur", _3ds_float(1.0))
|
mat_sub_texblur.add_variable("maptexblur", _3ds_float(1.0))
|
||||||
@ -801,11 +823,11 @@ def extract_triangles(mesh):
|
|||||||
|
|
||||||
faceflag = 0
|
faceflag = 0
|
||||||
if c_a.use_edge_sharp:
|
if c_a.use_edge_sharp:
|
||||||
faceflag = faceflag + 0x1
|
faceflag |= 0x1
|
||||||
if b_c.use_edge_sharp:
|
if b_c.use_edge_sharp:
|
||||||
faceflag = faceflag + 0x2
|
faceflag |= 0x2
|
||||||
if a_b.use_edge_sharp:
|
if a_b.use_edge_sharp:
|
||||||
faceflag = faceflag + 0x4
|
faceflag |= 0x4
|
||||||
|
|
||||||
smoothgroup = polygroup[face.polygon_index]
|
smoothgroup = polygroup[face.polygon_index]
|
||||||
|
|
||||||
@ -977,16 +999,6 @@ def make_uv_chunk(uv_array):
|
|||||||
return uv_chunk
|
return uv_chunk
|
||||||
|
|
||||||
|
|
||||||
'''
|
|
||||||
def make_matrix_4x3_chunk(matrix):
|
|
||||||
matrix_chunk = _3ds_chunk(OBJECT_TRANS_MATRIX)
|
|
||||||
for vec in matrix.col:
|
|
||||||
for f in vec[:3]:
|
|
||||||
matrix_chunk.add_variable("matrix_f", _3ds_float(f))
|
|
||||||
return matrix_chunk
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
def make_mesh_chunk(ob, mesh, matrix, materialDict, translation):
|
def make_mesh_chunk(ob, mesh, matrix, materialDict, translation):
|
||||||
"""Make a chunk out of a Blender mesh."""
|
"""Make a chunk out of a Blender mesh."""
|
||||||
|
|
||||||
@ -1017,8 +1029,6 @@ def make_mesh_chunk(ob, mesh, matrix, materialDict, translation):
|
|||||||
if uv_array:
|
if uv_array:
|
||||||
mesh_chunk.add_subchunk(make_uv_chunk(uv_array))
|
mesh_chunk.add_subchunk(make_uv_chunk(uv_array))
|
||||||
|
|
||||||
# mesh_chunk.add_subchunk(make_matrix_4x3_chunk(matrix))
|
|
||||||
|
|
||||||
# create transformation matrix chunk
|
# create transformation matrix chunk
|
||||||
matrix_chunk = _3ds_chunk(OBJECT_TRANS_MATRIX)
|
matrix_chunk = _3ds_chunk(OBJECT_TRANS_MATRIX)
|
||||||
obj_matrix = matrix.transposed().to_3x3()
|
obj_matrix = matrix.transposed().to_3x3()
|
||||||
@ -1073,7 +1083,6 @@ def make_kfdata(start=0, stop=0, curtime=0):
|
|||||||
|
|
||||||
def make_track_chunk(ID, obj):
|
def make_track_chunk(ID, obj):
|
||||||
"""Make a chunk for track data.
|
"""Make a chunk for track data.
|
||||||
|
|
||||||
Depending on the ID, this will construct a position, rotation or scale track."""
|
Depending on the ID, this will construct a position, rotation or scale track."""
|
||||||
track_chunk = _3ds_chunk(ID)
|
track_chunk = _3ds_chunk(ID)
|
||||||
track_chunk.add_variable("track_flags", _3ds_ushort())
|
track_chunk.add_variable("track_flags", _3ds_ushort())
|
||||||
@ -1111,13 +1120,12 @@ def make_track_chunk(ID, obj):
|
|||||||
|
|
||||||
def make_kf_obj_node(obj, name_to_id):
|
def make_kf_obj_node(obj, name_to_id):
|
||||||
"""Make a node chunk for a Blender object.
|
"""Make a node chunk for a Blender object.
|
||||||
|
|
||||||
Takes the Blender object as a parameter. Object id's are taken from the dictionary name_to_id.
|
Takes the Blender object as a parameter. Object id's are taken from the dictionary name_to_id.
|
||||||
Blender Empty objects are converted to dummy nodes."""
|
Blender Empty objects are converted to dummy nodes."""
|
||||||
|
|
||||||
name = obj.name
|
name = obj.name
|
||||||
# main object node chunk:
|
# main object node chunk:
|
||||||
kf_obj_node = _3ds_chunk(KFDATA_OBJECT_NODE_TAG)
|
kf_obj_node = _3ds_chunk(OBJECT_NODE_TAG)
|
||||||
# chunk for the object id:
|
# chunk for the object id:
|
||||||
obj_id_chunk = _3ds_chunk(OBJECT_NODE_ID)
|
obj_id_chunk = _3ds_chunk(OBJECT_NODE_ID)
|
||||||
# object id is from the name_to_id dictionary:
|
# object id is from the name_to_id dictionary:
|
||||||
@ -1184,7 +1192,6 @@ def save(operator,
|
|||||||
|
|
||||||
# Time the export
|
# Time the export
|
||||||
duration = time.time()
|
duration = time.time()
|
||||||
# Blender.Window.WaitCursor(1)
|
|
||||||
|
|
||||||
if global_matrix is None:
|
if global_matrix is None:
|
||||||
global_matrix = mathutils.Matrix()
|
global_matrix = mathutils.Matrix()
|
||||||
@ -1237,6 +1244,7 @@ def save(operator,
|
|||||||
else:
|
else:
|
||||||
objects = [ob for ob in scene.objects if not ob.hide_viewport]
|
objects = [ob for ob in scene.objects if not ob.hide_viewport]
|
||||||
|
|
||||||
|
empty_objects = [ob for ob in objects if ob.type == 'EMPTY']
|
||||||
light_objects = [ob for ob in objects if ob.type == 'LIGHT']
|
light_objects = [ob for ob in objects if ob.type == 'LIGHT']
|
||||||
camera_objects = [ob for ob in objects if ob.type == 'CAMERA']
|
camera_objects = [ob for ob in objects if ob.type == 'CAMERA']
|
||||||
|
|
||||||
@ -1304,15 +1312,17 @@ def save(operator,
|
|||||||
object_info.add_subchunk(make_material_chunk(ma_image[0], ma_image[1]))
|
object_info.add_subchunk(make_material_chunk(ma_image[0], ma_image[1]))
|
||||||
|
|
||||||
# Give all objects a unique ID and build a dictionary from object name to object id:
|
# Give all objects a unique ID and build a dictionary from object name to object id:
|
||||||
translation = {} # collect translation for transformation matrix
|
|
||||||
# name_to_id = {}
|
# name_to_id = {}
|
||||||
|
|
||||||
|
translation = {} # collect translation for transformation matrix
|
||||||
|
|
||||||
for ob, data, matrix in mesh_objects:
|
for ob, data, matrix in mesh_objects:
|
||||||
translation[ob.name] = ob.location
|
translation[ob.name] = ob.location
|
||||||
# name_to_id[ob.name]= len(name_to_id)
|
# name_to_id[ob.name]= len(name_to_id)
|
||||||
"""
|
|
||||||
#for ob in empty_objects:
|
for ob in empty_objects:
|
||||||
# name_to_id[ob.name]= len(name_to_id)
|
translation[ob.name] = ob.location
|
||||||
"""
|
# name_to_id[ob.name]= len(name_to_id)
|
||||||
|
|
||||||
# Create object chunks for all meshes:
|
# Create object chunks for all meshes:
|
||||||
i = 0
|
i = 0
|
||||||
@ -1339,10 +1349,6 @@ def save(operator,
|
|||||||
kfdata.add_subchunk(make_kf_obj_node(ob, name_to_id))
|
kfdata.add_subchunk(make_kf_obj_node(ob, name_to_id))
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# if not blender_mesh.users:
|
|
||||||
# bpy.data.meshes.remove(blender_mesh)
|
|
||||||
# blender_mesh.vertices = None
|
|
||||||
|
|
||||||
i += i
|
i += i
|
||||||
|
|
||||||
# Create chunks for all empties:
|
# Create chunks for all empties:
|
||||||
@ -1362,7 +1368,7 @@ def save(operator,
|
|||||||
object_chunk.add_variable("light", _3ds_string(sane_name(ob.name)))
|
object_chunk.add_variable("light", _3ds_string(sane_name(ob.name)))
|
||||||
light_chunk.add_variable("location", _3ds_point_3d(ob.location))
|
light_chunk.add_variable("location", _3ds_point_3d(ob.location))
|
||||||
color_float_chunk.add_variable("color", _3ds_float_color(ob.data.color))
|
color_float_chunk.add_variable("color", _3ds_float_color(ob.data.color))
|
||||||
energy_factor.add_variable("energy", _3ds_float(ob.data.energy * .001))
|
energy_factor.add_variable("energy", _3ds_float(ob.data.energy * 0.001))
|
||||||
light_chunk.add_subchunk(color_float_chunk)
|
light_chunk.add_subchunk(color_float_chunk)
|
||||||
light_chunk.add_subchunk(energy_factor)
|
light_chunk.add_subchunk(energy_factor)
|
||||||
|
|
||||||
@ -1374,12 +1380,18 @@ def save(operator,
|
|||||||
pos_y = ob.location[1] + (ob.location[0] * math.tan(math.radians(90) - ob.rotation_euler[2]))
|
pos_y = ob.location[1] + (ob.location[0] * math.tan(math.radians(90) - ob.rotation_euler[2]))
|
||||||
pos_z = hypo * math.tan(math.radians(90) - ob.rotation_euler[0])
|
pos_z = hypo * math.tan(math.radians(90) - ob.rotation_euler[0])
|
||||||
spotlight_chunk = _3ds_chunk(LIGHT_SPOTLIGHT)
|
spotlight_chunk = _3ds_chunk(LIGHT_SPOTLIGHT)
|
||||||
spot_roll_chunk = _3ds_chunk(LIGHT_SPOTROLL)
|
spot_roll_chunk = _3ds_chunk(LIGHT_SPOT_ROLL)
|
||||||
spotlight_chunk.add_variable("target", _3ds_point_3d((pos_x, pos_y, pos_z)))
|
spotlight_chunk.add_variable("target", _3ds_point_3d((pos_x, pos_y, pos_z)))
|
||||||
spotlight_chunk.add_variable("hotspot", _3ds_float(round(hotspot, 4)))
|
spotlight_chunk.add_variable("hotspot", _3ds_float(round(hotspot, 4)))
|
||||||
spotlight_chunk.add_variable("angle", _3ds_float(round(cone_angle, 4)))
|
spotlight_chunk.add_variable("angle", _3ds_float(round(cone_angle, 4)))
|
||||||
spot_roll_chunk.add_variable("roll", _3ds_float(round(ob.rotation_euler[1], 6)))
|
spot_roll_chunk.add_variable("roll", _3ds_float(round(ob.rotation_euler[1], 6)))
|
||||||
spotlight_chunk.add_subchunk(spot_roll_chunk)
|
spotlight_chunk.add_subchunk(spot_roll_chunk)
|
||||||
|
if ob.data.show_cone:
|
||||||
|
spot_cone_chunk = _3ds_chunk(LIGHT_SPOT_SEE_CONE)
|
||||||
|
spotlight_chunk.add_subchunk(spot_cone_chunk)
|
||||||
|
if ob.data.use_square:
|
||||||
|
spot_square_chunk = _3ds_chunk(LIGHT_SPOT_RECTANGLE)
|
||||||
|
spotlight_chunk.add_subchunk(spot_square_chunk)
|
||||||
light_chunk.add_subchunk(spotlight_chunk)
|
light_chunk.add_subchunk(spotlight_chunk)
|
||||||
|
|
||||||
# Add light to object info
|
# Add light to object info
|
||||||
@ -1411,9 +1423,9 @@ def save(operator,
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
# At this point, the chunk hierarchy is completely built.
|
# At this point, the chunk hierarchy is completely built.
|
||||||
|
|
||||||
# Check the size:
|
# Check the size:
|
||||||
primary.get_size()
|
primary.get_size()
|
||||||
|
|
||||||
# Open the file for writing:
|
# Open the file for writing:
|
||||||
file = open(filepath, 'wb')
|
file = open(filepath, 'wb')
|
||||||
|
|
||||||
@ -1428,7 +1440,6 @@ def save(operator,
|
|||||||
name_mapping.clear()
|
name_mapping.clear()
|
||||||
|
|
||||||
# Debugging only: report the exporting time:
|
# Debugging only: report the exporting time:
|
||||||
# Blender.Window.WaitCursor(0)
|
|
||||||
print("3ds export time: %.2f" % (time.time() - duration))
|
print("3ds export time: %.2f" % (time.time() - duration))
|
||||||
|
|
||||||
# Debugging only: dump the chunk hierarchy:
|
# Debugging only: dump the chunk hierarchy:
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -931,26 +931,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
me.edges.foreach_get("vertices", t_ev)
|
me.edges.foreach_get("vertices", t_ev)
|
||||||
me.loops.foreach_get("edge_index", t_lei)
|
me.loops.foreach_get("edge_index", t_lei)
|
||||||
|
|
||||||
# Polygons might not be in the same order as loops. To export per-loop and per-polygon data in a matching order,
|
|
||||||
# one must be set into the order of the other. Since there are fewer polygons than loops and there are usually
|
|
||||||
# more geometry layers exported that are per-loop than per-polygon, it's more efficient to re-order polygons and
|
|
||||||
# per-polygon data.
|
|
||||||
perm_polygons_to_loop_order = None
|
|
||||||
# t_ls indicates the ordering of polygons compared to loops. When t_ls is sorted, polygons and loops are in the same
|
|
||||||
# order. Since each loop must be assigned to exactly one polygon for the mesh to be valid, every value in t_ls must
|
|
||||||
# be unique, so t_ls will be monotonically increasing when sorted.
|
|
||||||
# t_ls is expected to be in the same order as loops in most cases since exiting Edit mode will sort t_ls, so do an
|
|
||||||
# initial check for any element being smaller than the previous element to determine if sorting is required.
|
|
||||||
sort_polygon_data = np.any(t_ls[1:] < t_ls[:-1])
|
|
||||||
if sort_polygon_data:
|
|
||||||
# t_ls is not sorted, so get the indices that would sort t_ls using argsort, these will be re-used to sort
|
|
||||||
# per-polygon data.
|
|
||||||
# Using 'stable' for radix sort, which performs much better with partially ordered data and slightly worse with
|
|
||||||
# completely random data, compared to the default of 'quicksort' for introsort.
|
|
||||||
perm_polygons_to_loop_order = np.argsort(t_ls, kind='stable')
|
|
||||||
# Sort t_ls into the same order as loops.
|
|
||||||
t_ls = t_ls[perm_polygons_to_loop_order]
|
|
||||||
|
|
||||||
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
|
# Add "fake" faces for loose edges. Each "fake" face consists of two loops creating a new 2-sided polygon.
|
||||||
if scene_data.settings.use_mesh_edges:
|
if scene_data.settings.use_mesh_edges:
|
||||||
bl_edge_is_loose_dtype = bool
|
bl_edge_is_loose_dtype = bool
|
||||||
@ -1051,8 +1031,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
if smooth_type == 'FACE':
|
if smooth_type == 'FACE':
|
||||||
t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype)
|
t_ps = np.empty(len(me.polygons), dtype=poly_use_smooth_dtype)
|
||||||
me.polygons.foreach_get("use_smooth", t_ps)
|
me.polygons.foreach_get("use_smooth", t_ps)
|
||||||
if sort_polygon_data:
|
|
||||||
t_ps = t_ps[perm_polygons_to_loop_order]
|
|
||||||
_map = b"ByPolygon"
|
_map = b"ByPolygon"
|
||||||
else: # EDGE
|
else: # EDGE
|
||||||
_map = b"ByEdge"
|
_map = b"ByEdge"
|
||||||
@ -1071,17 +1049,14 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
# Get the 'use_smooth' attribute of all polygons.
|
# Get the 'use_smooth' attribute of all polygons.
|
||||||
p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype)
|
p_use_smooth_mask = np.empty(mesh_poly_nbr, dtype=poly_use_smooth_dtype)
|
||||||
me.polygons.foreach_get('use_smooth', p_use_smooth_mask)
|
me.polygons.foreach_get('use_smooth', p_use_smooth_mask)
|
||||||
if sort_polygon_data:
|
|
||||||
p_use_smooth_mask = p_use_smooth_mask[perm_polygons_to_loop_order]
|
|
||||||
# Invert to get all flat shaded polygons.
|
# Invert to get all flat shaded polygons.
|
||||||
p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask)
|
p_flat_mask = np.invert(p_use_smooth_mask, out=p_use_smooth_mask)
|
||||||
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of
|
# Convert flat shaded polygons to flat shaded loops by repeating each element by the number of sides of
|
||||||
# that polygon.
|
# that polygon.
|
||||||
# Polygon sides can be calculated from the element-wise difference of sorted loop starts appended by the
|
# Polygon sides can be calculated from the element-wise difference of loop starts appended by the number
|
||||||
# number of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total'
|
# of loops. Alternatively, polygon sides can be retrieved directly from the 'loop_total' attribute of
|
||||||
# attribute of polygons, but that might need to be sorted, and we already have t_ls which is sorted loop
|
# polygons, but since we already have t_ls, it tends to be quicker to calculate from t_ls when above
|
||||||
# starts. It tends to be quicker to calculate from t_ls when above around 10_000 polygons even when the
|
# around 10_000 polygons.
|
||||||
# 'loop_total' array wouldn't need sorting.
|
|
||||||
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
|
polygon_sides = np.diff(mesh_t_ls_view, append=mesh_loop_nbr)
|
||||||
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
|
p_flat_loop_mask = np.repeat(p_flat_mask, polygon_sides)
|
||||||
# Convert flat shaded loops to flat shaded (sharp) edge indices.
|
# Convert flat shaded loops to flat shaded (sharp) edge indices.
|
||||||
@ -1442,8 +1417,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
fbx_pm_dtype = np.int32
|
fbx_pm_dtype = np.int32
|
||||||
t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype)
|
t_pm = np.empty(len(me.polygons), dtype=bl_pm_dtype)
|
||||||
me.polygons.foreach_get("material_index", t_pm)
|
me.polygons.foreach_get("material_index", t_pm)
|
||||||
if sort_polygon_data:
|
|
||||||
t_pm = t_pm[perm_polygons_to_loop_order]
|
|
||||||
|
|
||||||
# We have to validate mat indices, and map them to FBX indices.
|
# We have to validate mat indices, and map them to FBX indices.
|
||||||
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
|
# Note a mat might not be in me_fbxmaterials_idx (e.g. node mats are ignored).
|
||||||
@ -1474,7 +1447,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
|
|||||||
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
|
elem_data_single_string(lay_ma, b"MappingInformationType", b"AllSame")
|
||||||
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
elem_data_single_string(lay_ma, b"ReferenceInformationType", b"IndexToDirect")
|
||||||
elem_data_single_int32_array(lay_ma, b"Materials", [0])
|
elem_data_single_int32_array(lay_ma, b"Materials", [0])
|
||||||
del perm_polygons_to_loop_order
|
|
||||||
|
|
||||||
# And the "layer TOC"...
|
# And the "layer TOC"...
|
||||||
|
|
||||||
|
@ -372,7 +372,7 @@ def blen_read_custom_properties(fbx_obj, blen_obj, settings):
|
|||||||
def blen_read_object_transform_do(transform_data):
|
def blen_read_object_transform_do(transform_data):
|
||||||
# This is a nightmare. FBX SDK uses Maya way to compute the transformation matrix of a node - utterly simple:
|
# This is a nightmare. FBX SDK uses Maya way to compute the transformation matrix of a node - utterly simple:
|
||||||
#
|
#
|
||||||
# WorldTransform = ParentWorldTransform @ T @ Roff @ Rp @ Rpre @ R @ Rpost @ Rp-1 @ Soff @ Sp @ S @ Sp-1
|
# WorldTransform = ParentWorldTransform @ T @ Roff @ Rp @ Rpre @ R @ Rpost-1 @ Rp-1 @ Soff @ Sp @ S @ Sp-1
|
||||||
#
|
#
|
||||||
# Where all those terms are 4 x 4 matrices that contain:
|
# Where all those terms are 4 x 4 matrices that contain:
|
||||||
# WorldTransform: Transformation matrix of the node in global space.
|
# WorldTransform: Transformation matrix of the node in global space.
|
||||||
@ -382,7 +382,7 @@ def blen_read_object_transform_do(transform_data):
|
|||||||
# Rp: Rotation pivot
|
# Rp: Rotation pivot
|
||||||
# Rpre: Pre-rotation
|
# Rpre: Pre-rotation
|
||||||
# R: Rotation
|
# R: Rotation
|
||||||
# Rpost: Post-rotation
|
# Rpost-1: Inverse of the post-rotation (FBX 2011 documentation incorrectly specifies this without inversion)
|
||||||
# Rp-1: Inverse of the rotation pivot
|
# Rp-1: Inverse of the rotation pivot
|
||||||
# Soff: Scaling offset
|
# Soff: Scaling offset
|
||||||
# Sp: Scaling pivot
|
# Sp: Scaling pivot
|
||||||
@ -402,14 +402,15 @@ def blen_read_object_transform_do(transform_data):
|
|||||||
# S: Scaling
|
# S: Scaling
|
||||||
# OT: Geometric transform translation
|
# OT: Geometric transform translation
|
||||||
# OR: Geometric transform rotation
|
# OR: Geometric transform rotation
|
||||||
# OS: Geometric transform translation
|
# OS: Geometric transform scale
|
||||||
#
|
#
|
||||||
# Notes:
|
# Notes:
|
||||||
# Geometric transformations ***are not inherited***: ParentWorldTransform does not contain the OT, OR, OS
|
# Geometric transformations ***are not inherited***: ParentWorldTransform does not contain the OT, OR, OS
|
||||||
# of WorldTransform's parent node.
|
# of WorldTransform's parent node.
|
||||||
|
# The R matrix takes into account the rotation order. Other rotation matrices are always 'XYZ' order.
|
||||||
#
|
#
|
||||||
# Taken from http://download.autodesk.com/us/fbx/20112/FBX_SDK_HELP/
|
# Taken from https://help.autodesk.com/view/FBX/2020/ENU/
|
||||||
# index.html?url=WS1a9193826455f5ff1f92379812724681e696651.htm,topicNumber=d0e7429
|
# ?guid=FBX_Developer_Help_nodes_and_scene_graph_fbx_nodes_computing_transformation_matrix_html
|
||||||
|
|
||||||
# translation
|
# translation
|
||||||
lcl_translation = Matrix.Translation(transform_data.loc)
|
lcl_translation = Matrix.Translation(transform_data.loc)
|
||||||
@ -418,9 +419,9 @@ def blen_read_object_transform_do(transform_data):
|
|||||||
# rotation
|
# rotation
|
||||||
to_rot = lambda rot, rot_ord: Euler(convert_deg_to_rad_iter(rot), rot_ord).to_matrix().to_4x4()
|
to_rot = lambda rot, rot_ord: Euler(convert_deg_to_rad_iter(rot), rot_ord).to_matrix().to_4x4()
|
||||||
lcl_rot = to_rot(transform_data.rot, transform_data.rot_ord) @ transform_data.rot_alt_mat
|
lcl_rot = to_rot(transform_data.rot, transform_data.rot_ord) @ transform_data.rot_alt_mat
|
||||||
pre_rot = to_rot(transform_data.pre_rot, transform_data.rot_ord)
|
pre_rot = to_rot(transform_data.pre_rot, 'XYZ')
|
||||||
pst_rot = to_rot(transform_data.pst_rot, transform_data.rot_ord)
|
pst_rot = to_rot(transform_data.pst_rot, 'XYZ')
|
||||||
geom_rot = to_rot(transform_data.geom_rot, transform_data.rot_ord)
|
geom_rot = to_rot(transform_data.geom_rot, 'XYZ')
|
||||||
|
|
||||||
rot_ofs = Matrix.Translation(transform_data.rot_ofs)
|
rot_ofs = Matrix.Translation(transform_data.rot_ofs)
|
||||||
rot_piv = Matrix.Translation(transform_data.rot_piv)
|
rot_piv = Matrix.Translation(transform_data.rot_piv)
|
||||||
@ -439,7 +440,7 @@ def blen_read_object_transform_do(transform_data):
|
|||||||
rot_piv @
|
rot_piv @
|
||||||
pre_rot @
|
pre_rot @
|
||||||
lcl_rot @
|
lcl_rot @
|
||||||
pst_rot @
|
pst_rot.inverted_safe() @
|
||||||
rot_piv.inverted_safe() @
|
rot_piv.inverted_safe() @
|
||||||
sca_ofs @
|
sca_ofs @
|
||||||
sca_piv @
|
sca_piv @
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
'name': 'glTF 2.0 format',
|
'name': 'glTF 2.0 format',
|
||||||
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
||||||
"version": (3, 6, 14),
|
"version": (3, 6, 15),
|
||||||
'blender': (3, 5, 0),
|
'blender': (3, 5, 0),
|
||||||
'location': 'File > Import-Export',
|
'location': 'File > Import-Export',
|
||||||
'description': 'Import-Export as glTF 2.0',
|
'description': 'Import-Export as glTF 2.0',
|
||||||
@ -450,24 +450,26 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
|
|||||||
|
|
||||||
export_hierarchy_flatten_bones: BoolProperty(
|
export_hierarchy_flatten_bones: BoolProperty(
|
||||||
name='Flatten Bone Hierarchy',
|
name='Flatten Bone Hierarchy',
|
||||||
description='Flatten Bone Hierarchy. Usefull in case of non decomposable TRS matrix',
|
description='Flatten Bone Hierarchy. Useful in case of non decomposable transformation matrix',
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
|
|
||||||
export_optimize_animation_size: BoolProperty(
|
export_optimize_animation_size: BoolProperty(
|
||||||
name='Optimize Animation Size',
|
name='Optimize Animation Size',
|
||||||
description=(
|
description=(
|
||||||
"Reduce exported file size by removing duplicate keyframes "
|
"Reduce exported file size by removing duplicate keyframes"
|
||||||
"(can cause problems with stepped animation)"
|
|
||||||
),
|
),
|
||||||
default=True
|
default=True
|
||||||
)
|
)
|
||||||
|
|
||||||
export_optimize_animation_keep_anim_armature: BoolProperty(
|
export_optimize_animation_keep_anim_armature: BoolProperty(
|
||||||
name='Force keeping channel for armature / bones',
|
name='Force keeping channels for bones',
|
||||||
description=(
|
description=(
|
||||||
"if all keyframes are identical in a rig "
|
"if all keyframes are identical in a rig, "
|
||||||
"force keeping the minimal animation"
|
"force keeping the minimal animation. "
|
||||||
|
"When off, all possible channels for "
|
||||||
|
"the bones will be exported, even if empty "
|
||||||
|
"(minimal animation, 2 keyframes)"
|
||||||
),
|
),
|
||||||
default=True
|
default=True
|
||||||
)
|
)
|
||||||
@ -475,7 +477,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
|
|||||||
export_optimize_animation_keep_anim_object: BoolProperty(
|
export_optimize_animation_keep_anim_object: BoolProperty(
|
||||||
name='Force keeping channel for objects',
|
name='Force keeping channel for objects',
|
||||||
description=(
|
description=(
|
||||||
"if all keyframes are identical for object transformations "
|
"If all keyframes are identical for object transformations, "
|
||||||
"force keeping the minimal animation"
|
"force keeping the minimal animation"
|
||||||
),
|
),
|
||||||
default=False
|
default=False
|
||||||
@ -488,7 +490,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
|
|||||||
('CROP', 'Crop',
|
('CROP', 'Crop',
|
||||||
'Keep only frames above frame 0'),
|
'Keep only frames above frame 0'),
|
||||||
),
|
),
|
||||||
description='Negative Frames are slided or cropped',
|
description='Negative Frames are slid or cropped',
|
||||||
default='SLIDE'
|
default='SLIDE'
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -496,7 +498,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
|
|||||||
name='Set all glTF Animation starting at 0',
|
name='Set all glTF Animation starting at 0',
|
||||||
description=(
|
description=(
|
||||||
"Set all glTF animation starting at 0.0s. "
|
"Set all glTF animation starting at 0.0s. "
|
||||||
"Can be usefull for looping animations"
|
"Can be useful for looping animations"
|
||||||
),
|
),
|
||||||
default=False
|
default=False
|
||||||
)
|
)
|
||||||
@ -505,7 +507,7 @@ class ExportGLTF2_Base(ConvertGLTF2_Base):
|
|||||||
name='Bake All Objects Animations',
|
name='Bake All Objects Animations',
|
||||||
description=(
|
description=(
|
||||||
"Force exporting animation on every objects. "
|
"Force exporting animation on every objects. "
|
||||||
"Can be usefull when using constraints or driver. "
|
"Can be useful when using constraints or driver. "
|
||||||
"Also useful when exporting only selection"
|
"Also useful when exporting only selection"
|
||||||
),
|
),
|
||||||
default=False
|
default=False
|
||||||
@ -1225,11 +1227,38 @@ class GLTF_PT_export_animation(bpy.types.Panel):
|
|||||||
row.active = operator.export_morph is True
|
row.active = operator.export_morph is True
|
||||||
row.prop(operator, 'export_morph_animation')
|
row.prop(operator, 'export_morph_animation')
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.active = operator.export_force_sampling
|
row.active = operator.export_force_sampling and operator.export_animation_mode in ['ACTIONS', 'ACTIVE_ACTIONS']
|
||||||
row.prop(operator, 'export_bake_animation')
|
row.prop(operator, 'export_bake_animation')
|
||||||
if operator.export_animation_mode == "SCENE":
|
if operator.export_animation_mode == "SCENE":
|
||||||
layout.prop(operator, 'export_anim_scene_split_object')
|
layout.prop(operator, 'export_anim_scene_split_object')
|
||||||
|
|
||||||
|
class GLTF_PT_export_animation_notes(bpy.types.Panel):
|
||||||
|
bl_space_type = 'FILE_BROWSER'
|
||||||
|
bl_region_type = 'TOOL_PROPS'
|
||||||
|
bl_label = "Notes"
|
||||||
|
bl_parent_id = "GLTF_PT_export_animation"
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
sfile = context.space_data
|
||||||
|
operator = sfile.active_operator
|
||||||
|
|
||||||
|
return operator.bl_idname == "EXPORT_SCENE_OT_gltf" and \
|
||||||
|
operator.export_animation_mode in ["NLA_TRACKS", "SCENE"]
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
operator = context.space_data.active_operator
|
||||||
|
layout = self.layout
|
||||||
|
if operator.export_animation_mode == "SCENE":
|
||||||
|
layout.label(text="Scene mode uses full bake mode:")
|
||||||
|
layout.label(text="- sampling is active")
|
||||||
|
layout.label(text="- baking all objects is active")
|
||||||
|
layout.label(text="- Using scene frame range")
|
||||||
|
elif operator.export_animation_mode == "NLA_TRACKS":
|
||||||
|
layout.label(text="Track mode uses full bake mode:")
|
||||||
|
layout.label(text="- sampling is active")
|
||||||
|
layout.label(text="- baking all objects is active")
|
||||||
|
|
||||||
class GLTF_PT_export_animation_ranges(bpy.types.Panel):
|
class GLTF_PT_export_animation_ranges(bpy.types.Panel):
|
||||||
bl_space_type = 'FILE_BROWSER'
|
bl_space_type = 'FILE_BROWSER'
|
||||||
@ -1256,8 +1285,12 @@ class GLTF_PT_export_animation_ranges(bpy.types.Panel):
|
|||||||
layout.active = operator.export_animations
|
layout.active = operator.export_animations
|
||||||
|
|
||||||
layout.prop(operator, 'export_current_frame')
|
layout.prop(operator, 'export_current_frame')
|
||||||
layout.prop(operator, 'export_frame_range')
|
row = layout.row()
|
||||||
|
row.active = operator.export_animation_mode in ['ACTIONS', 'ACTIVE_ACTIONS', 'NLA_TRACKS']
|
||||||
|
row.prop(operator, 'export_frame_range')
|
||||||
layout.prop(operator, 'export_anim_slide_to_zero')
|
layout.prop(operator, 'export_anim_slide_to_zero')
|
||||||
|
row = layout.row()
|
||||||
|
row.active = operator.export_animation_mode in ['ACTIONS', 'ACTIVE_ACTIONS', 'NLA_TRACKS']
|
||||||
layout.prop(operator, 'export_negative_frame')
|
layout.prop(operator, 'export_negative_frame')
|
||||||
|
|
||||||
class GLTF_PT_export_animation_armature(bpy.types.Panel):
|
class GLTF_PT_export_animation_armature(bpy.types.Panel):
|
||||||
@ -1304,7 +1337,7 @@ class GLTF_PT_export_animation_sampling(bpy.types.Panel):
|
|||||||
def draw_header(self, context):
|
def draw_header(self, context):
|
||||||
sfile = context.space_data
|
sfile = context.space_data
|
||||||
operator = sfile.active_operator
|
operator = sfile.active_operator
|
||||||
self.layout.active = operator.export_animations
|
self.layout.active = operator.export_animations and operator.export_animation_mode in ['ACTIONS', 'ACTIVE_ACTIONS']
|
||||||
self.layout.prop(operator, "export_force_sampling", text="")
|
self.layout.prop(operator, "export_force_sampling", text="")
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
@ -1347,11 +1380,9 @@ class GLTF_PT_export_animation_optimize(bpy.types.Panel):
|
|||||||
layout.prop(operator, 'export_optimize_animation_size')
|
layout.prop(operator, 'export_optimize_animation_size')
|
||||||
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.active = operator.export_optimize_animation_size
|
|
||||||
row.prop(operator, 'export_optimize_animation_keep_anim_armature')
|
row.prop(operator, 'export_optimize_animation_keep_anim_armature')
|
||||||
|
|
||||||
row = layout.row()
|
row = layout.row()
|
||||||
row.active = operator.export_optimize_animation_size
|
|
||||||
row.prop(operator, 'export_optimize_animation_keep_anim_object')
|
row.prop(operator, 'export_optimize_animation_keep_anim_object')
|
||||||
|
|
||||||
|
|
||||||
@ -1641,6 +1672,7 @@ classes = (
|
|||||||
GLTF_PT_export_data_lighting,
|
GLTF_PT_export_data_lighting,
|
||||||
GLTF_PT_export_data_compression,
|
GLTF_PT_export_data_compression,
|
||||||
GLTF_PT_export_animation,
|
GLTF_PT_export_animation,
|
||||||
|
GLTF_PT_export_animation_notes,
|
||||||
GLTF_PT_export_animation_ranges,
|
GLTF_PT_export_animation_ranges,
|
||||||
GLTF_PT_export_animation_armature,
|
GLTF_PT_export_animation_armature,
|
||||||
GLTF_PT_export_animation_sampling,
|
GLTF_PT_export_animation_sampling,
|
||||||
|
@ -135,3 +135,10 @@ def get_attribute_type(component_type, data_type):
|
|||||||
}[component_type]
|
}[component_type]
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def get_gltf_interpolation(interpolation):
|
||||||
|
return {
|
||||||
|
"BEZIER": "CUBICSPLINE",
|
||||||
|
"LINEAR": "LINEAR",
|
||||||
|
"CONSTANT": "STEP"
|
||||||
|
}.get(interpolation, "LINEAR")
|
||||||
|
@ -21,10 +21,13 @@ class NODE_OT_GLTF_SETTINGS(bpy.types.Operator):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
space = context.space_data
|
space = context.space_data
|
||||||
return space.type == "NODE_EDITOR" \
|
return (
|
||||||
and context.object and context.object.active_material \
|
space is not None
|
||||||
and context.object.active_material.use_nodes is True \
|
and space.type == "NODE_EDITOR"
|
||||||
|
and context.object and context.object.active_material
|
||||||
|
and context.object.active_material.use_nodes is True
|
||||||
and bpy.context.preferences.addons['io_scene_gltf2'].preferences.settings_node_ui is True
|
and bpy.context.preferences.addons['io_scene_gltf2'].preferences.settings_node_ui is True
|
||||||
|
)
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
gltf_settings_node_name = get_gltf_node_name()
|
gltf_settings_node_name = get_gltf_node_name()
|
||||||
|
@ -143,7 +143,7 @@ def get_channel_groups(obj_uuid: str, blender_action: bpy.types.Action, export_s
|
|||||||
# Check if the property can be exported without sampling
|
# Check if the property can be exported without sampling
|
||||||
new_properties = {}
|
new_properties = {}
|
||||||
for prop in target_data['properties'].keys():
|
for prop in target_data['properties'].keys():
|
||||||
if __needs_baking(obj_uuid, target_data['properties'][prop], export_settings) is True:
|
if needs_baking(obj_uuid, target_data['properties'][prop], export_settings) is True:
|
||||||
to_be_sampled.append((obj_uuid, target_data['type'], get_channel_from_target(get_target(prop)), target_data['bone'])) # bone can be None if not a bone :)
|
to_be_sampled.append((obj_uuid, target_data['type'], get_channel_from_target(get_target(prop)), target_data['bone'])) # bone can be None if not a bone :)
|
||||||
else:
|
else:
|
||||||
new_properties[prop] = target_data['properties'][prop]
|
new_properties[prop] = target_data['properties'][prop]
|
||||||
@ -262,7 +262,7 @@ def __gather_sampler(obj_uuid: str,
|
|||||||
|
|
||||||
return gather_animation_fcurves_sampler(obj_uuid, channel_group, bone, custom_range, export_settings)
|
return gather_animation_fcurves_sampler(obj_uuid, channel_group, bone, custom_range, export_settings)
|
||||||
|
|
||||||
def __needs_baking(obj_uuid: str,
|
def needs_baking(obj_uuid: str,
|
||||||
channels: typing.Tuple[bpy.types.FCurve],
|
channels: typing.Tuple[bpy.types.FCurve],
|
||||||
export_settings
|
export_settings
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
@ -6,6 +6,7 @@ import typing
|
|||||||
import mathutils
|
import mathutils
|
||||||
from .....io.com import gltf2_io
|
from .....io.com import gltf2_io
|
||||||
from .....io.com import gltf2_io_constants
|
from .....io.com import gltf2_io_constants
|
||||||
|
from .....blender.com.gltf2_blender_conversion import get_gltf_interpolation
|
||||||
from .....io.exp import gltf2_io_binary_data
|
from .....io.exp import gltf2_io_binary_data
|
||||||
from .....io.exp.gltf2_io_user_extensions import export_user_extensions
|
from .....io.exp.gltf2_io_user_extensions import export_user_extensions
|
||||||
from ....com.gltf2_blender_data_path import get_target_property_name
|
from ....com.gltf2_blender_data_path import get_target_property_name
|
||||||
@ -205,8 +206,4 @@ def __gather_interpolation(
|
|||||||
blender_keyframe = [c for c in channel_group if c is not None][0].keyframe_points[0]
|
blender_keyframe = [c for c in channel_group if c is not None][0].keyframe_points[0]
|
||||||
|
|
||||||
# Select the interpolation method.
|
# Select the interpolation method.
|
||||||
return {
|
return get_gltf_interpolation(blender_keyframe.interpolation)
|
||||||
"BEZIER": "CUBICSPLINE",
|
|
||||||
"LINEAR": "LINEAR",
|
|
||||||
"CONSTANT": "STEP"
|
|
||||||
}[blender_keyframe.interpolation]
|
|
||||||
|
@ -6,6 +6,7 @@ import typing
|
|||||||
from ....io.com import gltf2_io
|
from ....io.com import gltf2_io
|
||||||
from ....io.com.gltf2_io_debug import print_console
|
from ....io.com.gltf2_io_debug import print_console
|
||||||
from ....io.exp.gltf2_io_user_extensions import export_user_extensions
|
from ....io.exp.gltf2_io_user_extensions import export_user_extensions
|
||||||
|
from ....blender.com.gltf2_blender_conversion import get_gltf_interpolation
|
||||||
from ...com.gltf2_blender_data_path import is_bone_anim_channel
|
from ...com.gltf2_blender_data_path import is_bone_anim_channel
|
||||||
from ...com.gltf2_blender_extras import generate_extras
|
from ...com.gltf2_blender_extras import generate_extras
|
||||||
from ..gltf2_blender_gather_cache import cached
|
from ..gltf2_blender_gather_cache import cached
|
||||||
@ -69,9 +70,18 @@ def prepare_actions_range(export_settings):
|
|||||||
blender_actions = __get_blender_actions(obj_uuid, export_settings)
|
blender_actions = __get_blender_actions(obj_uuid, export_settings)
|
||||||
for blender_action, track, type_ in blender_actions:
|
for blender_action, track, type_ in blender_actions:
|
||||||
|
|
||||||
|
# What about frame_range bug for single keyframe animations ? 107030
|
||||||
start_frame = int(blender_action.frame_range[0])
|
start_frame = int(blender_action.frame_range[0])
|
||||||
end_frame = int(blender_action.frame_range[1])
|
end_frame = int(blender_action.frame_range[1])
|
||||||
|
|
||||||
|
if end_frame - start_frame == 1:
|
||||||
|
# To workaround Blender bug 107030, check manually
|
||||||
|
try: # Avoid crash in case of strange/buggy fcurves
|
||||||
|
start_frame = int(min([c.range()[0] for c in blender_action.fcurves]))
|
||||||
|
end_frame = int(max([c.range()[1] for c in blender_action.fcurves]))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
export_settings['ranges'][obj_uuid][blender_action.name] = {}
|
export_settings['ranges'][obj_uuid][blender_action.name] = {}
|
||||||
|
|
||||||
# If some negative frame and crop -> set start at 0
|
# If some negative frame and crop -> set start at 0
|
||||||
@ -277,9 +287,9 @@ def gather_action_animations( obj_uuid: int,
|
|||||||
animation, to_be_sampled = gather_animation_fcurves(obj_uuid, blender_action, export_settings)
|
animation, to_be_sampled = gather_animation_fcurves(obj_uuid, blender_action, export_settings)
|
||||||
for (obj_uuid, type_, prop, bone) in to_be_sampled:
|
for (obj_uuid, type_, prop, bone) in to_be_sampled:
|
||||||
if type_ == "BONE":
|
if type_ == "BONE":
|
||||||
channel = gather_sampled_bone_channel(obj_uuid, bone, prop, blender_action.name, True, export_settings)
|
channel = gather_sampled_bone_channel(obj_uuid, bone, prop, blender_action.name, True, get_gltf_interpolation("LINEAR"), export_settings)
|
||||||
elif type_ == "OBJECT":
|
elif type_ == "OBJECT":
|
||||||
channel = gather_sampled_object_channel(obj_uuid, prop, blender_action.name, True, export_settings)
|
channel = gather_sampled_object_channel(obj_uuid, prop, blender_action.name, True, get_gltf_interpolation("LINEAR"), export_settings)
|
||||||
elif type_ == "SK":
|
elif type_ == "SK":
|
||||||
channel = gather_sampled_sk_channel(obj_uuid, blender_action.name, export_settings)
|
channel = gather_sampled_sk_channel(obj_uuid, blender_action.name, export_settings)
|
||||||
else:
|
else:
|
||||||
|
@ -5,8 +5,10 @@ import bpy
|
|||||||
import typing
|
import typing
|
||||||
from ......io.com import gltf2_io
|
from ......io.com import gltf2_io
|
||||||
from ......io.exp.gltf2_io_user_extensions import export_user_extensions
|
from ......io.exp.gltf2_io_user_extensions import export_user_extensions
|
||||||
|
from ......blender.com.gltf2_blender_conversion import get_gltf_interpolation
|
||||||
from .....com.gltf2_blender_conversion import get_target, get_channel_from_target
|
from .....com.gltf2_blender_conversion import get_target, get_channel_from_target
|
||||||
from ...fcurves.gltf2_blender_gather_fcurves_channels import get_channel_groups
|
from ...fcurves.gltf2_blender_gather_fcurves_channels import get_channel_groups
|
||||||
|
from ...fcurves.gltf2_blender_gather_fcurves_channels import needs_baking
|
||||||
from ...gltf2_blender_gather_drivers import get_sk_drivers
|
from ...gltf2_blender_gather_drivers import get_sk_drivers
|
||||||
from ..object.gltf2_blender_gather_object_channels import gather_sampled_object_channel
|
from ..object.gltf2_blender_gather_object_channels import gather_sampled_object_channel
|
||||||
from ..shapekeys.gltf2_blender_gather_sk_channels import gather_sampled_sk_channel
|
from ..shapekeys.gltf2_blender_gather_sk_channels import gather_sampled_sk_channel
|
||||||
@ -22,16 +24,27 @@ def gather_armature_sampled_channels(armature_uuid, blender_action_name, export_
|
|||||||
bones_to_be_animated = [export_settings["vtree"].nodes[b].blender_bone.name for b in bones_uuid]
|
bones_to_be_animated = [export_settings["vtree"].nodes[b].blender_bone.name for b in bones_uuid]
|
||||||
|
|
||||||
# List of really animated bones is needed for optimization decision
|
# List of really animated bones is needed for optimization decision
|
||||||
list_of_animated_bone_channels = []
|
list_of_animated_bone_channels = {}
|
||||||
if armature_uuid != blender_action_name and blender_action_name in bpy.data.actions:
|
if armature_uuid != blender_action_name and blender_action_name in bpy.data.actions:
|
||||||
# Not bake situation
|
# Not bake situation
|
||||||
channels_animated, to_be_sampled = get_channel_groups(armature_uuid, bpy.data.actions[blender_action_name], export_settings)
|
channels_animated, to_be_sampled = get_channel_groups(armature_uuid, bpy.data.actions[blender_action_name], export_settings)
|
||||||
for chan in [chan for chan in channels_animated.values() if chan['bone'] is not None]:
|
for chan in [chan for chan in channels_animated.values() if chan['bone'] is not None]:
|
||||||
for prop in chan['properties'].keys():
|
for prop in chan['properties'].keys():
|
||||||
list_of_animated_bone_channels.append((chan['bone'], get_channel_from_target(get_target(prop))))
|
list_of_animated_bone_channels[
|
||||||
|
(
|
||||||
|
chan['bone'],
|
||||||
|
get_channel_from_target(get_target(prop))
|
||||||
|
)
|
||||||
|
] = get_gltf_interpolation(chan['properties'][prop][0].keyframe_points[0].interpolation) # Could be exported without sampling : keep interpolation
|
||||||
|
|
||||||
for _, _, chan_prop, chan_bone in [chan for chan in to_be_sampled if chan[1] == "BONE"]:
|
for _, _, chan_prop, chan_bone in [chan for chan in to_be_sampled if chan[1] == "BONE"]:
|
||||||
list_of_animated_bone_channels.append((chan_bone, chan_prop))
|
list_of_animated_bone_channels[
|
||||||
|
(
|
||||||
|
chan_bone,
|
||||||
|
chan_prop,
|
||||||
|
)
|
||||||
|
] = get_gltf_interpolation("LINEAR") # if forced to be sampled, keep LINEAR interpolation
|
||||||
|
|
||||||
|
|
||||||
for bone in bones_to_be_animated:
|
for bone in bones_to_be_animated:
|
||||||
for p in ["location", "rotation_quaternion", "scale"]:
|
for p in ["location", "rotation_quaternion", "scale"]:
|
||||||
@ -40,7 +53,8 @@ def gather_armature_sampled_channels(armature_uuid, blender_action_name, export_
|
|||||||
bone,
|
bone,
|
||||||
p,
|
p,
|
||||||
blender_action_name,
|
blender_action_name,
|
||||||
(bone, p) in list_of_animated_bone_channels,
|
(bone, p) in list_of_animated_bone_channels.keys(),
|
||||||
|
list_of_animated_bone_channels[(bone, p)] if (bone, p) in list_of_animated_bone_channels.keys() else get_gltf_interpolation("LINEAR"),
|
||||||
export_settings)
|
export_settings)
|
||||||
if channel is not None:
|
if channel is not None:
|
||||||
channels.append(channel)
|
channels.append(channel)
|
||||||
@ -48,15 +62,17 @@ def gather_armature_sampled_channels(armature_uuid, blender_action_name, export_
|
|||||||
# Retrieve animation on armature object itself, if any
|
# Retrieve animation on armature object itself, if any
|
||||||
# If armature is baked (no animation of armature), need to use all channels
|
# If armature is baked (no animation of armature), need to use all channels
|
||||||
if blender_action_name == armature_uuid or export_settings['gltf_animation_mode'] in ["SCENE", "NLA_TRACKS"]:
|
if blender_action_name == armature_uuid or export_settings['gltf_animation_mode'] in ["SCENE", "NLA_TRACKS"]:
|
||||||
armature_channels = ["location", "rotation_quaternion", "scale"]
|
armature_channels = []
|
||||||
else:
|
else:
|
||||||
armature_channels = __gather_armature_object_channel(bpy.data.actions[blender_action_name], export_settings)
|
armature_channels = __gather_armature_object_channel(armature_uuid, bpy.data.actions[blender_action_name], export_settings)
|
||||||
for channel in armature_channels:
|
|
||||||
|
for p in ["location", "rotation_quaternion", "scale"]:
|
||||||
armature_channel = gather_sampled_object_channel(
|
armature_channel = gather_sampled_object_channel(
|
||||||
armature_uuid,
|
armature_uuid,
|
||||||
channel,
|
p,
|
||||||
blender_action_name,
|
blender_action_name,
|
||||||
True, # channel is animated (because we detect it on __gather_armature_object_channel)
|
p in [a[0] for a in armature_channels],
|
||||||
|
[c[1] for c in armature_channels if c[0] == p][0] if p in [a[0] for a in armature_channels] else "LINEAR",
|
||||||
export_settings
|
export_settings
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -79,12 +95,13 @@ def gather_sampled_bone_channel(
|
|||||||
channel: str,
|
channel: str,
|
||||||
action_name: str,
|
action_name: str,
|
||||||
node_channel_is_animated: bool,
|
node_channel_is_animated: bool,
|
||||||
|
node_channel_interpolation: str,
|
||||||
export_settings
|
export_settings
|
||||||
):
|
):
|
||||||
|
|
||||||
__target= __gather_target(armature_uuid, bone, channel, export_settings)
|
__target= __gather_target(armature_uuid, bone, channel, export_settings)
|
||||||
if __target.path is not None:
|
if __target.path is not None:
|
||||||
sampler = __gather_sampler(armature_uuid, bone, channel, action_name, node_channel_is_animated, export_settings)
|
sampler = __gather_sampler(armature_uuid, bone, channel, action_name, node_channel_is_animated, node_channel_interpolation, export_settings)
|
||||||
|
|
||||||
if sampler is None:
|
if sampler is None:
|
||||||
# After check, no need to animate this node for this channel
|
# After check, no need to animate this node for this channel
|
||||||
@ -120,30 +137,61 @@ def __gather_target(armature_uuid: str,
|
|||||||
return gather_armature_sampled_channel_target(
|
return gather_armature_sampled_channel_target(
|
||||||
armature_uuid, bone, channel, export_settings)
|
armature_uuid, bone, channel, export_settings)
|
||||||
|
|
||||||
def __gather_sampler(armature_uuid, bone, channel, action_name, node_channel_is_animated, export_settings):
|
def __gather_sampler(armature_uuid, bone, channel, action_name, node_channel_is_animated, node_channel_interpolation, export_settings):
|
||||||
return gather_bone_sampled_animation_sampler(
|
return gather_bone_sampled_animation_sampler(
|
||||||
armature_uuid,
|
armature_uuid,
|
||||||
bone,
|
bone,
|
||||||
channel,
|
channel,
|
||||||
action_name,
|
action_name,
|
||||||
node_channel_is_animated,
|
node_channel_is_animated,
|
||||||
|
node_channel_interpolation,
|
||||||
export_settings
|
export_settings
|
||||||
)
|
)
|
||||||
|
|
||||||
def __gather_armature_object_channel(blender_action: str, export_settings):
|
def __gather_armature_object_channel(obj_uuid: str, blender_action, export_settings):
|
||||||
channels = []
|
channels = []
|
||||||
for p in ["location", "rotation_quaternion", "scale", "delta_location", "delta_scale", "delta_rotation_euler", "delta_rotation_quaternion"]:
|
|
||||||
if p in [f.data_path for f in blender_action.fcurves]:
|
channels_animated, to_be_sampled = get_channel_groups(obj_uuid, blender_action, export_settings)
|
||||||
channels.append(
|
# Remove all channel linked to bones, keep only directly object channels
|
||||||
|
channels_animated = [c for c in channels_animated.values() if c['type'] == "OBJECT"]
|
||||||
|
to_be_sampled = [c for c in to_be_sampled if c[1] == "OBJECT"]
|
||||||
|
|
||||||
|
original_channels = []
|
||||||
|
for c in channels_animated:
|
||||||
|
original_channels.extend([(prop, c['properties'][prop][0].keyframe_points[0].interpolation) for prop in c['properties'].keys()])
|
||||||
|
|
||||||
|
for c, inter in original_channels:
|
||||||
|
channels.append(
|
||||||
|
(
|
||||||
{
|
{
|
||||||
"location":"location",
|
"location":"location",
|
||||||
"rotation_quaternion": "rotation_quaternion",
|
"rotation_quaternion": "rotation_quaternion",
|
||||||
|
"rotation_euler": "rotation_quaternion",
|
||||||
"scale": "scale",
|
"scale": "scale",
|
||||||
"delta_location": "location",
|
"delta_location": "location",
|
||||||
"delta_scale": "scale",
|
"delta_scale": "scale",
|
||||||
"delta_rotation_euler": "rotation_quaternion",
|
"delta_rotation_euler": "rotation_quaternion",
|
||||||
"delta_rotation_quaternion": "rotation_quaternion"
|
"delta_rotation_quaternion": "rotation_quaternion"
|
||||||
}.get(p)
|
}.get(c),
|
||||||
|
get_gltf_interpolation(inter)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return list(set(channels)) #remove doubles
|
for c in to_be_sampled:
|
||||||
|
channels.append(
|
||||||
|
(
|
||||||
|
{
|
||||||
|
"location":"location",
|
||||||
|
"rotation_quaternion": "rotation_quaternion",
|
||||||
|
"rotation_euler": "rotation_quaternion",
|
||||||
|
"scale": "scale",
|
||||||
|
"delta_location": "location",
|
||||||
|
"delta_scale": "scale",
|
||||||
|
"delta_rotation_euler": "rotation_quaternion",
|
||||||
|
"delta_rotation_quaternion": "rotation_quaternion"
|
||||||
|
}.get(c[2]),
|
||||||
|
get_gltf_interpolation("LINEAR") # Forced to be sampled, so use LINEAR
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return channels
|
||||||
|
@ -53,25 +53,38 @@ def gather_bone_sampled_keyframes(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if not export_settings['gltf_optimize_animation']:
|
if not export_settings['gltf_optimize_animation']:
|
||||||
return keyframes
|
# For bones, if all values are the same, keeping only if changing values, or if user want to keep data
|
||||||
|
if node_channel_is_animated is True:
|
||||||
# For armatures
|
return keyframes # Always keeping
|
||||||
# Check if all values are the same
|
|
||||||
# In that case, if there is no real keyframe on this channel for this given bone,
|
|
||||||
# We can ignore these keyframes
|
|
||||||
# if there are some fcurve, we can keep only 2 keyframes, first and last
|
|
||||||
cst = fcurve_is_constant(keyframes)
|
|
||||||
|
|
||||||
if node_channel_is_animated is True: # fcurve on this bone for this property
|
|
||||||
# Keep animation, but keep only 2 keyframes if data are not changing
|
|
||||||
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
|
||||||
else: # bone is not animated (no fcurve)
|
|
||||||
# Not keeping if not changing property if user decided to not keep
|
|
||||||
if export_settings['gltf_optimize_animation_keep_armature'] is False:
|
|
||||||
return None if cst is True else keyframes
|
|
||||||
else:
|
else:
|
||||||
# Keep at least 2 keyframes if data are not changing
|
# baked bones
|
||||||
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
if export_settings['gltf_optimize_animation_keep_armature'] is False:
|
||||||
|
# Not keeping if not changing property
|
||||||
|
cst = fcurve_is_constant(keyframes)
|
||||||
|
return None if cst is True else keyframes
|
||||||
|
else:
|
||||||
|
# Keep data, as requested by user. We keep all samples, as user don't want to optimize
|
||||||
|
return keyframes
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# For armatures
|
||||||
|
# Check if all values are the same
|
||||||
|
# In that case, if there is no real keyframe on this channel for this given bone,
|
||||||
|
# We can ignore these keyframes
|
||||||
|
# if there are some fcurve, we can keep only 2 keyframes, first and last
|
||||||
|
cst = fcurve_is_constant(keyframes)
|
||||||
|
|
||||||
|
if node_channel_is_animated is True: # fcurve on this bone for this property
|
||||||
|
# Keep animation, but keep only 2 keyframes if data are not changing
|
||||||
|
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
||||||
|
else: # bone is not animated (no fcurve)
|
||||||
|
# Not keeping if not changing property if user decided to not keep
|
||||||
|
if export_settings['gltf_optimize_animation_keep_armature'] is False:
|
||||||
|
return None if cst is True else keyframes
|
||||||
|
else:
|
||||||
|
# Keep at least 2 keyframes if data are not changing
|
||||||
|
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
||||||
|
|
||||||
def fcurve_is_constant(keyframes):
|
def fcurve_is_constant(keyframes):
|
||||||
return all([j < 0.0001 for j in np.ptp([[k.value[i] for i in range(len(keyframes[0].value))] for k in keyframes], axis=0)])
|
return all([j < 0.0001 for j in np.ptp([[k.value[i] for i in range(len(keyframes[0].value))] for k in keyframes], axis=0)])
|
||||||
|
@ -21,6 +21,7 @@ def gather_bone_sampled_animation_sampler(
|
|||||||
channel: str,
|
channel: str,
|
||||||
action_name: str,
|
action_name: str,
|
||||||
node_channel_is_animated: bool,
|
node_channel_is_animated: bool,
|
||||||
|
node_channel_interpolation: str,
|
||||||
export_settings
|
export_settings
|
||||||
):
|
):
|
||||||
|
|
||||||
@ -45,7 +46,7 @@ def gather_bone_sampled_animation_sampler(
|
|||||||
extensions=None,
|
extensions=None,
|
||||||
extras=None,
|
extras=None,
|
||||||
input=input,
|
input=input,
|
||||||
interpolation=__gather_interpolation(export_settings),
|
interpolation=__gather_interpolation(node_channel_is_animated, node_channel_interpolation, keyframes, export_settings),
|
||||||
output=output
|
output=output
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -194,6 +195,25 @@ def __convert_keyframes(armature_uuid, bone_name, channel, keyframes, action_nam
|
|||||||
|
|
||||||
return input, output
|
return input, output
|
||||||
|
|
||||||
def __gather_interpolation(export_settings):
|
def __gather_interpolation(node_channel_is_animated, node_channel_interpolation, keyframes, export_settings):
|
||||||
# TODO: check if the bone was animated with CONSTANT
|
|
||||||
return 'LINEAR'
|
if len(keyframes) > 2:
|
||||||
|
# keep STEP as STEP, other become LINEAR
|
||||||
|
return {
|
||||||
|
"STEP": "STEP"
|
||||||
|
}.get(node_channel_interpolation, "LINEAR")
|
||||||
|
elif len(keyframes) == 1:
|
||||||
|
if node_channel_is_animated is False:
|
||||||
|
return "STEP"
|
||||||
|
else:
|
||||||
|
return node_channel_interpolation
|
||||||
|
else:
|
||||||
|
# If we only have 2 keyframes, set interpolation to STEP if baked
|
||||||
|
if node_channel_is_animated is False:
|
||||||
|
# baked => We have first and last keyframe
|
||||||
|
return "STEP"
|
||||||
|
else:
|
||||||
|
if keyframes[0].value == keyframes[1].value:
|
||||||
|
return "STEP"
|
||||||
|
else:
|
||||||
|
return "LINEAR"
|
||||||
|
@ -5,6 +5,7 @@ import bpy
|
|||||||
import typing
|
import typing
|
||||||
from ......io.com import gltf2_io
|
from ......io.com import gltf2_io
|
||||||
from ......io.exp.gltf2_io_user_extensions import export_user_extensions
|
from ......io.exp.gltf2_io_user_extensions import export_user_extensions
|
||||||
|
from ......blender.com.gltf2_blender_conversion import get_gltf_interpolation
|
||||||
from .....com.gltf2_blender_conversion import get_target, get_channel_from_target
|
from .....com.gltf2_blender_conversion import get_target, get_channel_from_target
|
||||||
from ....gltf2_blender_gather_cache import cached
|
from ....gltf2_blender_gather_cache import cached
|
||||||
from ...fcurves.gltf2_blender_gather_fcurves_channels import get_channel_groups
|
from ...fcurves.gltf2_blender_gather_fcurves_channels import get_channel_groups
|
||||||
@ -14,23 +15,26 @@ from .gltf2_blender_gather_object_channel_target import gather_object_sampled_ch
|
|||||||
def gather_object_sampled_channels(object_uuid: str, blender_action_name: str, export_settings) -> typing.List[gltf2_io.AnimationChannel]:
|
def gather_object_sampled_channels(object_uuid: str, blender_action_name: str, export_settings) -> typing.List[gltf2_io.AnimationChannel]:
|
||||||
channels = []
|
channels = []
|
||||||
|
|
||||||
list_of_animated_channels = []
|
list_of_animated_channels = {}
|
||||||
if object_uuid != blender_action_name and blender_action_name in bpy.data.actions:
|
if object_uuid != blender_action_name and blender_action_name in bpy.data.actions:
|
||||||
# Not bake situation
|
# Not bake situation
|
||||||
channels_animated, to_be_sampled = get_channel_groups(object_uuid, bpy.data.actions[blender_action_name], export_settings)
|
channels_animated, to_be_sampled = get_channel_groups(object_uuid, bpy.data.actions[blender_action_name], export_settings)
|
||||||
for chan in [chan for chan in channels_animated.values() if chan['bone'] is None]:
|
for chan in [chan for chan in channels_animated.values() if chan['bone'] is None]:
|
||||||
for prop in chan['properties'].keys():
|
for prop in chan['properties'].keys():
|
||||||
list_of_animated_channels.append(get_channel_from_target(get_target(prop)))
|
list_of_animated_channels[
|
||||||
|
get_channel_from_target(get_target(prop))
|
||||||
|
] = get_gltf_interpolation(chan['properties'][prop][0].keyframe_points[0].interpolation) # Could be exported without sampling : keep interpolation
|
||||||
|
|
||||||
for _, _, chan_prop, _ in [chan for chan in to_be_sampled if chan[1] == "OBJECT"]:
|
for _, _, chan_prop, _ in [chan for chan in to_be_sampled if chan[1] == "OBJECT"]:
|
||||||
list_of_animated_channels.append(chan_prop)
|
list_of_animated_channels[chan_prop] = get_gltf_interpolation("LINEAR") # if forced to be sampled, keep LINEAR interpolation
|
||||||
|
|
||||||
for p in ["location", "rotation_quaternion", "scale"]:
|
for p in ["location", "rotation_quaternion", "scale"]:
|
||||||
channel = gather_sampled_object_channel(
|
channel = gather_sampled_object_channel(
|
||||||
object_uuid,
|
object_uuid,
|
||||||
p,
|
p,
|
||||||
blender_action_name,
|
blender_action_name,
|
||||||
p in list_of_animated_channels,
|
p in list_of_animated_channels.keys(),
|
||||||
|
list_of_animated_channels[p] if p in list_of_animated_channels.keys() else get_gltf_interpolation("LINEAR"),
|
||||||
export_settings
|
export_settings
|
||||||
)
|
)
|
||||||
if channel is not None:
|
if channel is not None:
|
||||||
@ -48,12 +52,13 @@ def gather_sampled_object_channel(
|
|||||||
channel: str,
|
channel: str,
|
||||||
action_name: str,
|
action_name: str,
|
||||||
node_channel_is_animated: bool,
|
node_channel_is_animated: bool,
|
||||||
|
node_channel_interpolation: str,
|
||||||
export_settings
|
export_settings
|
||||||
):
|
):
|
||||||
|
|
||||||
__target= __gather_target(obj_uuid, channel, export_settings)
|
__target= __gather_target(obj_uuid, channel, export_settings)
|
||||||
if __target.path is not None:
|
if __target.path is not None:
|
||||||
sampler = __gather_sampler(obj_uuid, channel, action_name, node_channel_is_animated, export_settings)
|
sampler = __gather_sampler(obj_uuid, channel, action_name, node_channel_is_animated, node_channel_interpolation, export_settings)
|
||||||
|
|
||||||
if sampler is None:
|
if sampler is None:
|
||||||
# After check, no need to animate this node for this channel
|
# After check, no need to animate this node for this channel
|
||||||
@ -92,6 +97,7 @@ def __gather_sampler(
|
|||||||
channel: str,
|
channel: str,
|
||||||
action_name: str,
|
action_name: str,
|
||||||
node_channel_is_animated: bool,
|
node_channel_is_animated: bool,
|
||||||
|
node_channel_interpolation: str,
|
||||||
export_settings):
|
export_settings):
|
||||||
|
|
||||||
|
|
||||||
@ -100,5 +106,6 @@ def __gather_sampler(
|
|||||||
channel,
|
channel,
|
||||||
action_name,
|
action_name,
|
||||||
node_channel_is_animated,
|
node_channel_is_animated,
|
||||||
|
node_channel_interpolation,
|
||||||
export_settings
|
export_settings
|
||||||
)
|
)
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
# Copyright 2018-2022 The glTF-Blender-IO authors.
|
# Copyright 2018-2022 The glTF-Blender-IO authors.
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from ....gltf2_blender_gather_tree import VExportNode
|
||||||
from ....gltf2_blender_gather_cache import cached
|
from ....gltf2_blender_gather_cache import cached
|
||||||
from ...gltf2_blender_gather_keyframes import Keyframe
|
from ...gltf2_blender_gather_keyframes import Keyframe
|
||||||
from ..gltf2_blender_gather_animation_sampling_cache import get_cache_data
|
from ..gltf2_blender_gather_animation_sampling_cache import get_cache_data
|
||||||
@ -51,20 +52,33 @@ def gather_object_sampled_keyframes(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if not export_settings['gltf_optimize_animation']:
|
if not export_settings['gltf_optimize_animation']:
|
||||||
return keyframes
|
# For objects, if all values are the same, keeping only if changing values, or if user want to keep data
|
||||||
|
if node_channel_is_animated is True:
|
||||||
# For objects, if all values are the same, we keep only first and last
|
return keyframes # Always keeping
|
||||||
cst = fcurve_is_constant(keyframes)
|
|
||||||
if node_channel_is_animated is True:
|
|
||||||
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
|
||||||
else:
|
|
||||||
# baked object
|
|
||||||
# Not keeping if not changing property if user decided to not keep
|
|
||||||
if export_settings['gltf_optimize_animation_keep_object'] is False:
|
|
||||||
return None if cst is True else keyframes
|
|
||||||
else:
|
else:
|
||||||
# Keep at least 2 keyframes if data are not changing
|
# baked object
|
||||||
|
if export_settings['gltf_optimize_animation_keep_object'] is False:
|
||||||
|
# Not keeping if not changing property
|
||||||
|
cst = fcurve_is_constant(keyframes)
|
||||||
|
return None if cst is True else keyframes
|
||||||
|
else:
|
||||||
|
# Keep data, as requested by user. We keep all samples, as user don't want to optimize
|
||||||
|
return keyframes
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# For objects, if all values are the same, we keep only first and last
|
||||||
|
cst = fcurve_is_constant(keyframes)
|
||||||
|
if node_channel_is_animated is True:
|
||||||
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
||||||
|
else:
|
||||||
|
# baked object
|
||||||
|
# Not keeping if not changing property if user decided to not keep
|
||||||
|
if export_settings['gltf_optimize_animation_keep_object'] is False:
|
||||||
|
return None if cst is True else keyframes
|
||||||
|
else:
|
||||||
|
# Keep at least 2 keyframes if data are not changing
|
||||||
|
return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
|
||||||
|
|
||||||
def fcurve_is_constant(keyframes):
|
def fcurve_is_constant(keyframes):
|
||||||
return all([j < 0.0001 for j in np.ptp([[k.value[i] for i in range(len(keyframes[0].value))] for k in keyframes], axis=0)])
|
return all([j < 0.0001 for j in np.ptp([[k.value[i] for i in range(len(keyframes[0].value))] for k in keyframes], axis=0)])
|
||||||
|
@ -20,6 +20,7 @@ def gather_object_sampled_animation_sampler(
|
|||||||
channel: str,
|
channel: str,
|
||||||
action_name: str,
|
action_name: str,
|
||||||
node_channel_is_animated: bool,
|
node_channel_is_animated: bool,
|
||||||
|
node_channel_interpolation: str,
|
||||||
export_settings
|
export_settings
|
||||||
):
|
):
|
||||||
|
|
||||||
@ -41,7 +42,7 @@ def gather_object_sampled_animation_sampler(
|
|||||||
extensions=None,
|
extensions=None,
|
||||||
extras=None,
|
extras=None,
|
||||||
input=input,
|
input=input,
|
||||||
interpolation=__gather_interpolation(export_settings),
|
interpolation=__gather_interpolation(node_channel_is_animated, node_channel_interpolation, keyframes, export_settings),
|
||||||
output=output
|
output=output
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -66,10 +67,6 @@ def __gather_keyframes(
|
|||||||
export_settings
|
export_settings
|
||||||
)
|
)
|
||||||
|
|
||||||
if keyframes is None:
|
|
||||||
# After check, no need to animation this node
|
|
||||||
return None
|
|
||||||
|
|
||||||
return keyframes
|
return keyframes
|
||||||
|
|
||||||
def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str, export_settings):
|
def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str, export_settings):
|
||||||
@ -136,6 +133,29 @@ def __convert_keyframes(obj_uuid: str, channel: str, keyframes, action_name: str
|
|||||||
|
|
||||||
return input, output
|
return input, output
|
||||||
|
|
||||||
def __gather_interpolation(export_settings):
|
def __gather_interpolation(
|
||||||
# TODO: check if the object was animated with CONSTANT
|
node_channel_is_animated: bool,
|
||||||
return 'LINEAR'
|
node_channel_interpolation: str,
|
||||||
|
keyframes,
|
||||||
|
export_settings):
|
||||||
|
|
||||||
|
if len(keyframes) > 2:
|
||||||
|
# keep STEP as STEP, other become LINEAR
|
||||||
|
return {
|
||||||
|
"STEP": "STEP"
|
||||||
|
}.get(node_channel_interpolation, "LINEAR")
|
||||||
|
elif len(keyframes) == 1:
|
||||||
|
if node_channel_is_animated is False:
|
||||||
|
return "STEP"
|
||||||
|
else:
|
||||||
|
return node_channel_interpolation
|
||||||
|
else:
|
||||||
|
# If we only have 2 keyframes, set interpolation to STEP if baked
|
||||||
|
if node_channel_is_animated is False:
|
||||||
|
# baked => We have first and last keyframe
|
||||||
|
return "STEP"
|
||||||
|
else:
|
||||||
|
if keyframes[0].value == keyframes[1].value:
|
||||||
|
return "STEP"
|
||||||
|
else:
|
||||||
|
return "LINEAR"
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Node Wrangler",
|
"name": "Node Wrangler",
|
||||||
"author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer",
|
"author": "Bartek Skorupa, Greg Zaal, Sebastian Koenig, Christian Brinkmann, Florian Meyer",
|
||||||
"version": (3, 45),
|
"version": (3, 46),
|
||||||
"blender": (3, 6, 0),
|
"blender": (3, 6, 0),
|
||||||
"location": "Node Editor Toolbar or Shift-W",
|
"location": "Node Editor Toolbar or Shift-W",
|
||||||
"description": "Various tools to enhance and speed up node-based workflow",
|
"description": "Various tools to enhance and speed up node-based workflow",
|
||||||
|
@ -13,6 +13,7 @@ from bpy.props import (
|
|||||||
CollectionProperty,
|
CollectionProperty,
|
||||||
)
|
)
|
||||||
from bpy_extras.io_utils import ImportHelper, ExportHelper
|
from bpy_extras.io_utils import ImportHelper, ExportHelper
|
||||||
|
from bpy_extras.node_utils import connect_sockets
|
||||||
from mathutils import Vector
|
from mathutils import Vector
|
||||||
from os import path
|
from os import path
|
||||||
from glob import glob
|
from glob import glob
|
||||||
@ -368,13 +369,13 @@ class NWSwapLinks(Operator, NWBase):
|
|||||||
|
|
||||||
for connection in n1_outputs:
|
for connection in n1_outputs:
|
||||||
try:
|
try:
|
||||||
links.new(n2.outputs[connection[0]], connection[1])
|
connect_sockets(n2.outputs[connection[0]], connection[1])
|
||||||
except:
|
except:
|
||||||
self.report({'WARNING'},
|
self.report({'WARNING'},
|
||||||
"Some connections have been lost due to differing numbers of output sockets")
|
"Some connections have been lost due to differing numbers of output sockets")
|
||||||
for connection in n2_outputs:
|
for connection in n2_outputs:
|
||||||
try:
|
try:
|
||||||
links.new(n1.outputs[connection[0]], connection[1])
|
connect_sockets(n1.outputs[connection[0]], connection[1])
|
||||||
except:
|
except:
|
||||||
self.report({'WARNING'},
|
self.report({'WARNING'},
|
||||||
"Some connections have been lost due to differing numbers of output sockets")
|
"Some connections have been lost due to differing numbers of output sockets")
|
||||||
@ -412,8 +413,8 @@ class NWSwapLinks(Operator, NWBase):
|
|||||||
i1t = pair[0].links[0].to_socket
|
i1t = pair[0].links[0].to_socket
|
||||||
i2f = pair[1].links[0].from_socket
|
i2f = pair[1].links[0].from_socket
|
||||||
i2t = pair[1].links[0].to_socket
|
i2t = pair[1].links[0].to_socket
|
||||||
links.new(i1f, i2t)
|
connect_sockets(i1f, i2t)
|
||||||
links.new(i2f, i1t)
|
connect_sockets(i2f, i1t)
|
||||||
if t[1] == 1:
|
if t[1] == 1:
|
||||||
if len(types) == 1:
|
if len(types) == 1:
|
||||||
fs = t[0].links[0].from_socket
|
fs = t[0].links[0].from_socket
|
||||||
@ -424,14 +425,14 @@ class NWSwapLinks(Operator, NWBase):
|
|||||||
i += 1
|
i += 1
|
||||||
while n1.inputs[i].is_linked:
|
while n1.inputs[i].is_linked:
|
||||||
i += 1
|
i += 1
|
||||||
links.new(fs, n1.inputs[i])
|
connect_sockets(fs, n1.inputs[i])
|
||||||
elif len(types) == 2:
|
elif len(types) == 2:
|
||||||
i1f = types[0][0].links[0].from_socket
|
i1f = types[0][0].links[0].from_socket
|
||||||
i1t = types[0][0].links[0].to_socket
|
i1t = types[0][0].links[0].to_socket
|
||||||
i2f = types[1][0].links[0].from_socket
|
i2f = types[1][0].links[0].from_socket
|
||||||
i2t = types[1][0].links[0].to_socket
|
i2t = types[1][0].links[0].to_socket
|
||||||
links.new(i1f, i2t)
|
connect_sockets(i1f, i2t)
|
||||||
links.new(i2f, i1t)
|
connect_sockets(i2f, i1t)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.report({'WARNING'}, "This node has no input connections to swap!")
|
self.report({'WARNING'}, "This node has no input connections to swap!")
|
||||||
@ -702,7 +703,7 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
make_links.append((active.outputs[out_i], geometryoutput.inputs[geometryoutindex]))
|
make_links.append((active.outputs[out_i], geometryoutput.inputs[geometryoutindex]))
|
||||||
output_socket = geometryoutput.inputs[geometryoutindex]
|
output_socket = geometryoutput.inputs[geometryoutindex]
|
||||||
for li_from, li_to in make_links:
|
for li_from, li_to in make_links:
|
||||||
base_node_tree.links.new(li_from, li_to)
|
connect_sockets(li_from, li_to)
|
||||||
tree = base_node_tree
|
tree = base_node_tree
|
||||||
link_end = output_socket
|
link_end = output_socket
|
||||||
while tree.nodes.active != active:
|
while tree.nodes.active != active:
|
||||||
@ -713,11 +714,11 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
node_socket = node.node_tree.outputs[index]
|
node_socket = node.node_tree.outputs[index]
|
||||||
if node_socket in delete_sockets:
|
if node_socket in delete_sockets:
|
||||||
delete_sockets.remove(node_socket)
|
delete_sockets.remove(node_socket)
|
||||||
tree.links.new(link_start, link_end)
|
connect_sockets(link_start, link_end)
|
||||||
# Iterate
|
# Iterate
|
||||||
link_end = self.ensure_group_output(node.node_tree).inputs[index]
|
link_end = self.ensure_group_output(node.node_tree).inputs[index]
|
||||||
tree = tree.nodes.active.node_tree
|
tree = tree.nodes.active.node_tree
|
||||||
tree.links.new(active.outputs[out_i], link_end)
|
connect_sockets(active.outputs[out_i], link_end)
|
||||||
|
|
||||||
# Delete sockets
|
# Delete sockets
|
||||||
for socket in delete_sockets:
|
for socket in delete_sockets:
|
||||||
@ -776,7 +777,7 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
make_links.append((active.outputs[out_i], materialout.inputs[materialout_index]))
|
make_links.append((active.outputs[out_i], materialout.inputs[materialout_index]))
|
||||||
output_socket = materialout.inputs[materialout_index]
|
output_socket = materialout.inputs[materialout_index]
|
||||||
for li_from, li_to in make_links:
|
for li_from, li_to in make_links:
|
||||||
base_node_tree.links.new(li_from, li_to)
|
connect_sockets(li_from, li_to)
|
||||||
|
|
||||||
# Create links through node groups until we reach the active node
|
# Create links through node groups until we reach the active node
|
||||||
tree = base_node_tree
|
tree = base_node_tree
|
||||||
@ -789,11 +790,11 @@ class NWPreviewNode(Operator, NWBase):
|
|||||||
node_socket = node.node_tree.outputs[index]
|
node_socket = node.node_tree.outputs[index]
|
||||||
if node_socket in delete_sockets:
|
if node_socket in delete_sockets:
|
||||||
delete_sockets.remove(node_socket)
|
delete_sockets.remove(node_socket)
|
||||||
tree.links.new(link_start, link_end)
|
connect_sockets(link_start, link_end)
|
||||||
# Iterate
|
# Iterate
|
||||||
link_end = self.ensure_group_output(node.node_tree).inputs[index]
|
link_end = self.ensure_group_output(node.node_tree).inputs[index]
|
||||||
tree = tree.nodes.active.node_tree
|
tree = tree.nodes.active.node_tree
|
||||||
tree.links.new(active.outputs[out_i], link_end)
|
connect_sockets(active.outputs[out_i], link_end)
|
||||||
|
|
||||||
# Delete sockets
|
# Delete sockets
|
||||||
for socket in delete_sockets:
|
for socket in delete_sockets:
|
||||||
@ -1064,31 +1065,31 @@ class NWSwitchNodeType(Operator, NWBase):
|
|||||||
if node.inputs[src_i].links and not new_node.inputs[dst_i].links:
|
if node.inputs[src_i].links and not new_node.inputs[dst_i].links:
|
||||||
in_src_link = node.inputs[src_i].links[0]
|
in_src_link = node.inputs[src_i].links[0]
|
||||||
in_dst_socket = new_node.inputs[dst_i]
|
in_dst_socket = new_node.inputs[dst_i]
|
||||||
links.new(in_src_link.from_socket, in_dst_socket)
|
connect_sockets(in_src_link.from_socket, in_dst_socket)
|
||||||
links.remove(in_src_link)
|
links.remove(in_src_link)
|
||||||
# OUTPUTS: Base on matches in proper order.
|
# OUTPUTS: Base on matches in proper order.
|
||||||
for (src_i, src_dval), (dst_i, dst_dval) in matches['OUTPUTS'][tp]:
|
for (src_i, src_dval), (dst_i, dst_dval) in matches['OUTPUTS'][tp]:
|
||||||
for out_src_link in node.outputs[src_i].links:
|
for out_src_link in node.outputs[src_i].links:
|
||||||
out_dst_socket = new_node.outputs[dst_i]
|
out_dst_socket = new_node.outputs[dst_i]
|
||||||
links.new(out_dst_socket, out_src_link.to_socket)
|
connect_sockets(out_dst_socket, out_src_link.to_socket)
|
||||||
# relink rest inputs if possible, no criteria
|
# relink rest inputs if possible, no criteria
|
||||||
for src_inp in node.inputs:
|
for src_inp in node.inputs:
|
||||||
for dst_inp in new_node.inputs:
|
for dst_inp in new_node.inputs:
|
||||||
if src_inp.links and not dst_inp.links:
|
if src_inp.links and not dst_inp.links:
|
||||||
src_link = src_inp.links[0]
|
src_link = src_inp.links[0]
|
||||||
links.new(src_link.from_socket, dst_inp)
|
connect_sockets(src_link.from_socket, dst_inp)
|
||||||
links.remove(src_link)
|
links.remove(src_link)
|
||||||
# relink rest outputs if possible, base on node kind if any left.
|
# relink rest outputs if possible, base on node kind if any left.
|
||||||
for src_o in node.outputs:
|
for src_o in node.outputs:
|
||||||
for out_src_link in src_o.links:
|
for out_src_link in src_o.links:
|
||||||
for dst_o in new_node.outputs:
|
for dst_o in new_node.outputs:
|
||||||
if src_o.type == dst_o.type:
|
if src_o.type == dst_o.type:
|
||||||
links.new(dst_o, out_src_link.to_socket)
|
connect_sockets(dst_o, out_src_link.to_socket)
|
||||||
# relink rest outputs no criteria if any left. Link all from first output.
|
# relink rest outputs no criteria if any left. Link all from first output.
|
||||||
for src_o in node.outputs:
|
for src_o in node.outputs:
|
||||||
for out_src_link in src_o.links:
|
for out_src_link in src_o.links:
|
||||||
if new_node.outputs:
|
if new_node.outputs:
|
||||||
links.new(new_node.outputs[0], out_src_link.to_socket)
|
connect_sockets(new_node.outputs[0], out_src_link.to_socket)
|
||||||
nodes.remove(node)
|
nodes.remove(node)
|
||||||
force_update(context)
|
force_update(context)
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
@ -1177,16 +1178,16 @@ class NWMergeNodes(Operator, NWBase):
|
|||||||
# outputs to the multi input socket.
|
# outputs to the multi input socket.
|
||||||
if i < len(socket_indices) - 1:
|
if i < len(socket_indices) - 1:
|
||||||
ind = socket_indices[i]
|
ind = socket_indices[i]
|
||||||
links.new(node.outputs[0], new_node.inputs[ind])
|
connect_sockets(node.outputs[0], new_node.inputs[ind])
|
||||||
else:
|
else:
|
||||||
outputs_for_multi_input.insert(0, node.outputs[0])
|
outputs_for_multi_input.insert(0, node.outputs[0])
|
||||||
if outputs_for_multi_input != []:
|
if outputs_for_multi_input != []:
|
||||||
ind = socket_indices[-1]
|
ind = socket_indices[-1]
|
||||||
for output in outputs_for_multi_input:
|
for output in outputs_for_multi_input:
|
||||||
links.new(output, new_node.inputs[ind])
|
connect_sockets(output, new_node.inputs[ind])
|
||||||
if prev_links != []:
|
if prev_links != []:
|
||||||
for link in prev_links:
|
for link in prev_links:
|
||||||
links.new(new_node.outputs[0], link.to_node.inputs[0])
|
connect_sockets(new_node.outputs[0], link.to_node.inputs[0])
|
||||||
return new_node
|
return new_node
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
@ -1447,19 +1448,19 @@ class NWMergeNodes(Operator, NWBase):
|
|||||||
# Prevent cyclic dependencies when nodes to be merged are linked to one another.
|
# Prevent cyclic dependencies when nodes to be merged are linked to one another.
|
||||||
# Link only if "to_node" index not in invalid indexes list.
|
# Link only if "to_node" index not in invalid indexes list.
|
||||||
if not self.link_creates_cycle(ss_link, invalid_nodes):
|
if not self.link_creates_cycle(ss_link, invalid_nodes):
|
||||||
links.new(get_first_enabled_output(last_add), ss_link.to_socket)
|
connect_sockets(get_first_enabled_output(last_add), ss_link.to_socket)
|
||||||
# add links from last_add to all links 'to_socket' of out links of first selected.
|
# add links from last_add to all links 'to_socket' of out links of first selected.
|
||||||
for fs_link in first_selected_output.links:
|
for fs_link in first_selected_output.links:
|
||||||
# Link only if "to_node" index not in invalid indexes list.
|
# Link only if "to_node" index not in invalid indexes list.
|
||||||
if not self.link_creates_cycle(fs_link, invalid_nodes):
|
if not self.link_creates_cycle(fs_link, invalid_nodes):
|
||||||
links.new(get_first_enabled_output(last_add), fs_link.to_socket)
|
connect_sockets(get_first_enabled_output(last_add), fs_link.to_socket)
|
||||||
# add link from "first" selected and "first" add node
|
# add link from "first" selected and "first" add node
|
||||||
node_to = nodes[count_after - 1]
|
node_to = nodes[count_after - 1]
|
||||||
links.new(first_selected_output, node_to.inputs[first])
|
connect_sockets(first_selected_output, node_to.inputs[first])
|
||||||
if node_to.type == 'ZCOMBINE':
|
if node_to.type == 'ZCOMBINE':
|
||||||
for fs_out in first_selected.outputs:
|
for fs_out in first_selected.outputs:
|
||||||
if fs_out != first_selected_output and fs_out.name in ('Z', 'Depth'):
|
if fs_out != first_selected_output and fs_out.name in ('Z', 'Depth'):
|
||||||
links.new(fs_out, node_to.inputs[1])
|
connect_sockets(fs_out, node_to.inputs[1])
|
||||||
break
|
break
|
||||||
# add links between added ADD nodes and between selected and ADD nodes
|
# add links between added ADD nodes and between selected and ADD nodes
|
||||||
for i in range(count_adds):
|
for i in range(count_adds):
|
||||||
@ -1468,21 +1469,21 @@ class NWMergeNodes(Operator, NWBase):
|
|||||||
node_to = nodes[index - 1]
|
node_to = nodes[index - 1]
|
||||||
node_to_input_i = first
|
node_to_input_i = first
|
||||||
node_to_z_i = 1 # if z combine - link z to first z input
|
node_to_z_i = 1 # if z combine - link z to first z input
|
||||||
links.new(get_first_enabled_output(node_from), node_to.inputs[node_to_input_i])
|
connect_sockets(get_first_enabled_output(node_from), node_to.inputs[node_to_input_i])
|
||||||
if node_to.type == 'ZCOMBINE':
|
if node_to.type == 'ZCOMBINE':
|
||||||
for from_out in node_from.outputs:
|
for from_out in node_from.outputs:
|
||||||
if from_out != get_first_enabled_output(node_from) and from_out.name in ('Z', 'Depth'):
|
if from_out != get_first_enabled_output(node_from) and from_out.name in ('Z', 'Depth'):
|
||||||
links.new(from_out, node_to.inputs[node_to_z_i])
|
connect_sockets(from_out, node_to.inputs[node_to_z_i])
|
||||||
if len(nodes_list) > 1:
|
if len(nodes_list) > 1:
|
||||||
node_from = nodes[nodes_list[i + 1][0]]
|
node_from = nodes[nodes_list[i + 1][0]]
|
||||||
node_to = nodes[index]
|
node_to = nodes[index]
|
||||||
node_to_input_i = second
|
node_to_input_i = second
|
||||||
node_to_z_i = 3 # if z combine - link z to second z input
|
node_to_z_i = 3 # if z combine - link z to second z input
|
||||||
links.new(get_first_enabled_output(node_from), node_to.inputs[node_to_input_i])
|
connect_sockets(get_first_enabled_output(node_from), node_to.inputs[node_to_input_i])
|
||||||
if node_to.type == 'ZCOMBINE':
|
if node_to.type == 'ZCOMBINE':
|
||||||
for from_out in node_from.outputs:
|
for from_out in node_from.outputs:
|
||||||
if from_out != get_first_enabled_output(node_from) and from_out.name in ('Z', 'Depth'):
|
if from_out != get_first_enabled_output(node_from) and from_out.name in ('Z', 'Depth'):
|
||||||
links.new(from_out, node_to.inputs[node_to_z_i])
|
connect_sockets(from_out, node_to.inputs[node_to_z_i])
|
||||||
index -= 1
|
index -= 1
|
||||||
# set "last" of added nodes as active
|
# set "last" of added nodes as active
|
||||||
nodes.active = last_add
|
nodes.active = last_add
|
||||||
@ -1690,7 +1691,7 @@ class NWCopySettings(Operator, NWBase):
|
|||||||
new_node.location = node_loc
|
new_node.location = node_loc
|
||||||
|
|
||||||
for str_from, str_to in reconnections:
|
for str_from, str_to in reconnections:
|
||||||
node_tree.links.new(eval(str_from), eval(str_to))
|
node_tree.connect_sockets(eval(str_from), eval(str_to))
|
||||||
|
|
||||||
success_names.append(new_node.name)
|
success_names.append(new_node.name)
|
||||||
|
|
||||||
@ -1859,7 +1860,7 @@ class NWAddTextureSetup(Operator, NWBase):
|
|||||||
x_offset = x_offset + image_texture_node.width + padding
|
x_offset = x_offset + image_texture_node.width + padding
|
||||||
image_texture_node.location = [locx - x_offset, locy]
|
image_texture_node.location = [locx - x_offset, locy]
|
||||||
nodes.active = image_texture_node
|
nodes.active = image_texture_node
|
||||||
links.new(image_texture_node.outputs[0], target_input)
|
connect_sockets(image_texture_node.outputs[0], target_input)
|
||||||
|
|
||||||
# The mapping setup following this will connect to the first input of this image texture.
|
# The mapping setup following this will connect to the first input of this image texture.
|
||||||
target_input = image_texture_node.inputs[0]
|
target_input = image_texture_node.inputs[0]
|
||||||
@ -1871,7 +1872,7 @@ class NWAddTextureSetup(Operator, NWBase):
|
|||||||
mapping_node = nodes.new('ShaderNodeMapping')
|
mapping_node = nodes.new('ShaderNodeMapping')
|
||||||
x_offset = x_offset + mapping_node.width + padding
|
x_offset = x_offset + mapping_node.width + padding
|
||||||
mapping_node.location = [locx - x_offset, locy]
|
mapping_node.location = [locx - x_offset, locy]
|
||||||
links.new(mapping_node.outputs[0], target_input)
|
connect_sockets(mapping_node.outputs[0], target_input)
|
||||||
|
|
||||||
# Add Texture Coordinates node.
|
# Add Texture Coordinates node.
|
||||||
tex_coord_node = nodes.new('ShaderNodeTexCoord')
|
tex_coord_node = nodes.new('ShaderNodeTexCoord')
|
||||||
@ -1881,7 +1882,7 @@ class NWAddTextureSetup(Operator, NWBase):
|
|||||||
is_procedural_texture = is_texture_node and node.type != 'TEX_IMAGE'
|
is_procedural_texture = is_texture_node and node.type != 'TEX_IMAGE'
|
||||||
use_generated_coordinates = is_procedural_texture or use_environment_texture
|
use_generated_coordinates = is_procedural_texture or use_environment_texture
|
||||||
tex_coord_output = tex_coord_node.outputs[0 if use_generated_coordinates else 2]
|
tex_coord_output = tex_coord_node.outputs[0 if use_generated_coordinates else 2]
|
||||||
links.new(tex_coord_output, mapping_node.inputs[0])
|
connect_sockets(tex_coord_output, mapping_node.inputs[0])
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
@ -2006,7 +2007,7 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
|
|||||||
disp_node = nodes.new(type='ShaderNodeDisplacement')
|
disp_node = nodes.new(type='ShaderNodeDisplacement')
|
||||||
# Align the Displacement node under the active Principled BSDF node
|
# Align the Displacement node under the active Principled BSDF node
|
||||||
disp_node.location = active_node.location + Vector((100, -700))
|
disp_node.location = active_node.location + Vector((100, -700))
|
||||||
link = links.new(disp_node.inputs[0], disp_texture.outputs[0])
|
link = connect_sockets(disp_node.inputs[0], disp_texture.outputs[0])
|
||||||
|
|
||||||
# TODO Turn on true displacement in the material
|
# TODO Turn on true displacement in the material
|
||||||
# Too complicated for now
|
# Too complicated for now
|
||||||
@ -2015,7 +2016,7 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
|
|||||||
output_node = [n for n in nodes if n.bl_idname == 'ShaderNodeOutputMaterial']
|
output_node = [n for n in nodes if n.bl_idname == 'ShaderNodeOutputMaterial']
|
||||||
if output_node:
|
if output_node:
|
||||||
if not output_node[0].inputs[2].is_linked:
|
if not output_node[0].inputs[2].is_linked:
|
||||||
link = links.new(output_node[0].inputs[2], disp_node.outputs[0])
|
link = connect_sockets(output_node[0].inputs[2], disp_node.outputs[0])
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -2045,13 +2046,13 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
|
|||||||
if match_normal:
|
if match_normal:
|
||||||
# If Normal add normal node in between
|
# If Normal add normal node in between
|
||||||
normal_node = nodes.new(type='ShaderNodeNormalMap')
|
normal_node = nodes.new(type='ShaderNodeNormalMap')
|
||||||
link = links.new(normal_node.inputs[1], texture_node.outputs[0])
|
link = connect_sockets(normal_node.inputs[1], texture_node.outputs[0])
|
||||||
elif match_bump:
|
elif match_bump:
|
||||||
# If Bump add bump node in between
|
# If Bump add bump node in between
|
||||||
normal_node = nodes.new(type='ShaderNodeBump')
|
normal_node = nodes.new(type='ShaderNodeBump')
|
||||||
link = links.new(normal_node.inputs[2], texture_node.outputs[0])
|
link = connect_sockets(normal_node.inputs[2], texture_node.outputs[0])
|
||||||
|
|
||||||
link = links.new(active_node.inputs[sname[0]], normal_node.outputs[0])
|
link = connect_sockets(active_node.inputs[sname[0]], normal_node.outputs[0])
|
||||||
normal_node_texture = texture_node
|
normal_node_texture = texture_node
|
||||||
|
|
||||||
elif sname[0] == 'Roughness':
|
elif sname[0] == 'Roughness':
|
||||||
@ -2062,19 +2063,19 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
|
|||||||
|
|
||||||
if match_rough:
|
if match_rough:
|
||||||
# If Roughness nothing to to
|
# If Roughness nothing to to
|
||||||
link = links.new(active_node.inputs[sname[0]], texture_node.outputs[0])
|
link = connect_sockets(active_node.inputs[sname[0]], texture_node.outputs[0])
|
||||||
|
|
||||||
elif match_gloss:
|
elif match_gloss:
|
||||||
# If Gloss Map add invert node
|
# If Gloss Map add invert node
|
||||||
invert_node = nodes.new(type='ShaderNodeInvert')
|
invert_node = nodes.new(type='ShaderNodeInvert')
|
||||||
link = links.new(invert_node.inputs[1], texture_node.outputs[0])
|
link = connect_sockets(invert_node.inputs[1], texture_node.outputs[0])
|
||||||
|
|
||||||
link = links.new(active_node.inputs[sname[0]], invert_node.outputs[0])
|
link = connect_sockets(active_node.inputs[sname[0]], invert_node.outputs[0])
|
||||||
roughness_node = texture_node
|
roughness_node = texture_node
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# This is a simple connection Texture --> Input slot
|
# This is a simple connection Texture --> Input slot
|
||||||
link = links.new(active_node.inputs[sname[0]], texture_node.outputs[0])
|
link = connect_sockets(active_node.inputs[sname[0]], texture_node.outputs[0])
|
||||||
|
|
||||||
# Use non-color for all but 'Base Color' Textures
|
# Use non-color for all but 'Base Color' Textures
|
||||||
if not sname[0] in ['Base Color', 'Emission'] and texture_node.image:
|
if not sname[0] in ['Base Color', 'Emission'] and texture_node.image:
|
||||||
@ -2119,15 +2120,15 @@ class NWAddPrincipledSetup(Operator, NWBase, ImportHelper):
|
|||||||
sum(n.location.y for n in texture_nodes) / len(texture_nodes)))
|
sum(n.location.y for n in texture_nodes) / len(texture_nodes)))
|
||||||
reroute.location = tex_coords + Vector((-50, -120))
|
reroute.location = tex_coords + Vector((-50, -120))
|
||||||
for texture_node in texture_nodes:
|
for texture_node in texture_nodes:
|
||||||
link = links.new(texture_node.inputs[0], reroute.outputs[0])
|
link = connect_sockets(texture_node.inputs[0], reroute.outputs[0])
|
||||||
link = links.new(reroute.inputs[0], mapping.outputs[0])
|
link = connect_sockets(reroute.inputs[0], mapping.outputs[0])
|
||||||
else:
|
else:
|
||||||
link = links.new(texture_nodes[0].inputs[0], mapping.outputs[0])
|
link = connect_sockets(texture_nodes[0].inputs[0], mapping.outputs[0])
|
||||||
|
|
||||||
# Connect texture_coordiantes to mapping node
|
# Connect texture_coordiantes to mapping node
|
||||||
texture_input = nodes.new(type='ShaderNodeTexCoord')
|
texture_input = nodes.new(type='ShaderNodeTexCoord')
|
||||||
texture_input.location = mapping.location + Vector((-200, 0))
|
texture_input.location = mapping.location + Vector((-200, 0))
|
||||||
link = links.new(mapping.inputs[0], texture_input.outputs[2])
|
link = connect_sockets(mapping.inputs[0], texture_input.outputs[2])
|
||||||
|
|
||||||
# Create frame around tex coords and mapping
|
# Create frame around tex coords and mapping
|
||||||
frame = nodes.new(type='NodeFrame')
|
frame = nodes.new(type='NodeFrame')
|
||||||
@ -2231,8 +2232,8 @@ class NWAddReroutes(Operator, NWBase):
|
|||||||
n = nodes.new('NodeReroute')
|
n = nodes.new('NodeReroute')
|
||||||
nodes.active = n
|
nodes.active = n
|
||||||
for link in output.links:
|
for link in output.links:
|
||||||
links.new(n.outputs[0], link.to_socket)
|
connect_sockets(n.outputs[0], link.to_socket)
|
||||||
links.new(output, n.inputs[0])
|
connect_sockets(output, n.inputs[0])
|
||||||
n.location = loc
|
n.location = loc
|
||||||
post_select.append(n)
|
post_select.append(n)
|
||||||
reroutes_count += 1
|
reroutes_count += 1
|
||||||
@ -2324,7 +2325,7 @@ class NWLinkActiveToSelected(Operator, NWBase):
|
|||||||
for input in node.inputs:
|
for input in node.inputs:
|
||||||
if input.type == out.type or node.type == 'REROUTE':
|
if input.type == out.type or node.type == 'REROUTE':
|
||||||
if replace or not input.is_linked:
|
if replace or not input.is_linked:
|
||||||
links.new(out, input)
|
connect_sockets(out, input)
|
||||||
if not use_node_name and not use_outputs_names:
|
if not use_node_name and not use_outputs_names:
|
||||||
doit = False
|
doit = False
|
||||||
break
|
break
|
||||||
@ -2521,7 +2522,7 @@ class NWLinkToOutputNode(Operator):
|
|||||||
elif tree_type == 'GeometryNodeTree':
|
elif tree_type == 'GeometryNodeTree':
|
||||||
if active.outputs[output_index].type != 'GEOMETRY':
|
if active.outputs[output_index].type != 'GEOMETRY':
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
links.new(active.outputs[output_index], output_node.inputs[out_input_index])
|
connect_sockets(active.outputs[output_index], output_node.inputs[out_input_index])
|
||||||
|
|
||||||
force_update(context) # viewport render does not update
|
force_update(context) # viewport render does not update
|
||||||
|
|
||||||
@ -2542,7 +2543,7 @@ class NWMakeLink(Operator, NWBase):
|
|||||||
n1 = nodes[context.scene.NWLazySource]
|
n1 = nodes[context.scene.NWLazySource]
|
||||||
n2 = nodes[context.scene.NWLazyTarget]
|
n2 = nodes[context.scene.NWLazyTarget]
|
||||||
|
|
||||||
links.new(n1.outputs[self.from_socket], n2.inputs[self.to_socket])
|
connect_sockets(n1.outputs[self.from_socket], n2.inputs[self.to_socket])
|
||||||
|
|
||||||
force_update(context)
|
force_update(context)
|
||||||
|
|
||||||
@ -2566,7 +2567,7 @@ class NWCallInputsMenu(Operator, NWBase):
|
|||||||
if len(n2.inputs) > 1:
|
if len(n2.inputs) > 1:
|
||||||
bpy.ops.wm.call_menu("INVOKE_DEFAULT", name=NWConnectionListInputs.bl_idname)
|
bpy.ops.wm.call_menu("INVOKE_DEFAULT", name=NWConnectionListInputs.bl_idname)
|
||||||
elif len(n2.inputs) == 1:
|
elif len(n2.inputs) == 1:
|
||||||
links.new(n1.outputs[self.from_socket], n2.inputs[0])
|
connect_sockets(n1.outputs[self.from_socket], n2.inputs[0])
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
@ -2950,7 +2951,7 @@ class NWResetNodes(bpy.types.Operator):
|
|||||||
new_node.location = node_loc
|
new_node.location = node_loc
|
||||||
|
|
||||||
for str_from, str_to in reconnections:
|
for str_from, str_to in reconnections:
|
||||||
node_tree.links.new(eval(str_from), eval(str_to))
|
connect_sockets(eval(str_from), eval(str_to))
|
||||||
|
|
||||||
new_node.select = False
|
new_node.select = False
|
||||||
success_names.append(new_node.name)
|
success_names.append(new_node.name)
|
||||||
|
@ -8,7 +8,8 @@ bl_info = {
|
|||||||
"location": "3D View",
|
"location": "3D View",
|
||||||
"description": "Distribute object instances on another object.",
|
"description": "Distribute object instances on another object.",
|
||||||
"warning": "",
|
"warning": "",
|
||||||
"doc_url": "",
|
"doc_url": "{BLENDER_MANUAL_URL}/addons/object/scatter_objects.html",
|
||||||
|
"tracker_url": "https://projects.blender.org/blender/blender-addons/issues",
|
||||||
"support": 'OFFICIAL',
|
"support": 'OFFICIAL',
|
||||||
"category": "Object",
|
"category": "Object",
|
||||||
}
|
}
|
||||||
|
@ -16,8 +16,8 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Sun Position",
|
"name": "Sun Position",
|
||||||
"author": "Michael Martin, Damien Picard",
|
"author": "Michael Martin, Damien Picard",
|
||||||
"version": (3, 3, 3),
|
"version": (3, 5, 0),
|
||||||
"blender": (3, 0, 0),
|
"blender": (3, 2, 0),
|
||||||
"location": "World > Sun Position",
|
"location": "World > Sun Position",
|
||||||
"description": "Show sun position with objects and/or sky texture",
|
"description": "Show sun position with objects and/or sky texture",
|
||||||
"doc_url": "{BLENDER_MANUAL_URL}/addons/lighting/sun_position.html",
|
"doc_url": "{BLENDER_MANUAL_URL}/addons/lighting/sun_position.html",
|
||||||
@ -41,17 +41,22 @@ from bpy.app.handlers import persistent
|
|||||||
register_classes, unregister_classes = bpy.utils.register_classes_factory(
|
register_classes, unregister_classes = bpy.utils.register_classes_factory(
|
||||||
(properties.SunPosProperties,
|
(properties.SunPosProperties,
|
||||||
properties.SunPosAddonPreferences, ui_sun.SUNPOS_OT_AddPreset,
|
properties.SunPosAddonPreferences, ui_sun.SUNPOS_OT_AddPreset,
|
||||||
ui_sun.SUNPOS_MT_Presets, ui_sun.SUNPOS_PT_Panel,
|
ui_sun.SUNPOS_PT_Presets, ui_sun.SUNPOS_PT_Panel,
|
||||||
ui_sun.SUNPOS_PT_Location, ui_sun.SUNPOS_PT_Time, hdr.SUNPOS_OT_ShowHdr))
|
ui_sun.SUNPOS_PT_Location, ui_sun.SUNPOS_PT_Time, hdr.SUNPOS_OT_ShowHdr))
|
||||||
|
|
||||||
|
|
||||||
@persistent
|
@persistent
|
||||||
def sun_scene_handler(scene):
|
def sun_scene_handler(scene):
|
||||||
sun_props = bpy.context.scene.sun_pos_properties
|
sun_props = bpy.context.scene.sun_pos_properties
|
||||||
|
|
||||||
|
# Force drawing update
|
||||||
sun_props.show_surface = sun_props.show_surface
|
sun_props.show_surface = sun_props.show_surface
|
||||||
sun_props.show_analemmas = sun_props.show_analemmas
|
sun_props.show_analemmas = sun_props.show_analemmas
|
||||||
sun_props.show_north = sun_props.show_north
|
sun_props.show_north = sun_props.show_north
|
||||||
|
|
||||||
|
# Force coordinates update
|
||||||
|
sun_props.latitude = sun_props.latitude
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
register_classes()
|
register_classes()
|
||||||
|
@ -6,11 +6,19 @@ import gpu
|
|||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
from mathutils import Vector
|
from mathutils import Vector
|
||||||
|
|
||||||
|
from .sun_calc import calc_surface, calc_analemma
|
||||||
|
|
||||||
|
|
||||||
if bpy.app.background: # ignore north line in background mode
|
if bpy.app.background: # ignore north line in background mode
|
||||||
def north_update(self, context):
|
def north_update(self, context):
|
||||||
pass
|
pass
|
||||||
|
def surface_update(self, context):
|
||||||
|
pass
|
||||||
|
def analemmas_update(self, context):
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
|
# North line
|
||||||
|
|
||||||
shader_interface = gpu.types.GPUStageInterfaceInfo("my_interface")
|
shader_interface = gpu.types.GPUStageInterfaceInfo("my_interface")
|
||||||
shader_interface.flat('VEC2', "v_StartPos")
|
shader_interface.flat('VEC2', "v_StartPos")
|
||||||
shader_interface.smooth('VEC4', "v_VertPos")
|
shader_interface.smooth('VEC4', "v_VertPos")
|
||||||
@ -54,7 +62,7 @@ else:
|
|||||||
del shader_info
|
del shader_info
|
||||||
del shader_interface
|
del shader_interface
|
||||||
|
|
||||||
def draw_north_callback():
|
def north_draw():
|
||||||
"""
|
"""
|
||||||
Set up the compass needle using the current north offset angle
|
Set up the compass needle using the current north offset angle
|
||||||
less 90 degrees. This forces the unit circle to begin at the
|
less 90 degrees. This forces the unit circle to begin at the
|
||||||
@ -84,8 +92,77 @@ else:
|
|||||||
|
|
||||||
def north_update(self, context):
|
def north_update(self, context):
|
||||||
global _north_handle
|
global _north_handle
|
||||||
if self.show_north and _north_handle is None:
|
sun_props = context.scene.sun_pos_properties
|
||||||
_north_handle = bpy.types.SpaceView3D.draw_handler_add(draw_north_callback, (), 'WINDOW', 'POST_VIEW')
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
|
if addon_prefs.show_overlays and sun_props.show_north:
|
||||||
|
_north_handle = bpy.types.SpaceView3D.draw_handler_add(north_draw, (), 'WINDOW', 'POST_VIEW')
|
||||||
elif _north_handle is not None:
|
elif _north_handle is not None:
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(_north_handle, 'WINDOW')
|
bpy.types.SpaceView3D.draw_handler_remove(_north_handle, 'WINDOW')
|
||||||
_north_handle = None
|
_north_handle = None
|
||||||
|
|
||||||
|
# Analemmas
|
||||||
|
|
||||||
|
def analemmas_draw(batch, shader):
|
||||||
|
shader.uniform_float("color", (1, 0, 0, 1))
|
||||||
|
batch.draw(shader)
|
||||||
|
|
||||||
|
_analemmas_handle = None
|
||||||
|
|
||||||
|
def analemmas_update(self, context):
|
||||||
|
global _analemmas_handle
|
||||||
|
sun_props = context.scene.sun_pos_properties
|
||||||
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
|
if addon_prefs.show_overlays and sun_props.show_analemmas:
|
||||||
|
coords = []
|
||||||
|
indices = []
|
||||||
|
coord_offset = 0
|
||||||
|
for h in range(24):
|
||||||
|
analemma_verts = calc_analemma(context, h)
|
||||||
|
coords.extend(analemma_verts)
|
||||||
|
for i in range(len(analemma_verts) - 1):
|
||||||
|
indices.append((coord_offset + i,
|
||||||
|
coord_offset + i+1))
|
||||||
|
coord_offset += len(analemma_verts)
|
||||||
|
|
||||||
|
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||||
|
batch = batch_for_shader(shader, 'LINES',
|
||||||
|
{"pos": coords}, indices=indices)
|
||||||
|
|
||||||
|
if _analemmas_handle is not None:
|
||||||
|
bpy.types.SpaceView3D.draw_handler_remove(_analemmas_handle, 'WINDOW')
|
||||||
|
_analemmas_handle = bpy.types.SpaceView3D.draw_handler_add(
|
||||||
|
analemmas_draw, (batch, shader), 'WINDOW', 'POST_VIEW')
|
||||||
|
elif _analemmas_handle is not None:
|
||||||
|
bpy.types.SpaceView3D.draw_handler_remove(_analemmas_handle, 'WINDOW')
|
||||||
|
_analemmas_handle = None
|
||||||
|
|
||||||
|
# Surface
|
||||||
|
|
||||||
|
def surface_draw(batch, shader):
|
||||||
|
blend = gpu.state.blend_get()
|
||||||
|
gpu.state.blend_set("ALPHA")
|
||||||
|
shader.uniform_float("color", (.8, .6, 0, 0.2))
|
||||||
|
batch.draw(shader)
|
||||||
|
gpu.state.blend_set(blend)
|
||||||
|
|
||||||
|
_surface_handle = None
|
||||||
|
|
||||||
|
def surface_update(self, context):
|
||||||
|
global _surface_handle
|
||||||
|
sun_props = context.scene.sun_pos_properties
|
||||||
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
|
if addon_prefs.show_overlays and sun_props.show_surface:
|
||||||
|
coords = calc_surface(context)
|
||||||
|
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
||||||
|
batch = batch_for_shader(shader, 'TRIS', {"pos": coords})
|
||||||
|
|
||||||
|
if _surface_handle is not None:
|
||||||
|
bpy.types.SpaceView3D.draw_handler_remove(_surface_handle, 'WINDOW')
|
||||||
|
_surface_handle = bpy.types.SpaceView3D.draw_handler_add(
|
||||||
|
surface_draw, (batch, shader), 'WINDOW', 'POST_VIEW')
|
||||||
|
elif _surface_handle is not None:
|
||||||
|
bpy.types.SpaceView3D.draw_handler_remove(_surface_handle, 'WINDOW')
|
||||||
|
_surface_handle = None
|
||||||
|
@ -95,9 +95,9 @@ def draw_callback_px(self, context):
|
|||||||
|
|
||||||
|
|
||||||
class SUNPOS_OT_ShowHdr(bpy.types.Operator):
|
class SUNPOS_OT_ShowHdr(bpy.types.Operator):
|
||||||
"""Tooltip"""
|
"""Select the location of the Sun in any 3D viewport and keep it in sync with the environment"""
|
||||||
bl_idname = "world.sunpos_show_hdr"
|
bl_idname = "world.sunpos_show_hdr"
|
||||||
bl_label = "Sync Sun to Texture"
|
bl_label = "Pick Sun in Viewport"
|
||||||
|
|
||||||
exposure: FloatProperty(name="Exposure", default=1.0)
|
exposure: FloatProperty(name="Exposure", default=1.0)
|
||||||
scale: FloatProperty(name="Scale", default=1.0)
|
scale: FloatProperty(name="Scale", default=1.0)
|
||||||
@ -265,7 +265,7 @@ class SUNPOS_OT_ShowHdr(bpy.types.Operator):
|
|||||||
|
|
||||||
nt = context.scene.world.node_tree.nodes
|
nt = context.scene.world.node_tree.nodes
|
||||||
env_tex_node = nt.get(context.scene.sun_pos_properties.hdr_texture)
|
env_tex_node = nt.get(context.scene.sun_pos_properties.hdr_texture)
|
||||||
if env_tex_node.type != "TEX_ENVIRONMENT":
|
if env_tex_node is None or env_tex_node.type != "TEX_ENVIRONMENT":
|
||||||
self.report({'ERROR'}, 'Please select an Environment Texture node')
|
self.report({'ERROR'}, 'Please select an Environment Texture node')
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
@ -4,9 +4,12 @@ import bpy
|
|||||||
from bpy.types import AddonPreferences, PropertyGroup
|
from bpy.types import AddonPreferences, PropertyGroup
|
||||||
from bpy.props import (StringProperty, EnumProperty, IntProperty,
|
from bpy.props import (StringProperty, EnumProperty, IntProperty,
|
||||||
FloatProperty, BoolProperty, PointerProperty)
|
FloatProperty, BoolProperty, PointerProperty)
|
||||||
|
from bpy.app.translations import pgettext_iface as iface_
|
||||||
|
|
||||||
from .sun_calc import sun_update, parse_coordinates, surface_update, analemmas_update, sun
|
|
||||||
from .draw import north_update
|
from .draw import north_update, surface_update, analemmas_update
|
||||||
|
from .geo import parse_position
|
||||||
|
from .sun_calc import format_lat_long, sun, update_time, move_sun
|
||||||
|
|
||||||
from math import pi
|
from math import pi
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@ -16,6 +19,47 @@ TODAY = datetime.today()
|
|||||||
# Sun panel properties
|
# Sun panel properties
|
||||||
############################################################################
|
############################################################################
|
||||||
|
|
||||||
|
parse_success = True
|
||||||
|
|
||||||
|
|
||||||
|
def lat_long_update(self, context):
|
||||||
|
global parse_success
|
||||||
|
parse_success = True
|
||||||
|
sun_update(self, context)
|
||||||
|
|
||||||
|
|
||||||
|
def get_coordinates(self):
|
||||||
|
if parse_success:
|
||||||
|
return format_lat_long(self.latitude, self.longitude)
|
||||||
|
return iface_("ERROR: Could not parse coordinates")
|
||||||
|
|
||||||
|
|
||||||
|
def set_coordinates(self, value):
|
||||||
|
parsed_co = parse_position(value)
|
||||||
|
|
||||||
|
global parse_success
|
||||||
|
if parsed_co is not None and len(parsed_co) == 2:
|
||||||
|
latitude, longitude = parsed_co
|
||||||
|
self.latitude, self.longitude = latitude, longitude
|
||||||
|
else:
|
||||||
|
parse_success = False
|
||||||
|
|
||||||
|
sun_update(self, bpy.context)
|
||||||
|
|
||||||
|
|
||||||
|
def sun_update(self, context):
|
||||||
|
sun_props = context.scene.sun_pos_properties
|
||||||
|
|
||||||
|
update_time(context)
|
||||||
|
move_sun(context)
|
||||||
|
|
||||||
|
if sun_props.show_surface:
|
||||||
|
surface_update(self, context)
|
||||||
|
if sun_props.show_analemmas:
|
||||||
|
analemmas_update(self, context)
|
||||||
|
if sun_props.show_north:
|
||||||
|
north_update(self, context)
|
||||||
|
|
||||||
|
|
||||||
class SunPosProperties(PropertyGroup):
|
class SunPosProperties(PropertyGroup):
|
||||||
usage_mode: EnumProperty(
|
usage_mode: EnumProperty(
|
||||||
@ -36,42 +80,49 @@ class SunPosProperties(PropertyGroup):
|
|||||||
|
|
||||||
use_refraction: BoolProperty(
|
use_refraction: BoolProperty(
|
||||||
name="Use Refraction",
|
name="Use Refraction",
|
||||||
description="Show apparent Sun position due to refraction",
|
description="Show the apparent Sun position due to atmospheric refraction",
|
||||||
default=True,
|
default=True,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
show_north: BoolProperty(
|
show_north: BoolProperty(
|
||||||
name="Show North",
|
name="Show North",
|
||||||
description="Draw line pointing north",
|
description="Draw a line pointing to the north",
|
||||||
default=False,
|
default=False,
|
||||||
update=north_update)
|
update=north_update)
|
||||||
|
|
||||||
north_offset: FloatProperty(
|
north_offset: FloatProperty(
|
||||||
name="North Offset",
|
name="North Offset",
|
||||||
description="Rotate the scene to choose North direction",
|
description="Rotate the scene to choose the North direction",
|
||||||
unit="ROTATION",
|
unit="ROTATION",
|
||||||
soft_min=-pi, soft_max=pi, step=10.0, default=0.0,
|
soft_min=-pi, soft_max=pi, step=10.0, default=0.0,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
show_surface: BoolProperty(
|
show_surface: BoolProperty(
|
||||||
name="Show Surface",
|
name="Show Surface",
|
||||||
description="Draw sun surface",
|
description="Draw the surface that the Sun occupies in the sky",
|
||||||
default=False,
|
default=False,
|
||||||
update=surface_update)
|
update=surface_update)
|
||||||
|
|
||||||
show_analemmas: BoolProperty(
|
show_analemmas: BoolProperty(
|
||||||
name="Show Analemmas",
|
name="Show Analemmas",
|
||||||
description="Draw sun analemmas",
|
description="Draw Sun analemmas. These help visualize the motion of the Sun in the sky during the year, for each hour of the day",
|
||||||
default=False,
|
default=False,
|
||||||
update=analemmas_update)
|
update=analemmas_update)
|
||||||
|
|
||||||
|
coordinates: StringProperty(
|
||||||
|
name="Coordinates",
|
||||||
|
description="Enter coordinates from an online map",
|
||||||
|
get=get_coordinates,
|
||||||
|
set=set_coordinates,
|
||||||
|
options={'SKIP_SAVE'})
|
||||||
|
|
||||||
latitude: FloatProperty(
|
latitude: FloatProperty(
|
||||||
name="Latitude",
|
name="Latitude",
|
||||||
description="Latitude: (+) Northern (-) Southern",
|
description="Latitude: (+) Northern (-) Southern",
|
||||||
soft_min=-90.0, soft_max=90.0,
|
soft_min=-90.0, soft_max=90.0,
|
||||||
step=5, precision=3,
|
step=5, precision=3,
|
||||||
default=0.0,
|
default=0.0,
|
||||||
update=sun_update)
|
update=lat_long_update)
|
||||||
|
|
||||||
longitude: FloatProperty(
|
longitude: FloatProperty(
|
||||||
name="Longitude",
|
name="Longitude",
|
||||||
@ -79,7 +130,7 @@ class SunPosProperties(PropertyGroup):
|
|||||||
soft_min=-180.0, soft_max=180.0,
|
soft_min=-180.0, soft_max=180.0,
|
||||||
step=5, precision=3,
|
step=5, precision=3,
|
||||||
default=0.0,
|
default=0.0,
|
||||||
update=sun_update)
|
update=lat_long_update)
|
||||||
|
|
||||||
sunrise_time: FloatProperty(
|
sunrise_time: FloatProperty(
|
||||||
name="Sunrise Time",
|
name="Sunrise Time",
|
||||||
@ -95,24 +146,23 @@ class SunPosProperties(PropertyGroup):
|
|||||||
default=0.0,
|
default=0.0,
|
||||||
get=lambda _: sun.sunset)
|
get=lambda _: sun.sunset)
|
||||||
|
|
||||||
sun_azimuth: FloatProperty(
|
|
||||||
name="Sun Azimuth",
|
|
||||||
description="Rotation angle of the Sun from the north direction",
|
|
||||||
soft_min=-pi, soft_max=pi,
|
|
||||||
default=0.0,
|
|
||||||
get=lambda _: sun.azimuth)
|
|
||||||
|
|
||||||
sun_elevation: FloatProperty(
|
sun_elevation: FloatProperty(
|
||||||
name="Sunset Time",
|
name="Sun Elevation",
|
||||||
description="Elevation angle of the Sun",
|
description="Elevation angle of the Sun",
|
||||||
soft_min=-pi/2, soft_max=pi/2,
|
soft_min=-pi/2, soft_max=pi/2,
|
||||||
|
precision=3,
|
||||||
default=0.0,
|
default=0.0,
|
||||||
|
unit="ROTATION",
|
||||||
get=lambda _: sun.elevation)
|
get=lambda _: sun.elevation)
|
||||||
|
|
||||||
co_parser: StringProperty(
|
sun_azimuth: FloatProperty(
|
||||||
name="Enter coordinates",
|
name="Sun Azimuth",
|
||||||
description="Enter coordinates from an online map",
|
description="Rotation angle of the Sun from the direction of the north",
|
||||||
update=parse_coordinates)
|
soft_min=-pi, soft_max=pi,
|
||||||
|
precision=3,
|
||||||
|
default=0.0,
|
||||||
|
unit="ROTATION",
|
||||||
|
get=lambda _: sun.azimuth - bpy.context.scene.sun_pos_properties.north_offset)
|
||||||
|
|
||||||
month: IntProperty(
|
month: IntProperty(
|
||||||
name="Month",
|
name="Month",
|
||||||
@ -130,19 +180,19 @@ class SunPosProperties(PropertyGroup):
|
|||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
use_day_of_year: BoolProperty(
|
use_day_of_year: BoolProperty(
|
||||||
description="Use a single value for day of year",
|
description="Use a single value for the day of year",
|
||||||
name="Use day of year",
|
name="Use day of year",
|
||||||
default=False,
|
default=False,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
day_of_year: IntProperty(
|
day_of_year: IntProperty(
|
||||||
name="Day of year",
|
name="Day of Year",
|
||||||
min=1, max=366, default=1,
|
min=1, max=366, default=1,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
UTC_zone: FloatProperty(
|
UTC_zone: FloatProperty(
|
||||||
name="UTC zone",
|
name="UTC Zone",
|
||||||
description="Time zone: Difference from Greenwich, England in hours",
|
description="Difference from Greenwich, England, in hours",
|
||||||
precision=1,
|
precision=1,
|
||||||
min=-14.0, max=13, step=50, default=0.0,
|
min=-14.0, max=13, step=50, default=0.0,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
@ -156,7 +206,7 @@ class SunPosProperties(PropertyGroup):
|
|||||||
|
|
||||||
sun_distance: FloatProperty(
|
sun_distance: FloatProperty(
|
||||||
name="Distance",
|
name="Distance",
|
||||||
description="Distance to sun from origin",
|
description="Distance to the Sun from the origin",
|
||||||
unit="LENGTH",
|
unit="LENGTH",
|
||||||
min=0.0, soft_max=3000.0, step=10.0, default=50.0,
|
min=0.0, soft_max=3000.0, step=10.0, default=50.0,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
@ -164,22 +214,22 @@ class SunPosProperties(PropertyGroup):
|
|||||||
sun_object: PointerProperty(
|
sun_object: PointerProperty(
|
||||||
name="Sun Object",
|
name="Sun Object",
|
||||||
type=bpy.types.Object,
|
type=bpy.types.Object,
|
||||||
description="Sun object to set in the scene",
|
description="Sun object to use in the scene",
|
||||||
poll=lambda self, obj: obj.type == 'LIGHT',
|
poll=lambda self, obj: obj.type == 'LIGHT',
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
object_collection: PointerProperty(
|
object_collection: PointerProperty(
|
||||||
name="Collection",
|
name="Collection",
|
||||||
type=bpy.types.Collection,
|
type=bpy.types.Collection,
|
||||||
description="Collection of objects used to visualize sun motion",
|
description="Collection of objects used to visualize the motion of the Sun",
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
object_collection_type: EnumProperty(
|
object_collection_type: EnumProperty(
|
||||||
name="Display type",
|
name="Display type",
|
||||||
description="Show object collection as sun motion",
|
description="Type of Sun motion to visualize.",
|
||||||
items=(
|
items=(
|
||||||
('ANALEMMA', "Analemma", ""),
|
('ANALEMMA', "Analemma", "Trajectory of the Sun in the sky during the year, for a given time of the day"),
|
||||||
('DIURNAL', "Diurnal", ""),
|
('DIURNAL', "Diurnal", "Trajectory of the Sun in the sky during a single day"),
|
||||||
),
|
),
|
||||||
default='ANALEMMA',
|
default='ANALEMMA',
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
@ -187,19 +237,19 @@ class SunPosProperties(PropertyGroup):
|
|||||||
sky_texture: StringProperty(
|
sky_texture: StringProperty(
|
||||||
name="Sky Texture",
|
name="Sky Texture",
|
||||||
default="",
|
default="",
|
||||||
description="Name of sky texture to be used",
|
description="Name of the sky texture to use",
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
hdr_texture: StringProperty(
|
hdr_texture: StringProperty(
|
||||||
default="Environment Texture",
|
default="Environment Texture",
|
||||||
name="Environment Texture",
|
name="Environment Texture",
|
||||||
description="Name of texture to use. World nodes must be enabled "
|
description="Name of the environment texture to use. World nodes must be enabled "
|
||||||
"and color set to Environment Texture",
|
"and the color set to an environment Texture",
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
hdr_azimuth: FloatProperty(
|
hdr_azimuth: FloatProperty(
|
||||||
name="Rotation",
|
name="Rotation",
|
||||||
description="Rotation angle of sun and environment texture",
|
description="Rotation angle of the Sun and environment texture",
|
||||||
unit="ROTATION",
|
unit="ROTATION",
|
||||||
step=10.0,
|
step=10.0,
|
||||||
default=0.0, precision=3,
|
default=0.0, precision=3,
|
||||||
@ -207,7 +257,7 @@ class SunPosProperties(PropertyGroup):
|
|||||||
|
|
||||||
hdr_elevation: FloatProperty(
|
hdr_elevation: FloatProperty(
|
||||||
name="Elevation",
|
name="Elevation",
|
||||||
description="Elevation angle of sun",
|
description="Elevation angle of the Sun",
|
||||||
unit="ROTATION",
|
unit="ROTATION",
|
||||||
step=10.0,
|
step=10.0,
|
||||||
default=0.0, precision=3,
|
default=0.0, precision=3,
|
||||||
@ -215,13 +265,13 @@ class SunPosProperties(PropertyGroup):
|
|||||||
|
|
||||||
bind_to_sun: BoolProperty(
|
bind_to_sun: BoolProperty(
|
||||||
name="Bind Texture to Sun",
|
name="Bind Texture to Sun",
|
||||||
description="If true, Environment texture moves with sun",
|
description="If enabled, the environment texture moves with the Sun",
|
||||||
default=False,
|
default=False,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
time_spread: FloatProperty(
|
time_spread: FloatProperty(
|
||||||
name="Time Spread",
|
name="Time Spread",
|
||||||
description="Time period in which to spread object collection",
|
description="Time period around which to spread object collection",
|
||||||
precision=4,
|
precision=4,
|
||||||
soft_min=1.0, soft_max=24.0, step=1.0, default=23.0,
|
soft_min=1.0, soft_max=24.0, step=1.0, default=23.0,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
@ -234,53 +284,24 @@ class SunPosProperties(PropertyGroup):
|
|||||||
class SunPosAddonPreferences(AddonPreferences):
|
class SunPosAddonPreferences(AddonPreferences):
|
||||||
bl_idname = __package__
|
bl_idname = __package__
|
||||||
|
|
||||||
show_time_place: BoolProperty(
|
show_overlays: BoolProperty(
|
||||||
name="Time and place presets",
|
name="Show Overlays",
|
||||||
description="Show time/place presets",
|
description="Display overlays in the viewport: the direction of the north, analemmas and the Sun surface",
|
||||||
default=False)
|
|
||||||
|
|
||||||
show_dms: BoolProperty(
|
|
||||||
name="D° M' S\"",
|
|
||||||
description="Show lat/long degrees, minutes, seconds labels",
|
|
||||||
default=True)
|
|
||||||
|
|
||||||
show_north: BoolProperty(
|
|
||||||
name="Show North",
|
|
||||||
description="Show north offset choice and slider",
|
|
||||||
default=True,
|
|
||||||
update=sun_update)
|
|
||||||
|
|
||||||
show_surface: BoolProperty(
|
|
||||||
name="Show Surface",
|
|
||||||
description="Show sun surface choice and slider",
|
|
||||||
default=True,
|
|
||||||
update=sun_update)
|
|
||||||
|
|
||||||
show_analemmas: BoolProperty(
|
|
||||||
name="Show Analemmas",
|
|
||||||
description="Show analemmas choice and slider",
|
|
||||||
default=True,
|
default=True,
|
||||||
update=sun_update)
|
update=sun_update)
|
||||||
|
|
||||||
show_refraction: BoolProperty(
|
show_refraction: BoolProperty(
|
||||||
name="Refraction",
|
name="Refraction",
|
||||||
description="Show sun refraction choice",
|
description="Show Sun Refraction choice",
|
||||||
default=True,
|
default=True)
|
||||||
update=sun_update)
|
|
||||||
|
|
||||||
show_az_el: BoolProperty(
|
show_az_el: BoolProperty(
|
||||||
name="Azimuth and elevation info",
|
name="Azimuth and Elevation Info",
|
||||||
description="Show azimuth and solar elevation info",
|
description="Show azimuth and solar elevation info",
|
||||||
default=True)
|
default=True)
|
||||||
|
|
||||||
show_daylight_savings: BoolProperty(
|
|
||||||
name="Daylight savings",
|
|
||||||
description="Show daylight savings time choice",
|
|
||||||
default=True,
|
|
||||||
update=sun_update)
|
|
||||||
|
|
||||||
show_rise_set: BoolProperty(
|
show_rise_set: BoolProperty(
|
||||||
name="Sunrise and sunset info",
|
name="Sunrise and Sunset Info",
|
||||||
description="Show sunrise and sunset labels",
|
description="Show sunrise and sunset labels",
|
||||||
default=True)
|
default=True)
|
||||||
|
|
||||||
@ -292,12 +313,7 @@ class SunPosAddonPreferences(AddonPreferences):
|
|||||||
|
|
||||||
col.label(text="Show options or labels:")
|
col.label(text="Show options or labels:")
|
||||||
flow = col.grid_flow(columns=0, even_columns=True, even_rows=False, align=False)
|
flow = col.grid_flow(columns=0, even_columns=True, even_rows=False, align=False)
|
||||||
flow.prop(self, "show_time_place")
|
|
||||||
flow.prop(self, "show_dms")
|
|
||||||
flow.prop(self, "show_north")
|
|
||||||
flow.prop(self, "show_surface")
|
|
||||||
flow.prop(self, "show_analemmas")
|
|
||||||
flow.prop(self, "show_refraction")
|
flow.prop(self, "show_refraction")
|
||||||
|
flow.prop(self, "show_overlays")
|
||||||
flow.prop(self, "show_az_el")
|
flow.prop(self, "show_az_el")
|
||||||
flow.prop(self, "show_daylight_savings")
|
|
||||||
flow.prop(self, "show_rise_set")
|
flow.prop(self, "show_rise_set")
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
from bpy.app.handlers import persistent
|
from bpy.app.handlers import persistent
|
||||||
|
|
||||||
import gpu
|
import gpu
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
|
|
||||||
@ -9,7 +10,6 @@ from mathutils import Euler, Vector
|
|||||||
|
|
||||||
from math import degrees, radians, pi, sin, cos, asin, acos, tan, floor
|
from math import degrees, radians, pi, sin, cos, asin, acos, tan, floor
|
||||||
import datetime
|
import datetime
|
||||||
from .geo import parse_position
|
|
||||||
|
|
||||||
|
|
||||||
class SunInfo:
|
class SunInfo:
|
||||||
@ -48,32 +48,6 @@ class SunInfo:
|
|||||||
sun = SunInfo()
|
sun = SunInfo()
|
||||||
|
|
||||||
|
|
||||||
def sun_update(self, context):
|
|
||||||
update_time(context)
|
|
||||||
move_sun(context)
|
|
||||||
if self.show_surface:
|
|
||||||
surface_update(self, context)
|
|
||||||
if self.show_analemmas:
|
|
||||||
analemmas_update(self, context)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_coordinates(self, context):
|
|
||||||
error_message = "ERROR: Could not parse coordinates"
|
|
||||||
sun_props = context.scene.sun_pos_properties
|
|
||||||
|
|
||||||
if sun_props.co_parser:
|
|
||||||
parsed_co = parse_position(sun_props.co_parser)
|
|
||||||
|
|
||||||
if parsed_co is not None and len(parsed_co) == 2:
|
|
||||||
sun_props.latitude, sun_props.longitude = parsed_co
|
|
||||||
elif sun_props.co_parser != error_message:
|
|
||||||
sun_props.co_parser = error_message
|
|
||||||
|
|
||||||
# Clear prop
|
|
||||||
if sun_props.co_parser not in {'', error_message}:
|
|
||||||
sun_props.co_parser = ''
|
|
||||||
|
|
||||||
|
|
||||||
def move_sun(context):
|
def move_sun(context):
|
||||||
"""
|
"""
|
||||||
Cycle through all the selected objects and set their position and rotation
|
Cycle through all the selected objects and set their position and rotation
|
||||||
@ -120,8 +94,7 @@ def move_sun(context):
|
|||||||
|
|
||||||
azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
local_time, sun_props.latitude, sun_props.longitude,
|
local_time, sun_props.latitude, sun_props.longitude,
|
||||||
zone, sun_props.month, sun_props.day, sun_props.year,
|
zone, sun_props.month, sun_props.day, sun_props.year)
|
||||||
sun_props.sun_distance)
|
|
||||||
|
|
||||||
sun.azimuth = azimuth
|
sun.azimuth = azimuth
|
||||||
sun.elevation = elevation
|
sun.elevation = elevation
|
||||||
@ -159,8 +132,7 @@ def move_sun(context):
|
|||||||
azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
local_time, sun_props.latitude,
|
local_time, sun_props.latitude,
|
||||||
sun_props.longitude, zone,
|
sun_props.longitude, zone,
|
||||||
sun_props.month, sun_props.day,
|
sun_props.month, sun_props.day)
|
||||||
sun_props.year, sun_props.sun_distance)
|
|
||||||
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
local_time -= time_increment
|
local_time -= time_increment
|
||||||
obj.rotation_euler = ((elevation - pi/2, 0, -azimuth))
|
obj.rotation_euler = ((elevation - pi/2, 0, -azimuth))
|
||||||
@ -174,8 +146,7 @@ def move_sun(context):
|
|||||||
azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
local_time, sun_props.latitude,
|
local_time, sun_props.latitude,
|
||||||
sun_props.longitude, zone,
|
sun_props.longitude, zone,
|
||||||
dt.month, dt.day, sun_props.year,
|
dt.month, dt.day, sun_props.year)
|
||||||
sun_props.sun_distance)
|
|
||||||
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
obj.location = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
day -= day_increment
|
day -= day_increment
|
||||||
obj.rotation_euler = (elevation - pi/2, 0, -azimuth)
|
obj.rotation_euler = (elevation - pi/2, 0, -azimuth)
|
||||||
@ -225,50 +196,46 @@ def sun_handler(scene):
|
|||||||
move_sun(bpy.context)
|
move_sun(bpy.context)
|
||||||
|
|
||||||
|
|
||||||
def format_time(the_time, daylight_savings, longitude, UTC_zone=None):
|
def format_time(time, daylight_savings, UTC_zone=None):
|
||||||
if UTC_zone is not None:
|
if UTC_zone is not None:
|
||||||
if daylight_savings:
|
if daylight_savings:
|
||||||
UTC_zone += 1
|
UTC_zone += 1
|
||||||
the_time -= UTC_zone
|
time -= UTC_zone
|
||||||
|
|
||||||
the_time %= 24
|
time %= 24
|
||||||
|
|
||||||
hh = int(the_time)
|
return format_hms(time)
|
||||||
mm = (the_time - int(the_time)) * 60
|
|
||||||
ss = int((mm - int(mm)) * 60)
|
|
||||||
|
|
||||||
return ("%02i:%02i:%02i" % (hh, mm, ss))
|
|
||||||
|
|
||||||
|
|
||||||
def format_hms(the_time):
|
def format_hms(time):
|
||||||
hh = str(int(the_time))
|
hh = int(time)
|
||||||
min = (the_time - int(the_time)) * 60
|
mm = (time % 1.0) * 60
|
||||||
sec = int((min - int(min)) * 60)
|
ss = (mm % 1.0) * 60
|
||||||
mm = "0" + str(int(min)) if min < 10 else str(int(min))
|
|
||||||
ss = "0" + str(sec) if sec < 10 else str(sec)
|
|
||||||
|
|
||||||
return (hh + ":" + mm + ":" + ss)
|
return f"{hh:02d}:{int(mm):02d}:{int(ss):02d}"
|
||||||
|
|
||||||
|
|
||||||
def format_lat_long(lat_long, is_latitude):
|
def format_lat_long(latitude, longitude):
|
||||||
hh = str(abs(int(lat_long)))
|
coordinates = ""
|
||||||
min = abs((lat_long - int(lat_long)) * 60)
|
|
||||||
sec = abs(int((min - int(min)) * 60))
|
for i, co in enumerate((latitude, longitude)):
|
||||||
mm = "0" + str(int(min)) if min < 10 else str(int(min))
|
dd = abs(int(co))
|
||||||
ss = "0" + str(sec) if sec < 10 else str(sec)
|
mm = abs(co - int(co)) * 60.0
|
||||||
if lat_long == 0:
|
ss = abs(mm - int(mm)) * 60.0
|
||||||
coord_tag = " "
|
if co == 0:
|
||||||
else:
|
direction = ""
|
||||||
if is_latitude:
|
elif i == 0:
|
||||||
coord_tag = " N" if lat_long > 0 else " S"
|
direction = "N" if co > 0 else "S"
|
||||||
else:
|
else:
|
||||||
coord_tag = " E" if lat_long > 0 else " W"
|
direction = "E" if co > 0 else "W"
|
||||||
|
|
||||||
return hh + "° " + mm + "' " + ss + '"' + coord_tag
|
coordinates += f"{dd:02d}°{int(mm):02d}′{ss:05.2f}″{direction} "
|
||||||
|
|
||||||
|
return coordinates.strip(" ")
|
||||||
|
|
||||||
|
|
||||||
def get_sun_coordinates(local_time, latitude, longitude,
|
def get_sun_coordinates(local_time, latitude, longitude,
|
||||||
utc_zone, month, day, year, distance):
|
utc_zone, month, day, year):
|
||||||
"""
|
"""
|
||||||
Calculate the actual position of the sun based on input parameters.
|
Calculate the actual position of the sun based on input parameters.
|
||||||
|
|
||||||
@ -284,7 +251,6 @@ def get_sun_coordinates(local_time, latitude, longitude,
|
|||||||
NOAA's web site is:
|
NOAA's web site is:
|
||||||
http://www.esrl.noaa.gov/gmd/grad/solcalc
|
http://www.esrl.noaa.gov/gmd/grad/solcalc
|
||||||
"""
|
"""
|
||||||
addon_prefs = bpy.context.preferences.addons[__package__].preferences
|
|
||||||
sun_props = bpy.context.scene.sun_pos_properties
|
sun_props = bpy.context.scene.sun_pos_properties
|
||||||
|
|
||||||
longitude *= -1 # for internal calculations
|
longitude *= -1 # for internal calculations
|
||||||
@ -446,10 +412,6 @@ def calc_sunrise_sunset(rise):
|
|||||||
sun.latitude, sun.longitude)
|
sun.latitude, sun.longitude)
|
||||||
time_local = new_time_UTC + (-zone * 60.0)
|
time_local = new_time_UTC + (-zone * 60.0)
|
||||||
tl = time_local / 60.0
|
tl = time_local / 60.0
|
||||||
azimuth, elevation = get_sun_coordinates(
|
|
||||||
tl, sun.latitude, sun.longitude,
|
|
||||||
zone, sun.month, sun.day, sun.year,
|
|
||||||
sun.sun_distance)
|
|
||||||
if sun.use_daylight_savings:
|
if sun.use_daylight_savings:
|
||||||
time_local += 60.0
|
time_local += 60.0
|
||||||
tl = time_local / 60.0
|
tl = time_local / 60.0
|
||||||
@ -563,7 +525,7 @@ def calc_surface(context):
|
|||||||
def get_surface_coordinates(time, month):
|
def get_surface_coordinates(time, month):
|
||||||
azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
time, sun_props.latitude, sun_props.longitude,
|
time, sun_props.latitude, sun_props.longitude,
|
||||||
zone, month, 1, sun_props.year, sun_props.sun_distance)
|
zone, month, 1, sun_props.year)
|
||||||
sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
sun_vector.z = max(0, sun_vector.z)
|
sun_vector.z = max(0, sun_vector.z)
|
||||||
return sun_vector
|
return sun_vector
|
||||||
@ -588,71 +550,8 @@ def calc_analemma(context, h):
|
|||||||
day, month = day_of_year_to_month_day(sun_props.year, day_of_year)
|
day, month = day_of_year_to_month_day(sun_props.year, day_of_year)
|
||||||
azimuth, elevation = get_sun_coordinates(
|
azimuth, elevation = get_sun_coordinates(
|
||||||
h, sun_props.latitude, sun_props.longitude,
|
h, sun_props.latitude, sun_props.longitude,
|
||||||
zone, month, day, sun_props.year,
|
zone, month, day, sun_props.year)
|
||||||
sun_props.sun_distance)
|
|
||||||
sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
sun_vector = get_sun_vector(azimuth, elevation) * sun_props.sun_distance
|
||||||
if sun_vector.z > 0:
|
if sun_vector.z > 0:
|
||||||
vertices.append(sun_vector)
|
vertices.append(sun_vector)
|
||||||
return vertices
|
return vertices
|
||||||
|
|
||||||
|
|
||||||
def draw_surface(batch, shader):
|
|
||||||
blend = gpu.state.blend_get()
|
|
||||||
gpu.state.blend_set("ALPHA")
|
|
||||||
shader.uniform_float("color", (.8, .6, 0, 0.2))
|
|
||||||
batch.draw(shader)
|
|
||||||
gpu.state.blend_set(blend)
|
|
||||||
|
|
||||||
|
|
||||||
def draw_analemmas(batch, shader):
|
|
||||||
shader.uniform_float("color", (1, 0, 0, 1))
|
|
||||||
batch.draw(shader)
|
|
||||||
|
|
||||||
|
|
||||||
_handle_surface = None
|
|
||||||
|
|
||||||
|
|
||||||
def surface_update(self, context):
|
|
||||||
global _handle_surface
|
|
||||||
if self.show_surface:
|
|
||||||
coords = calc_surface(context)
|
|
||||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
|
||||||
batch = batch_for_shader(shader, 'TRIS', {"pos": coords})
|
|
||||||
|
|
||||||
if _handle_surface is not None:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(_handle_surface, 'WINDOW')
|
|
||||||
_handle_surface = bpy.types.SpaceView3D.draw_handler_add(
|
|
||||||
draw_surface, (batch, shader), 'WINDOW', 'POST_VIEW')
|
|
||||||
elif _handle_surface is not None:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(_handle_surface, 'WINDOW')
|
|
||||||
_handle_surface = None
|
|
||||||
|
|
||||||
|
|
||||||
_handle_analemmas = None
|
|
||||||
|
|
||||||
|
|
||||||
def analemmas_update(self, context):
|
|
||||||
global _handle_analemmas
|
|
||||||
if self.show_analemmas:
|
|
||||||
coords = []
|
|
||||||
indices = []
|
|
||||||
coord_offset = 0
|
|
||||||
for h in range(24):
|
|
||||||
analemma_verts = calc_analemma(context, h)
|
|
||||||
coords.extend(analemma_verts)
|
|
||||||
for i in range(len(analemma_verts) - 1):
|
|
||||||
indices.append((coord_offset + i,
|
|
||||||
coord_offset + i+1))
|
|
||||||
coord_offset += len(analemma_verts)
|
|
||||||
|
|
||||||
shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
|
|
||||||
batch = batch_for_shader(shader, 'LINES',
|
|
||||||
{"pos": coords}, indices=indices)
|
|
||||||
|
|
||||||
if _handle_analemmas is not None:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(_handle_analemmas, 'WINDOW')
|
|
||||||
_handle_analemmas = bpy.types.SpaceView3D.draw_handler_add(
|
|
||||||
draw_analemmas, (batch, shader), 'WINDOW', 'POST_VIEW')
|
|
||||||
elif _handle_analemmas is not None:
|
|
||||||
bpy.types.SpaceView3D.draw_handler_remove(_handle_analemmas, 'WINDOW')
|
|
||||||
_handle_analemmas = None
|
|
||||||
|
@ -10,14 +10,14 @@
|
|||||||
translations_tuple = (
|
translations_tuple = (
|
||||||
(("*", ""),
|
(("*", ""),
|
||||||
((), ()),
|
((), ()),
|
||||||
("fr_FR", "Project-Id-Version: Sun Position 3.1.2 (0)\n",
|
("fr_FR", "Project-Id-Version: Sun Position 3.3.3 (0)\n",
|
||||||
(False,
|
(False,
|
||||||
("Blender's translation file (po format).",
|
("Blender's translation file (po format).",
|
||||||
"Copyright (C) 2022 The Blender Foundation.",
|
"Copyright (C) 2022 The Blender Foundation.",
|
||||||
"This file is distributed under the same license as the Blender package.",
|
"This file is distributed under the same license as the Blender package.",
|
||||||
"Damien Picard <dam.pic@free.fr>, 2022."))),
|
"Damien Picard <dam.pic@free.fr>, 2022."))),
|
||||||
),
|
),
|
||||||
(("*", "Azimuth and elevation info"),
|
(("*", "Azimuth and Elevation Info"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_az_el",),
|
(("bpy.types.SunPosAddonPreferences.show_az_el",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Infos d’azimut et de hauteur",
|
("fr_FR", "Infos d’azimut et de hauteur",
|
||||||
@ -26,60 +26,35 @@ translations_tuple = (
|
|||||||
(("*", "Show azimuth and solar elevation info"),
|
(("*", "Show azimuth and solar elevation info"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_az_el",),
|
(("bpy.types.SunPosAddonPreferences.show_az_el",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher les infos d’azimut et de hauteur du soleil",
|
("fr_FR", "Afficher les infos d’azimut et de hauteur du Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Daylight savings"),
|
(("*", "Daylight Savings"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_daylight_savings",
|
(("bpy.types.SunPosProperties.use_daylight_savings"),
|
||||||
"bpy.types.SunPosProperties.use_daylight_savings"),
|
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Heure d’été",
|
("fr_FR", "Heure d’été",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Show daylight savings time choice"),
|
(("*", "Display overlays in the viewport: the direction of the north, analemmas and the Sun surface"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_daylight_savings",),
|
(("bpy.types.SunPosAddonPreferences.show_overlays",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher l’option de changement d’heure",
|
("fr_FR", "Afficher des surimpressions dans la vue 3D : la direction du nord, les analemmes et la surface du Soleil",
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "D° M' S\""),
|
|
||||||
(("bpy.types.SunPosAddonPreferences.show_dms",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Show lat/long degrees, minutes, seconds labels"),
|
|
||||||
(("bpy.types.SunPosAddonPreferences.show_dms",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Afficher les étiquettes de latitude et longitude en degrés, minutes, secondes",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Show North"),
|
|
||||||
(("bpy.types.SunPosAddonPreferences.show_north",
|
|
||||||
"bpy.types.SunPosProperties.show_north"),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Afficher le nord",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Show north offset choice and slider"),
|
|
||||||
(("bpy.types.SunPosAddonPreferences.show_north",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Afficher l’option et le curseur de décalage du nord",
|
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Refraction"),
|
(("*", "Refraction"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_refraction",),
|
(("bpy.types.SunPosAddonPreferences.show_refraction",
|
||||||
|
"scripts/addons/sun_position/ui_sun.py:151"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Réfraction",
|
("fr_FR", "Réfraction",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Show sun refraction choice"),
|
(("*", "Show Sun Refraction choice"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_refraction",),
|
(("bpy.types.SunPosAddonPreferences.show_refraction",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher l’option de réfraction du soleil",
|
("fr_FR", "Afficher l’option de réfraction du Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Sunrise and sunset info"),
|
(("*", "Sunrise and Sunset Info"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_rise_set",),
|
(("bpy.types.SunPosAddonPreferences.show_rise_set",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Infos de lever et coucher",
|
("fr_FR", "Infos de lever et coucher",
|
||||||
@ -88,19 +63,7 @@ translations_tuple = (
|
|||||||
(("*", "Show sunrise and sunset labels"),
|
(("*", "Show sunrise and sunset labels"),
|
||||||
(("bpy.types.SunPosAddonPreferences.show_rise_set",),
|
(("bpy.types.SunPosAddonPreferences.show_rise_set",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher les informations de lever et coucher du soleil",
|
("fr_FR", "Afficher les informations de lever et coucher du Soleil",
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Time and place presets"),
|
|
||||||
(("bpy.types.SunPosAddonPreferences.show_time_place",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Préréglages d’heure et de lieu",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Show time/place presets"),
|
|
||||||
(("bpy.types.SunPosAddonPreferences.show_time_place",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Afficher les préréglages d’heure et de lieu",
|
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Sun Position"),
|
(("*", "Sun Position"),
|
||||||
@ -114,56 +77,56 @@ translations_tuple = (
|
|||||||
(("*", "Sun Position Settings"),
|
(("*", "Sun Position Settings"),
|
||||||
(("bpy.types.Scene.sun_pos_properties",),
|
(("bpy.types.Scene.sun_pos_properties",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Options de Position du Soleil",
|
("fr_FR", "Options de position du Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Sun Position Presets"),
|
(("*", "Sun Position Presets"),
|
||||||
(("bpy.types.SUNPOS_MT_Presets",),
|
(("bpy.types.SUNPOS_PT_Presets",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Préréglages de position du Soleil",
|
("fr_FR", "Préréglages de position du Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("Operator", "Sync Sun to Texture"),
|
(("Operator", "Pick Sun in Viewport"),
|
||||||
(("bpy.types.WORLD_OT_sunpos_show_hdr",),
|
(("bpy.types.WORLD_OT_sunpos_show_hdr",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Synchroniser Soleil et texture",
|
("fr_FR", "Pointer le Soleil dans la vue",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "UTC zone"),
|
(("*", "Select the location of the Sun in any 3D viewport and keep it in sync with the environment"),
|
||||||
|
(("bpy.types.WORLD_OT_sunpos_show_hdr",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Sélectionner la position du Soleil dans n’importe quelle vue 3D, puis la synchroniser avec l’environnement",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "UTC Zone"),
|
||||||
(("bpy.types.SunPosProperties.UTC_zone",),
|
(("bpy.types.SunPosProperties.UTC_zone",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Fuseau horaire",
|
("fr_FR", "Fuseau horaire",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Time zone: Difference from Greenwich, England in hours"),
|
(("*", "Difference from Greenwich, England, in hours"),
|
||||||
(("bpy.types.SunPosProperties.UTC_zone",),
|
(("bpy.types.SunPosProperties.UTC_zone",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Fuseau horaire : différence avec Greenwich, Angleterre, en heures",
|
("fr_FR", "Différence avec Greenwich, Angleterre, en heures",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Bind Texture to Sun"),
|
(("*", "Bind Texture to Sun"),
|
||||||
(("bpy.types.SunPosProperties.bind_to_sun",
|
(("bpy.types.SunPosProperties.bind_to_sun",
|
||||||
"scripts/addons/sun_position/ui_sun.py:119"),
|
"scripts/addons/sun_position/ui_sun.py:103"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Lier la texture au Soleil",
|
("fr_FR", "Lier la texture au Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "If true, Environment texture moves with sun"),
|
(("*", "If enabled, the environment texture moves with the Sun"),
|
||||||
(("bpy.types.SunPosProperties.bind_to_sun",),
|
(("bpy.types.SunPosProperties.bind_to_sun",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Si actif, la texture d’environnement tourne avec le Soleil",
|
("fr_FR", "Si actif, la texture d’environnement tourne avec le Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Enter coordinates"),
|
|
||||||
(("bpy.types.SunPosProperties.co_parser",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Saisir coordonnées",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Enter coordinates from an online map"),
|
(("*", "Enter coordinates from an online map"),
|
||||||
(("bpy.types.SunPosProperties.co_parser",),
|
(("bpy.types.SunPosProperties.coordinates",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Saisir des coordonnées depuis une carte",
|
("fr_FR", "Saisir des coordonnées depuis une carte en ligne",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Day"),
|
(("*", "Day"),
|
||||||
@ -172,34 +135,36 @@ translations_tuple = (
|
|||||||
("fr_FR", "Jour",
|
("fr_FR", "Jour",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Day of year"),
|
(("*", "Day of Year"),
|
||||||
(("bpy.types.SunPosProperties.day_of_year",),
|
(("bpy.types.SunPosProperties.day_of_year",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Jour de l’année",
|
("fr_FR", "Jour de l’année",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Rotation angle of sun and environment texture"),
|
(("*", "Rotation angle of the Sun and environment texture"),
|
||||||
(("bpy.types.SunPosProperties.hdr_azimuth",),
|
(("bpy.types.SunPosProperties.hdr_azimuth",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Angle de rotation du Soleil et de la texture d’environnement",
|
("fr_FR", "Angle de rotation du Soleil et de la texture d’environnement",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Elevation"),
|
(("*", "Elevation"),
|
||||||
(("bpy.types.SunPosProperties.hdr_elevation",),
|
(("bpy.types.SunPosProperties.hdr_elevation",
|
||||||
|
"scripts/addons/sun_position/ui_sun.py:185"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Hauteur",
|
("fr_FR", "Hauteur",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Elevation angle of sun"),
|
(("*", "Elevation angle of the Sun"),
|
||||||
(("bpy.types.SunPosProperties.hdr_elevation",),
|
(("bpy.types.SunPosProperties.hdr_elevation",
|
||||||
|
"bpy.types.SunPosProperties.sun_elevation"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Angle de hauteur du Soleil",
|
("fr_FR", "Angle de hauteur du Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Name of texture to use. World nodes must be enabled and color set to Environment Texture"),
|
(("*", "Name of the environment texture to use. World nodes must be enabled and the color set to an environment Texture"),
|
||||||
(("bpy.types.SunPosProperties.hdr_texture",),
|
(("bpy.types.SunPosProperties.hdr_texture",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Nom de la texture à utiliser. Les nœuds de shader du monde doivent être activés, et la couleur utiliser une texture d’environnement",
|
("fr_FR", "Nom de la texture d’environnement à utiliser. Les nœuds de shader du monde doivent être activés, et la couleur utiliser une texture d’environnement",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Latitude"),
|
(("*", "Latitude"),
|
||||||
@ -233,27 +198,28 @@ translations_tuple = (
|
|||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "North Offset"),
|
(("*", "North Offset"),
|
||||||
(("bpy.types.SunPosProperties.north_offset",),
|
(("bpy.types.SunPosProperties.north_offset",
|
||||||
|
"scripts/addons/sun_position/ui_sun.py:181"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Décalage du nord",
|
("fr_FR", "Décalage du nord",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Rotate the scene to choose North direction"),
|
(("*", "Rotate the scene to choose the North direction"),
|
||||||
(("bpy.types.SunPosProperties.north_offset",),
|
(("bpy.types.SunPosProperties.north_offset",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Tourner la scène pour choisir la direction du nord",
|
("fr_FR", "Tourner la scène pour choisir la direction du nord",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Collection of objects used to visualize sun motion"),
|
(("*", "Collection of objects used to visualize the motion of the Sun"),
|
||||||
(("bpy.types.SunPosProperties.object_collection",),
|
(("bpy.types.SunPosProperties.object_collection",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Collection d’objets utilisée pour visualiser la trajectoire du Soleil",
|
("fr_FR", "Collection d’objets utilisée pour visualiser la trajectoire du Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Show object collection as sun motion"),
|
(("*", "Type of Sun motion to visualize."),
|
||||||
(("bpy.types.SunPosProperties.object_collection_type",),
|
(("bpy.types.SunPosProperties.object_collection_type",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher la collection en tant que",
|
("fr_FR", "Type de trajectoire du Soleil à visualiser",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Analemma"),
|
(("*", "Analemma"),
|
||||||
@ -262,41 +228,118 @@ translations_tuple = (
|
|||||||
("fr_FR", "Analemme",
|
("fr_FR", "Analemme",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
|
(("*", "Trajectory of the Sun in the sky during the year, for a given time of the day"),
|
||||||
|
(("bpy.types.SunPosProperties.object_collection_type:'ANALEMMA'",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Trajectoire du Soleil pendant l’année, pour une heure donnée du jour",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
(("*", "Diurnal"),
|
(("*", "Diurnal"),
|
||||||
(("bpy.types.SunPosProperties.object_collection_type:'DIURNAL'",),
|
(("bpy.types.SunPosProperties.object_collection_type:'DIURNAL'",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Diurne",
|
("fr_FR", "Diurne",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Draw line pointing north"),
|
(("*", "Trajectory of the Sun in the sky during a single day"),
|
||||||
|
(("bpy.types.SunPosProperties.object_collection_type:'DIURNAL'",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Trajectoire du Soleil pendant un seul jour",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Show Analemmas"),
|
||||||
|
(("bpy.types.SunPosProperties.show_analemmas",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Afficher les analemmes",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Draw Sun analemmas. These help visualize the motion of the Sun in the sky during the year, for each hour of the day"),
|
||||||
|
(("bpy.types.SunPosProperties.show_analemmas",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Afficher les analemmes du soleil. Ils aident à visualiser la trajectoire du Soleil dans le ciel pendant l’année, pour chaque heure du jour",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Show North"),
|
||||||
|
(("bpy.types.SunPosProperties.show_north",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Afficher le nord",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Draw a line pointing to the north"),
|
||||||
(("bpy.types.SunPosProperties.show_north",),
|
(("bpy.types.SunPosProperties.show_north",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher une ligne pointant le nord",
|
("fr_FR", "Afficher une ligne pointant le nord",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Name of sky texture to be used"),
|
(("*", "Show Surface"),
|
||||||
(("bpy.types.SunPosProperties.sky_texture",),
|
(("bpy.types.SunPosProperties.show_surface",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Nom de la texture à utiliser",
|
("fr_FR", "Afficher la surface",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Distance to sun from origin"),
|
(("*", "Draw the surface that the Sun occupies in the sky"),
|
||||||
|
(("bpy.types.SunPosProperties.show_surface",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Afficher la surface que le Soleil occupe dans le ciel",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Name of the sky texture to use"),
|
||||||
|
(("bpy.types.SunPosProperties.sky_texture",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Nom de la texture de ciel à utiliser",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Sun Azimuth"),
|
||||||
|
(("bpy.types.SunPosProperties.sun_azimuth",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Azimut du Soleil",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Rotation angle of the Sun from the direction of the north"),
|
||||||
|
(("bpy.types.SunPosProperties.sun_azimuth",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Angle de rotation du Soleil depuis la direction du nord",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Distance to the Sun from the origin"),
|
||||||
(("bpy.types.SunPosProperties.sun_distance",),
|
(("bpy.types.SunPosProperties.sun_distance",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Distance entre l’origine et le Soleil",
|
("fr_FR", "Distance entre l’origine et le Soleil",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Sun Object"),
|
(("*", "Sun Object"),
|
||||||
(("bpy.types.SunPosProperties.sun_object",
|
|
||||||
"scripts/addons/sun_position/ui_sun.py:101"),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Objet soleil",
|
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Sun object to set in the scene"),
|
|
||||||
(("bpy.types.SunPosProperties.sun_object",),
|
(("bpy.types.SunPosProperties.sun_object",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Objet soleil à utiliser dans la scène",
|
("fr_FR", "Objet Soleil",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Sun object to use in the scene"),
|
||||||
|
(("bpy.types.SunPosProperties.sun_object",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Objet Soleil à utiliser dans la scène",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Sunrise Time"),
|
||||||
|
(("bpy.types.SunPosProperties.sunrise_time",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Heure de lever",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Time at which the Sun rises"),
|
||||||
|
(("bpy.types.SunPosProperties.sunrise_time",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Heure à laquelle le Soleil se lève",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Sunset Time"),
|
||||||
|
(("bpy.types.SunPosProperties.sunset_time",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Heure de coucher",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Time at which the Sun sets"),
|
||||||
|
(("bpy.types.SunPosProperties.sunset_time",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Heure à laquelle le Soleil se couche",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Time of the day"),
|
(("*", "Time of the day"),
|
||||||
@ -311,16 +354,16 @@ translations_tuple = (
|
|||||||
("fr_FR", "Plage horaire",
|
("fr_FR", "Plage horaire",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Time period in which to spread object collection"),
|
(("*", "Time period around which to spread object collection"),
|
||||||
(("bpy.types.SunPosProperties.time_spread",),
|
(("bpy.types.SunPosProperties.time_spread",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Plage horaire à visualiser par les objets de la collection",
|
("fr_FR", "Plage horaire à visualiser par les objets de la collection",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Usage mode"),
|
(("*", "Usage Mode"),
|
||||||
(("bpy.types.SunPosProperties.usage_mode",),
|
(("bpy.types.SunPosProperties.usage_mode",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Mode",
|
("fr_FR", "Mode d’utilisation",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Operate in normal mode or environment texture mode"),
|
(("*", "Operate in normal mode or environment texture mode"),
|
||||||
@ -332,7 +375,7 @@ translations_tuple = (
|
|||||||
(("*", "Sun + HDR texture"),
|
(("*", "Sun + HDR texture"),
|
||||||
(("bpy.types.SunPosProperties.usage_mode:'HDR'",),
|
(("bpy.types.SunPosProperties.usage_mode:'HDR'",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Soleil + texture HDRI",
|
("fr_FR", "Soleil et texture HDRI",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Use day of year"),
|
(("*", "Use day of year"),
|
||||||
@ -341,7 +384,7 @@ translations_tuple = (
|
|||||||
("fr_FR", "Utiliser le jour de l’année",
|
("fr_FR", "Utiliser le jour de l’année",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Use a single value for day of year"),
|
(("*", "Use a single value for the day of year"),
|
||||||
(("bpy.types.SunPosProperties.use_day_of_year",),
|
(("bpy.types.SunPosProperties.use_day_of_year",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Utiliser une seule valeur pour le jour de l’année",
|
("fr_FR", "Utiliser une seule valeur pour le jour de l’année",
|
||||||
@ -353,16 +396,16 @@ translations_tuple = (
|
|||||||
("fr_FR", "L’heure d’été ajoute une heure à l’heure standard",
|
("fr_FR", "L’heure d’été ajoute une heure à l’heure standard",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Use refraction"),
|
(("*", "Use Refraction"),
|
||||||
(("bpy.types.SunPosProperties.use_refraction",),
|
(("bpy.types.SunPosProperties.use_refraction",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Utiliser la réfraction",
|
("fr_FR", "Utiliser la réfraction",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Show apparent sun position due to refraction"),
|
(("*", "Show the apparent Sun position due to atmospheric refraction"),
|
||||||
(("bpy.types.SunPosProperties.use_refraction",),
|
(("bpy.types.SunPosProperties.use_refraction",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher la position apparente du Soleil due à la réfraction",
|
("fr_FR", "Afficher la position apparente du Soleil due à la réfraction atmosphérique",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Year"),
|
(("*", "Year"),
|
||||||
@ -372,99 +415,111 @@ translations_tuple = (
|
|||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Could not find 3D View"),
|
(("*", "Could not find 3D View"),
|
||||||
(("scripts/addons/sun_position/hdr.py:262",),
|
(("scripts/addons/sun_position/hdr.py:263",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Impossible de trouver la vue 3D",
|
("fr_FR", "Impossible de trouver la vue 3D",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Please select an Environment Texture node"),
|
(("*", "Please select an Environment Texture node"),
|
||||||
(("scripts/addons/sun_position/hdr.py:268",),
|
(("scripts/addons/sun_position/hdr.py:269",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Veuillez utiliser un nœud de texture d’environnement",
|
("fr_FR", "Veuillez utiliser un nœud de texture d’environnement",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Unknown projection"),
|
(("*", "Unknown projection"),
|
||||||
(("scripts/addons/sun_position/hdr.py:180",),
|
(("scripts/addons/sun_position/hdr.py:181",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Projection inconnue",
|
("fr_FR", "Projection inconnue",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Show options or labels:"),
|
(("*", "Show options or labels:"),
|
||||||
(("scripts/addons/sun_position/properties.py:242",),
|
(("scripts/addons/sun_position/properties.py:297",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Afficher les options et étiquettes :",
|
("fr_FR", "Afficher les options et étiquettes :",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Usage Mode"),
|
(("*", "ERROR: Could not parse coordinates"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:71",),
|
(("scripts/addons/sun_position/sun_calc.py:54",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Mode",
|
("fr_FR", "ERREUR : Impossible d’analyser les coordonnées",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Environment Texture"),
|
(("Hour", "Time"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:85",),
|
(("scripts/addons/sun_position/ui_sun.py:224",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Texture d’environnement",
|
("fr_FR", "Heure",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Enter Coordinates"),
|
(("*", "Time Local:"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:174",),
|
(("scripts/addons/sun_position/ui_sun.py:242",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Saisir coordonnées",
|
("fr_FR", "Heure locale :",
|
||||||
(False, ())),
|
|
||||||
),
|
|
||||||
(("*", "Local:"),
|
|
||||||
(("scripts/addons/sun_position/ui_sun.py:269",),
|
|
||||||
()),
|
|
||||||
("fr_FR", "Locale :",
|
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "UTC:"),
|
(("*", "UTC:"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:272",),
|
(("scripts/addons/sun_position/ui_sun.py:243",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "UTC : ",
|
("fr_FR", "UTC :",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Please select World in the World panel."),
|
(("*", "Please select World in the World panel."),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:95",
|
(("scripts/addons/sun_position/ui_sun.py:97",
|
||||||
"scripts/addons/sun_position/ui_sun.py:153"),
|
"scripts/addons/sun_position/ui_sun.py:140"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Veuillez sélectionner le monde dans le panneau Monde",
|
("fr_FR", "Veuillez sélectionner le monde dans le panneau Monde",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Release binding"),
|
(("*", "Show"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:116",),
|
(("scripts/addons/sun_position/ui_sun.py:144",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Annuler le lien",
|
("fr_FR", "Afficher",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Azimuth:"),
|
(("*", "North"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:205",),
|
(("scripts/addons/sun_position/ui_sun.py:145",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Azimut :",
|
("fr_FR", "Nord",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Elevation:"),
|
(("*", "Analemmas"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:208",),
|
(("scripts/addons/sun_position/ui_sun.py:146",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Hauteur :",
|
("fr_FR", "Analemmes",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Surface"),
|
||||||
|
(("scripts/addons/sun_position/ui_sun.py:147",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Surface",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Use"),
|
||||||
|
(("scripts/addons/sun_position/ui_sun.py:150",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Utiliser",
|
||||||
|
(False, ())),
|
||||||
|
),
|
||||||
|
(("*", "Azimuth"),
|
||||||
|
(("scripts/addons/sun_position/ui_sun.py:186",),
|
||||||
|
()),
|
||||||
|
("fr_FR", "Azimut",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Sunrise:"),
|
(("*", "Sunrise:"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:284",),
|
(("scripts/addons/sun_position/ui_sun.py:259",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Lever : ",
|
("fr_FR", "Lever :",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Sunset:"),
|
(("*", "Sunset:"),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:287",),
|
(("scripts/addons/sun_position/ui_sun.py:260",),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Coucher : ",
|
("fr_FR", "Coucher :",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
),
|
),
|
||||||
(("*", "Please activate Use Nodes in the World panel."),
|
(("*", "Please activate Use Nodes in the World panel."),
|
||||||
(("scripts/addons/sun_position/ui_sun.py:92",
|
(("scripts/addons/sun_position/ui_sun.py:94",
|
||||||
"scripts/addons/sun_position/ui_sun.py:150"),
|
"scripts/addons/sun_position/ui_sun.py:137"),
|
||||||
()),
|
()),
|
||||||
("fr_FR", "Veuillez activer Utiliser nœuds dans le panneau Monde",
|
("fr_FR", "Veuillez activer Utiliser nœuds dans le panneau Monde",
|
||||||
(False, ())),
|
(False, ())),
|
||||||
|
@ -3,10 +3,11 @@
|
|||||||
import bpy
|
import bpy
|
||||||
from bpy.types import Operator, Menu
|
from bpy.types import Operator, Menu
|
||||||
from bl_operators.presets import AddPresetBase
|
from bl_operators.presets import AddPresetBase
|
||||||
|
from bl_ui.utils import PresetPanel
|
||||||
import os
|
import os
|
||||||
from math import degrees
|
from math import degrees
|
||||||
|
|
||||||
from .sun_calc import (format_lat_long, format_time, format_hms, sun)
|
from .sun_calc import format_lat_long, format_time, format_hms, sun
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
@ -14,18 +15,18 @@ from .sun_calc import (format_lat_long, format_time, format_hms, sun)
|
|||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
class SUNPOS_MT_Presets(Menu):
|
class SUNPOS_PT_Presets(PresetPanel, bpy.types.Panel):
|
||||||
bl_label = "Sun Position Presets"
|
bl_label = "Sun Position Presets"
|
||||||
preset_subdir = "operator/sun_position"
|
preset_subdir = "operator/sun_position"
|
||||||
preset_operator = "script.execute_preset"
|
preset_operator = "script.execute_preset"
|
||||||
draw = Menu.draw_preset
|
preset_add_operator = "world.sunpos_add_preset"
|
||||||
|
|
||||||
|
|
||||||
class SUNPOS_OT_AddPreset(AddPresetBase, Operator):
|
class SUNPOS_OT_AddPreset(AddPresetBase, Operator):
|
||||||
'''Add Sun Position preset'''
|
'''Add Sun Position preset'''
|
||||||
bl_idname = "world.sunpos_add_preset"
|
bl_idname = "world.sunpos_add_preset"
|
||||||
bl_label = "Add Sun Position preset"
|
bl_label = "Add Sun Position preset"
|
||||||
preset_menu = "SUNPOS_MT_Presets"
|
preset_menu = "SUNPOS_PT_Presets"
|
||||||
|
|
||||||
# variable used for all preset values
|
# variable used for all preset values
|
||||||
preset_defines = [
|
preset_defines = [
|
||||||
@ -61,91 +62,33 @@ class SUNPOS_PT_Panel(bpy.types.Panel):
|
|||||||
bl_label = "Sun Position"
|
bl_label = "Sun Position"
|
||||||
bl_options = {'DEFAULT_CLOSED'}
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
def draw_header_preset(self, _context):
|
||||||
|
SUNPOS_PT_Presets.draw_panel_header(self.layout)
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
sp = context.scene.sun_pos_properties
|
sun_props = context.scene.sun_pos_properties
|
||||||
p = context.preferences.addons[__package__].preferences
|
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
self.draw_panel(context, sp, p, layout)
|
layout.use_property_split = True
|
||||||
|
layout.use_property_decorate = False
|
||||||
|
|
||||||
def draw_panel(self, context, sp, p, layout):
|
layout.prop(sun_props, "usage_mode", expand=True)
|
||||||
col = self.layout.column(align=True)
|
layout.separator()
|
||||||
col.label(text="Usage Mode")
|
|
||||||
row = col.row()
|
if sun_props.usage_mode == "HDR":
|
||||||
row.prop(sp, "usage_mode", expand=True)
|
self.draw_environment_mode_panel(context)
|
||||||
col.separator()
|
|
||||||
if sp.usage_mode == "HDR":
|
|
||||||
self.draw_environ_mode_panel(context, sp, p, layout)
|
|
||||||
else:
|
else:
|
||||||
self.draw_normal_mode_panel(context, sp, p, layout)
|
self.draw_normal_mode_panel(context)
|
||||||
|
|
||||||
def draw_environ_mode_panel(self, context, sp, p, layout):
|
def draw_environment_mode_panel(self, context):
|
||||||
flow = layout.grid_flow(row_major=True, columns=0, even_columns=True,
|
sun_props = context.scene.sun_pos_properties
|
||||||
even_rows=False, align=False)
|
layout = self.layout
|
||||||
|
|
||||||
col = flow.column(align=True)
|
|
||||||
col.label(text="Environment Texture")
|
|
||||||
|
|
||||||
if context.scene.world is not None:
|
|
||||||
if context.scene.world.node_tree is not None:
|
|
||||||
col.prop_search(sp, "hdr_texture",
|
|
||||||
context.scene.world.node_tree, "nodes", text="")
|
|
||||||
else:
|
|
||||||
col.label(text="Please activate Use Nodes in the World panel.",
|
|
||||||
icon="ERROR")
|
|
||||||
else:
|
|
||||||
col.label(text="Please select World in the World panel.",
|
|
||||||
icon="ERROR")
|
|
||||||
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
col = flow.column(align=True)
|
|
||||||
col.label(text="Sun Object")
|
|
||||||
col.prop_search(sp, "sun_object",
|
|
||||||
context.view_layer, "objects", text="")
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
col = flow.column(align=True)
|
|
||||||
col.prop(sp, "sun_distance")
|
|
||||||
if not sp.bind_to_sun:
|
|
||||||
col.prop(sp, "hdr_elevation")
|
|
||||||
col.prop(sp, "hdr_azimuth")
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
col = flow.column(align=True)
|
|
||||||
if sp.bind_to_sun:
|
|
||||||
col.prop(sp, "bind_to_sun", toggle=True, icon="CONSTRAINT",
|
|
||||||
text="Release binding")
|
|
||||||
else:
|
|
||||||
col.prop(sp, "bind_to_sun", toggle=True, icon="CONSTRAINT",
|
|
||||||
text="Bind Texture to Sun")
|
|
||||||
|
|
||||||
row = col.row(align=True)
|
|
||||||
row.enabled = not sp.bind_to_sun
|
|
||||||
row.operator("world.sunpos_show_hdr", icon='LIGHT_SUN')
|
|
||||||
|
|
||||||
def draw_normal_mode_panel(self, context, sp, p, layout):
|
|
||||||
if p.show_time_place:
|
|
||||||
row = layout.row(align=True)
|
|
||||||
row.menu(SUNPOS_MT_Presets.__name__, text=SUNPOS_MT_Presets.bl_label)
|
|
||||||
row.operator(SUNPOS_OT_AddPreset.bl_idname, text="", icon='ADD')
|
|
||||||
row.operator(SUNPOS_OT_AddPreset.bl_idname, text="", icon='REMOVE').remove_active = True
|
|
||||||
|
|
||||||
col = layout.column(align=True)
|
col = layout.column(align=True)
|
||||||
col.use_property_split = True
|
col.prop_search(sun_props, "sun_object",
|
||||||
col.use_property_decorate = False
|
context.view_layer, "objects")
|
||||||
col.prop(sp, "sun_object")
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
col.prop(sp, "object_collection")
|
|
||||||
if sp.object_collection:
|
|
||||||
col.prop(sp, "object_collection_type")
|
|
||||||
if sp.object_collection_type == 'DIURNAL':
|
|
||||||
col.prop(sp, "time_spread")
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
if context.scene.world is not None:
|
if context.scene.world is not None:
|
||||||
if context.scene.world.node_tree is not None:
|
if context.scene.world.node_tree is not None:
|
||||||
col.prop_search(sp, "sky_texture",
|
col.prop_search(sun_props, "hdr_texture",
|
||||||
context.scene.world.node_tree, "nodes")
|
context.scene.world.node_tree, "nodes")
|
||||||
else:
|
else:
|
||||||
col.label(text="Please activate Use Nodes in the World panel.",
|
col.label(text="Please activate Use Nodes in the World panel.",
|
||||||
@ -154,6 +97,59 @@ class SUNPOS_PT_Panel(bpy.types.Panel):
|
|||||||
col.label(text="Please select World in the World panel.",
|
col.label(text="Please select World in the World panel.",
|
||||||
icon="ERROR")
|
icon="ERROR")
|
||||||
|
|
||||||
|
layout.use_property_decorate = True
|
||||||
|
|
||||||
|
col = layout.column(align=True)
|
||||||
|
col.prop(sun_props, "bind_to_sun", text="Bind Texture to Sun")
|
||||||
|
col.prop(sun_props, "hdr_azimuth")
|
||||||
|
row = col.row(align=True)
|
||||||
|
row.active = not sun_props.bind_to_sun
|
||||||
|
row.prop(sun_props, "hdr_elevation")
|
||||||
|
col.prop(sun_props, "sun_distance")
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col = layout.column(align=True)
|
||||||
|
row = col.row(align=True)
|
||||||
|
row.enabled = not sun_props.bind_to_sun
|
||||||
|
row.operator("world.sunpos_show_hdr", icon='LIGHT_SUN')
|
||||||
|
|
||||||
|
def draw_normal_mode_panel(self, context):
|
||||||
|
sun_props = context.scene.sun_pos_properties
|
||||||
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
layout = self.layout
|
||||||
|
|
||||||
|
col = layout.column(align=True)
|
||||||
|
col.prop(sun_props, "sun_object")
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
col.prop(sun_props, "object_collection")
|
||||||
|
if sun_props.object_collection:
|
||||||
|
col.prop(sun_props, "object_collection_type")
|
||||||
|
if sun_props.object_collection_type == 'DIURNAL':
|
||||||
|
col.prop(sun_props, "time_spread")
|
||||||
|
col.separator()
|
||||||
|
|
||||||
|
if context.scene.world is not None:
|
||||||
|
if context.scene.world.node_tree is not None:
|
||||||
|
col.prop_search(sun_props, "sky_texture",
|
||||||
|
context.scene.world.node_tree, "nodes")
|
||||||
|
else:
|
||||||
|
col.label(text="Please activate Use Nodes in the World panel.",
|
||||||
|
icon="ERROR")
|
||||||
|
else:
|
||||||
|
col.label(text="Please select World in the World panel.",
|
||||||
|
icon="ERROR")
|
||||||
|
|
||||||
|
if addon_prefs.show_overlays:
|
||||||
|
col = layout.column(align=True, heading="Show")
|
||||||
|
col.prop(sun_props, "show_north", text="North")
|
||||||
|
col.prop(sun_props, "show_analemmas", text="Analemmas")
|
||||||
|
col.prop(sun_props, "show_surface", text="Surface")
|
||||||
|
|
||||||
|
if addon_prefs.show_refraction:
|
||||||
|
col = layout.column(align=True, heading="Use")
|
||||||
|
col.prop(sun_props, "use_refraction", text="Refraction")
|
||||||
|
|
||||||
|
|
||||||
class SUNPOS_PT_Location(bpy.types.Panel):
|
class SUNPOS_PT_Location(bpy.types.Panel):
|
||||||
bl_space_type = "PROPERTIES"
|
bl_space_type = "PROPERTIES"
|
||||||
@ -164,68 +160,34 @@ class SUNPOS_PT_Location(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(self, context):
|
def poll(self, context):
|
||||||
sp = context.scene.sun_pos_properties
|
sun_props = context.scene.sun_pos_properties
|
||||||
return sp.usage_mode != "HDR"
|
return sun_props.usage_mode != "HDR"
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
sp = context.scene.sun_pos_properties
|
layout.use_property_split = True
|
||||||
p = context.preferences.addons[__package__].preferences
|
|
||||||
|
sun_props = context.scene.sun_pos_properties
|
||||||
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
col = layout.column(align=True)
|
col = layout.column(align=True)
|
||||||
col.label(text="Enter Coordinates")
|
col.prop(sun_props, "coordinates", icon='URL')
|
||||||
col.prop(sp, "co_parser", text='', icon='URL')
|
col.prop(sun_props, "latitude")
|
||||||
|
col.prop(sun_props, "longitude")
|
||||||
|
|
||||||
layout.separator()
|
|
||||||
|
|
||||||
flow = layout.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=False)
|
|
||||||
|
|
||||||
col = flow.column(align=True)
|
|
||||||
col.prop(sp, "latitude")
|
|
||||||
if p.show_dms:
|
|
||||||
row = col.row()
|
|
||||||
row.alignment = 'RIGHT'
|
|
||||||
row.label(text=format_lat_long(sp.latitude, True))
|
|
||||||
|
|
||||||
col = flow.column(align=True)
|
|
||||||
col.prop(sp, "longitude")
|
|
||||||
if p.show_dms:
|
|
||||||
row = col.row()
|
|
||||||
row.alignment = 'RIGHT'
|
|
||||||
row.label(text=format_lat_long(sp.longitude, False))
|
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
if p.show_north:
|
col = layout.column(align=True)
|
||||||
col = flow.column(align=True)
|
col.prop(sun_props, "north_offset", text="North Offset")
|
||||||
col.prop(sp, "show_north", toggle=True)
|
|
||||||
col.prop(sp, "north_offset")
|
if addon_prefs.show_az_el:
|
||||||
|
col = layout.column(align=True)
|
||||||
|
col.prop(sun_props, "sun_elevation", text="Elevation")
|
||||||
|
col.prop(sun_props, "sun_azimuth", text="Azimuth")
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
if p.show_surface or p.show_analemmas:
|
col = layout.column()
|
||||||
col = flow.column(align=True)
|
col.prop(sun_props, "sun_distance")
|
||||||
if p.show_surface:
|
|
||||||
col.prop(sp, "show_surface", toggle=True)
|
|
||||||
if p.show_analemmas:
|
|
||||||
col.prop(sp, "show_analemmas", toggle=True)
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
if p.show_az_el:
|
|
||||||
col = flow.column(align=True)
|
|
||||||
split = col.split(factor=0.4, align=True)
|
|
||||||
split.label(text="Azimuth:")
|
|
||||||
split.label(text=str(round(degrees(sun.azimuth), 3)) + "°")
|
|
||||||
split = col.split(factor=0.4, align=True)
|
|
||||||
split.label(text="Elevation:")
|
|
||||||
split.label(text=str(round(degrees(sun.elevation), 3)) + "°")
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
if p.show_refraction:
|
|
||||||
col = flow.column()
|
|
||||||
col.prop(sp, "use_refraction")
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
col = flow.column()
|
|
||||||
col.prop(sp, "sun_distance")
|
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
|
|
||||||
@ -238,63 +200,67 @@ class SUNPOS_PT_Time(bpy.types.Panel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def poll(self, context):
|
def poll(self, context):
|
||||||
sp = context.scene.sun_pos_properties
|
sun_props = context.scene.sun_pos_properties
|
||||||
return sp.usage_mode != "HDR"
|
return sun_props.usage_mode != "HDR"
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
sp = context.scene.sun_pos_properties
|
layout.use_property_split = True
|
||||||
p = context.preferences.addons[__package__].preferences
|
|
||||||
|
|
||||||
flow = layout.grid_flow(row_major=True, columns=0, even_columns=True, even_rows=False, align=False)
|
sun_props = context.scene.sun_pos_properties
|
||||||
|
addon_prefs = context.preferences.addons[__package__].preferences
|
||||||
|
|
||||||
col = flow.column(align=True)
|
col = layout.column(align=True)
|
||||||
col.prop(sp, "use_day_of_year",
|
col.prop(sun_props, "use_day_of_year")
|
||||||
icon='SORTTIME')
|
if sun_props.use_day_of_year:
|
||||||
if sp.use_day_of_year:
|
col.prop(sun_props, "day_of_year")
|
||||||
col.prop(sp, "day_of_year")
|
|
||||||
else:
|
else:
|
||||||
col.prop(sp, "day")
|
col.prop(sun_props, "day")
|
||||||
col.prop(sp, "month")
|
col.prop(sun_props, "month")
|
||||||
col.prop(sp, "year")
|
col.prop(sun_props, "year")
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
col = flow.column(align=True)
|
col = layout.column(align=True)
|
||||||
col.prop(sp, "time")
|
col.prop(sun_props, "time", text="Time", text_ctxt="Hour")
|
||||||
col.prop(sp, "UTC_zone")
|
col.prop(sun_props, "UTC_zone")
|
||||||
if p.show_daylight_savings:
|
col.prop(sun_props, "use_daylight_savings")
|
||||||
col.prop(sp, "use_daylight_savings")
|
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
col = flow.column(align=True)
|
local_time = format_time(sun_props.time,
|
||||||
lt = format_time(sp.time,
|
sun_props.use_daylight_savings)
|
||||||
p.show_daylight_savings and sp.use_daylight_savings,
|
utc_time = format_time(sun_props.time,
|
||||||
sp.longitude)
|
sun_props.use_daylight_savings,
|
||||||
ut = format_time(sp.time,
|
sun_props.UTC_zone)
|
||||||
p.show_daylight_savings and sp.use_daylight_savings,
|
|
||||||
sp.longitude,
|
col = layout.column(align=True)
|
||||||
sp.UTC_zone)
|
|
||||||
col.alignment = 'CENTER'
|
col.alignment = 'CENTER'
|
||||||
|
|
||||||
split = col.split(factor=0.5, align=True)
|
split = col.split(factor=0.5, align=True)
|
||||||
split.label(text="Local:", icon='TIME')
|
sub = split.column(align=True)
|
||||||
split.label(text=lt)
|
sub.alignment = 'RIGHT'
|
||||||
split = col.split(factor=0.5, align=True)
|
sub.label(text="Time Local:")
|
||||||
split.label(text="UTC:", icon='PREVIEW_RANGE')
|
sub.label(text="UTC:")
|
||||||
split.label(text=ut)
|
|
||||||
|
sub = split.column(align=True)
|
||||||
|
sub.label(text=local_time)
|
||||||
|
sub.label(text=utc_time)
|
||||||
col.separator()
|
col.separator()
|
||||||
|
|
||||||
col = flow.column(align=True)
|
if addon_prefs.show_rise_set:
|
||||||
col.alignment = 'CENTER'
|
sunrise = format_hms(sun.sunrise)
|
||||||
if p.show_rise_set:
|
sunset = format_hms(sun.sunset)
|
||||||
sr = format_hms(sun.sunrise)
|
|
||||||
ss = format_hms(sun.sunset)
|
col = layout.column(align=True)
|
||||||
|
col.alignment = 'CENTER'
|
||||||
|
|
||||||
split = col.split(factor=0.5, align=True)
|
split = col.split(factor=0.5, align=True)
|
||||||
split.label(text="Sunrise:", icon='LIGHT_SUN')
|
sub = split.column(align=True)
|
||||||
split.label(text=sr)
|
sub.alignment = 'RIGHT'
|
||||||
split = col.split(factor=0.5, align=True)
|
sub.label(text="Sunrise:")
|
||||||
split.label(text="Sunset:", icon='SOLO_ON')
|
sub.label(text="Sunset:")
|
||||||
split.label(text=ss)
|
|
||||||
|
sub = split.column(align=True)
|
||||||
|
sub.label(text=sunrise)
|
||||||
|
sub.label(text=sunset)
|
||||||
|
|
||||||
col.separator()
|
col.separator()
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
bl_info = {
|
bl_info = {
|
||||||
"name": "Manage UI translations",
|
"name": "Manage UI translations",
|
||||||
"author": "Bastien Montagne",
|
"author": "Bastien Montagne",
|
||||||
"version": (1, 3, 2),
|
"version": (1, 3, 3),
|
||||||
"blender": (2, 92, 0),
|
"blender": (3, 6, 0),
|
||||||
"location": "Main \"File\" menu, text editor, any UI control",
|
"location": "Main \"File\" menu, text editor, any UI control",
|
||||||
"description": "Allows managing UI translations directly from Blender "
|
"description": "Allows managing UI translations directly from Blender "
|
||||||
"(update main .po files, update scripts' translations, etc.)",
|
"(update main .po files, update scripts' translations, etc.)",
|
||||||
|
@ -141,25 +141,34 @@ class UI_OT_i18n_cleanuptranslation_svn_branches(Operator):
|
|||||||
|
|
||||||
|
|
||||||
def i18n_updatetranslation_svn_trunk_callback(lng, settings):
|
def i18n_updatetranslation_svn_trunk_callback(lng, settings):
|
||||||
|
reports = []
|
||||||
if lng['uid'] in settings.IMPORT_LANGUAGES_SKIP:
|
if lng['uid'] in settings.IMPORT_LANGUAGES_SKIP:
|
||||||
print("Skipping {} language ({}), edit settings if you want to enable it.\n".format(lng['name'], lng['uid']))
|
reports.append("Skipping {} language ({}), edit settings if you want to enable it.".format(lng['name'], lng['uid']))
|
||||||
return lng['uid'], 0.0
|
return lng['uid'], 0.0, reports
|
||||||
if not lng['use']:
|
if not lng['use']:
|
||||||
print("Skipping {} language ({}).\n".format(lng['name'], lng['uid']))
|
reports.append("Skipping {} language ({}).".format(lng['name'], lng['uid']))
|
||||||
return lng['uid'], 0.0
|
return lng['uid'], 0.0, reports
|
||||||
po = utils_i18n.I18nMessages(uid=lng['uid'], kind='PO', src=lng['po_path'], settings=settings)
|
po = utils_i18n.I18nMessages(uid=lng['uid'], kind='PO', src=lng['po_path'], settings=settings)
|
||||||
errs = po.check(fix=True)
|
errs = po.check(fix=True)
|
||||||
print("Processing {} language ({}).\n"
|
reports.append("Processing {} language ({}).\n"
|
||||||
"Cleaned up {} commented messages.\n".format(lng['name'], lng['uid'], po.clean_commented()) +
|
"Cleaned up {} commented messages.\n".format(lng['name'], lng['uid'], po.clean_commented()) +
|
||||||
("Errors in this po, solved as best as possible!\n\t" + "\n\t".join(errs) if errs else "") + "\n")
|
("Errors in this po, solved as best as possible!\n\t" + "\n\t".join(errs) if errs else ""))
|
||||||
if lng['uid'] in settings.IMPORT_LANGUAGES_RTL:
|
if lng['uid'] in settings.IMPORT_LANGUAGES_RTL:
|
||||||
po.write(kind="PO", dest=lng['po_path_trunk'][:-3] + "_raw.po")
|
po.write(kind="PO", dest=lng['po_path_trunk'][:-3] + "_raw.po")
|
||||||
po.rtl_process()
|
po.rtl_process()
|
||||||
po.write(kind="PO", dest=lng['po_path_trunk'])
|
po.write(kind="PO", dest=lng['po_path_trunk'])
|
||||||
po.write(kind="PO_COMPACT", dest=lng['po_path_git'])
|
po.write(kind="PO_COMPACT", dest=lng['po_path_git'])
|
||||||
po.write(kind="MO", dest=lng['mo_path_trunk'])
|
ret = po.write(kind="MO", dest=lng['mo_path_trunk'])
|
||||||
|
if (ret.stdout):
|
||||||
|
reports.append(ret.stdout.decode().rstrip("\n"))
|
||||||
|
if (ret.stderr):
|
||||||
|
stderr_str = ret.stderr.decode().rstrip("\n")
|
||||||
|
if ret.returncode != 0:
|
||||||
|
reports.append("ERROR: " + stderr_str)
|
||||||
|
else:
|
||||||
|
reports.append(stderr_str)
|
||||||
po.update_info()
|
po.update_info()
|
||||||
return lng['uid'], po.nbr_trans_msgs / po.nbr_msgs
|
return lng['uid'], po.nbr_trans_msgs / po.nbr_msgs, reports
|
||||||
|
|
||||||
|
|
||||||
class UI_OT_i18n_updatetranslation_svn_trunk(Operator):
|
class UI_OT_i18n_updatetranslation_svn_trunk(Operator):
|
||||||
@ -178,12 +187,13 @@ class UI_OT_i18n_updatetranslation_svn_trunk(Operator):
|
|||||||
context.window_manager.progress_update(0)
|
context.window_manager.progress_update(0)
|
||||||
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
with concurrent.futures.ProcessPoolExecutor() as exctr:
|
||||||
num_langs = len(i18n_sett.langs)
|
num_langs = len(i18n_sett.langs)
|
||||||
for progress, (lng_uid, stats_val) in enumerate(exctr.map(i18n_updatetranslation_svn_trunk_callback,
|
for progress, (lng_uid, stats_val, reports) in enumerate(exctr.map(i18n_updatetranslation_svn_trunk_callback,
|
||||||
[dict(lng.items()) for lng in i18n_sett.langs],
|
[dict(lng.items()) for lng in i18n_sett.langs],
|
||||||
(self.settings,) * num_langs,
|
(self.settings,) * num_langs,
|
||||||
chunksize=4)):
|
chunksize=4)):
|
||||||
context.window_manager.progress_update(progress + 1)
|
context.window_manager.progress_update(progress + 1)
|
||||||
stats[lng_uid] = stats_val
|
stats[lng_uid] = stats_val
|
||||||
|
print("".join(reports) + "\n")
|
||||||
|
|
||||||
# Copy pot file from branches to trunk.
|
# Copy pot file from branches to trunk.
|
||||||
shutil.copy2(self.settings.FILE_NAME_POT, self.settings.TRUNK_PO_DIR)
|
shutil.copy2(self.settings.FILE_NAME_POT, self.settings.TRUNK_PO_DIR)
|
||||||
|
Loading…
Reference in New Issue
Block a user