New Addon: Import Autodesk .max #105013

Closed
Sebastian Sille wants to merge 136 commits from (deleted):nrgsille-import_max into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
22 changed files with 283 additions and 191 deletions
Showing only changes of commit 76c9aecde9 - Show all commits

View File

@ -6,7 +6,7 @@ bl_info = {
"name": "Curve Tools", "name": "Curve Tools",
"description": "Adds some functionality for bezier/nurbs curve/surface modeling", "description": "Adds some functionality for bezier/nurbs curve/surface modeling",
"author": "Mackraken, Spivak Vladimir (cwolf3d)", "author": "Mackraken, Spivak Vladimir (cwolf3d)",
"version": (0, 4, 5), "version": (0, 4, 6),
"blender": (2, 80, 0), "blender": (2, 80, 0),
"location": "View3D > Tool Shelf > Edit Tab", "location": "View3D > Tool Shelf > Edit Tab",
"doc_url": "{BLENDER_MANUAL_URL}/addons/add_curve/curve_tools.html", "doc_url": "{BLENDER_MANUAL_URL}/addons/add_curve/curve_tools.html",

View File

@ -52,18 +52,23 @@ class OperatorCurveInfo(bpy.types.Operator):
class OperatorCurveLength(bpy.types.Operator): class OperatorCurveLength(bpy.types.Operator):
bl_idname = "curvetools.operatorcurvelength" bl_idname = "curvetools.operatorcurvelength"
bl_label = "Length" bl_label = "Length"
bl_description = "Calculates the length of the active/selected curve" bl_description = "Calculates the length of the active/selected curves"
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
return util.Selected1Curve() return util.Selected1OrMoreCurves()
def execute(self, context): def execute(self, context):
curve = curves.Curve(context.active_object) selCurves = util.GetSelectedCurves()
context.scene.curvetools.CurveLength = curve.length length = 0
for blCurve in selCurves:
curve = curves.Curve(blCurve)
length += curve.length
context.scene.curvetools.CurveLength = length
return {'FINISHED'} return {'FINISHED'}

View File

@ -1,8 +1,6 @@
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
#
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# Copyright 2011-2022 Blender Foundation
# <pep8 compliant>
bl_info = { bl_info = {
"name": "Hydra Storm render engine", "name": "Hydra Storm render engine",

View File

@ -1,10 +1,10 @@
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
#
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# Copyright 2011-2022 Blender Foundation
# <pep8 compliant>
import bpy import bpy
class StormHydraRenderEngine(bpy.types.HydraRenderEngine): class StormHydraRenderEngine(bpy.types.HydraRenderEngine):
bl_idname = 'HYDRA_STORM' bl_idname = 'HYDRA_STORM'
bl_label = "Hydra Storm" bl_label = "Hydra Storm"
@ -19,11 +19,11 @@ class StormHydraRenderEngine(bpy.types.HydraRenderEngine):
settings = bpy.context.scene.hydra_storm.viewport if engine_type == 'VIEWPORT' else \ settings = bpy.context.scene.hydra_storm.viewport if engine_type == 'VIEWPORT' else \
bpy.context.scene.hydra_storm.final bpy.context.scene.hydra_storm.final
result = { result = {
'enableTinyPrimCulling': settings.enable_tiny_prim_culling, 'enableTinyPrimCulling': settings.use_tiny_prim_culling,
'maxLights': settings.max_lights,
'volumeRaymarchingStepSize': settings.volume_raymarching_step_size, 'volumeRaymarchingStepSize': settings.volume_raymarching_step_size,
'volumeRaymarchingStepSizeLighting': settings.volume_raymarching_step_size_lighting, 'volumeRaymarchingStepSizeLighting': settings.volume_raymarching_step_size_lighting,
'volumeMaxTextureMemoryPerField': settings.volume_max_texture_memory_per_field, 'volumeMaxTextureMemoryPerField': settings.volume_max_texture_memory_per_field,
'maxLights': settings.max_lights,
} }
if engine_type != 'VIEWPORT': if engine_type != 'VIEWPORT':
@ -35,6 +35,8 @@ class StormHydraRenderEngine(bpy.types.HydraRenderEngine):
return result return result
def update_render_passes(self, scene, render_layer): def update_render_passes(self, scene, render_layer):
if render_layer.use_pass_combined:
self.register_pass(scene, render_layer, 'Combined', 4, 'RGBA', 'COLOR')
if render_layer.use_pass_z: if render_layer.use_pass_z:
self.register_pass(scene, render_layer, 'Depth', 1, 'Z', 'VALUE') self.register_pass(scene, render_layer, 'Depth', 1, 'Z', 'VALUE')

View File

@ -1,7 +1,6 @@
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
#
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# Copyright 2011-2022 Blender Foundation
# <pep8 compliant>
import bpy import bpy
@ -23,9 +22,14 @@ class Properties(bpy.types.PropertyGroup):
class RenderProperties(bpy.types.PropertyGroup): class RenderProperties(bpy.types.PropertyGroup):
enable_tiny_prim_culling: bpy.props.BoolProperty( max_lights: bpy.props.IntProperty(
name="Max Lights",
description="Limit maximum number of lights",
default=16, min=0, max=16,
)
use_tiny_prim_culling: bpy.props.BoolProperty(
name="Tiny Prim Culling", name="Tiny Prim Culling",
description="Enable Tiny Prim Culling", description="Hide small geometry primitives to improve performance",
default=False, default=False,
) )
volume_raymarching_step_size: bpy.props.FloatProperty( volume_raymarching_step_size: bpy.props.FloatProperty(
@ -43,11 +47,6 @@ class RenderProperties(bpy.types.PropertyGroup):
description="Maximum memory for a volume field texture in Mb (unless overridden by field prim)", description="Maximum memory for a volume field texture in Mb (unless overridden by field prim)",
default=128.0, default=128.0,
) )
max_lights: bpy.props.IntProperty(
name="Max Lights",
description="Limit maximum number of lights",
default=16, min=0, max=16,
)
class SceneProperties(Properties): class SceneProperties(Properties):

View File

@ -1,7 +1,6 @@
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
#
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# Copyright 2011-2022 Blender Foundation
# <pep8 compliant>
import bpy import bpy
@ -20,12 +19,32 @@ class Panel(bpy.types.Panel):
# #
# Final render settings # Quality render settings
# #
class STORM_HYDRA_RENDER_PT_final(Panel): class STORM_HYDRA_RENDER_PT_quality(Panel):
"""Final render delegate and settings""" bl_label = "Quality"
bl_idname = 'STORM_HYDRA_RENDER_PT_final'
bl_label = "Final Render Settings" def draw(self, layout):
pass
class STORM_HYDRA_RENDER_PT_quality_viewport(Panel):
bl_label = "Viewport"
bl_parent_id = "STORM_HYDRA_RENDER_PT_quality"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
settings = context.scene.hydra_storm.viewport
layout.prop(settings, 'max_lights')
layout.prop(settings, 'use_tiny_prim_culling')
class STORM_HYDRA_RENDER_PT_quality_render(Panel):
bl_label = "Render"
bl_parent_id = "STORM_HYDRA_RENDER_PT_quality"
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
@ -33,17 +52,42 @@ class STORM_HYDRA_RENDER_PT_final(Panel):
layout.use_property_decorate = False layout.use_property_decorate = False
settings = context.scene.hydra_storm.final settings = context.scene.hydra_storm.final
layout.prop(settings, 'enable_tiny_prim_culling')
layout.prop(settings, 'max_lights') layout.prop(settings, 'max_lights')
layout.prop(settings, 'use_tiny_prim_culling')
class STORM_HYDRA_RENDER_PT_volume_final(bpy.types.Panel): #
bl_parent_id = STORM_HYDRA_RENDER_PT_final.bl_idname # Volume render settings
bl_label = "Volume Raymarching" #
bl_space_type = 'PROPERTIES' class STORM_HYDRA_RENDER_PT_volumes(Panel):
bl_region_type = 'WINDOW' bl_label = "Volumes"
bl_options = {'DEFAULT_CLOSED'} bl_options = {'DEFAULT_CLOSED'}
def draw(self, layout):
pass
class STORM_HYDRA_RENDER_PT_volumes_viewport(Panel):
bl_label = "Viewport"
bl_parent_id = "STORM_HYDRA_RENDER_PT_volumes"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
settings = context.scene.hydra_storm.viewport
col = layout.column(align=True)
col.prop(settings, "volume_raymarching_step_size", text="Step Size")
col.prop(settings, "volume_raymarching_step_size_lighting", text="Step Size Lightning")
col.prop(settings, "volume_max_texture_memory_per_field")
class STORM_HYDRA_RENDER_PT_volumes_render(Panel):
bl_label = "Render"
bl_parent_id = "STORM_HYDRA_RENDER_PT_volumes"
def draw(self, context): def draw(self, context):
layout = self.layout layout = self.layout
layout.use_property_split = True layout.use_property_split = True
@ -58,28 +102,10 @@ class STORM_HYDRA_RENDER_PT_volume_final(bpy.types.Panel):
# #
# Viewport render settings # Film settings
# #
class STORM_HYDRA_RENDER_PT_viewport(Panel): class STORM_HYDRA_RENDER_PT_film(Panel):
"""Viewport render delegate and settings""" bl_label = "Film"
bl_idname = 'STORM_HYDRA_RENDER_PT_viewport'
bl_label = "Viewport Render Settings"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
settings = context.scene.hydra_storm.viewport
layout.prop(settings, 'enable_tiny_prim_culling')
layout.prop(settings, 'max_lights')
class STORM_HYDRA_RENDER_PT_volume_viewport(bpy.types.Panel):
bl_parent_id = STORM_HYDRA_RENDER_PT_viewport.bl_idname
bl_label = "Volume Raymarching"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_options = {'DEFAULT_CLOSED'} bl_options = {'DEFAULT_CLOSED'}
def draw(self, context): def draw(self, context):
@ -87,14 +113,40 @@ class STORM_HYDRA_RENDER_PT_volume_viewport(bpy.types.Panel):
layout.use_property_split = True layout.use_property_split = True
layout.use_property_decorate = False layout.use_property_decorate = False
settings = context.scene.hydra_storm.viewport layout.prop(context.scene.render, "film_transparent", text="Transparent Background")
col = layout.column(align=True)
col.prop(settings, "volume_raymarching_step_size", text="Step Size")
col.prop(settings, "volume_raymarching_step_size_lighting", text="Step Size Lightning")
col.prop(settings, "volume_max_texture_memory_per_field")
#
# View layer settings
#
class STORM_HYDRA_RENDER_PT_passes(Panel):
bl_label = "Passes"
bl_context = "view_layer"
def draw(self, context):
pass
class STORM_HYDRA_RENDER_PT_passes_data(Panel):
bl_label = "Data"
bl_context = "view_layer"
bl_parent_id = "STORM_HYDRA_RENDER_PT_passes"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
view_layer = context.view_layer
col = layout.column(heading="Include", align=True)
col.prop(view_layer, "use_pass_combined")
col.prop(view_layer, "use_pass_z")
#
# Light settings
#
class STORM_HYDRA_LIGHT_PT_light(Panel): class STORM_HYDRA_LIGHT_PT_light(Panel):
"""Physical light sources""" """Physical light sources"""
bl_label = "Light" bl_label = "Light"
@ -148,49 +200,13 @@ class STORM_HYDRA_LIGHT_PT_light(Panel):
main_col.prop(light, 'size') main_col.prop(light, 'size')
class STORM_HYDRA_RENDER_PT_film(Panel):
bl_label = "Film"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
layout.prop(context.scene.render, "film_transparent", text="Transparent Background")
class STORM_HYDRA_RENDER_PT_passes(Panel):
bl_label = "Passes"
bl_context = "view_layer"
def draw(self, context):
pass
class STORM_HYDRA_RENDER_PT_passes_data(Panel):
bl_label = "Data"
bl_context = "view_layer"
bl_parent_id = "STORM_HYDRA_RENDER_PT_passes"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
view_layer = context.view_layer
col = layout.column(heading="Include", align=True)
col.prop(view_layer, "use_pass_z")
register_classes, unregister_classes = bpy.utils.register_classes_factory(( register_classes, unregister_classes = bpy.utils.register_classes_factory((
STORM_HYDRA_RENDER_PT_final, STORM_HYDRA_RENDER_PT_quality,
STORM_HYDRA_RENDER_PT_volume_final, STORM_HYDRA_RENDER_PT_quality_viewport,
STORM_HYDRA_RENDER_PT_viewport, STORM_HYDRA_RENDER_PT_quality_render,
STORM_HYDRA_RENDER_PT_volume_viewport, STORM_HYDRA_RENDER_PT_volumes,
STORM_HYDRA_RENDER_PT_volumes_viewport,
STORM_HYDRA_RENDER_PT_volumes_render,
STORM_HYDRA_RENDER_PT_film, STORM_HYDRA_RENDER_PT_film,
STORM_HYDRA_LIGHT_PT_light, STORM_HYDRA_LIGHT_PT_light,
STORM_HYDRA_RENDER_PT_passes, STORM_HYDRA_RENDER_PT_passes,

View File

@ -142,7 +142,6 @@ class BVH_PT_import_main(bpy.types.Panel):
bl_parent_id = "FILE_PT_operator" bl_parent_id = "FILE_PT_operator"
bl_options = {'HIDE_HEADER'} bl_options = {'HIDE_HEADER'}
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
sfile = context.space_data sfile = context.space_data
@ -368,6 +367,7 @@ classes = (
BVH_PT_export_animation, BVH_PT_export_animation,
) )
def register(): def register():
for cls in classes: for cls in classes:
bpy.utils.register_class(cls) bpy.utils.register_class(cls)
@ -383,5 +383,6 @@ def unregister():
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import) bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export) bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
if __name__ == "__main__": if __name__ == "__main__":
register() register()

View File

@ -77,7 +77,12 @@ def write_armature(
if (bone.use_connect or root_transform_only) and bone.parent: if (bone.use_connect or root_transform_only) and bone.parent:
file.write("%s\tCHANNELS 3 %srotation %srotation %srotation\n" % (indent_str, *rot_order_str)) file.write("%s\tCHANNELS 3 %srotation %srotation %srotation\n" % (indent_str, *rot_order_str))
else: else:
file.write("%s\tCHANNELS 6 Xposition Yposition Zposition %srotation %srotation %srotation\n" % (indent_str, *rot_order_str)) file.write(
"%s\tCHANNELS 6 Xposition Yposition Zposition %srotation %srotation %srotation\n" % (
indent_str,
*rot_order_str,
)
)
if my_children: if my_children:
# store the location for the children # store the location for the children
@ -250,7 +255,13 @@ def write_armature(
if not dbone.skip_position: if not dbone.skip_position:
file.write("%.6f %.6f %.6f " % (loc * global_scale)[:]) file.write("%.6f %.6f %.6f " % (loc * global_scale)[:])
file.write("%.6f %.6f %.6f " % (degrees(rot[dbone.rot_order[0]]), degrees(rot[dbone.rot_order[1]]), degrees(rot[dbone.rot_order[2]]))) file.write(
"%.6f %.6f %.6f " % (
degrees(rot[dbone.rot_order[0]]),
degrees(rot[dbone.rot_order[1]]),
degrees(rot[dbone.rot_order[2]]),
)
)
dbone.prev_euler = rot dbone.prev_euler = rot

View File

@ -137,6 +137,14 @@ def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
# Make sure the names are unique - Object names will match joint names exactly and both will be unique. # Make sure the names are unique - Object names will match joint names exactly and both will be unique.
name = file_lines[lineIdx][1] name = file_lines[lineIdx][1]
# While unlikely, there exists a user report of duplicate joint names, see: #109399.
if name in bvh_nodes:
name_orig = name
name_index = 1
while (name := "%s.%03d" % (name_orig, name_index)) in bvh_nodes:
name_index += 1
del name_orig, name_index
# print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1]) # print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1])
lineIdx += 2 # Increment to the next line (Offset) lineIdx += 2 # Increment to the next line (Offset)

View File

@ -551,10 +551,10 @@ def make_percent_subchunk(chunk_id, percent):
return pct_sub return pct_sub
def make_texture_chunk(chunk_id, images): def make_texture_chunk(chunk_id, images, pct):
"""Make Material Map texture chunk.""" """Make Material Map texture chunk."""
# Add texture percentage value (100 = 1.0) # Add texture percentage value (100 = 1.0)
mat_sub = make_percent_subchunk(chunk_id, 1) mat_sub = make_percent_subchunk(chunk_id, pct)
has_entry = False has_entry = False
def add_image(img): def add_image(img):
@ -748,17 +748,16 @@ def make_material_chunk(material, image):
# Make sure no textures are lost. Everything that doesn't fit # Make sure no textures are lost. Everything that doesn't fit
# into a channel is exported as secondary texture # into a channel is exported as secondary texture
diffuse = [] matmap = False
lks = material.node_tree.links
for link in wrap.material.node_tree.links: pct = next((lk.from_node.inputs[0].default_value for lk in lks if lk.from_node.type in {'MIX', 'MIX_RGB'} and lk.to_node.type == 'BSDF_PRINCIPLED'), 0.5)
if link.from_node.type == 'TEX_IMAGE' and link.to_node.type in {'MIX', 'MIX_RGB'}: for link in mtlks:
diffuse = [link.from_node.image] mix_primary = link.from_node.image if link.from_node.type == 'TEX_IMAGE' and link.to_socket.identifier in {'Color2', 'B_Color'} else False
mix_secondary = link.from_node.image if link.from_node.type == 'TEX_IMAGE' and link.to_socket.identifier in {'Color1', 'A_Color'} else False
if diffuse: if mix_secondary:
if not primary_tex: matmap = make_uv_texture_chunk(MAT_TEXMAP, [mix_secondary], pct)
matmap = make_texture_chunk(MAT_DIFFUSEMAP, diffuse) elif not primary_tex and mix_primary:
else: material_chunk.add_subchunk(make_uv_texture_chunk(MAT_DIFFUSEMAP, [mix_primary], pct))
matmap = make_texture_chunk(MAT_TEX2MAP, diffuse)
if matmap: if matmap:
material_chunk.add_subchunk(matmap) material_chunk.add_subchunk(matmap)

View File

@ -5,7 +5,7 @@
bl_info = { bl_info = {
"name": "FBX format", "name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem", "author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 5, 1), "version": (5, 6, 0),
"blender": (3, 6, 0), "blender": (3, 6, 0),
"location": "File > Import-Export", "location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions", "description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",

View File

@ -169,6 +169,7 @@ def elem_prop_first(elem, default=None):
# ---- # ----
# Support for # Support for
# Properties70: { ... P: # Properties70: { ... P:
# Custom properties ("user properties" in FBX) are ignored here and get handled separately (see #104773).
def elem_props_find_first(elem, elem_prop_id): def elem_props_find_first(elem, elem_prop_id):
if elem is None: if elem is None:
# When properties are not found... Should never happen, but happens - as usual. # When properties are not found... Should never happen, but happens - as usual.
@ -185,7 +186,8 @@ def elem_props_find_first(elem, elem_prop_id):
for subelem in elem.elems: for subelem in elem.elems:
assert(subelem.id == b'P') assert(subelem.id == b'P')
if subelem.props[0] == elem_prop_id: # 'U' flag indicates that the property has been defined by the user.
if subelem.props[0] == elem_prop_id and b'U' not in subelem.props[3]:
return subelem return subelem
return None return None
@ -1885,7 +1887,6 @@ def blen_read_light(fbx_tmpl, fbx_obj, settings):
# TODO, cycles nodes??? # TODO, cycles nodes???
lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0)) lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0 lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0
lamp.distance = elem_props_get_number(fbx_props, b'DecayStart', 25.0) * settings.global_scale
lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True) lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True)
if hasattr(lamp, "cycles"): if hasattr(lamp, "cycles"):
lamp.cycles.cast_shadow = lamp.use_shadow lamp.cycles.cast_shadow = lamp.use_shadow

View File

@ -13,6 +13,7 @@ __all__ = (
from struct import unpack from struct import unpack
import array import array
import zlib import zlib
from io import BytesIO
from . import data_types from . import data_types
@ -20,7 +21,7 @@ from . import data_types
# that the sub-scope exists (i.e. to distinguish between P: and P : {}) # that the sub-scope exists (i.e. to distinguish between P: and P : {})
_BLOCK_SENTINEL_LENGTH = ... _BLOCK_SENTINEL_LENGTH = ...
_BLOCK_SENTINEL_DATA = ... _BLOCK_SENTINEL_DATA = ...
read_fbx_elem_uint = ... read_fbx_elem_start = ...
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little') _IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00' _HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
from collections import namedtuple from collections import namedtuple
@ -32,10 +33,6 @@ def read_uint(read):
return unpack(b'<I', read(4))[0] return unpack(b'<I', read(4))[0]
def read_uint64(read):
return unpack(b'<Q', read(8))[0]
def read_ubyte(read): def read_ubyte(read):
return unpack(b'B', read(1))[0] return unpack(b'B', read(1))[0]
@ -46,10 +43,24 @@ def read_string_ubyte(read):
return data return data
def read_array_params(read):
return unpack(b'<III', read(12))
def read_elem_start32(read):
end_offset, prop_count, _prop_length, elem_id_size = unpack(b'<IIIB', read(13))
elem_id = read(elem_id_size) if elem_id_size else b""
return end_offset, prop_count, elem_id
def read_elem_start64(read):
end_offset, prop_count, _prop_length, elem_id_size = unpack(b'<QQQB', read(25))
elem_id = read(elem_id_size) if elem_id_size else b""
return end_offset, prop_count, elem_id
def unpack_array(read, array_type, array_stride, array_byteswap): def unpack_array(read, array_type, array_stride, array_byteswap):
length = read_uint(read) length, encoding, comp_len = read_array_params(read)
encoding = read_uint(read)
comp_len = read_uint(read)
data = read(comp_len) data = read(comp_len)
@ -89,33 +100,32 @@ read_data_dict = {
# * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long. # * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long.
# * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64. # * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64.
def init_version(fbx_version): def init_version(fbx_version):
global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, read_fbx_elem_uint global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, read_fbx_elem_start
_BLOCK_SENTINEL_LENGTH = ... _BLOCK_SENTINEL_LENGTH = ...
_BLOCK_SENTINEL_DATA = ... _BLOCK_SENTINEL_DATA = ...
read_fbx_elem_uint = ...
if fbx_version < 7500: if fbx_version < 7500:
_BLOCK_SENTINEL_LENGTH = 13 _BLOCK_SENTINEL_LENGTH = 13
read_fbx_elem_uint = read_uint read_fbx_elem_start = read_elem_start32
else: else:
_BLOCK_SENTINEL_LENGTH = 25 _BLOCK_SENTINEL_LENGTH = 25
read_fbx_elem_uint = read_uint64 read_fbx_elem_start = read_elem_start64
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH) _BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
def read_elem(read, tell, use_namedtuple): def read_elem(read, tell, use_namedtuple, tell_file_offset=0):
# [0] the offset at which this block ends # [0] the offset at which this block ends
# [1] the number of properties in the scope # [1] the number of properties in the scope
# [2] the length of the property list # [2] the length of the property list
end_offset = read_fbx_elem_uint(read) # [3] elem name length
# [4] elem name of the scope/key
# read_fbx_elem_start does not return [2] because we don't use it and does not return [3] because it is only used to
# get [4].
end_offset, prop_count, elem_id = read_fbx_elem_start(read)
if end_offset == 0: if end_offset == 0:
return None return None
prop_count = read_fbx_elem_uint(read)
prop_length = read_fbx_elem_uint(read)
elem_id = read_string_ubyte(read) # elem name of the scope/key
elem_props_type = bytearray(prop_count) # elem property types elem_props_type = bytearray(prop_count) # elem property types
elem_props_data = [None] * prop_count # elem properties (if any) elem_props_data = [None] * prop_count # elem properties (if any)
elem_subtree = [] # elem children (if any) elem_subtree = [] # elem children (if any)
@ -125,15 +135,58 @@ def read_elem(read, tell, use_namedtuple):
elem_props_data[i] = read_data_dict[data_type](read) elem_props_data[i] = read_data_dict[data_type](read)
elem_props_type[i] = data_type elem_props_type[i] = data_type
if tell() < end_offset: pos = tell()
while tell() < (end_offset - _BLOCK_SENTINEL_LENGTH): local_end_offset = end_offset - tell_file_offset
elem_subtree.append(read_elem(read, tell, use_namedtuple))
if pos < local_end_offset:
# The default BufferedReader used when `open()`-ing files in 'rb' mode has to get the raw stream position from
# the OS every time its tell() function is called. This is about 10 times slower than the tell() function of
# BytesIO objects, so reading chunks of bytes from the file into memory at once and exposing them through
# BytesIO can give better performance. We know the total size of each element's subtree so can read entire
# subtrees into memory at a time.
# The "Objects" element's subtree, however, usually makes up most of the file, so we specifically avoid reading
# all its sub-elements into memory at once to reduce memory requirements at the cost of slightly worse
# performance when memory is not a concern.
# If we're currently reading directly from the opened file, then tell_file_offset will be zero.
if tell_file_offset == 0 and elem_id != b"Objects":
block_bytes_remaining = local_end_offset - pos
# Read the entire subtree
sub_elem_bytes = read(block_bytes_remaining)
num_bytes_read = len(sub_elem_bytes)
if num_bytes_read != block_bytes_remaining:
raise IOError("failed to read complete nested block, expected %i bytes, but only got %i"
% (block_bytes_remaining, num_bytes_read))
# BytesIO provides IO API for reading bytes in memory, so we can use the same code as reading bytes directly
# from a file.
f = BytesIO(sub_elem_bytes)
tell = f.tell
read = f.read
# The new `tell` function starts at zero and is offset by `pos` bytes from the start of the file.
start_sub_pos = 0
tell_file_offset = pos
sub_tree_end = block_bytes_remaining - _BLOCK_SENTINEL_LENGTH
else:
# The `tell` function is unchanged, so starts at the value returned by `tell()`, which is still `pos`
# because no reads have been made since then.
start_sub_pos = pos
sub_tree_end = local_end_offset - _BLOCK_SENTINEL_LENGTH
sub_pos = start_sub_pos
while sub_pos < sub_tree_end:
elem_subtree.append(read_elem(read, tell, use_namedtuple, tell_file_offset))
sub_pos = tell()
# At the end of each subtree there should be a sentinel (an empty element with all bytes set to zero).
if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA: if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA:
raise IOError("failed to read nested block sentinel, " raise IOError("failed to read nested block sentinel, "
"expected all bytes to be 0") "expected all bytes to be 0")
if tell() != end_offset: # Update `pos` for the number of bytes that have been read.
pos += (sub_pos - start_sub_pos) + _BLOCK_SENTINEL_LENGTH
if pos != local_end_offset:
raise IOError("scope length not reached, something is wrong") raise IOError("scope length not reached, something is wrong")
args = (elem_id, elem_props_data, elem_props_type, elem_subtree) args = (elem_id, elem_props_data, elem_props_type, elem_subtree)

View File

@ -5,8 +5,8 @@
bl_info = { bl_info = {
'name': 'glTF 2.0 format', 'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors', 'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
"version": (4, 0, 5), "version": (4, 0, 6),
'blender': (3, 5, 0), 'blender': (4, 0, 0),
'location': 'File > Import-Export', 'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0', 'description': 'Import-Export as glTF 2.0',
'warning': '', 'warning': '',

View File

@ -72,9 +72,9 @@ def get_socket(blender_material: bpy.types.Material, name: str, volume=False):
type = bpy.types.ShaderNodeBackground type = bpy.types.ShaderNodeBackground
name = "Color" name = "Color"
elif name == "sheenColor": elif name == "sheenColor":
return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfVelvet, "Color") return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfSheen, "Color")
elif name == "sheenRoughness": elif name == "sheenRoughness":
return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfVelvet, "Sigma") return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfSheen, "Roughness")
else: else:
if volume is False: if volume is False:
type = bpy.types.ShaderNodeBsdfPrincipled type = bpy.types.ShaderNodeBsdfPrincipled

View File

@ -233,7 +233,7 @@ def __get_image_data_mapping(sockets, results, export_settings) -> ExportImage:
dst_chan = Channel.G dst_chan = Channel.G
elif socket.name == "Specular": # For original KHR_material_specular elif socket.name == "Specular": # For original KHR_material_specular
dst_chan = Channel.A dst_chan = Channel.A
elif socket.name == "Sigma": # For KHR_materials_sheen elif socket.name == "Roughness" and socket.node.type == "BSDF_SHEEN": # For KHR_materials_sheen
dst_chan = Channel.A dst_chan = Channel.A
if dst_chan is not None: if dst_chan is not None:

View File

@ -31,7 +31,7 @@ def pbr_specular_glossiness(mh):
make_emission_socket=mh.needs_emissive(), make_emission_socket=mh.needs_emissive(),
make_alpha_socket=not mh.is_opaque(), make_alpha_socket=not mh.is_opaque(),
make_volume_socket=None, # No possible to have KHR_materials_volume with specular/glossiness make_volume_socket=None, # No possible to have KHR_materials_volume with specular/glossiness
make_velvet_socket=None # No possible to have KHR_materials_volume with specular/glossiness make_sheen_socket=None # No possible to have KHR_materials_volume with specular/glossiness
) )
if emission_socket: if emission_socket:

View File

@ -33,7 +33,7 @@ def unlit(mh):
make_emission_socket=False, make_emission_socket=False,
make_alpha_socket=not mh.is_opaque(), make_alpha_socket=not mh.is_opaque(),
make_volume_socket=None, # Not possible to have KHR_materials_volume with unlit make_volume_socket=None, # Not possible to have KHR_materials_volume with unlit
make_velvet_socket=None #Not possible to have KHR_materials_sheen with unlit make_sheen_socket=None #Not possible to have KHR_materials_sheen with unlit
) )
base_color( base_color(

View File

@ -72,11 +72,11 @@ def pbr_metallic_roughness(mh: MaterialHelper):
volume_location = additional_location volume_location = additional_location
additional_location = additional_location[0], additional_location[1] - 150 additional_location = additional_location[0], additional_location[1] - 150
need_velvet_node = False need_sheen_node = False
if mh.pymat.extensions and 'KHR_materials_sheen' in mh.pymat.extensions: if mh.pymat.extensions and 'KHR_materials_sheen' in mh.pymat.extensions:
need_velvet_node = True need_sheen_node = True
_, _, volume_socket, velvet_node = make_output_nodes( _, _, volume_socket, sheen_node = make_output_nodes(
mh, mh,
location=(250, 260), location=(250, 260),
additional_location=additional_location, additional_location=additional_location,
@ -84,7 +84,7 @@ def pbr_metallic_roughness(mh: MaterialHelper):
make_emission_socket=False, # is managed by Principled shader node make_emission_socket=False, # is managed by Principled shader node
make_alpha_socket=False, # is managed by Principled shader node make_alpha_socket=False, # is managed by Principled shader node
make_volume_socket=need_volume_node, make_volume_socket=need_volume_node,
make_velvet_socket=need_velvet_node make_sheen_socket=need_sheen_node
) )
@ -171,13 +171,13 @@ def pbr_metallic_roughness(mh: MaterialHelper):
location_original_specularcolor=locs['original_specularColorTexture'] location_original_specularcolor=locs['original_specularColorTexture']
) )
if need_velvet_node: if need_sheen_node:
sheen( sheen(
mh, mh,
location_sheenColor=locs['sheenColorTexture'], location_sheenColor=locs['sheenColorTexture'],
location_sheenRoughness=locs['sheenRoughnessTexture'], location_sheenRoughness=locs['sheenRoughnessTexture'],
sheenColor_socket=velvet_node.inputs[0], sheenColor_socket=sheen_node.inputs[0],
sheenRoughness_socket=velvet_node.inputs[1] sheenRoughness_socket=sheen_node.inputs[1]
) )
ior( ior(
@ -616,7 +616,7 @@ def occlusion(mh: MaterialHelper, location, occlusion_socket):
# => [Add Emission] => [Mix Alpha] => [Material Output] if needed, only for SpecGlossiness # => [Add Emission] => [Mix Alpha] => [Material Output] if needed, only for SpecGlossiness
# => [Volume] => [Add Shader] => [Material Output] if needed # => [Volume] => [Add Shader] => [Material Output] if needed
# => [Velvet] => [Add Shader] => [Material Output] if needed # => [Sheen] => [Add Shader] => [Material Output] if needed
def make_output_nodes( def make_output_nodes(
mh: MaterialHelper, mh: MaterialHelper,
location, location,
@ -625,7 +625,7 @@ def make_output_nodes(
make_emission_socket, make_emission_socket,
make_alpha_socket, make_alpha_socket,
make_volume_socket, make_volume_socket,
make_velvet_socket, # For sheen make_sheen_socket,
): ):
""" """
Creates the Material Output node and connects shader_socket to it. Creates the Material Output node and connects shader_socket to it.
@ -637,7 +637,7 @@ def make_output_nodes(
""" """
x, y = location x, y = location
emission_socket = None emission_socket = None
velvet_node = None sheen_node = None
alpha_socket = None alpha_socket = None
# Create an Emission node and add it to the shader. # Create an Emission node and add it to the shader.
@ -666,22 +666,22 @@ def make_output_nodes(
x += 380 x += 380
y += 125 y += 125
# Create an Velvet node add add it to the shader # Create an Sheen node add add it to the shader
# Note that you can not have Emission & Velvet at the same time # Note that you can not have Emission & Sheen at the same time
if make_velvet_socket: if make_sheen_socket:
# Velvet # Sheen
node = mh.node_tree.nodes.new("ShaderNodeBsdfVelvet") node = mh.node_tree.nodes.new("ShaderNodeBsdfSheen")
node.location = x + 50, y + 250 node.location = x + 50, y + 250
# Node # Node
velvet_node = node sheen_node = node
# Outputs # Outputs
velvet_output = node.outputs[0] sheen_output = node.outputs[0]
# Add # Add
node = mh.node_tree.nodes.new('ShaderNodeAddShader') node = mh.node_tree.nodes.new('ShaderNodeAddShader')
node.location = x + 250, y + 160 node.location = x + 250, y + 160
# Inputs # Inputs
mh.node_tree.links.new(node.inputs[0], velvet_output) mh.node_tree.links.new(node.inputs[0], sheen_output)
mh.node_tree.links.new(node.inputs[1], shader_socket) mh.node_tree.links.new(node.inputs[1], shader_socket)
# Outputs # Outputs
shader_socket = node.outputs[0] shader_socket = node.outputs[0]
@ -730,7 +730,7 @@ def make_output_nodes(
volume_socket = node.outputs[0] volume_socket = node.outputs[0]
return emission_socket, alpha_socket, volume_socket, velvet_node return emission_socket, alpha_socket, volume_socket, sheen_node
def make_settings_node(mh): def make_settings_node(mh):

View File

@ -5,8 +5,8 @@
bl_info = { bl_info = {
"name": "Copy Render Settings", "name": "Copy Render Settings",
"author": "Bastien Montagne", "author": "Bastien Montagne",
"version": (1, 1, 0), "version": (1, 2, 0),
"blender": (3, 0, 0), "blender": (3, 6, 0),
"location": "Render buttons (Properties window)", "location": "Render buttons (Properties window)",
"description": "Allows to copy a selection of render settings " "description": "Allows to copy a selection of render settings "
"from current scene to others.", "from current scene to others.",

View File

@ -16,9 +16,6 @@ presets = (CopyPreset("Resolution",
CopyPreset("Scale", CopyPreset("Scale",
("scale", "Render Scale", "The “Render Scale” setting"), ("scale", "Render Scale", "The “Render Scale” setting"),
{"resolution_percentage"}), {"resolution_percentage"}),
CopyPreset("OSA",
("osa", "Render OSA", "The OSA toggle and sample settings"),
{"use_antialiasing", "antialiasing_samples"}),
CopyPreset("Threads", CopyPreset("Threads",
("threads", "Render Threads", "The thread mode and number settings"), ("threads", "Render Threads", "The thread mode and number settings"),
{"threads_mode", "threads"}), {"threads_mode", "threads"}),

View File

@ -158,6 +158,8 @@ def create_path(scene):
else: else:
frame = "{:04d}-{:04d}".format(scene.frame_start, scene.frame_end) frame = "{:04d}-{:04d}".format(scene.frame_start, scene.frame_end)
os.makedirs(dirname, exist_ok=True)
return os.path.join(dirname, basename + frame + ".svg") return os.path.join(dirname, basename + frame + ".svg")