New Addon: Import Autodesk .max #105013
@ -6,7 +6,7 @@ bl_info = {
|
||||
"name": "Curve Tools",
|
||||
"description": "Adds some functionality for bezier/nurbs curve/surface modeling",
|
||||
"author": "Mackraken, Spivak Vladimir (cwolf3d)",
|
||||
"version": (0, 4, 5),
|
||||
"version": (0, 4, 6),
|
||||
"blender": (2, 80, 0),
|
||||
"location": "View3D > Tool Shelf > Edit Tab",
|
||||
"doc_url": "{BLENDER_MANUAL_URL}/addons/add_curve/curve_tools.html",
|
||||
|
@ -52,18 +52,23 @@ class OperatorCurveInfo(bpy.types.Operator):
|
||||
class OperatorCurveLength(bpy.types.Operator):
|
||||
bl_idname = "curvetools.operatorcurvelength"
|
||||
bl_label = "Length"
|
||||
bl_description = "Calculates the length of the active/selected curve"
|
||||
bl_description = "Calculates the length of the active/selected curves"
|
||||
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return util.Selected1Curve()
|
||||
return util.Selected1OrMoreCurves()
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
curve = curves.Curve(context.active_object)
|
||||
selCurves = util.GetSelectedCurves()
|
||||
|
||||
context.scene.curvetools.CurveLength = curve.length
|
||||
length = 0
|
||||
for blCurve in selCurves:
|
||||
curve = curves.Curve(blCurve)
|
||||
length += curve.length
|
||||
|
||||
context.scene.curvetools.CurveLength = length
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
@ -1,8 +1,6 @@
|
||||
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright 2011-2022 Blender Foundation
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
|
||||
bl_info = {
|
||||
"name": "Hydra Storm render engine",
|
||||
|
@ -1,10 +1,10 @@
|
||||
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright 2011-2022 Blender Foundation
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
class StormHydraRenderEngine(bpy.types.HydraRenderEngine):
|
||||
bl_idname = 'HYDRA_STORM'
|
||||
bl_label = "Hydra Storm"
|
||||
@ -19,11 +19,11 @@ class StormHydraRenderEngine(bpy.types.HydraRenderEngine):
|
||||
settings = bpy.context.scene.hydra_storm.viewport if engine_type == 'VIEWPORT' else \
|
||||
bpy.context.scene.hydra_storm.final
|
||||
result = {
|
||||
'enableTinyPrimCulling': settings.enable_tiny_prim_culling,
|
||||
'enableTinyPrimCulling': settings.use_tiny_prim_culling,
|
||||
'maxLights': settings.max_lights,
|
||||
'volumeRaymarchingStepSize': settings.volume_raymarching_step_size,
|
||||
'volumeRaymarchingStepSizeLighting': settings.volume_raymarching_step_size_lighting,
|
||||
'volumeMaxTextureMemoryPerField': settings.volume_max_texture_memory_per_field,
|
||||
'maxLights': settings.max_lights,
|
||||
}
|
||||
|
||||
if engine_type != 'VIEWPORT':
|
||||
@ -35,6 +35,8 @@ class StormHydraRenderEngine(bpy.types.HydraRenderEngine):
|
||||
return result
|
||||
|
||||
def update_render_passes(self, scene, render_layer):
|
||||
if render_layer.use_pass_combined:
|
||||
self.register_pass(scene, render_layer, 'Combined', 4, 'RGBA', 'COLOR')
|
||||
if render_layer.use_pass_z:
|
||||
self.register_pass(scene, render_layer, 'Depth', 1, 'Z', 'VALUE')
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright 2011-2022 Blender Foundation
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
|
||||
@ -23,9 +22,14 @@ class Properties(bpy.types.PropertyGroup):
|
||||
|
||||
|
||||
class RenderProperties(bpy.types.PropertyGroup):
|
||||
enable_tiny_prim_culling: bpy.props.BoolProperty(
|
||||
max_lights: bpy.props.IntProperty(
|
||||
name="Max Lights",
|
||||
description="Limit maximum number of lights",
|
||||
default=16, min=0, max=16,
|
||||
)
|
||||
use_tiny_prim_culling: bpy.props.BoolProperty(
|
||||
name="Tiny Prim Culling",
|
||||
description="Enable Tiny Prim Culling",
|
||||
description="Hide small geometry primitives to improve performance",
|
||||
default=False,
|
||||
)
|
||||
volume_raymarching_step_size: bpy.props.FloatProperty(
|
||||
@ -43,11 +47,6 @@ class RenderProperties(bpy.types.PropertyGroup):
|
||||
description="Maximum memory for a volume field texture in Mb (unless overridden by field prim)",
|
||||
default=128.0,
|
||||
)
|
||||
max_lights: bpy.props.IntProperty(
|
||||
name="Max Lights",
|
||||
description="Limit maximum number of lights",
|
||||
default=16, min=0, max=16,
|
||||
)
|
||||
|
||||
|
||||
class SceneProperties(Properties):
|
||||
|
@ -1,7 +1,6 @@
|
||||
# SPDX-FileCopyrightText: 2011-2022 Blender Foundation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright 2011-2022 Blender Foundation
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
|
||||
@ -20,12 +19,32 @@ class Panel(bpy.types.Panel):
|
||||
|
||||
|
||||
#
|
||||
# Final render settings
|
||||
# Quality render settings
|
||||
#
|
||||
class STORM_HYDRA_RENDER_PT_final(Panel):
|
||||
"""Final render delegate and settings"""
|
||||
bl_idname = 'STORM_HYDRA_RENDER_PT_final'
|
||||
bl_label = "Final Render Settings"
|
||||
class STORM_HYDRA_RENDER_PT_quality(Panel):
|
||||
bl_label = "Quality"
|
||||
|
||||
def draw(self, layout):
|
||||
pass
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_quality_viewport(Panel):
|
||||
bl_label = "Viewport"
|
||||
bl_parent_id = "STORM_HYDRA_RENDER_PT_quality"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
settings = context.scene.hydra_storm.viewport
|
||||
layout.prop(settings, 'max_lights')
|
||||
layout.prop(settings, 'use_tiny_prim_culling')
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_quality_render(Panel):
|
||||
bl_label = "Render"
|
||||
bl_parent_id = "STORM_HYDRA_RENDER_PT_quality"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -33,17 +52,42 @@ class STORM_HYDRA_RENDER_PT_final(Panel):
|
||||
layout.use_property_decorate = False
|
||||
|
||||
settings = context.scene.hydra_storm.final
|
||||
layout.prop(settings, 'enable_tiny_prim_culling')
|
||||
layout.prop(settings, 'max_lights')
|
||||
layout.prop(settings, 'use_tiny_prim_culling')
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_volume_final(bpy.types.Panel):
|
||||
bl_parent_id = STORM_HYDRA_RENDER_PT_final.bl_idname
|
||||
bl_label = "Volume Raymarching"
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
#
|
||||
# Volume render settings
|
||||
#
|
||||
class STORM_HYDRA_RENDER_PT_volumes(Panel):
|
||||
bl_label = "Volumes"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
def draw(self, layout):
|
||||
pass
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_volumes_viewport(Panel):
|
||||
bl_label = "Viewport"
|
||||
bl_parent_id = "STORM_HYDRA_RENDER_PT_volumes"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
settings = context.scene.hydra_storm.viewport
|
||||
|
||||
col = layout.column(align=True)
|
||||
col.prop(settings, "volume_raymarching_step_size", text="Step Size")
|
||||
col.prop(settings, "volume_raymarching_step_size_lighting", text="Step Size Lightning")
|
||||
col.prop(settings, "volume_max_texture_memory_per_field")
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_volumes_render(Panel):
|
||||
bl_label = "Render"
|
||||
bl_parent_id = "STORM_HYDRA_RENDER_PT_volumes"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
@ -58,28 +102,10 @@ class STORM_HYDRA_RENDER_PT_volume_final(bpy.types.Panel):
|
||||
|
||||
|
||||
#
|
||||
# Viewport render settings
|
||||
# Film settings
|
||||
#
|
||||
class STORM_HYDRA_RENDER_PT_viewport(Panel):
|
||||
"""Viewport render delegate and settings"""
|
||||
bl_idname = 'STORM_HYDRA_RENDER_PT_viewport'
|
||||
bl_label = "Viewport Render Settings"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
settings = context.scene.hydra_storm.viewport
|
||||
layout.prop(settings, 'enable_tiny_prim_culling')
|
||||
layout.prop(settings, 'max_lights')
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_volume_viewport(bpy.types.Panel):
|
||||
bl_parent_id = STORM_HYDRA_RENDER_PT_viewport.bl_idname
|
||||
bl_label = "Volume Raymarching"
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
class STORM_HYDRA_RENDER_PT_film(Panel):
|
||||
bl_label = "Film"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
def draw(self, context):
|
||||
@ -87,14 +113,40 @@ class STORM_HYDRA_RENDER_PT_volume_viewport(bpy.types.Panel):
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
settings = context.scene.hydra_storm.viewport
|
||||
|
||||
col = layout.column(align=True)
|
||||
col.prop(settings, "volume_raymarching_step_size", text="Step Size")
|
||||
col.prop(settings, "volume_raymarching_step_size_lighting", text="Step Size Lightning")
|
||||
col.prop(settings, "volume_max_texture_memory_per_field")
|
||||
layout.prop(context.scene.render, "film_transparent", text="Transparent Background")
|
||||
|
||||
|
||||
#
|
||||
# View layer settings
|
||||
#
|
||||
class STORM_HYDRA_RENDER_PT_passes(Panel):
|
||||
bl_label = "Passes"
|
||||
bl_context = "view_layer"
|
||||
|
||||
def draw(self, context):
|
||||
pass
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_passes_data(Panel):
|
||||
bl_label = "Data"
|
||||
bl_context = "view_layer"
|
||||
bl_parent_id = "STORM_HYDRA_RENDER_PT_passes"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
view_layer = context.view_layer
|
||||
|
||||
col = layout.column(heading="Include", align=True)
|
||||
col.prop(view_layer, "use_pass_combined")
|
||||
col.prop(view_layer, "use_pass_z")
|
||||
|
||||
|
||||
#
|
||||
# Light settings
|
||||
#
|
||||
class STORM_HYDRA_LIGHT_PT_light(Panel):
|
||||
"""Physical light sources"""
|
||||
bl_label = "Light"
|
||||
@ -148,49 +200,13 @@ class STORM_HYDRA_LIGHT_PT_light(Panel):
|
||||
main_col.prop(light, 'size')
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_film(Panel):
|
||||
bl_label = "Film"
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
layout.prop(context.scene.render, "film_transparent", text="Transparent Background")
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_passes(Panel):
|
||||
bl_label = "Passes"
|
||||
bl_context = "view_layer"
|
||||
|
||||
def draw(self, context):
|
||||
pass
|
||||
|
||||
|
||||
class STORM_HYDRA_RENDER_PT_passes_data(Panel):
|
||||
bl_label = "Data"
|
||||
bl_context = "view_layer"
|
||||
bl_parent_id = "STORM_HYDRA_RENDER_PT_passes"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
view_layer = context.view_layer
|
||||
|
||||
col = layout.column(heading="Include", align=True)
|
||||
col.prop(view_layer, "use_pass_z")
|
||||
|
||||
|
||||
register_classes, unregister_classes = bpy.utils.register_classes_factory((
|
||||
STORM_HYDRA_RENDER_PT_final,
|
||||
STORM_HYDRA_RENDER_PT_volume_final,
|
||||
STORM_HYDRA_RENDER_PT_viewport,
|
||||
STORM_HYDRA_RENDER_PT_volume_viewport,
|
||||
STORM_HYDRA_RENDER_PT_quality,
|
||||
STORM_HYDRA_RENDER_PT_quality_viewport,
|
||||
STORM_HYDRA_RENDER_PT_quality_render,
|
||||
STORM_HYDRA_RENDER_PT_volumes,
|
||||
STORM_HYDRA_RENDER_PT_volumes_viewport,
|
||||
STORM_HYDRA_RENDER_PT_volumes_render,
|
||||
STORM_HYDRA_RENDER_PT_film,
|
||||
STORM_HYDRA_LIGHT_PT_light,
|
||||
STORM_HYDRA_RENDER_PT_passes,
|
||||
|
@ -142,7 +142,6 @@ class BVH_PT_import_main(bpy.types.Panel):
|
||||
bl_parent_id = "FILE_PT_operator"
|
||||
bl_options = {'HIDE_HEADER'}
|
||||
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
sfile = context.space_data
|
||||
@ -368,6 +367,7 @@ classes = (
|
||||
BVH_PT_export_animation,
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
@ -383,5 +383,6 @@ def unregister():
|
||||
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
|
||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@ -77,7 +77,12 @@ def write_armature(
|
||||
if (bone.use_connect or root_transform_only) and bone.parent:
|
||||
file.write("%s\tCHANNELS 3 %srotation %srotation %srotation\n" % (indent_str, *rot_order_str))
|
||||
else:
|
||||
file.write("%s\tCHANNELS 6 Xposition Yposition Zposition %srotation %srotation %srotation\n" % (indent_str, *rot_order_str))
|
||||
file.write(
|
||||
"%s\tCHANNELS 6 Xposition Yposition Zposition %srotation %srotation %srotation\n" % (
|
||||
indent_str,
|
||||
*rot_order_str,
|
||||
)
|
||||
)
|
||||
|
||||
if my_children:
|
||||
# store the location for the children
|
||||
@ -250,7 +255,13 @@ def write_armature(
|
||||
if not dbone.skip_position:
|
||||
file.write("%.6f %.6f %.6f " % (loc * global_scale)[:])
|
||||
|
||||
file.write("%.6f %.6f %.6f " % (degrees(rot[dbone.rot_order[0]]), degrees(rot[dbone.rot_order[1]]), degrees(rot[dbone.rot_order[2]])))
|
||||
file.write(
|
||||
"%.6f %.6f %.6f " % (
|
||||
degrees(rot[dbone.rot_order[0]]),
|
||||
degrees(rot[dbone.rot_order[1]]),
|
||||
degrees(rot[dbone.rot_order[2]]),
|
||||
)
|
||||
)
|
||||
|
||||
dbone.prev_euler = rot
|
||||
|
||||
|
@ -137,6 +137,14 @@ def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
|
||||
# Make sure the names are unique - Object names will match joint names exactly and both will be unique.
|
||||
name = file_lines[lineIdx][1]
|
||||
|
||||
# While unlikely, there exists a user report of duplicate joint names, see: #109399.
|
||||
if name in bvh_nodes:
|
||||
name_orig = name
|
||||
name_index = 1
|
||||
while (name := "%s.%03d" % (name_orig, name_index)) in bvh_nodes:
|
||||
name_index += 1
|
||||
del name_orig, name_index
|
||||
|
||||
# print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1])
|
||||
|
||||
lineIdx += 2 # Increment to the next line (Offset)
|
||||
@ -471,7 +479,7 @@ def bvh_node_dict2armature(
|
||||
bvh_node.temp.parent = bvh_node.parent.temp
|
||||
|
||||
# Set the connection state
|
||||
if(
|
||||
if (
|
||||
(not bvh_node.has_loc) and
|
||||
(bvh_node.parent.temp.name not in ZERO_AREA_BONES) and
|
||||
(bvh_node.parent.rest_tail_local == bvh_node.rest_head_local)
|
||||
|
@ -551,10 +551,10 @@ def make_percent_subchunk(chunk_id, percent):
|
||||
return pct_sub
|
||||
|
||||
|
||||
def make_texture_chunk(chunk_id, images):
|
||||
def make_texture_chunk(chunk_id, images, pct):
|
||||
"""Make Material Map texture chunk."""
|
||||
# Add texture percentage value (100 = 1.0)
|
||||
mat_sub = make_percent_subchunk(chunk_id, 1)
|
||||
mat_sub = make_percent_subchunk(chunk_id, pct)
|
||||
has_entry = False
|
||||
|
||||
def add_image(img):
|
||||
@ -748,19 +748,18 @@ def make_material_chunk(material, image):
|
||||
|
||||
# Make sure no textures are lost. Everything that doesn't fit
|
||||
# into a channel is exported as secondary texture
|
||||
diffuse = []
|
||||
|
||||
for link in wrap.material.node_tree.links:
|
||||
if link.from_node.type == 'TEX_IMAGE' and link.to_node.type in {'MIX', 'MIX_RGB'}:
|
||||
diffuse = [link.from_node.image]
|
||||
|
||||
if diffuse:
|
||||
if not primary_tex:
|
||||
matmap = make_texture_chunk(MAT_DIFFUSEMAP, diffuse)
|
||||
else:
|
||||
matmap = make_texture_chunk(MAT_TEX2MAP, diffuse)
|
||||
if matmap:
|
||||
material_chunk.add_subchunk(matmap)
|
||||
matmap = False
|
||||
lks = material.node_tree.links
|
||||
pct = next((lk.from_node.inputs[0].default_value for lk in lks if lk.from_node.type in {'MIX', 'MIX_RGB'} and lk.to_node.type == 'BSDF_PRINCIPLED'), 0.5)
|
||||
for link in mtlks:
|
||||
mix_primary = link.from_node.image if link.from_node.type == 'TEX_IMAGE' and link.to_socket.identifier in {'Color2', 'B_Color'} else False
|
||||
mix_secondary = link.from_node.image if link.from_node.type == 'TEX_IMAGE' and link.to_socket.identifier in {'Color1', 'A_Color'} else False
|
||||
if mix_secondary:
|
||||
matmap = make_uv_texture_chunk(MAT_TEXMAP, [mix_secondary], pct)
|
||||
elif not primary_tex and mix_primary:
|
||||
material_chunk.add_subchunk(make_uv_texture_chunk(MAT_DIFFUSEMAP, [mix_primary], pct))
|
||||
if matmap:
|
||||
material_chunk.add_subchunk(matmap)
|
||||
|
||||
else:
|
||||
shading.add_variable("shading", _3ds_ushort(2)) # Gouraud shading
|
||||
|
@ -5,7 +5,7 @@
|
||||
bl_info = {
|
||||
"name": "FBX format",
|
||||
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
|
||||
"version": (5, 5, 1),
|
||||
"version": (5, 6, 0),
|
||||
"blender": (3, 6, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",
|
||||
|
@ -169,6 +169,7 @@ def elem_prop_first(elem, default=None):
|
||||
# ----
|
||||
# Support for
|
||||
# Properties70: { ... P:
|
||||
# Custom properties ("user properties" in FBX) are ignored here and get handled separately (see #104773).
|
||||
def elem_props_find_first(elem, elem_prop_id):
|
||||
if elem is None:
|
||||
# When properties are not found... Should never happen, but happens - as usual.
|
||||
@ -185,7 +186,8 @@ def elem_props_find_first(elem, elem_prop_id):
|
||||
|
||||
for subelem in elem.elems:
|
||||
assert(subelem.id == b'P')
|
||||
if subelem.props[0] == elem_prop_id:
|
||||
# 'U' flag indicates that the property has been defined by the user.
|
||||
if subelem.props[0] == elem_prop_id and b'U' not in subelem.props[3]:
|
||||
return subelem
|
||||
return None
|
||||
|
||||
@ -1885,7 +1887,6 @@ def blen_read_light(fbx_tmpl, fbx_obj, settings):
|
||||
# TODO, cycles nodes???
|
||||
lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
|
||||
lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0
|
||||
lamp.distance = elem_props_get_number(fbx_props, b'DecayStart', 25.0) * settings.global_scale
|
||||
lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True)
|
||||
if hasattr(lamp, "cycles"):
|
||||
lamp.cycles.cast_shadow = lamp.use_shadow
|
||||
|
@ -13,6 +13,7 @@ __all__ = (
|
||||
from struct import unpack
|
||||
import array
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
|
||||
from . import data_types
|
||||
|
||||
@ -20,7 +21,7 @@ from . import data_types
|
||||
# that the sub-scope exists (i.e. to distinguish between P: and P : {})
|
||||
_BLOCK_SENTINEL_LENGTH = ...
|
||||
_BLOCK_SENTINEL_DATA = ...
|
||||
read_fbx_elem_uint = ...
|
||||
read_fbx_elem_start = ...
|
||||
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
|
||||
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
|
||||
from collections import namedtuple
|
||||
@ -32,10 +33,6 @@ def read_uint(read):
|
||||
return unpack(b'<I', read(4))[0]
|
||||
|
||||
|
||||
def read_uint64(read):
|
||||
return unpack(b'<Q', read(8))[0]
|
||||
|
||||
|
||||
def read_ubyte(read):
|
||||
return unpack(b'B', read(1))[0]
|
||||
|
||||
@ -46,10 +43,24 @@ def read_string_ubyte(read):
|
||||
return data
|
||||
|
||||
|
||||
def read_array_params(read):
|
||||
return unpack(b'<III', read(12))
|
||||
|
||||
|
||||
def read_elem_start32(read):
|
||||
end_offset, prop_count, _prop_length, elem_id_size = unpack(b'<IIIB', read(13))
|
||||
elem_id = read(elem_id_size) if elem_id_size else b""
|
||||
return end_offset, prop_count, elem_id
|
||||
|
||||
|
||||
def read_elem_start64(read):
|
||||
end_offset, prop_count, _prop_length, elem_id_size = unpack(b'<QQQB', read(25))
|
||||
elem_id = read(elem_id_size) if elem_id_size else b""
|
||||
return end_offset, prop_count, elem_id
|
||||
|
||||
|
||||
def unpack_array(read, array_type, array_stride, array_byteswap):
|
||||
length = read_uint(read)
|
||||
encoding = read_uint(read)
|
||||
comp_len = read_uint(read)
|
||||
length, encoding, comp_len = read_array_params(read)
|
||||
|
||||
data = read(comp_len)
|
||||
|
||||
@ -89,33 +100,32 @@ read_data_dict = {
|
||||
# * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long.
|
||||
# * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64.
|
||||
def init_version(fbx_version):
|
||||
global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, read_fbx_elem_uint
|
||||
global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, read_fbx_elem_start
|
||||
|
||||
_BLOCK_SENTINEL_LENGTH = ...
|
||||
_BLOCK_SENTINEL_DATA = ...
|
||||
read_fbx_elem_uint = ...
|
||||
|
||||
if fbx_version < 7500:
|
||||
_BLOCK_SENTINEL_LENGTH = 13
|
||||
read_fbx_elem_uint = read_uint
|
||||
read_fbx_elem_start = read_elem_start32
|
||||
else:
|
||||
_BLOCK_SENTINEL_LENGTH = 25
|
||||
read_fbx_elem_uint = read_uint64
|
||||
read_fbx_elem_start = read_elem_start64
|
||||
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
|
||||
|
||||
|
||||
def read_elem(read, tell, use_namedtuple):
|
||||
def read_elem(read, tell, use_namedtuple, tell_file_offset=0):
|
||||
# [0] the offset at which this block ends
|
||||
# [1] the number of properties in the scope
|
||||
# [2] the length of the property list
|
||||
end_offset = read_fbx_elem_uint(read)
|
||||
# [3] elem name length
|
||||
# [4] elem name of the scope/key
|
||||
# read_fbx_elem_start does not return [2] because we don't use it and does not return [3] because it is only used to
|
||||
# get [4].
|
||||
end_offset, prop_count, elem_id = read_fbx_elem_start(read)
|
||||
if end_offset == 0:
|
||||
return None
|
||||
|
||||
prop_count = read_fbx_elem_uint(read)
|
||||
prop_length = read_fbx_elem_uint(read)
|
||||
|
||||
elem_id = read_string_ubyte(read) # elem name of the scope/key
|
||||
elem_props_type = bytearray(prop_count) # elem property types
|
||||
elem_props_data = [None] * prop_count # elem properties (if any)
|
||||
elem_subtree = [] # elem children (if any)
|
||||
@ -125,15 +135,58 @@ def read_elem(read, tell, use_namedtuple):
|
||||
elem_props_data[i] = read_data_dict[data_type](read)
|
||||
elem_props_type[i] = data_type
|
||||
|
||||
if tell() < end_offset:
|
||||
while tell() < (end_offset - _BLOCK_SENTINEL_LENGTH):
|
||||
elem_subtree.append(read_elem(read, tell, use_namedtuple))
|
||||
pos = tell()
|
||||
local_end_offset = end_offset - tell_file_offset
|
||||
|
||||
if pos < local_end_offset:
|
||||
# The default BufferedReader used when `open()`-ing files in 'rb' mode has to get the raw stream position from
|
||||
# the OS every time its tell() function is called. This is about 10 times slower than the tell() function of
|
||||
# BytesIO objects, so reading chunks of bytes from the file into memory at once and exposing them through
|
||||
# BytesIO can give better performance. We know the total size of each element's subtree so can read entire
|
||||
# subtrees into memory at a time.
|
||||
# The "Objects" element's subtree, however, usually makes up most of the file, so we specifically avoid reading
|
||||
# all its sub-elements into memory at once to reduce memory requirements at the cost of slightly worse
|
||||
# performance when memory is not a concern.
|
||||
# If we're currently reading directly from the opened file, then tell_file_offset will be zero.
|
||||
if tell_file_offset == 0 and elem_id != b"Objects":
|
||||
block_bytes_remaining = local_end_offset - pos
|
||||
|
||||
# Read the entire subtree
|
||||
sub_elem_bytes = read(block_bytes_remaining)
|
||||
num_bytes_read = len(sub_elem_bytes)
|
||||
if num_bytes_read != block_bytes_remaining:
|
||||
raise IOError("failed to read complete nested block, expected %i bytes, but only got %i"
|
||||
% (block_bytes_remaining, num_bytes_read))
|
||||
|
||||
# BytesIO provides IO API for reading bytes in memory, so we can use the same code as reading bytes directly
|
||||
# from a file.
|
||||
f = BytesIO(sub_elem_bytes)
|
||||
tell = f.tell
|
||||
read = f.read
|
||||
# The new `tell` function starts at zero and is offset by `pos` bytes from the start of the file.
|
||||
start_sub_pos = 0
|
||||
tell_file_offset = pos
|
||||
sub_tree_end = block_bytes_remaining - _BLOCK_SENTINEL_LENGTH
|
||||
else:
|
||||
# The `tell` function is unchanged, so starts at the value returned by `tell()`, which is still `pos`
|
||||
# because no reads have been made since then.
|
||||
start_sub_pos = pos
|
||||
sub_tree_end = local_end_offset - _BLOCK_SENTINEL_LENGTH
|
||||
|
||||
sub_pos = start_sub_pos
|
||||
while sub_pos < sub_tree_end:
|
||||
elem_subtree.append(read_elem(read, tell, use_namedtuple, tell_file_offset))
|
||||
sub_pos = tell()
|
||||
|
||||
# At the end of each subtree there should be a sentinel (an empty element with all bytes set to zero).
|
||||
if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA:
|
||||
raise IOError("failed to read nested block sentinel, "
|
||||
"expected all bytes to be 0")
|
||||
|
||||
if tell() != end_offset:
|
||||
# Update `pos` for the number of bytes that have been read.
|
||||
pos += (sub_pos - start_sub_pos) + _BLOCK_SENTINEL_LENGTH
|
||||
|
||||
if pos != local_end_offset:
|
||||
raise IOError("scope length not reached, something is wrong")
|
||||
|
||||
args = (elem_id, elem_props_data, elem_props_type, elem_subtree)
|
||||
|
@ -5,8 +5,8 @@
|
||||
bl_info = {
|
||||
'name': 'glTF 2.0 format',
|
||||
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
||||
"version": (4, 0, 5),
|
||||
'blender': (3, 5, 0),
|
||||
"version": (4, 0, 6),
|
||||
'blender': (4, 0, 0),
|
||||
'location': 'File > Import-Export',
|
||||
'description': 'Import-Export as glTF 2.0',
|
||||
'warning': '',
|
||||
|
@ -72,9 +72,9 @@ def get_socket(blender_material: bpy.types.Material, name: str, volume=False):
|
||||
type = bpy.types.ShaderNodeBackground
|
||||
name = "Color"
|
||||
elif name == "sheenColor":
|
||||
return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfVelvet, "Color")
|
||||
return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfSheen, "Color")
|
||||
elif name == "sheenRoughness":
|
||||
return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfVelvet, "Sigma")
|
||||
return get_node_socket(blender_material, bpy.types.ShaderNodeBsdfSheen, "Roughness")
|
||||
else:
|
||||
if volume is False:
|
||||
type = bpy.types.ShaderNodeBsdfPrincipled
|
||||
|
@ -233,7 +233,7 @@ def __get_image_data_mapping(sockets, results, export_settings) -> ExportImage:
|
||||
dst_chan = Channel.G
|
||||
elif socket.name == "Specular": # For original KHR_material_specular
|
||||
dst_chan = Channel.A
|
||||
elif socket.name == "Sigma": # For KHR_materials_sheen
|
||||
elif socket.name == "Roughness" and socket.node.type == "BSDF_SHEEN": # For KHR_materials_sheen
|
||||
dst_chan = Channel.A
|
||||
|
||||
if dst_chan is not None:
|
||||
|
@ -31,7 +31,7 @@ def pbr_specular_glossiness(mh):
|
||||
make_emission_socket=mh.needs_emissive(),
|
||||
make_alpha_socket=not mh.is_opaque(),
|
||||
make_volume_socket=None, # No possible to have KHR_materials_volume with specular/glossiness
|
||||
make_velvet_socket=None # No possible to have KHR_materials_volume with specular/glossiness
|
||||
make_sheen_socket=None # No possible to have KHR_materials_volume with specular/glossiness
|
||||
)
|
||||
|
||||
if emission_socket:
|
||||
|
@ -33,7 +33,7 @@ def unlit(mh):
|
||||
make_emission_socket=False,
|
||||
make_alpha_socket=not mh.is_opaque(),
|
||||
make_volume_socket=None, # Not possible to have KHR_materials_volume with unlit
|
||||
make_velvet_socket=None #Not possible to have KHR_materials_sheen with unlit
|
||||
make_sheen_socket=None #Not possible to have KHR_materials_sheen with unlit
|
||||
)
|
||||
|
||||
base_color(
|
||||
|
@ -72,11 +72,11 @@ def pbr_metallic_roughness(mh: MaterialHelper):
|
||||
volume_location = additional_location
|
||||
additional_location = additional_location[0], additional_location[1] - 150
|
||||
|
||||
need_velvet_node = False
|
||||
need_sheen_node = False
|
||||
if mh.pymat.extensions and 'KHR_materials_sheen' in mh.pymat.extensions:
|
||||
need_velvet_node = True
|
||||
need_sheen_node = True
|
||||
|
||||
_, _, volume_socket, velvet_node = make_output_nodes(
|
||||
_, _, volume_socket, sheen_node = make_output_nodes(
|
||||
mh,
|
||||
location=(250, 260),
|
||||
additional_location=additional_location,
|
||||
@ -84,7 +84,7 @@ def pbr_metallic_roughness(mh: MaterialHelper):
|
||||
make_emission_socket=False, # is managed by Principled shader node
|
||||
make_alpha_socket=False, # is managed by Principled shader node
|
||||
make_volume_socket=need_volume_node,
|
||||
make_velvet_socket=need_velvet_node
|
||||
make_sheen_socket=need_sheen_node
|
||||
)
|
||||
|
||||
|
||||
@ -171,13 +171,13 @@ def pbr_metallic_roughness(mh: MaterialHelper):
|
||||
location_original_specularcolor=locs['original_specularColorTexture']
|
||||
)
|
||||
|
||||
if need_velvet_node:
|
||||
if need_sheen_node:
|
||||
sheen(
|
||||
mh,
|
||||
location_sheenColor=locs['sheenColorTexture'],
|
||||
location_sheenRoughness=locs['sheenRoughnessTexture'],
|
||||
sheenColor_socket=velvet_node.inputs[0],
|
||||
sheenRoughness_socket=velvet_node.inputs[1]
|
||||
sheenColor_socket=sheen_node.inputs[0],
|
||||
sheenRoughness_socket=sheen_node.inputs[1]
|
||||
)
|
||||
|
||||
ior(
|
||||
@ -616,7 +616,7 @@ def occlusion(mh: MaterialHelper, location, occlusion_socket):
|
||||
|
||||
# => [Add Emission] => [Mix Alpha] => [Material Output] if needed, only for SpecGlossiness
|
||||
# => [Volume] => [Add Shader] => [Material Output] if needed
|
||||
# => [Velvet] => [Add Shader] => [Material Output] if needed
|
||||
# => [Sheen] => [Add Shader] => [Material Output] if needed
|
||||
def make_output_nodes(
|
||||
mh: MaterialHelper,
|
||||
location,
|
||||
@ -625,7 +625,7 @@ def make_output_nodes(
|
||||
make_emission_socket,
|
||||
make_alpha_socket,
|
||||
make_volume_socket,
|
||||
make_velvet_socket, # For sheen
|
||||
make_sheen_socket,
|
||||
):
|
||||
"""
|
||||
Creates the Material Output node and connects shader_socket to it.
|
||||
@ -637,7 +637,7 @@ def make_output_nodes(
|
||||
"""
|
||||
x, y = location
|
||||
emission_socket = None
|
||||
velvet_node = None
|
||||
sheen_node = None
|
||||
alpha_socket = None
|
||||
|
||||
# Create an Emission node and add it to the shader.
|
||||
@ -666,22 +666,22 @@ def make_output_nodes(
|
||||
x += 380
|
||||
y += 125
|
||||
|
||||
# Create an Velvet node add add it to the shader
|
||||
# Note that you can not have Emission & Velvet at the same time
|
||||
if make_velvet_socket:
|
||||
# Velvet
|
||||
node = mh.node_tree.nodes.new("ShaderNodeBsdfVelvet")
|
||||
# Create an Sheen node add add it to the shader
|
||||
# Note that you can not have Emission & Sheen at the same time
|
||||
if make_sheen_socket:
|
||||
# Sheen
|
||||
node = mh.node_tree.nodes.new("ShaderNodeBsdfSheen")
|
||||
node.location = x + 50, y + 250
|
||||
# Node
|
||||
velvet_node = node
|
||||
sheen_node = node
|
||||
# Outputs
|
||||
velvet_output = node.outputs[0]
|
||||
sheen_output = node.outputs[0]
|
||||
|
||||
# Add
|
||||
node = mh.node_tree.nodes.new('ShaderNodeAddShader')
|
||||
node.location = x + 250, y + 160
|
||||
# Inputs
|
||||
mh.node_tree.links.new(node.inputs[0], velvet_output)
|
||||
mh.node_tree.links.new(node.inputs[0], sheen_output)
|
||||
mh.node_tree.links.new(node.inputs[1], shader_socket)
|
||||
# Outputs
|
||||
shader_socket = node.outputs[0]
|
||||
@ -730,7 +730,7 @@ def make_output_nodes(
|
||||
volume_socket = node.outputs[0]
|
||||
|
||||
|
||||
return emission_socket, alpha_socket, volume_socket, velvet_node
|
||||
return emission_socket, alpha_socket, volume_socket, sheen_node
|
||||
|
||||
|
||||
def make_settings_node(mh):
|
||||
|
@ -5,8 +5,8 @@
|
||||
bl_info = {
|
||||
"name": "Copy Render Settings",
|
||||
"author": "Bastien Montagne",
|
||||
"version": (1, 1, 0),
|
||||
"blender": (3, 0, 0),
|
||||
"version": (1, 2, 0),
|
||||
"blender": (3, 6, 0),
|
||||
"location": "Render buttons (Properties window)",
|
||||
"description": "Allows to copy a selection of render settings "
|
||||
"from current scene to others.",
|
||||
@ -24,17 +24,17 @@ if "bpy" in locals():
|
||||
|
||||
else:
|
||||
from . import (
|
||||
data,
|
||||
operator,
|
||||
panel,
|
||||
translations,
|
||||
)
|
||||
data,
|
||||
operator,
|
||||
panel,
|
||||
translations,
|
||||
)
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import (
|
||||
PointerProperty,
|
||||
)
|
||||
PointerProperty,
|
||||
)
|
||||
|
||||
|
||||
classes = data.classes + operator.classes + panel.classes
|
||||
|
@ -16,13 +16,10 @@ presets = (CopyPreset("Resolution",
|
||||
CopyPreset("Scale",
|
||||
("scale", "Render Scale", "The “Render Scale” setting"),
|
||||
{"resolution_percentage"}),
|
||||
CopyPreset("OSA",
|
||||
("osa", "Render OSA", "The OSA toggle and sample settings"),
|
||||
{"use_antialiasing", "antialiasing_samples"}),
|
||||
CopyPreset("Threads",
|
||||
("threads", "Render Threads", "The thread mode and number settings"),
|
||||
{"threads_mode", "threads"}),
|
||||
CopyPreset("Stamp",
|
||||
("stamp", "Render Stamp", "The Stamp toggle"),
|
||||
{"use_stamp"})
|
||||
)
|
||||
)
|
||||
|
@ -158,6 +158,8 @@ def create_path(scene):
|
||||
else:
|
||||
frame = "{:04d}-{:04d}".format(scene.frame_start, scene.frame_end)
|
||||
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
|
||||
return os.path.join(dirname, basename + frame + ".svg")
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user