New Addon: Import Autodesk .max #105013

Closed
Sebastian Sille wants to merge 136 commits from (deleted):nrgsille-import_max into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
4 changed files with 38 additions and 15 deletions
Showing only changes of commit e9a1fdb023 - Show all commits

View File

@ -710,6 +710,7 @@ def make_material_chunk(material, image):
material_chunk.add_subchunk(make_percent_subchunk(MATTRANS, 1 - wrap.alpha))
material_chunk.add_subchunk(make_percent_subchunk(MATXPFALL, wrap.transmission))
material_chunk.add_subchunk(make_percent_subchunk(MATSELFILPCT, wrap.emission_strength))
material_chunk.add_subchunk(make_percent_subchunk(MATREFBLUR, wrap.node_principled_bsdf.inputs['Coat Weight'].default_value))
material_chunk.add_subchunk(shading)
primary_tex = False
@ -1303,10 +1304,11 @@ def make_object_node(ob, translation, rotation, scale, name_id):
else: # Add flag variables - Based on observation flags1 is usually 0x0040 and 0x4000 for empty objects
obj_node_header_chunk.add_variable("name", _3ds_string(sane_name(name)))
obj_node_header_chunk.add_variable("flags1", _3ds_ushort(0x0040))
"""Flags2 defines 0x01 for display path, 0x04 object frozen,
0x10 for motion blur, 0x20 for material morph and bit 0x40 for mesh morph."""
obj_node_header_chunk.add_variable("flags2", _3ds_ushort(0))
# Flag 0x01 display path 0x02 use autosmooth 0x04 object frozen 0x10 motion blur 0x20 material morph 0x40 mesh morph
if ob.type == 'MESH' and 'Smooth by Angle' in ob.modifiers:
ob_node_header_chunk.add_variable("flags2", _3ds_ushort(0x02))
else:
ob_node_header_chunk.add_variable("flags2", _3ds_ushort(0))
obj_node_header_chunk.add_variable("parent", _3ds_ushort(ROOT_OBJECT))
'''
@ -1346,6 +1348,12 @@ def make_object_node(ob, translation, rotation, scale, name_id):
obj_boundbox.add_variable("max", _3ds_point_3d(ob.bound_box[6]))
obj_node.add_subchunk(obj_boundbox)
# Add smooth angle if smooth modifier is used
if ob.type == 'MESH' and 'Smooth by Angle' in ob.modifiers:
obj_morph_smooth = _3ds_chunk(OBJECT_MORPH_SMOOTH)
obj_morph_smooth.add_variable("angle", _3ds_float(round(ob.modifiers['Smooth by Angle']['Input_1'], 6)))
obj_node.add_subchunk(obj_morph_smooth)
# Add track chunks for position, rotation, size
ob_scale = scale[name] # and collect masterscale
if parent is None or (parent.name not in name_id):

View File

@ -361,6 +361,7 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
contextColor = None
contextWrapper = None
contextMatrix = None
contextReflection = None
contextTransmission = None
contextMesh_vertls = None
contextMesh_facels = None
@ -544,6 +545,7 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
contextWrapper.emission_color = contextMaterial.line_color[:3]
contextWrapper.emission_strength = contextMaterial.line_priority / 100
contextWrapper.alpha = contextMaterial.diffuse_color[3] = contextAlpha
contextWrapper.node_principled_bsdf.inputs['Coat Weight'].default_value = contextReflection
while (new_chunk.bytes_read < new_chunk.length):
read_chunk(file, temp_chunk)
@ -907,6 +909,7 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
# If material chunk
elif new_chunk.ID == MATERIAL:
contextAlpha = True
contextReflection = False
contextTransmission = False
contextColor = mathutils.Color((0.8, 0.8, 0.8))
contextMaterial = bpy.data.materials.new('Material')
@ -998,12 +1001,22 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
elif new_chunk.ID == MAT_XPFALL:
read_chunk(file, temp_chunk)
if temp_chunk.ID == PCTI:
if temp_chunk.ID == PCT_SHORT:
contextTransmission = float(read_short(temp_chunk) / 100)
else:
skip_to_end(file, temp_chunk)
new_chunk.bytes_read += temp_chunk.bytes_read
elif new_chunk.ID == MAT_REFBLUR:
read_chunk(file, temp_chunk)
if temp_chunk.ID == PCT_SHORT:
contextReflection = float(read_short(temp_chunk) / 100)
elif temp_chunk.ID == PCT_FLOAT:
contextReflection = float(read_float(temp_chunk))
else:
skip_to_end(file, temp_chunk)
new_chunk.bytes_read += temp_chunk.bytes_read
elif new_chunk.ID == MAT_SELF_ILPCT:
read_chunk(file, temp_chunk)
if temp_chunk.ID == PCT_SHORT:
@ -1027,6 +1040,7 @@ def process_next_chunk(context, file, previous_chunk, imported_objects, CONSTRAI
contextWrapper.emission_color = contextMaterial.line_color[:3]
contextWrapper.emission_strength = contextMaterial.line_priority / 100
contextWrapper.alpha = contextMaterial.diffuse_color[3] = contextAlpha
contextWrapper.node_principled_bsdf.inputs['Coat Weight'].default_value = contextReflection
contextWrapper.use_nodes = False
if shading >= 3:
contextWrapper.use_nodes = True

View File

@ -5,8 +5,8 @@
bl_info = {
"name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier, @Mysteryem",
"version": (5, 8, 8),
"blender": (4, 0, 0),
"version": (5, 8, 11),
"blender": (4, 1, 0),
"location": "File > Import-Export",
"description": "FBX IO meshes, UVs, vertex colors, materials, textures, cameras, lamps and actions",
"warning": "",

View File

@ -1363,14 +1363,13 @@ def blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, f
def blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size,
descr, xform):
"""Generic fbx_layer to blen_data foreach setter for polyloop ByVertice layers.
"""Generic fbx_layer to blen_data foreach setter for face corner ByVertice layers.
blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
fbx_data must be an array.array"""
# The fbx_data is mapped to vertices. To expand fbx_data to polygon loops, get an array of the vertex index of each
# polygon loop that will then be used to index fbx_data
loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, loop_vertex_indices, stride,
# The fbx_data is mapped to vertices. To expand fbx_data to face corners, get an array of the vertex index of each
# face corner that will then be used to index fbx_data.
corner_vertex_indices = MESH_ATTRIBUTE_CORNER_VERT.to_ndarray(mesh.attributes)
blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, corner_vertex_indices, stride,
item_size, descr, xform)
@ -1747,11 +1746,13 @@ def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
poly_loop_totals = np.empty(len(mesh.polygons), dtype=np.uintc)
mesh.polygons.foreach_get("loop_total", poly_loop_totals)
loop_normals = np.repeat(bdata, poly_loop_totals, axis=0)
mesh.attributes["temp_custom_normals"].data.foreach_set("normal", loop_normals.ravel())
mesh.attributes["temp_custom_normals"].data.foreach_set("vector", loop_normals.ravel())
elif blen_data_type == "Vertices":
# Note: Currently unreachable because `blen_read_geom_array_mapped_polyloop` covers all the supported
# import cases covered by `blen_read_geom_array_mapped_vert`.
# We have to copy vnors to lnors! Far from elegant, but simple.
loop_vertex_indices = MESH_ATTRIBUTE_CORNER_VERT.to_ndarray(mesh.attributes)
mesh.attributes["temp_custom_normals"].data.foreach_set("normal", bdata[loop_vertex_indices].ravel())
mesh.attributes["temp_custom_normals"].data.foreach_set("vector", bdata[loop_vertex_indices].ravel())
return True
blen_read_geom_array_error_mapping("normal", fbx_layer_mapping)