New Addon: Import Autodesk .max #105013

Closed
Sebastian Sille wants to merge 136 commits from (deleted):nrgsille-import_max into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
Showing only changes of commit 876fe8f278 - Show all commits

View File

@ -30,7 +30,6 @@ bl_info = {
import io, re
import os, sys, zlib
import struct, array
import time, datetime
import math, mathutils
import bpy, bpy_extras
from bpy_extras.io_utils import axis_conversion
@ -170,23 +169,9 @@ FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT
ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain
FREESECT = 0xFFFFFFFF # (-1) unallocated sector
STGTY_EMPTY = 0 # empty directory entry
STGTY_STORAGE = 1 # element is a storage object
STGTY_STREAM = 2 # element is a stream object
STGTY_LOCKBYTES = 3 # element is an ILockBytes object
STGTY_PROPERTY = 4 # element is an IPropertyStorage object
STGTY_ROOT = 5 # element is a root storage
VT_EMPTY=0; VT_NULL=1; VT_I2=2; VT_I4=3; VT_R4=4; VT_R8=5; VT_CY=6;
VT_DATE=7; VT_BSTR=8; VT_DISPATCH=9; VT_ERROR=10; VT_BOOL=11;
VT_VARIANT=12; VT_UNKNOWN=13; VT_DECIMAL=14; VT_I1=16; VT_UI1=17;
VT_UI2=18; VT_UI4=19; VT_I8=20; VT_UI8=21; VT_INT=22; VT_UINT=23;
VT_VOID=24; VT_HRESULT=25; VT_PTR=26; VT_SAFEARRAY=27; VT_CARRAY=28;
VT_USERDEFINED=29; VT_LPSTR=30; VT_LPWSTR=31; VT_FILETIME=64;
VT_BLOB=65; VT_STREAM=66; VT_STORAGE=67; VT_STREAMED_OBJECT=68;
VT_STORED_OBJECT=69; VT_BLOB_OBJECT=70; VT_CF=71; VT_CLSID=72;
VT_VECTOR=0x1000;
TYP_NAME = 0x0962
INVALID_NAME = re.compile('^[0-9].*')
UNPACK_BOX_DATA = struct.Struct('<HIHHBff').unpack_from # Index, int, short, short, byte, float, Length
@ -692,71 +677,6 @@ class ImportMaxFile:
def get_rootentry_name(self):
return self.root.name
def getproperties(self, filename):
fp = self.openstream(filename)
data = {}
try:
stream = fp.read(28)
clsid = _clsid(stream[8:24])
stream = fp.read(20)
fmtid = _clsid(stream[:16])
fp.seek(i32(stream, 16))
stream = b"****" + fp.read(i32(fp.read(4)) - 4)
num_props = i32(stream, 4)
except BaseException as exc:
return data
num_props = min(num_props, int(len(stream) / 8))
for i in range(num_props):
property_id = 0
try:
property_id = i32(stream, 8 + i*8)
offset = i32(stream, 12 + i*8)
property_type = i32(stream, offset)
if property_type == VT_I2: # 16-bit signed integer
value = i16(stream, offset + 4)
if value >= 32768:
value = value - 65536
elif property_type == VT_UI2: # 2-byte unsigned integer
value = i16(stream, offset + 4)
elif property_type in (VT_I4, VT_INT, VT_ERROR):
value = i32(stream, offset + 4)
elif property_type in (VT_UI4, VT_UINT): # 4-byte unsigned integer
value = i32(stream, offset + 4)
elif property_type in (VT_BSTR, VT_LPSTR):
count = i32(stream, offset + 4)
value = stream[offset + 8:offset + 8 + count - 1]
value = value.replace(b'\x00', b'')
elif property_type == VT_BLOB:
count = i32(stream, offset + 4)
value = stream[offset + 8:offset + 8 + count]
elif property_type == VT_LPWSTR:
count = i32(stream, offset + 4)
value = self._decode_utf16_str(stream[offset + 8:offset + 8 + count * 2])
elif property_type == VT_FILETIME:
value = int(i32(stream, offset + 4)) + (int(i32(stream, offset + 8)) << 32)
if property_id != 10:
_FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0)
value = _FILETIME_null_date + datetime.timedelta(microseconds=value // 10)
else:
value = value // 10000000
elif property_type == VT_UI1: # 1-byte unsigned integer
value = i8(stream[offset + 4])
elif property_type == VT_CLSID:
value = _clsid(stream[offset + 4:offset + 20])
elif property_type == VT_CF:
count = i32(stream, offset + 4)
value = stream[offset + 8:offset + 8 + count]
elif property_type == VT_BOOL:
value = bool(i16(stream, offset + 4))
else:
value = None
data[property_id] = value
except BaseException as exc:
print('Error while parsing property_id:', exc)
return data
###################
# DATA PROCESSING #
@ -917,7 +837,7 @@ class ChunkReader():
long, ofst = get_long(data, ofst)
if (short == 0x8B1F):
short, ofst = get_long(data, ofst)
if (short == 0xB000000):
if (short in (0xB000000, 0xa040000)):
data = zlib.decompress(data, zlib.MAX_WBITS|32)
print(" reading '%s'..." %self.name, len(data))
while offset < len(data):
@ -1597,8 +1517,6 @@ def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform):
def read(context, filename, mscale, usemat, uvmesh, transform):
if (is_maxfile(filename)):
maxfile = ImportMaxFile(filename)
prop = maxfile.getproperties('\x05DocumentSummaryInformation')
prop = maxfile.getproperties('\x05SummaryInformation')
read_class_data(maxfile, filename)
read_config(maxfile, filename)
read_directory(maxfile, filename)
@ -1611,10 +1529,12 @@ def read(context, filename, mscale, usemat, uvmesh, transform):
def load(operator, context, filepath="", scale_objects=1.0, use_material=True,
use_uv_mesh=False, use_apply_matrix=False, global_matrix=None):
context.window.cursor_set('WAIT')
mscale = mathutils.Matrix.Scale(scale_objects, 4)
if global_matrix is not None:
mscale = global_matrix @ mscale
read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix)
context.window.cursor_set('DEFAULT')
return {'FINISHED'}