From 500e09268abe32a278e9e6bba78a5d783f7a7bdc Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 22 Nov 2023 23:20:24 +0100 Subject: [PATCH 01/38] Uploaded: io_import_max Uploaded new addon for importing meshes and materials from Autodesk .max files --- io_import_max.py | 1498 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1498 insertions(+) create mode 100644 io_import_max.py diff --git a/io_import_max.py b/io_import_max.py new file mode 100644 index 0000000..175afc3 --- /dev/null +++ b/io_import_max.py @@ -0,0 +1,1498 @@ +# SPDX-FileCopyrightText: 2023 Sebastian Schrand +# +# SPDX-License-Identifier: GPL-2.0-or-later +# Import is based on using information from olefile IO sourcecode +# and the FreeCAD Autodesk 3DS Max importer ImportMAX +# +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2018 Philippe Lagadec +# (https://www.decalage.info) +# +# ImportMAX is copyright (c) 2017-2022 Jens M. Plonka +# (https://www.github.com/jmplonka/Importer3D) + + +bl_info = { + "name": "Import Autodesk MAX (.max)", + "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", + "version": (1, 0, 0), + "blender": (4, 0, 0), + "location": "File > Import", + "description": "Import 3DSMAX meshes & materials", + "warning": "", + "filepath_url": "", + "category": "Import-Export"} + + +################## +# IMPORT MODULES # +################## + +import io, re +import os, sys, zlib +import struct, array +import time, datetime +import math, mathutils +import bpy, bpy_extras +from bpy_extras.io_utils import axis_conversion +from bpy_extras.io_utils import orientation_helper + +@orientation_helper(axis_forward='Y', axis_up='Z') + +### IMPORT OPERATOR ### +class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): + """Import Autodesk MAX""" + bl_idname = "import_autodesk.max" + bl_label = "Import Autodesk MAX (.max)" + bl_options = {'PRESET', 'UNDO'} + + filename_ext = ".max" + filter_glob: bpy.props.StringProperty(default="*.max", options={'HIDDEN'},) + + def execute(self, context): + keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) + global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up,).to_4x4() + keywords["global_matrix"] = global_matrix + + return load(self, context, **keywords) + +### REGISTER ### +def menu_func(self, context): + self.layout.operator(Import_max.bl_idname, text="Autodesk MAX (.max)") + +def register(): + bpy.utils.register_class(Import_max) + bpy.types.TOPBAR_MT_file_import.append(menu_func) + +def unregister(): + bpy.types.TOPBAR_MT_file_import.remove(menu_func) + bpy.utils.unregister_class(Import_max) + + +################### +# DATA STRUCTURES # +################### + +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' +WORD_CLSID = "00020900-0000-0000-C000-000000000046" + +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector +MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID +NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry +UNKNOWN_SIZE = 0x7FFFFFFF +MIN_FILE_SIZE = 1536 + +STGTY_EMPTY = 0 # empty directory entry +STGTY_STORAGE = 1 # element is a storage object +STGTY_STREAM = 2 # element is a stream object +STGTY_LOCKBYTES = 3 # element is an ILockBytes object +STGTY_PROPERTY = 4 # element is an IPropertyStorage object +STGTY_ROOT = 5 # element is a root storage + +VT_EMPTY=0; VT_NULL=1; VT_I2=2; VT_I4=3; VT_R4=4; VT_R8=5; VT_CY=6; +VT_DATE=7; VT_BSTR=8; VT_DISPATCH=9; VT_ERROR=10; VT_BOOL=11; +VT_VARIANT=12; VT_UNKNOWN=13; VT_DECIMAL=14; VT_I1=16; VT_UI1=17; +VT_UI2=18; VT_UI4=19; VT_I8=20; VT_UI8=21; VT_INT=22; VT_UINT=23; +VT_VOID=24; VT_HRESULT=25; VT_PTR=26; VT_SAFEARRAY=27; VT_CARRAY=28; +VT_USERDEFINED=29; VT_LPSTR=30; VT_LPWSTR=31; VT_FILETIME=64; +VT_BLOB=65; VT_STREAM=66; VT_STORAGE=67; VT_STREAMED_OBJECT=68; +VT_STORED_OBJECT=69; VT_BLOB_OBJECT=70; VT_CF=71; VT_CLSID=72; +VT_VECTOR=0x1000; + +TYP_NAME = 0x0962 +INVALID_NAME = re.compile('^[0-9].*') +UNPACK_BOX_DATA = struct.Struct('= MIN_FILE_SIZE: + header = filename[:len(MAGIC)] + else: + with open(filename, 'rb') as fp: + header = fp.read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +class MaxStream(io.BytesIO): + """Returns an instance of the BytesIO class as read-only file object.""" + def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize): + unknown_size = False + if size == UNKNOWN_SIZE: + size = len(fat) * sectorsize + unknown_size = True + nb_sectors = (size + (sectorsize-1)) // sectorsize + + data = [] + for i in range(nb_sectors): + try: + fp.seek(offset + sectorsize * sect) + except: + break + sector_data = fp.read(sectorsize) + data.append(sector_data) + try: + sect = fat[sect] & 0xFFFFFFFF + except IndexError: + break + data = b"".join(data) + if len(data) >= size: + data = data[:size] + self.size = size + else: + self.size = len(data) + io.BytesIO.__init__(self, data) + + +class MaxFileDirEntry: + """Directory Entry for a stream or storage.""" + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, maxfile): + self.sid = sid + self.maxfile = maxfile + self.kids = [] + self.kids_dict = {} + self.used = False + ( + self.name_raw, + self.namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + self.sizeLow, + self.sizeHigh + ) = struct.unpack(MaxFileDirEntry.STRUCT_DIRENTRY, entry) + + if self.namelength > 64: + self.namelength = 64 + self.name_utf16 = self.name_raw[:(self.namelength - 2)] + self.name = maxfile._decode_utf16_str(self.name_utf16) + # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + if maxfile.sectorsize == 512: + self.size = self.sizeLow + else: + self.size = self.sizeLow + (int(self.sizeHigh) << 32) + self.clsid = _clsid(clsid) + self.is_minifat = False + if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: + if self.size < maxfile.minisectorcutoff \ + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + self.is_minifat = True + else: + self.is_minifat = False + maxfile._check_duplicate_stream(self.isectStart, self.is_minifat) + self.sect_chain = None + + def build_sect_chain(self, maxfile): + if self.sect_chain: + return + if self.entry_type not in (STGTY_ROOT, STGTY_STREAM) or self.size == 0: + return + self.sect_chain = list() + if self.is_minifat and not maxfile.minifat: + maxfile.loadminifat() + next_sect = self.isectStart + while next_sect != ENDOFCHAIN: + self.sect_chain.append(next_sect) + if self.is_minifat: + next_sect = maxfile.minifat[next_sect] + else: + next_sect = maxfile.fat[next_sect] + + def build_storage_tree(self): + if self.sid_child != NOSTREAM: + self.append_kids(self.sid_child) + self.kids.sort() + + def append_kids(self, child_sid): + if child_sid == NOSTREAM: + return + else: + child = self.maxfile._load_direntry(child_sid) + if child.used: + return + child.used = True + self.append_kids(child.sid_left) + name_lower = child.name.lower() + self.kids.append(child) + self.kids_dict[name_lower] = child + self.append_kids(child.sid_right) + child.build_storage_tree() + + def __eq__(self, other): + return self.name == other.name + + def __lt__(self, other): + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + +class ImportMaxFile: + """Representing an interface for importing .max files.""" + def __init__(self, filename=None, write_mode=False, debug=False): + self.write_mode = write_mode + self._filesize = None + self.byte_order = None + self.directory_fp = None + self.direntries = None + self.dll_version = None + self.fat = None + self.first_difat_sector = None + self.first_dir_sector = None + self.first_mini_fat_sector = None + self.fp = None + self.header_clsid = None + self.header_signature = None + self.metadata = None + self.mini_sector_shift = None + self.mini_sector_size = None + self.mini_stream_cutoff_size = None + self.minifat = None + self.minifatsect = None + self.minisectorcutoff = None + self.minisectorsize = None + self.ministream = None + self.minor_version = None + self.nb_sect = None + self.num_difat_sectors = None + self.num_dir_sectors = None + self.num_fat_sectors = None + self.num_mini_fat_sectors = None + self.reserved1 = None + self.reserved2 = None + self.root = None + self.sector_shift = None + self.sector_size = None + self.transaction_signature_number = None + if filename: + self.open(filename, write_mode=write_mode) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def _decode_utf16_str(self, utf16_str, errors='replace'): + unicode_str = utf16_str.decode('UTF-16LE', errors) + return unicode_str + + def open(self, filename, write_mode=False): + self.write_mode = write_mode + if hasattr(filename, 'read'): + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE: + self.fp = io.BytesIO(filename) + else: + if self.write_mode: + mode = 'r+b' + else: + mode = 'rb' + self.fp = open(filename, mode) + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + self._used_streams_fat = [] + self._used_streams_minifat = [] + header = self.fp.read(512) + + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + header1 = header[:header_size] + ( + self.header_signature, + self.header_clsid, + self.minor_version, + self.dll_version, + self.byte_order, + self.sector_shift, + self.mini_sector_shift, + self.reserved1, + self.reserved2, + self.num_dir_sectors, + self.num_fat_sectors, + self.first_dir_sector, + self.transaction_signature_number, + self.mini_stream_cutoff_size, + self.first_mini_fat_sector, + self.num_mini_fat_sectors, + self.first_difat_sector, + self.num_difat_sectors + ) = struct.unpack(fmt_header, header1) + + self.sector_size = 2**self.sector_shift + self.mini_sector_size = 2**self.mini_sector_shift + if self.mini_stream_cutoff_size != 0x1000: + self.mini_stream_cutoff_size = 0x1000 + self.nb_sect = ((filesize + self.sector_size-1) // self.sector_size) - 1 + + # file clsid + self.header_clsid = _clsid(header[8:24]) + self.sectorsize = self.sector_size #1 << i16(header, 30) + self.minisectorsize = self.mini_sector_size #1 << i16(header, 32) + self.minisectorcutoff = self.mini_stream_cutoff_size # i32(header, 56) + self._check_duplicate_stream(self.first_dir_sector) + if self.num_mini_fat_sectors: + self._check_duplicate_stream(self.first_mini_fat_sector) + if self.num_difat_sectors: + self._check_duplicate_stream(self.first_difat_sector) + + # Load file allocation tables + self.loadfat(header) + self.loaddirectory(self.first_dir_sector) + self.minifatsect = self.first_mini_fat_sector + + def close(self): + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + if minifat: + used_streams = self._used_streams_minifat + else: + if first_sect in (DIFSECT,FATSECT,ENDOFCHAIN,FREESECT): + return + used_streams = self._used_streams_fat + if first_sect in used_streams: + pass + else: + used_streams.append(first_sect) + + def sector_array(self, sect): + ary = array.array('I', sect) + if sys.byteorder == 'big': + ary.byteswap() + return ary + + def loadfat_sect(self, sect): + if isinstance(sect, array.array): + fat1 = sect + else: + fat1 = self.sector_array(sect) + isect = None + for isect in fat1: + isect = isect & 0xFFFFFFFF + if isect == ENDOFCHAIN or isect == FREESECT: + break + sector = self.getsect(isect) + nextfat = self.sector_array(sector) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + sect = header[76:512] + self.fat = array.array('I') + self.loadfat_sect(sect) + if self.num_difat_sectors != 0: + nb_difat_sectors = (self.sectorsize//4) - 1 + nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors + isect_difat = self.first_difat_sector + for i in range(nb_difat): + sector_difat = self.getsect(isect_difat) + difat = self.sector_array(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + isect_difat = difat[nb_difat_sectors] + if len(self.fat) > self.nb_sect: + self.fat = self.fat[:self.nb_sect] + + def loadminifat(self): + stream_size = self.num_mini_fat_sectors * self.sector_size + nb_minisectors = (self.root.size + self.mini_sector_size - 1) // self.mini_sector_size + used_size = nb_minisectors * 4 + sect = self._open(self.minifatsect, stream_size, force_FAT=True).read() + self.minifat = self.sector_array(sect) + self.minifat = self.minifat[:nb_minisectors] + + def getsect(self, sect): + try: + self.fp.seek(self.sectorsize * (sect + 1)) + except: + print('MAX sector index out of range') + sector = self.fp.read(self.sectorsize) + return sector + + def loaddirectory(self, sect): + self.directory_fp = self._open(sect, force_FAT=True) + max_entries = self.directory_fp.size // 128 + self.direntries = [None] * max_entries + root_entry = self._load_direntry(0) + self.root = self.direntries[0] + self.root.build_storage_tree() + + def _load_direntry (self, sid): + if self.direntries[sid] is not None: + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = MaxFileDirEntry(entry, sid, self) + return self.direntries[sid] + + def _open(self, start, size = UNKNOWN_SIZE, force_FAT=False): + if size < self.minisectorcutoff and not force_FAT: + if not self.ministream: + self.loadminifat() + size_ministream = self.root.size + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return MaxStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + return MaxStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, + sectorsize=self.sectorsize, fat=self.fat, + filesize=self._filesize) + + def _find(self, filename): + if isinstance(filename, str): + filename = filename.split('/') + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + node = kid + return node.sid + + def openstream(self, filename): + sid = self._find(filename) + entry = self.direntries[sid] + return self._open(entry.isectStart, entry.size) + + def get_type(self, filename): + try: + sid = self._find(filename) + entry = self.direntries[sid] + return entry.entry_type + except: + return False + + def getclsid(self, filename): + sid = self._find(filename) + entry = self.direntries[sid] + return entry.clsid + + def get_size(self, filename): + sid = self._find(filename) + entry = self.direntries[sid] + return entry.size + + def get_rootentry_name(self): + return self.root.name + + def getproperties(self, filename, convert_time=False, no_conversion=None): + if no_conversion == None: + no_conversion = [] + streampath = filename + if not isinstance(streampath, str): + streampath = '/'.join(streampath) + fp = self.openstream(filename) + data = {} + try: + stream = fp.read(28) + clsid = _clsid(stream[8:24]) + stream = fp.read(20) + fmtid = _clsid(stream[:16]) + fp.seek(i32(stream, 16)) + stream = b"****" + fp.read(i32(fp.read(4)) - 4) + num_props = i32(stream, 4) + except BaseException as exc: + return data + + num_props = min(num_props, int(len(stream) / 8)) + for i in range(num_props): + property_id = 0 + try: + property_id = i32(stream, 8 + i*8) + offset = i32(stream, 12 + i*8) + property_type = i32(stream, offset) + if property_type == VT_I2: # 16-bit signed integer + value = i16(stream, offset + 4) + if value >= 32768: + value = value - 65536 + elif property_type == VT_UI2: # 2-byte unsigned integer + value = i16(stream, offset + 4) + elif property_type in (VT_I4, VT_INT, VT_ERROR): + value = i32(stream, offset + 4) + elif property_type in (VT_UI4, VT_UINT): # 4-byte unsigned integer + value = i32(stream, offset + 4) + elif property_type in (VT_BSTR, VT_LPSTR): + count = i32(stream, offset + 4) + value = stream[offset + 8:offset + 8 + count - 1] + value = value.replace(b'\x00', b'') + elif property_type == VT_BLOB: + count = i32(stream, offset + 4) + value = stream[offset + 8:offset + 8 + count] + elif property_type == VT_LPWSTR: + count = i32(stream, offset + 4) + value = self._decode_utf16_str(stream[offset + 8:offset + 8 + count * 2]) + elif property_type == VT_FILETIME: + value = int(i32(stream, offset + 4)) + (int(i32(stream, offset + 8)) << 32) + if convert_time and property_id not in no_conversion: + _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) + value = _FILETIME_null_date + datetime.timedelta(microseconds=value // 10) + else: + value = value // 10000000 + elif property_type == VT_UI1: # 1-byte unsigned integer + value = i8(stream[offset + 4]) + elif property_type == VT_CLSID: + value = _clsid(stream[offset + 4:offset + 20]) + elif property_type == VT_CF: + count = i32(stream, offset + 4) + value = stream[offset + 8:offset + 8 + count] + elif property_type == VT_BOOL: + value = bool(i16(stream, offset + 4)) + else: + value = None + + data[property_id] = value + except BaseException as exc: + print('Error while parsing property_id:', exc) + return data + + +class MaxChunk(): + """Representing a chunk of a .max file.""" + def __init__(self, types, size, level, number): + self.number = number + self.types = types + self.level = level + self.parent = None + self.previous = None + self.next = None + self.size = size + self.unknown = True + self.format = None + self.data = None + self.resolved = False + + def __str__(self): + if (self.unknown == True): + return "%s[%4x] %04X: %s" %("" * self.level, self.number, self.types, ":".join("%02x"%(c) for c in self.data)) + return "%s[%4x] %04X: %s=%s" %("" * self.level, self.number, self.types, self.format, self.data) + + +class ByteArrayChunk(MaxChunk): + """A byte array of a .max chunk.""" + def __init__(self, types, data, level, number): + MaxChunk.__init__(self, types, data, level, number) + + def set(self, data, name, fmt, start, end): + try: + self.data = struct.unpack(fmt, data[start:end]) + self.format = name + self.unknown = False + except Exception as exc: + self.data = data + # print('StructError:', exc, name) + + def set_string(self, data): + try: + self.data = data.decode('UTF-16LE') + self.format = "Str16" + self.unknown = False + except: + self.data = data + + def set_le16_string(self, data): + try: + long, offset = get_long(data, 0) + self.data = data[offset:offset + l * 2].decode('utf-16-le') + if (self.data[-1] == b'\0'): + self.data = self.data[0:-1] + self.format = "LStr16" + self.unknown = False + except: + self.data = data + + def set_data(self, data): + if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]): + self.set_string(data) + elif (self.types in [0x2034, 0x2035]): + self.set(data, "ints", '<'+'I'*int(len(data) / 4), 0, len(data)) + elif (self.types in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]): + self.set(data, "floats", '<'+'f'*int(len(data) / 4), 0, len(data)) + elif (self.types == 0x2510): + self.set(data, "struct", '<'+'f'*int(len(data) / 4 - 1) + 'I', 0, len(data)) + elif (self.types == 0x0100): + self.set(data, "float", ' 3): + return get_rotation(refs[0]) + elif (uid == 0x3A90416731381913): # Rotation Wire + return get_rotation(get_references(pos)[0]) + if (rotation): + mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis) + return mtx + + +def get_scale(pos): + mtx = mathutils.Matrix.Identity(4) + if (pos): + uid = get_guid(pos) + if (uid == 0x2010): # Bezier Scale + scale = pos.get_first(0x2501) + if (scale is None): + scale = pos.get_first(0x2505) + pos = scale.data + elif (uid == 0x442315): # TCB Zoom + scale = pos.get_first(0x2501) + if (scale is None): + scale = pos.get_first(0x2505) + pos = scale.data + elif (uid == 0xFEEE238B118F7C01): # ScaleXYZ + pos = get_point_3d(pos, 1.0) + else: + return mtx + mtx = mathutils.Matrix.Diagonal(pos[:3]).to_4x4() + return mtx + + +def create_matrix(prc): + mtx = mathutils.Matrix.Identity(4) + pos = rot = scl = None + uid = get_guid(prc) + if (uid == 0x2005): # Position/Rotation/Scale + pos = get_position(get_references(prc)[0]) + rot = get_rotation(get_references(prc)[1]) + scl = get_scale(get_references(prc)[2]) + elif (uid == 0x9154): # BipSlave Control + biped_sub_anim = get_references(prc)[2] + refs = get_references(biped_sub_anim) + scl = get_scale(get_references(refs[1])[0]) + rot = get_rotation(get_references(refs[2])[0]) + pos = get_position(get_references(refs[3])[0]) + if (pos is not None): + mtx = pos @ mtx + if (rot is not None): + mtx = rot @ mtx + if (scl is not None): + mtx = scl @ mtx + return mtx + + +def get_property(properties, idx): + for child in properties.children: + if (child.types & 0x100E): + if (get_short(child.data, 0)[0] == idx): + return child + return None + + +def get_color(colors, idx): + prop = get_property(colors, idx) + if (prop is not None): + siz = 15 if (len(prop.data) > 23) else 11 + col, offset = get_floats(prop.data, siz, 3) + return (col[0], col[1], col[2]) + return None + + +def get_value(colors, idx): + prop = get_property(colors, idx) + if (prop is not None): + val, offset = get_float(prop.data, 15) + return val + return None + + +def get_standard_material(refs): + material = None + try: + if (len(refs) > 2): + colors = refs[2] + parameters = get_references(colors)[0] + material = Material() + material.set('ambient', get_color(parameters, 0x00)) + material.set('diffuse', get_color(parameters, 0x01)) + material.set('specular', get_color(parameters, 0x02)) + material.set('emissive', get_color(parameters, 0x08)) + material.set('shinines', get_value(parameters, 0x0A)) + transparency = refs[4] # ParameterBlock2 + material.set('transparency', get_value(transparency, 0x02)) + except: + pass + return material + + +def get_vray_material(vry): + material = Material() + try: + material.set('diffuse', get_color(vry, 0x01)) + material.set('ambient', get_color(vry, 0x02)) + material.set('specular', get_color(vry, 0x05)) + material.set('emissive', get_color(vry, 0x05)) + material.set('shinines', get_value(vry, 0x0B)) + material.set('transparency', get_value(vry, 0x02)) + except: + pass + return material + + +def get_arch_material(ad): + material = Material() + try: + material.set('diffuse', get_color(ad, 0x1A)) + material.set('ambient', get_color(ad, 0x02)) + material.set('specular', get_color(ad, 0x05)) + material.set('emissive', get_color(ad, 0x05)) + material.set('shinines', get_value(ad, 0x0B)) + material.set('transparency', get_value(ad, 0x02)) + except: + pass + return material + + +def adjust_material(obj, mat): + material = None + if (mat is not None): + uid = get_guid(mat) + if (uid == 0x0002): # Standard + refs = get_references(mat) + material = get_standard_material(refs) + elif (uid == 0x0200): # Multi/Sub-Object + refs = get_references(mat) + material = adjust_material(obj, refs[-1]) + elif (uid == 0x7034695C37BF3F2F): # VRayMtl + refs = get_reference(mat) + material = get_vray_material(refs[1]) + elif (uid == 0x4A16365470B05735): # Arch + refs = get_references(mat) + material = get_arch_material(refs[0]) + if (obj is not None) and (material is not None): + objMaterial = bpy.data.materials.new(get_class_name(mat)) + obj.data.materials.append(objMaterial) + objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) + objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) + objMaterial.roughness = 1.0 - material.get('shinines', 0.6) + + +def create_shape(context, pts, indices, node, key, prc, mat): + name = node.get_first(TYP_NAME).data + shape = bpy.data.meshes.new(name) + if (key is not None): + name = "%s_%d" %(name, key) + mtx = create_matrix(prc) + data = [] + if (pts): + loopstart = [] + looplines = loop = 0 + nbr_faces = len(indices) + for fid in range(nbr_faces): + polyface = indices[fid] + looplines += len(polyface) + shape.vertices.add(len(pts) // 3) + shape.loops.add(looplines) + shape.polygons.add(nbr_faces) + shape.vertices.foreach_set("co", pts) + for vtx in indices: + loopstart.append(loop) + data.extend(vtx) + loop += len(vtx) + shape.polygons.foreach_set("loop_start", loopstart) + shape.loops.foreach_set("vertex_index", data) + + if (len(data) > 0): + shape.validate() + shape.update() + obj = bpy.data.objects.new(name, shape) + context.view_layer.active_layer_collection.collection.objects.link(obj) + adjust_material(obj, mat) + return True + return True + + +def calc_point(data): + points = [] + long, offset = get_long(data, 0) + while (offset < len(data)): + val, offset = get_long(data, offset) + flt, offset = get_floats(data, offset, 3) + points.extend(flt) + return points + + +def calc_point_float(data): + points = [] + long, offset = get_long(data, 0) + while (offset < len(data)): + flt, offset = get_floats(data, offset, 3) + points.extend(flt) + return points + + +def get_poly_4p(points): + vertex = {} + for point in points: + ngon = point.points + key = point.fH + if (key not in vertex): + vertex[key] = [] + vertex[key].append(ngon) + return vertex + + +def get_poly_5p(data): + count, offset = get_long(data, 0) + ngons = [] + while count > 0: + pt, offset = get_longs(data, offset, 3) + offset += 8 + ngons.append(pt) + count -= 1 + return ngons + + +def get_poly_6p(data): + count, offset = get_long(data, 0) + polylist = [] + while (offset < len(data)): + long, offset = get_longs(data, offset, 6) + i = 5 + while ((i > 3) and (long[i] < 0)): + i -= 1 + if (i > 2): + polylist.append(long[1:i]) + return polylist + + +def get_poly_data(chunk): + offset = 0 + polylist = [] + data = chunk.data + while (offset < len(data)): + count, offset = get_long(data, offset) + points, offset = get_longs(data, offset, count) + polylist.append(points) + return polylist + + +def get_point_array(values): + verts = [] + if len(values) >= 4: + count, offset = get_long(values, 0) + while (count > 0): + floats, offset = get_floats(values, offset, 3) + verts.extend(floats) + count -= 1 + return verts + + +def calc_point_3d(chunk): + data = chunk.data + count, offset = get_long(data, 0) + pointlist = [] + try: + while (offset < len(data)): + pt = Point3d() + long, offset = get_long(data, offset) + pt.points, offset = get_longs(data, offset, long) + pt.flags, offset = get_short(data, offset) + if ((pt.flags & 0x01) != 0): + pt.f1, offset = get_long(data, offset) + if ((pt.flags & 0x08) != 0): + pt.fH, offset = get_short(data, offset) + if ((pt.flags & 0x10) != 0): + pt.f2, offset = get_long(data, offset) + if ((pt.flags & 0x20) != 0): + pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) + if (len(pt.points) > 0): + pointlist.append(pt) + except Exception as exc: + print('ArrayError:\n', "%s: offset = %d\n" %(exc, offset)) + return pointlist + + +def create_editable_poly(context, node, msh, mat, mtx): + coords = point3i = point4i = point6i = pointNi = None + name = node.get_first(TYP_NAME).data + poly = msh.get_first(0x08FE) + created = False + if (poly): + for child in poly.children: + if (child.types == 0x0100): + coords = calc_point(child.data) + elif (child.types == 0x0108): + point6i = child.data + elif (child.types == 0x011A): + point4i = calc_point_3d(child) + if (point4i is not None): + vertex = get_poly_4p(point4i) + if (len(vertex) > 0): + for key, ngons in vertex.items(): + created |= create_shape(context, coords, ngons, node, key, mtx, mat) + else: + created = True + elif (point6i is not None): + ngons = get_poly_6p(point6i) + created = create_shape(context, coords, ngons, node, None, mtx, mat) + return created + + +def create_editable_mesh(context, node, msh, mat, mtx): + name = node.get_first(TYP_NAME).data + poly = msh.get_first(0x08FE) + created = False + if (poly): + vertex_chunk = poly.get_first(0x0914) + clsid_chunk = poly.get_first(0x0912) + coords = get_point_array(vertex_chunk.data) + ngons = get_poly_5p(clsid_chunk.data) + created = create_shape(context, coords, ngons, node, None, mtx, mat) + return created + + +def get_matrix_mesh_material(node): + refs = get_reference(node) + if (refs): + mtx = refs.get(0, None) + msh = refs.get(1, None) + mat = refs.get(3, None) + lyr = refs.get(6, None) + else: + refs = get_references(node) + mtx = refs[0] + msh = refs[1] + mat = refs[3] + lyr = None + if (len(refs) > 6): + lyr = refs[6] + return mtx, msh, mat, lyr + + +def adjust_matrix(obj, node): + mtx = create_matrix(node).flatten() + plc = mathutils.Matrix(*mtx) + obj.matrix_world = plc + return plc + + +def create_shell(context, node, shell, mat, mtx): + name = node.get_first(TYP_NAME).data + refs = get_references(shell) + msh = refs[-1] + created = create_editable_mesh(context, node, msh, mtx, mat) + return created + + +def create_skipable(context, node, msh, mat, mtx, skip): + name = node.get_first(TYP_NAME).data + print(" skipping %s '%s'... " %(skip, name)) + return True + + +def create_mesh(context, node, msh, mtx, mat): + created = False + uid = get_guid(msh) + msh.geometry = None + if (uid == 0x0E44F10B3): + created = create_editable_mesh(context, node, msh, mat, mtx) + elif (uid == 0x192F60981BF8338D): + created = create_editable_poly(context, node, msh, mat, mtx) + elif (uid in {0x2032, 0x2033}): + created = create_shell(context, node, msh, mat, mtx) + else: + skip = SKIPPABLE.get(uid) + if (skip is not None): + created = create_skipable(context, node, msh, mat, mtx, skip) + return created, uid + + +def create_object(context, node): + parent = get_node_parent(node) + node.parent = parent + name = get_node_name(node) + mtx, msh, mat, lyr = get_matrix_mesh_material(node) + while ((parent is not None) and (get_guid(parent) != 0x0002)): + name = "%s/%s" %(get_node_name(parent), name) + parent_mtx = parent.matrix + if (parent_mtx): + mtx = mtx.dot(parent_mtx) + parent = get_node_parent(parent) + created, uid = create_mesh(context, node, msh, mtx, mat) + + +def make_scene(context, parent, level=0): + for chunk in parent.children: + if (isinstance(chunk, SceneChunk)): + if ((get_guid(chunk) == 0x0001) and (get_super_id(chunk) == 0x0001)): + try: + create_object(context, chunk) + except Exception as exc: + print('ImportError:', exc, chunk) + + +def read_scene(context, maxfile, filename): + global SCENE_LIST + SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', containerReader=SceneChunk) + make_scene(context, SCENE_LIST[0], 0) + + +def read(context, filename): + if (is_maxfile(filename)): + maxfile = ImportMaxFile(filename) + prop = maxfile.getproperties('\x05DocumentSummaryInformation', convert_time=True, no_conversion=[10]) + prop = maxfile.getproperties('\x05SummaryInformation', convert_time=True, no_conversion=[10]) + read_class_data(maxfile, filename) + read_config(maxfile, filename) + read_directory(maxfile, filename) + read_class_directory(maxfile, filename) + read_video_postqueue(maxfile, filename) + read_scene(context, maxfile, filename) + else: + print("File seems to be no 3D Studio Max file!") + + +def load(operator, context, filepath="", global_matrix=None): + read(context, filepath) + + return {'FINISHED'} \ No newline at end of file -- 2.30.2 From b96e94368e55944f3d53b45099cacb6fa5cb0a1a Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 02:47:48 +0100 Subject: [PATCH 02/38] Import_max: Added import options Added import options for scale and transform Fixed axis conversion --- io_import_max.py | 95 +++++++++++++++++++++++++++++------------------- 1 file changed, 57 insertions(+), 38 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 175afc3..d5ec9a3 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -48,6 +48,18 @@ class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): filename_ext = ".max" filter_glob: bpy.props.StringProperty(default="*.max", options={'HIDDEN'},) + scale_objects: bpy.props.FloatProperty(name="Scale", + description="Scale factor for all objects", + min=0.0, max=10000.0, + soft_min=0.0, soft_max=10000.0, + default=1.0, + ) + + use_apply_matrix: bpy.props.BoolProperty(name="Apply Matrix", + description="Use transformation matrix to transform the objects", + default=True, + ) + def execute(self, context): keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up,).to_4x4() @@ -1120,6 +1132,24 @@ def create_matrix(prc): return mtx +def get_matrix_mesh_material(node): + refs = get_reference(node) + if (refs): + mtx = refs.get(0, None) + msh = refs.get(1, None) + mat = refs.get(3, None) + lyr = refs.get(6, None) + else: + refs = get_references(node) + mtx = refs[0] + msh = refs[1] + mat = refs[3] + lyr = None + if (len(refs) > 6): + lyr = refs[6] + return mtx, msh, mat, lyr + + def get_property(properties, idx): for child in properties.children: if (child.types & 0x100E): @@ -1216,12 +1246,18 @@ def adjust_material(obj, mat): objMaterial.roughness = 1.0 - material.get('shinines', 0.6) -def create_shape(context, pts, indices, node, key, prc, mat): +def adjust_matrix(obj, node): + mtx = create_matrix(node).flatten() + plc = mathutils.Matrix(*mtx) + obj.matrix_world = plc + return plc + + +def create_shape(context, pts, indices, node, key, mtx, mat): name = node.get_first(TYP_NAME).data shape = bpy.data.meshes.new(name) if (key is not None): name = "%s_%d" %(name, key) - mtx = create_matrix(prc) data = [] if (pts): loopstart = [] @@ -1247,6 +1283,7 @@ def create_shape(context, pts, indices, node, key, prc, mat): obj = bpy.data.objects.new(name, shape) context.view_layer.active_layer_collection.collection.objects.link(obj) adjust_material(obj, mat) + obj.matrix_world = mtx return True return True @@ -1391,31 +1428,6 @@ def create_editable_mesh(context, node, msh, mat, mtx): return created -def get_matrix_mesh_material(node): - refs = get_reference(node) - if (refs): - mtx = refs.get(0, None) - msh = refs.get(1, None) - mat = refs.get(3, None) - lyr = refs.get(6, None) - else: - refs = get_references(node) - mtx = refs[0] - msh = refs[1] - mat = refs[3] - lyr = None - if (len(refs) > 6): - lyr = refs[6] - return mtx, msh, mat, lyr - - -def adjust_matrix(obj, node): - mtx = create_matrix(node).flatten() - plc = mathutils.Matrix(*mtx) - obj.matrix_world = plc - return plc - - def create_shell(context, node, shell, mat, mtx): name = node.get_first(TYP_NAME).data refs = get_references(shell) @@ -1424,7 +1436,7 @@ def create_shell(context, node, shell, mat, mtx): return created -def create_skipable(context, node, msh, mat, mtx, skip): +def create_skipable(context, node, skip): name = node.get_first(TYP_NAME).data print(" skipping %s '%s'... " %(skip, name)) return True @@ -1443,11 +1455,11 @@ def create_mesh(context, node, msh, mtx, mat): else: skip = SKIPPABLE.get(uid) if (skip is not None): - created = create_skipable(context, node, msh, mat, mtx, skip) + created = create_skipable(context, node, skip) return created, uid -def create_object(context, node): +def create_object(context, node, mscale, transform): parent = get_node_parent(node) node.parent = parent name = get_node_name(node) @@ -1458,26 +1470,30 @@ def create_object(context, node): if (parent_mtx): mtx = mtx.dot(parent_mtx) parent = get_node_parent(parent) + if (transform): + mtx = create_matrix(mtx) @ mscale + else: + mtx = mscale created, uid = create_mesh(context, node, msh, mtx, mat) -def make_scene(context, parent, level=0): +def make_scene(context, mscale, transform, parent, level=0): for chunk in parent.children: if (isinstance(chunk, SceneChunk)): if ((get_guid(chunk) == 0x0001) and (get_super_id(chunk) == 0x0001)): try: - create_object(context, chunk) + create_object(context, chunk, mscale, transform) except Exception as exc: print('ImportError:', exc, chunk) -def read_scene(context, maxfile, filename): +def read_scene(context, maxfile, filename, mscale, transform): global SCENE_LIST SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', containerReader=SceneChunk) - make_scene(context, SCENE_LIST[0], 0) + make_scene(context, mscale, transform, SCENE_LIST[0], 0) -def read(context, filename): +def read(context, filename, mscale, transform): if (is_maxfile(filename)): maxfile = ImportMaxFile(filename) prop = maxfile.getproperties('\x05DocumentSummaryInformation', convert_time=True, no_conversion=[10]) @@ -1487,12 +1503,15 @@ def read(context, filename): read_directory(maxfile, filename) read_class_directory(maxfile, filename) read_video_postqueue(maxfile, filename) - read_scene(context, maxfile, filename) + read_scene(context, maxfile, filename, mscale, transform) else: print("File seems to be no 3D Studio Max file!") -def load(operator, context, filepath="", global_matrix=None): - read(context, filepath) +def load(operator, context, filepath="", scale_objects=1.0, use_apply_matrix=False, global_matrix=None): + mscale = mathutils.Matrix.Scale(scale_objects, 4) + if global_matrix is not None: + mscale = global_matrix @ mscale + read(context, filepath, mscale, transform=use_apply_matrix) return {'FINISHED'} \ No newline at end of file -- 2.30.2 From 2d0bb80d0d0b03419598739dfc26f9bc2cd39a3e Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 11:43:06 +0100 Subject: [PATCH 03/38] Import_max: Updated user interface design Updated UI design to blender standard --- io_import_max.py | 42 ++++++++++++++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index d5ec9a3..5355722 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -14,7 +14,7 @@ bl_info = { "name": "Import Autodesk MAX (.max)", "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", - "version": (1, 0, 0), + "version": (1, 1, 0), "blender": (4, 0, 0), "location": "File > Import", "description": "Import 3DSMAX meshes & materials", @@ -42,7 +42,7 @@ from bpy_extras.io_utils import orientation_helper class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): """Import Autodesk MAX""" bl_idname = "import_autodesk.max" - bl_label = "Import Autodesk MAX (.max)" + bl_label = "Import MAX (.max)" bl_options = {'PRESET', 'UNDO'} filename_ext = ".max" @@ -54,10 +54,9 @@ class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): soft_min=0.0, soft_max=10000.0, default=1.0, ) - use_apply_matrix: bpy.props.BoolProperty(name="Apply Matrix", description="Use transformation matrix to transform the objects", - default=True, + default=False, ) def execute(self, context): @@ -67,16 +66,51 @@ class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): return load(self, context, **keywords) + def draw(self, context): + pass + + +class MAX_PT_import_transform(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Transform" + bl_parent_id = "FILE_PT_operator" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "IMPORT_AUTODESK_OT_max" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False + + sfile = context.space_data + operator = sfile.active_operator + + layout.prop(operator, "scale_objects") + layrow = layout.row(align=True) + layrow.prop(operator, "use_apply_matrix") + layrow.label(text="", icon='VIEW_ORTHO' if operator.use_apply_matrix else 'MESH_GRID') + layout.prop(operator, "axis_forward") + layout.prop(operator, "axis_up") + + ### REGISTER ### def menu_func(self, context): self.layout.operator(Import_max.bl_idname, text="Autodesk MAX (.max)") def register(): bpy.utils.register_class(Import_max) + bpy.utils.register_class(MAX_PT_import_transform) bpy.types.TOPBAR_MT_file_import.append(menu_func) def unregister(): bpy.types.TOPBAR_MT_file_import.remove(menu_func) + bpy.utils.unregister_class(MAX_PT_import_transform) bpy.utils.unregister_class(Import_max) -- 2.30.2 From b9ce4686a8f4f11f39bf91db6157340d29c025b3 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 13:31:31 +0100 Subject: [PATCH 04/38] Import_max: Fixed matrix variable Changed variable for matrix chunk to avoid confusion with real matrix Removed debug keyword --- io_import_max.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 5355722..652bfc6 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -246,6 +246,11 @@ def _clsid(clsid): ((i32(clsid, 0), i16(clsid, 4), i16(clsid, 6)) + tuple(map(i8, clsid[8:16])))) + +############### +# DATA IMPORT # +############### + def is_maxfile (filename): """Test if file is a MAX OLE2 container.""" if hasattr(filename, 'read'): @@ -392,7 +397,7 @@ class MaxFileDirEntry: class ImportMaxFile: """Representing an interface for importing .max files.""" - def __init__(self, filename=None, write_mode=False, debug=False): + def __init__(self, filename=None, write_mode=False): self.write_mode = write_mode self._filesize = None self.byte_order = None @@ -717,6 +722,10 @@ class ImportMaxFile: return data +################### +# DATA PROCESSING # +################### + class MaxChunk(): """Representing a chunk of a .max file.""" def __init__(self, types, size, level, number): @@ -1169,19 +1178,19 @@ def create_matrix(prc): def get_matrix_mesh_material(node): refs = get_reference(node) if (refs): - mtx = refs.get(0, None) + prs = refs.get(0, None) msh = refs.get(1, None) mat = refs.get(3, None) lyr = refs.get(6, None) else: refs = get_references(node) - mtx = refs[0] + prs = refs[0] msh = refs[1] mat = refs[3] lyr = None if (len(refs) > 6): lyr = refs[6] - return mtx, msh, mat, lyr + return prs, msh, mat, lyr def get_property(properties, idx): @@ -1497,15 +1506,15 @@ def create_object(context, node, mscale, transform): parent = get_node_parent(node) node.parent = parent name = get_node_name(node) - mtx, msh, mat, lyr = get_matrix_mesh_material(node) + prs, msh, mat, lyr = get_matrix_mesh_material(node) while ((parent is not None) and (get_guid(parent) != 0x0002)): name = "%s/%s" %(get_node_name(parent), name) parent_mtx = parent.matrix if (parent_mtx): - mtx = mtx.dot(parent_mtx) + prs = prs.dot(parent_mtx) parent = get_node_parent(parent) if (transform): - mtx = create_matrix(mtx) @ mscale + mtx = create_matrix(prs) @ mscale else: mtx = mscale created, uid = create_mesh(context, node, msh, mtx, mat) -- 2.30.2 From d41ace599ba3fd2354ba225ac0a92d139909524a Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 13:50:52 +0100 Subject: [PATCH 05/38] Import_max: Fixed matrix variable Changed variable for matrix chunk to avoid confusion with real matrix --- io_import_max.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_import_max.py b/io_import_max.py index 652bfc6..3b470c9 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1475,7 +1475,7 @@ def create_shell(context, node, shell, mat, mtx): name = node.get_first(TYP_NAME).data refs = get_references(shell) msh = refs[-1] - created = create_editable_mesh(context, node, msh, mtx, mat) + created = create_editable_mesh(context, node, msh, mat, mtx) return created -- 2.30.2 From 05793fb6138d96e7dc06251ef82bd239a3057090 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 15:32:08 +0100 Subject: [PATCH 06/38] Import_max: Removed write mode Removed write mode from data import --- io_import_max.py | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 3b470c9..ff36e9b 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -397,8 +397,7 @@ class MaxFileDirEntry: class ImportMaxFile: """Representing an interface for importing .max files.""" - def __init__(self, filename=None, write_mode=False): - self.write_mode = write_mode + def __init__(self, filename=None): self._filesize = None self.byte_order = None self.directory_fp = None @@ -433,7 +432,7 @@ class ImportMaxFile: self.sector_size = None self.transaction_signature_number = None if filename: - self.open(filename, write_mode=write_mode) + self.open(filename) def __enter__(self): return self @@ -445,18 +444,13 @@ class ImportMaxFile: unicode_str = utf16_str.decode('UTF-16LE', errors) return unicode_str - def open(self, filename, write_mode=False): - self.write_mode = write_mode + def open(self, filename): if hasattr(filename, 'read'): self.fp = filename elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE: self.fp = io.BytesIO(filename) else: - if self.write_mode: - mode = 'r+b' - else: - mode = 'rb' - self.fp = open(filename, mode) + self.fp = open(filename, 'rb') filesize = 0 self.fp.seek(0, os.SEEK_END) try: @@ -467,7 +461,6 @@ class ImportMaxFile: self._used_streams_fat = [] self._used_streams_minifat = [] header = self.fp.read(512) - fmt_header = '<8s16sHHHHHHLLLLLLLLLL' header_size = struct.calcsize(fmt_header) header1 = header[:header_size] @@ -578,7 +571,7 @@ class ImportMaxFile: try: self.fp.seek(self.sectorsize * (sect + 1)) except: - print('MAX sector index out of range') + print('IndexError: MAX sector index out of range') sector = self.fp.read(self.sectorsize) return sector @@ -610,9 +603,8 @@ class ImportMaxFile: fat=self.minifat, filesize=self.ministream.size) else: return MaxStream(fp=self.fp, sect=start, size=size, - offset=self.sectorsize, - sectorsize=self.sectorsize, fat=self.fat, - filesize=self._filesize) + offset=self.sectorsize, sectorsize=self.sectorsize, + fat=self.fat, filesize=self._filesize) def _find(self, filename): if isinstance(filename, str): -- 2.30.2 From 8c92c2671b126ba763e07ba2fec4759c3e3ca4c7 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 15:57:14 +0100 Subject: [PATCH 07/38] Import_max: Changed definition order Changed definition order for better overview --- io_import_max.py | 72 ++++++++++++++++++++++++------------------------ 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index ff36e9b..713c4f2 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1342,6 +1342,42 @@ def calc_point_float(data): return points +def calc_point_3d(chunk): + data = chunk.data + count, offset = get_long(data, 0) + pointlist = [] + try: + while (offset < len(data)): + pt = Point3d() + long, offset = get_long(data, offset) + pt.points, offset = get_longs(data, offset, long) + pt.flags, offset = get_short(data, offset) + if ((pt.flags & 0x01) != 0): + pt.f1, offset = get_long(data, offset) + if ((pt.flags & 0x08) != 0): + pt.fH, offset = get_short(data, offset) + if ((pt.flags & 0x10) != 0): + pt.f2, offset = get_long(data, offset) + if ((pt.flags & 0x20) != 0): + pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) + if (len(pt.points) > 0): + pointlist.append(pt) + except Exception as exc: + print('ArrayError:\n', "%s: offset = %d\n" %(exc, offset)) + return pointlist + + +def get_point_array(values): + verts = [] + if len(values) >= 4: + count, offset = get_long(values, 0) + while (count > 0): + floats, offset = get_floats(values, offset, 3) + verts.extend(floats) + count -= 1 + return verts + + def get_poly_4p(points): vertex = {} for point in points: @@ -1388,42 +1424,6 @@ def get_poly_data(chunk): return polylist -def get_point_array(values): - verts = [] - if len(values) >= 4: - count, offset = get_long(values, 0) - while (count > 0): - floats, offset = get_floats(values, offset, 3) - verts.extend(floats) - count -= 1 - return verts - - -def calc_point_3d(chunk): - data = chunk.data - count, offset = get_long(data, 0) - pointlist = [] - try: - while (offset < len(data)): - pt = Point3d() - long, offset = get_long(data, offset) - pt.points, offset = get_longs(data, offset, long) - pt.flags, offset = get_short(data, offset) - if ((pt.flags & 0x01) != 0): - pt.f1, offset = get_long(data, offset) - if ((pt.flags & 0x08) != 0): - pt.fH, offset = get_short(data, offset) - if ((pt.flags & 0x10) != 0): - pt.f2, offset = get_long(data, offset) - if ((pt.flags & 0x20) != 0): - pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) - if (len(pt.points) > 0): - pointlist.append(pt) - except Exception as exc: - print('ArrayError:\n', "%s: offset = %d\n" %(exc, offset)) - return pointlist - - def create_editable_poly(context, node, msh, mat, mtx): coords = point3i = point4i = point6i = pointNi = None name = node.get_first(TYP_NAME).data -- 2.30.2 From a9e60678b528d02c7dfda2cb2b39f20c5bcac860 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 27 Nov 2023 20:23:50 +0100 Subject: [PATCH 08/38] Import_max: Added additional import options Added options to import materials and UV as mesh object --- io_import_max.py | 118 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 89 insertions(+), 29 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 713c4f2..844f4fa 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -14,7 +14,7 @@ bl_info = { "name": "Import Autodesk MAX (.max)", "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", - "version": (1, 1, 0), + "version": (1, 1, 2), "blender": (4, 0, 0), "location": "File > Import", "description": "Import 3DSMAX meshes & materials", @@ -54,6 +54,14 @@ class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): soft_min=0.0, soft_max=10000.0, default=1.0, ) + use_material: bpy.props.BoolProperty(name="Materials", + description="Import the materials of the objects", + default=True, + ) + use_uv_mesh: bpy.props.BoolProperty(name="UV Mesh", + description="Import texture coordinates as mesh objects", + default=False, + ) use_apply_matrix: bpy.props.BoolProperty(name="Apply Matrix", description="Use transformation matrix to transform the objects", default=False, @@ -70,6 +78,35 @@ class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): pass +class MAX_PT_import_include(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Include" + bl_parent_id = "FILE_PT_operator" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "IMPORT_AUTODESK_OT_max" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = True + + sfile = context.space_data + operator = sfile.active_operator + + layrow = layout.row(align=True) + layrow.prop(operator, "use_material") + layrow.label(text="", icon='MATERIAL' if operator.use_material else 'SHADING_TEXTURE') + layrow = layout.row(align=True) + layrow.prop(operator, "use_uv_mesh") + layrow.label(text="", icon='UV' if operator.use_uv_mesh else 'GROUP_UVS') + + class MAX_PT_import_transform(bpy.types.Panel): bl_space_type = 'FILE_BROWSER' bl_region_type = 'TOOL_PROPS' @@ -105,12 +142,14 @@ def menu_func(self, context): def register(): bpy.utils.register_class(Import_max) + bpy.utils.register_class(MAX_PT_import_include) bpy.utils.register_class(MAX_PT_import_transform) bpy.types.TOPBAR_MT_file_import.append(menu_func) def unregister(): bpy.types.TOPBAR_MT_file_import.remove(menu_func) bpy.utils.unregister_class(MAX_PT_import_transform) + bpy.utils.unregister_class(MAX_PT_import_include) bpy.utils.unregister_class(Import_max) @@ -1205,7 +1244,8 @@ def get_color(colors, idx): def get_value(colors, idx): prop = get_property(colors, idx) if (prop is not None): - val, offset = get_float(prop.data, 15) + siz = 15 if (len(prop.data) > 23) else 6 + val, offset = get_float(prop.data, siz) return val return None @@ -1288,7 +1328,7 @@ def adjust_matrix(obj, node): return plc -def create_shape(context, pts, indices, node, key, mtx, mat): +def create_shape(context, pts, indices, node, key, mtx, mat, umt): name = node.get_first(TYP_NAME).data shape = bpy.data.meshes.new(name) if (key is not None): @@ -1297,13 +1337,13 @@ def create_shape(context, pts, indices, node, key, mtx, mat): if (pts): loopstart = [] looplines = loop = 0 - nbr_faces = len(indices) - for fid in range(nbr_faces): + nb_faces = len(indices) + for fid in range(nb_faces): polyface = indices[fid] looplines += len(polyface) shape.vertices.add(len(pts) // 3) shape.loops.add(looplines) - shape.polygons.add(nbr_faces) + shape.polygons.add(nb_faces) shape.vertices.foreach_set("co", pts) for vtx in indices: loopstart.append(loop) @@ -1317,8 +1357,9 @@ def create_shape(context, pts, indices, node, key, mtx, mat): shape.update() obj = bpy.data.objects.new(name, shape) context.view_layer.active_layer_collection.collection.objects.link(obj) - adjust_material(obj, mat) obj.matrix_world = mtx + if (umt): + adjust_material(obj, mat) return True return True @@ -1424,11 +1465,14 @@ def get_poly_data(chunk): return polylist -def create_editable_poly(context, node, msh, mat, mtx): - coords = point3i = point4i = point6i = pointNi = None +def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): + coords = point4i = point6i = pointNi = None name = node.get_first(TYP_NAME).data poly = msh.get_first(0x08FE) created = False + lidx = [] + lcrd = [] + lply = [] if (poly): for child in poly.children: if (child.types == 0x0100): @@ -1437,20 +1481,34 @@ def create_editable_poly(context, node, msh, mat, mtx): point6i = child.data elif (child.types == 0x011A): point4i = calc_point_3d(child) + elif (child.types == 0x0310): + pointNi = child.data + elif (child.types == 0x0124): + lidx.append(get_long(child.data, 0)[0]) + elif (child.types == 0x0128): + lcrd.append(calc_point_float(child.data)) + elif (child.types == 0x012B): + lply.append(get_poly_data(child)) if (point4i is not None): vertex = get_poly_4p(point4i) if (len(vertex) > 0): for key, ngons in vertex.items(): - created |= create_shape(context, coords, ngons, node, key, mtx, mat) + created |= create_shape(context, coords, ngons, node, key, mtx, mat, umt) else: created = True elif (point6i is not None): ngons = get_poly_6p(point6i) - created = create_shape(context, coords, ngons, node, None, mtx, mat) + created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) + elif (pointNi is not None): + ngons = get_poly_5p(pointNi) + created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) + if (uvm and len(lidx) > 0): + for i in range(len(lidx)): + created |= create_shape(context, lcrd[i], lply[i], node, lidx[i], mtx, mat, umt) return created -def create_editable_mesh(context, node, msh, mat, mtx): +def create_editable_mesh(context, node, msh, mat, mtx, umt): name = node.get_first(TYP_NAME).data poly = msh.get_first(0x08FE) created = False @@ -1459,15 +1517,15 @@ def create_editable_mesh(context, node, msh, mat, mtx): clsid_chunk = poly.get_first(0x0912) coords = get_point_array(vertex_chunk.data) ngons = get_poly_5p(clsid_chunk.data) - created = create_shape(context, coords, ngons, node, None, mtx, mat) + created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) return created -def create_shell(context, node, shell, mat, mtx): +def create_shell(context, node, shell, mat, mtx, umt): name = node.get_first(TYP_NAME).data refs = get_references(shell) msh = refs[-1] - created = create_editable_mesh(context, node, msh, mat, mtx) + created = create_editable_mesh(context, node, msh, mat, mtx, umt) return created @@ -1477,16 +1535,16 @@ def create_skipable(context, node, skip): return True -def create_mesh(context, node, msh, mtx, mat): +def create_mesh(context, node, msh, mtx, mat, umt, uvm): created = False uid = get_guid(msh) msh.geometry = None if (uid == 0x0E44F10B3): - created = create_editable_mesh(context, node, msh, mat, mtx) + created = create_editable_mesh(context, node, msh, mat, mtx, umt) elif (uid == 0x192F60981BF8338D): - created = create_editable_poly(context, node, msh, mat, mtx) + created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) elif (uid in {0x2032, 0x2033}): - created = create_shell(context, node, msh, mat, mtx) + created = create_shell(context, node, msh, mat, mtx, umt) else: skip = SKIPPABLE.get(uid) if (skip is not None): @@ -1494,7 +1552,7 @@ def create_mesh(context, node, msh, mtx, mat): return created, uid -def create_object(context, node, mscale, transform): +def create_object(context, node, mscale, usemat, uvmesh, transform): parent = get_node_parent(node) node.parent = parent name = get_node_name(node) @@ -1509,26 +1567,26 @@ def create_object(context, node, mscale, transform): mtx = create_matrix(prs) @ mscale else: mtx = mscale - created, uid = create_mesh(context, node, msh, mtx, mat) + created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh) -def make_scene(context, mscale, transform, parent, level=0): +def make_scene(context, mscale, usemat, uvmesh, transform, parent, level=0): for chunk in parent.children: if (isinstance(chunk, SceneChunk)): if ((get_guid(chunk) == 0x0001) and (get_super_id(chunk) == 0x0001)): try: - create_object(context, chunk, mscale, transform) + create_object(context, chunk, mscale, usemat, uvmesh, transform) except Exception as exc: print('ImportError:', exc, chunk) -def read_scene(context, maxfile, filename, mscale, transform): +def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): global SCENE_LIST SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', containerReader=SceneChunk) - make_scene(context, mscale, transform, SCENE_LIST[0], 0) + make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0], 0) -def read(context, filename, mscale, transform): +def read(context, filename, mscale, usemat, uvmesh, transform): if (is_maxfile(filename)): maxfile = ImportMaxFile(filename) prop = maxfile.getproperties('\x05DocumentSummaryInformation', convert_time=True, no_conversion=[10]) @@ -1538,15 +1596,17 @@ def read(context, filename, mscale, transform): read_directory(maxfile, filename) read_class_directory(maxfile, filename) read_video_postqueue(maxfile, filename) - read_scene(context, maxfile, filename, mscale, transform) + read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform) else: print("File seems to be no 3D Studio Max file!") -def load(operator, context, filepath="", scale_objects=1.0, use_apply_matrix=False, global_matrix=None): +def load(operator, context, filepath="", scale_objects=1.0, use_material=True, + use_uv_mesh=False, use_apply_matrix=False, global_matrix=None): mscale = mathutils.Matrix.Scale(scale_objects, 4) if global_matrix is not None: mscale = global_matrix @ mscale - read(context, filepath, mscale, transform=use_apply_matrix) + + read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) return {'FINISHED'} \ No newline at end of file -- 2.30.2 From 51a690e9cc2af2b630041d05e13a11db5214009e Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Tue, 28 Nov 2023 01:26:14 +0100 Subject: [PATCH 09/38] Import_max: Fixed vray material import Fixed vray material import --- io_import_max.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 844f4fa..12d4431 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1244,7 +1244,7 @@ def get_color(colors, idx): def get_value(colors, idx): prop = get_property(colors, idx) if (prop is not None): - siz = 15 if (len(prop.data) > 23) else 6 + siz = 15 if (len(prop.data) > 19) else 6 val, offset = get_float(prop.data, siz) return val return None @@ -1273,10 +1273,9 @@ def get_vray_material(vry): material = Material() try: material.set('diffuse', get_color(vry, 0x01)) - material.set('ambient', get_color(vry, 0x02)) - material.set('specular', get_color(vry, 0x05)) - material.set('emissive', get_color(vry, 0x05)) - material.set('shinines', get_value(vry, 0x0B)) + material.set('specular', get_color(vry, 0x02)) + material.set('reflect', get_value(vry, 0x04)) + material.set('shinines', get_value(vry, 0x0A)) material.set('transparency', get_value(vry, 0x02)) except: pass @@ -1287,10 +1286,9 @@ def get_arch_material(ad): material = Material() try: material.set('diffuse', get_color(ad, 0x1A)) - material.set('ambient', get_color(ad, 0x02)) - material.set('specular', get_color(ad, 0x05)) - material.set('emissive', get_color(ad, 0x05)) - material.set('shinines', get_value(ad, 0x0B)) + material.set('specular', get_color(ad, 0x02)) + material.set('emissive', get_color(ad, 0x04)) + material.set('shinines', get_value(ad, 0x0A)) material.set('transparency', get_value(ad, 0x02)) except: pass -- 2.30.2 From b7156026c9586678a1afaf54273af8064da08125 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Tue, 28 Nov 2023 02:08:18 +0100 Subject: [PATCH 10/38] Import_max: Fixed arch material import Fixed arch material import --- io_import_max.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 12d4431..0b04e5c 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1287,7 +1287,7 @@ def get_arch_material(ad): try: material.set('diffuse', get_color(ad, 0x1A)) material.set('specular', get_color(ad, 0x02)) - material.set('emissive', get_color(ad, 0x04)) + material.set('reflect', get_value(ad, 0x04)) material.set('shinines', get_value(ad, 0x0A)) material.set('transparency', get_value(ad, 0x02)) except: @@ -1316,6 +1316,7 @@ def adjust_material(obj, mat): obj.data.materials.append(objMaterial) objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) + objMaterial.specular_intensity = material.get('reflect', 0.5) objMaterial.roughness = 1.0 - material.get('shinines', 0.6) @@ -1604,7 +1605,6 @@ def load(operator, context, filepath="", scale_objects=1.0, use_material=True, mscale = mathutils.Matrix.Scale(scale_objects, 4) if global_matrix is not None: mscale = global_matrix @ mscale - read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) return {'FINISHED'} \ No newline at end of file -- 2.30.2 From 1d6ced39c27fefdef5d950f0298af710993123e8 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Tue, 28 Nov 2023 02:28:54 +0100 Subject: [PATCH 11/38] Import_max: Removed property decorate Removed property decorate --- io_import_max.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_import_max.py b/io_import_max.py index 0b04e5c..b2d34c4 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -94,7 +94,7 @@ class MAX_PT_import_include(bpy.types.Panel): def draw(self, context): layout = self.layout layout.use_property_split = True - layout.use_property_decorate = True + layout.use_property_decorate = False sfile = context.space_data operator = sfile.active_operator -- 2.30.2 From ba4008a5f4f0b6b963d180da90ff45e1fc608a1d Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 29 Nov 2023 01:08:46 +0100 Subject: [PATCH 12/38] Import_max: Fixed mesh import Check if mesh is editable poly --- io_import_max.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index b2d34c4..798b6ee 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -223,7 +223,8 @@ SKIPPABLE = { def get_valid_name(name): - if (INVALID_NAME.match(name)): return "_%s"%(name.encode('utf8')) + if (INVALID_NAME.match(name)): + return "_%s"%(name.encode('utf8')) return "%s"%(name.encode('utf8')) def i8(data): @@ -840,7 +841,7 @@ class ClassIDChunk(ByteArrayChunk): self.set(data, "struct", ' Date: Wed, 29 Nov 2023 01:15:31 +0100 Subject: [PATCH 13/38] Cleanup: Import_max Removed unused code --- io_import_max.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 798b6ee..bd76421 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1508,7 +1508,6 @@ def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): def create_editable_mesh(context, node, msh, mat, mtx, umt): - name = node.get_first(TYP_NAME).data poly = msh.get_first(0x08FE) created = False if (poly): @@ -1521,7 +1520,6 @@ def create_editable_mesh(context, node, msh, mat, mtx, umt): def create_shell(context, node, shell, mat, mtx, umt, uvm): - name = node.get_first(TYP_NAME).data refs = get_references(shell) msh = refs[-1] if (get_class_name(msh) == "'Editable Poly'"): -- 2.30.2 From 94b6787948e81fa7a38f2912c9f888799c29232f Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 29 Nov 2023 01:46:53 +0100 Subject: [PATCH 14/38] Cleanup: Import_max Cleanup tabs and spaces --- io_import_max.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index bd76421..9f41e47 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -775,8 +775,8 @@ class MaxChunk(): def __str__(self): if (self.unknown == True): - return "%s[%4x] %04X: %s" %("" * self.level, self.number, self.types, ":".join("%02x"%(c) for c in self.data)) - return "%s[%4x] %04X: %s=%s" %("" * self.level, self.number, self.types, self.format, self.data) + return "%s[%4x] %04X: %s" %(""*self.level, self.number, self.types, ":".join("%02x"%(c) for c in self.data)) + return "%s[%4x] %04X: %s=%s" %(""*self.level, self.number, self.types, self.format, self.data) class ByteArrayChunk(MaxChunk): @@ -864,8 +864,8 @@ class ContainerChunk(MaxChunk): def __str__(self): if (self.unknown == True): - return "%s[%4x] %04X" %("" * self.level, self.number, self.types) - return "%s[%4x] %04X: %s" %("" * self.level, self.number, self.types, self.format) + return "%s[%4x] %04X" %(""*self.level, self.number, self.types) + return "%s[%4x] %04X: %s" %(""*self.level, self.number, self.types, self.format) def get_first(self, types): for child in self.children: @@ -889,8 +889,8 @@ class SceneChunk(ContainerChunk): def __str__(self): if (self.unknown == True): - return "%s[%4x] %s" %("" * self.level, self.number, get_class_name(self)) - return "%s[%4x] %s: %s" %("" * self.level, self.number, get_class_name(self), self.format) + return "%s[%4x] %s" %(""*self.level, self.number, get_class_name(self)) + return "%s[%4x] %s: %s" %(""*self.level, self.number, get_class_name(self), self.format) def set_data(self, data): previous = None @@ -915,7 +915,7 @@ class ChunkReader(): short, ofst = get_long(data, ofst) if (short == 0xB000000): data = zlib.decompress(data, zlib.MAX_WBITS|32) - print(" reading '%s'..."%self.name, len(data)) + print(" reading '%s'..." %self.name, len(data)) while offset < len(data): old = offset offset, chunk = self.get_next_chunk(data, offset, level, len(chunks), containerReader, primitiveReader) @@ -1466,7 +1466,6 @@ def get_poly_data(chunk): def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): coords = point4i = point6i = pointNi = None - name = node.get_first(TYP_NAME).data poly = msh.get_first(0x08FE) created = False lidx = [] @@ -1606,7 +1605,6 @@ def load(operator, context, filepath="", scale_objects=1.0, use_material=True, mscale = mathutils.Matrix.Scale(scale_objects, 4) if global_matrix is not None: mscale = global_matrix @ mscale - read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) return {'FINISHED'} \ No newline at end of file -- 2.30.2 From d7120e87247323b06b0fdcad2cf9aad3f33ece20 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 29 Nov 2023 14:19:15 +0100 Subject: [PATCH 15/38] Import_max: Added principled BSDF Added node_shader_utils principled BSDF wrapper Removed parameters wich are incompatible for blender --- io_import_max.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 9f41e47..d1ef9f3 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -35,6 +35,7 @@ import math, mathutils import bpy, bpy_extras from bpy_extras.io_utils import axis_conversion from bpy_extras.io_utils import orientation_helper +from bpy_extras.node_shader_utils import PrincipledBSDFWrapper @orientation_helper(axis_forward='Y', axis_up='Z') @@ -1150,7 +1151,7 @@ def get_rotation(pos): elif (uid == 0x442313): # TCB Rotation rot = pos.get_first(0x2504).data rotation = mathutils.Quaternion((rot[0], rot[1], rot[2], rot[3])) - elif (uid == 0x4B4B1003): #'Rotation List + elif (uid == 0x4B4B1003): # Rotation List refs = get_references(pos) if (len(refs) > 3): return get_rotation(refs[0]) @@ -1244,7 +1245,7 @@ def get_color(colors, idx): def get_value(colors, idx): prop = get_property(colors, idx) if (prop is not None): - siz = 15 if (len(prop.data) > 19) else 6 + siz = 15 if (len(prop.data) > 15) else 11 val, offset = get_float(prop.data, siz) return val return None @@ -1262,8 +1263,8 @@ def get_standard_material(refs): material.set('specular', get_color(parameters, 0x02)) material.set('emissive', get_color(parameters, 0x08)) material.set('shinines', get_value(parameters, 0x0A)) - transparency = refs[4] # ParameterBlock2 - material.set('transparency', get_value(transparency, 0x02)) + reflect = refs[4] # ParameterBlock2 + material.set('reflect', get_value(reflect, 0x02)) except: pass return material @@ -1274,9 +1275,6 @@ def get_vray_material(vry): try: material.set('diffuse', get_color(vry, 0x01)) material.set('specular', get_color(vry, 0x02)) - material.set('reflect', get_value(vry, 0x04)) - material.set('shinines', get_value(vry, 0x0A)) - material.set('transparency', get_value(vry, 0x02)) except: pass return material @@ -1287,9 +1285,6 @@ def get_arch_material(ad): try: material.set('diffuse', get_color(ad, 0x1A)) material.set('specular', get_color(ad, 0x02)) - material.set('reflect', get_value(ad, 0x04)) - material.set('shinines', get_value(ad, 0x0A)) - material.set('transparency', get_value(ad, 0x02)) except: pass return material @@ -1314,10 +1309,12 @@ def adjust_material(obj, mat): if (obj is not None) and (material is not None): objMaterial = bpy.data.materials.new(get_class_name(mat)) obj.data.materials.append(objMaterial) - objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) - objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) - objMaterial.specular_intensity = material.get('reflect', 0.5) - objMaterial.roughness = 1.0 - material.get('shinines', 0.6) + matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) + matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) + matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) + matShader.specular = objMaterial.specular_intensity = material.get('reflect', 1.0) + matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) + matShader.emission_color = material.get('emissive', (0,0,0)) def adjust_matrix(obj, node): -- 2.30.2 From 9e09f562f994e42598502047e233928859fdc4b4 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 29 Nov 2023 14:28:50 +0100 Subject: [PATCH 16/38] Import_max: Fixed specularity Changed specularity default --- io_import_max.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_import_max.py b/io_import_max.py index d1ef9f3..f20fc12 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1312,7 +1312,7 @@ def adjust_material(obj, mat): matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) - matShader.specular = objMaterial.specular_intensity = material.get('reflect', 1.0) + matShader.specular = objMaterial.specular_intensity = material.get('reflect', 0.5) matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) matShader.emission_color = material.get('emissive', (0,0,0)) -- 2.30.2 From 540f70018e8b16eeaaf33dbac56b3322d3c1f7c2 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 29 Nov 2023 22:29:30 +0100 Subject: [PATCH 17/38] Import_max: Avoid struct error Ensure there are enough bytes to read --- io_import_max.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/io_import_max.py b/io_import_max.py index f20fc12..da6c6c2 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1245,7 +1245,7 @@ def get_color(colors, idx): def get_value(colors, idx): prop = get_property(colors, idx) if (prop is not None): - siz = 15 if (len(prop.data) > 15) else 11 + siz = 15 if (len(prop.data) > 17) else 11 val, offset = get_float(prop.data, siz) return val return None -- 2.30.2 From 64d2906bc3f94e037b3bc8b5be7cbc6b5c8ad05e Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Fri, 1 Dec 2023 20:30:35 +0100 Subject: [PATCH 18/38] Import_max: Fixed material import Fixed material import --- io_import_max.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index da6c6c2..acc6dd8 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1236,7 +1236,7 @@ def get_property(properties, idx): def get_color(colors, idx): prop = get_property(colors, idx) if (prop is not None): - siz = 15 if (len(prop.data) > 23) else 11 + siz = len(prop.data) - 12 col, offset = get_floats(prop.data, siz, 3) return (col[0], col[1], col[2]) return None @@ -1245,7 +1245,7 @@ def get_color(colors, idx): def get_value(colors, idx): prop = get_property(colors, idx) if (prop is not None): - siz = 15 if (len(prop.data) > 17) else 11 + siz = len(prop.data) - 4 val, offset = get_float(prop.data, siz) return val return None @@ -1262,9 +1262,10 @@ def get_standard_material(refs): material.set('diffuse', get_color(parameters, 0x01)) material.set('specular', get_color(parameters, 0x02)) material.set('emissive', get_color(parameters, 0x08)) - material.set('shinines', get_value(parameters, 0x0A)) - reflect = refs[4] # ParameterBlock2 - material.set('reflect', get_value(reflect, 0x02)) + material.set('shinines', get_value(parameters, 0x0B)) + parablock = refs[4] # ParameterBlock2 + material.set('glossines', get_value(parablock, 0x02)) + material.set('metallic', get_value(parablock, 0x05)) except: pass return material @@ -1275,6 +1276,11 @@ def get_vray_material(vry): try: material.set('diffuse', get_color(vry, 0x01)) material.set('specular', get_color(vry, 0x02)) + material.set('shinines', get_value(vry, 0x03)) + material.set('ior', get_value(vry, 0x09)) + material.set('emissive', get_color(vry, 0x17)) + material.set('glossines', get_value(vry, 0x18)) + material.set('metallic', get_value(vry, 0x19)) except: pass return material @@ -1312,9 +1318,11 @@ def adjust_material(obj, mat): matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) - matShader.specular = objMaterial.specular_intensity = material.get('reflect', 0.5) + matShader.specular = objMaterial.specular_intensity = material.get('glossines', 0.5) matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) + matShader.metallic = objMaterial.metallic = material.get('metallic', 0) matShader.emission_color = material.get('emissive', (0,0,0)) + matShader.ior = material.get('ior', 1.45) def adjust_matrix(obj, node): @@ -1602,6 +1610,7 @@ def load(operator, context, filepath="", scale_objects=1.0, use_material=True, mscale = mathutils.Matrix.Scale(scale_objects, 4) if global_matrix is not None: mscale = global_matrix @ mscale + read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) return {'FINISHED'} \ No newline at end of file -- 2.30.2 From 7e75ed90a9e985d9ba3b914b6c7d6c6794376b98 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Fri, 1 Dec 2023 22:33:53 +0100 Subject: [PATCH 19/38] Cleanup: Import_max Changed blender version to last compatible version Cleanup unused code --- io_import_max.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index acc6dd8..171214d 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -15,7 +15,7 @@ bl_info = { "name": "Import Autodesk MAX (.max)", "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", "version": (1, 1, 2), - "blender": (4, 0, 0), + "blender": (3, 6, 0), "location": "File > Import", "description": "Import 3DSMAX meshes & materials", "warning": "", @@ -451,7 +451,6 @@ class ImportMaxFile: self.fp = None self.header_clsid = None self.header_signature = None - self.metadata = None self.mini_sector_shift = None self.mini_sector_size = None self.mini_stream_cutoff_size = None @@ -684,9 +683,7 @@ class ImportMaxFile: def get_rootentry_name(self): return self.root.name - def getproperties(self, filename, convert_time=False, no_conversion=None): - if no_conversion == None: - no_conversion = [] + def getproperties(self, filename): streampath = filename if not isinstance(streampath, str): streampath = '/'.join(streampath) @@ -732,7 +729,7 @@ class ImportMaxFile: value = self._decode_utf16_str(stream[offset + 8:offset + 8 + count * 2]) elif property_type == VT_FILETIME: value = int(i32(stream, offset + 4)) + (int(i32(stream, offset + 8)) << 32) - if convert_time and property_id not in no_conversion: + if property_id != 10: _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) value = _FILETIME_null_date + datetime.timedelta(microseconds=value // 10) else: @@ -1290,7 +1287,6 @@ def get_arch_material(ad): material = Material() try: material.set('diffuse', get_color(ad, 0x1A)) - material.set('specular', get_color(ad, 0x02)) except: pass return material @@ -1593,8 +1589,8 @@ def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): def read(context, filename, mscale, usemat, uvmesh, transform): if (is_maxfile(filename)): maxfile = ImportMaxFile(filename) - prop = maxfile.getproperties('\x05DocumentSummaryInformation', convert_time=True, no_conversion=[10]) - prop = maxfile.getproperties('\x05SummaryInformation', convert_time=True, no_conversion=[10]) + prop = maxfile.getproperties('\x05DocumentSummaryInformation') + prop = maxfile.getproperties('\x05SummaryInformation') read_class_data(maxfile, filename) read_config(maxfile, filename) read_directory(maxfile, filename) -- 2.30.2 From dc4698175f42ff7aa0af3ecb202fe2b74f79ad67 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Sat, 2 Dec 2023 12:18:13 +0100 Subject: [PATCH 20/38] Import_max:Added chunk definitions Added chunk definitions --- io_import_max.py | 62 ++++++++++++++++++++++++++++-------------------- 1 file changed, 36 insertions(+), 26 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 171214d..940089d 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -161,15 +161,14 @@ def unregister(): MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' WORD_CLSID = "00020900-0000-0000-C000-000000000046" +MIN_FILE_SIZE = 1536 +UNKNOWN_SIZE = 0x7FFFFFFF +MAXFILE_SIZE = 0x7FFFFFFFFFFFFFFF MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain FREESECT = 0xFFFFFFFF # (-1) unallocated sector -MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID -NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry -UNKNOWN_SIZE = 0x7FFFFFFF -MIN_FILE_SIZE = 1536 STGTY_EMPTY = 0 # empty directory entry STGTY_STORAGE = 1 # element is a storage object @@ -192,12 +191,15 @@ TYP_NAME = 0x0962 INVALID_NAME = re.compile('^[0-9].*') UNPACK_BOX_DATA = struct.Struct(' 3): return get_rotation(refs[0]) - elif (uid == 0x3A90416731381913): # Rotation Wire + elif (uid == MATRIX_ROT): # Rotation Wire return get_rotation(get_references(pos)[0]) if (rotation): mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis) @@ -1173,7 +1182,7 @@ def get_scale(pos): if (scale is None): scale = pos.get_first(0x2505) pos = scale.data - elif (uid == 0xFEEE238B118F7C01): # ScaleXYZ + elif (uid == MATRIX_SCL): # ScaleXYZ pos = get_point_3d(pos, 1.0) else: return mtx @@ -1287,6 +1296,7 @@ def get_arch_material(ad): material = Material() try: material.set('diffuse', get_color(ad, 0x1A)) + material.set('specular', get_color(ad, 0x02)) except: pass return material @@ -1302,10 +1312,10 @@ def adjust_material(obj, mat): elif (uid == 0x0200): # Multi/Sub-Object refs = get_references(mat) material = adjust_material(obj, refs[-1]) - elif (uid == 0x7034695C37BF3F2F): # VRayMtl + elif (uid == VRAY_MTL): # VRayMtl refs = get_reference(mat) material = get_vray_material(refs[1]) - elif (uid == 0x4A16365470B05735): # Arch + elif (uid == ARCH_MTL): # Arch refs = get_references(mat) material = get_arch_material(refs[0]) if (obj is not None) and (material is not None): @@ -1539,9 +1549,9 @@ def create_mesh(context, node, msh, mtx, mat, umt, uvm): created = False uid = get_guid(msh) msh.geometry = None - if (uid == 0x0E44F10B3): + if (uid == EDIT_MESH): created = create_editable_mesh(context, node, msh, mat, mtx, umt) - elif (uid == 0x192F60981BF8338D): + elif (uid == EDIT_POLY): created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) elif (uid in {0x2032, 0x2033}): created = create_shell(context, node, msh, mat, mtx, umt, uvm) @@ -1557,7 +1567,7 @@ def create_object(context, node, mscale, usemat, uvmesh, transform): node.parent = parent name = get_node_name(node) prs, msh, mat, lyr = get_matrix_mesh_material(node) - while ((parent is not None) and (get_guid(parent) != 0x0002)): + while ((parent is not None) and (get_guid(parent) != 0x02)): name = "%s/%s" %(get_node_name(parent), name) parent_mtx = parent.matrix if (parent_mtx): @@ -1573,7 +1583,7 @@ def create_object(context, node, mscale, usemat, uvmesh, transform): def make_scene(context, mscale, usemat, uvmesh, transform, parent, level=0): for chunk in parent.children: if (isinstance(chunk, SceneChunk)): - if ((get_guid(chunk) == 0x0001) and (get_super_id(chunk) == 0x0001)): + if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)): try: create_object(context, chunk, mscale, usemat, uvmesh, transform) except Exception as exc: -- 2.30.2 From 0de2356c12badd552d6245c8b02e69da73745285 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Sat, 2 Dec 2023 13:10:46 +0100 Subject: [PATCH 21/38] Cleanup: Import_max Cleanup tabs and spaces --- io_import_max.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 940089d..767105a 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -389,7 +389,7 @@ class MaxFileDirEntry: self.is_minifat = False if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: if self.size < maxfile.minisectorcutoff \ - and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT self.is_minifat = True else: self.is_minifat = False @@ -563,7 +563,7 @@ class ImportMaxFile: if minifat: used_streams = self._used_streams_minifat else: - if first_sect in (DIFSECT,FATSECT,ENDOFCHAIN,FREESECT): + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): return used_streams = self._used_streams_fat if first_sect in used_streams: @@ -597,7 +597,7 @@ class ImportMaxFile: self.fat = array.array('I') self.loadfat_sect(sect) if self.num_difat_sectors != 0: - nb_difat_sectors = (self.sectorsize//4) - 1 + nb_difat_sectors = (self.sectorsize // 4) - 1 nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors isect_difat = self.first_difat_sector for i in range(nb_difat): @@ -1177,7 +1177,7 @@ def get_scale(pos): if (scale is None): scale = pos.get_first(0x2505) pos = scale.data - elif (uid == 0x442315): # TCB Zoom + elif (uid == 0x442315): # TCB Zoom scale = pos.get_first(0x2501) if (scale is None): scale = pos.get_first(0x2505) -- 2.30.2 From b0443729ac0d1a75854dd9bf9a91f793d7271434 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Sun, 3 Dec 2023 23:05:55 +0100 Subject: [PATCH 22/38] Import_max: Removed unused code Added additional arch material parameter Removed unused code --- io_import_max.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 767105a..22f7b1b 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -620,7 +620,7 @@ class ImportMaxFile: try: self.fp.seek(self.sectorsize * (sect + 1)) except: - print('IndexError: MAX sector index out of range') + print('IndexError: Sector index out of range') sector = self.fp.read(self.sectorsize) return sector @@ -693,9 +693,6 @@ class ImportMaxFile: return self.root.name def getproperties(self, filename): - streampath = filename - if not isinstance(streampath, str): - streampath = '/'.join(streampath) fp = self.openstream(filename) data = {} try: @@ -1296,7 +1293,8 @@ def get_arch_material(ad): material = Material() try: material.set('diffuse', get_color(ad, 0x1A)) - material.set('specular', get_color(ad, 0x02)) + material.set('specular', get_color(ad, 0x05)) + material.set('shinines', get_value(ad, 0x0B)) except: pass return material -- 2.30.2 From 45b3f48498e9c2aaca822cb16be2fc40b3040c40 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 4 Dec 2023 20:01:29 +0100 Subject: [PATCH 23/38] Import_max: Fixed crash with new .max files Fixed crash with files from latest version Removed unnessecary code Removed unused imports --- io_import_max.py | 82 ++---------------------------------------------- 1 file changed, 3 insertions(+), 79 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 22f7b1b..46c2512 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -30,7 +30,6 @@ bl_info = { import io, re import os, sys, zlib import struct, array -import time, datetime import math, mathutils import bpy, bpy_extras from bpy_extras.io_utils import axis_conversion @@ -177,16 +176,6 @@ STGTY_LOCKBYTES = 3 # element is an ILockBytes object STGTY_PROPERTY = 4 # element is an IPropertyStorage object STGTY_ROOT = 5 # element is a root storage -VT_EMPTY=0; VT_NULL=1; VT_I2=2; VT_I4=3; VT_R4=4; VT_R8=5; VT_CY=6; -VT_DATE=7; VT_BSTR=8; VT_DISPATCH=9; VT_ERROR=10; VT_BOOL=11; -VT_VARIANT=12; VT_UNKNOWN=13; VT_DECIMAL=14; VT_I1=16; VT_UI1=17; -VT_UI2=18; VT_UI4=19; VT_I8=20; VT_UI8=21; VT_INT=22; VT_UINT=23; -VT_VOID=24; VT_HRESULT=25; VT_PTR=26; VT_SAFEARRAY=27; VT_CARRAY=28; -VT_USERDEFINED=29; VT_LPSTR=30; VT_LPWSTR=31; VT_FILETIME=64; -VT_BLOB=65; VT_STREAM=66; VT_STORAGE=67; VT_STREAMED_OBJECT=68; -VT_STORED_OBJECT=69; VT_BLOB_OBJECT=70; VT_CF=71; VT_CLSID=72; -VT_VECTOR=0x1000; - TYP_NAME = 0x0962 INVALID_NAME = re.compile('^[0-9].*') UNPACK_BOX_DATA = struct.Struct('= 32768: - value = value - 65536 - elif property_type == VT_UI2: # 2-byte unsigned integer - value = i16(stream, offset + 4) - elif property_type in (VT_I4, VT_INT, VT_ERROR): - value = i32(stream, offset + 4) - elif property_type in (VT_UI4, VT_UINT): # 4-byte unsigned integer - value = i32(stream, offset + 4) - elif property_type in (VT_BSTR, VT_LPSTR): - count = i32(stream, offset + 4) - value = stream[offset + 8:offset + 8 + count - 1] - value = value.replace(b'\x00', b'') - elif property_type == VT_BLOB: - count = i32(stream, offset + 4) - value = stream[offset + 8:offset + 8 + count] - elif property_type == VT_LPWSTR: - count = i32(stream, offset + 4) - value = self._decode_utf16_str(stream[offset + 8:offset + 8 + count * 2]) - elif property_type == VT_FILETIME: - value = int(i32(stream, offset + 4)) + (int(i32(stream, offset + 8)) << 32) - if property_id != 10: - _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) - value = _FILETIME_null_date + datetime.timedelta(microseconds=value // 10) - else: - value = value // 10000000 - elif property_type == VT_UI1: # 1-byte unsigned integer - value = i8(stream[offset + 4]) - elif property_type == VT_CLSID: - value = _clsid(stream[offset + 4:offset + 20]) - elif property_type == VT_CF: - count = i32(stream, offset + 4) - value = stream[offset + 8:offset + 8 + count] - elif property_type == VT_BOOL: - value = bool(i16(stream, offset + 4)) - else: - value = None - - data[property_id] = value - except BaseException as exc: - print('Error while parsing property_id:', exc) - return data - ################### # DATA PROCESSING # @@ -917,7 +841,7 @@ class ChunkReader(): long, ofst = get_long(data, ofst) if (short == 0x8B1F): short, ofst = get_long(data, ofst) - if (short == 0xB000000): + if (short in (0xB000000, 0xa040000)): data = zlib.decompress(data, zlib.MAX_WBITS|32) print(" reading '%s'..." %self.name, len(data)) while offset < len(data): @@ -1597,8 +1521,6 @@ def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): def read(context, filename, mscale, usemat, uvmesh, transform): if (is_maxfile(filename)): maxfile = ImportMaxFile(filename) - prop = maxfile.getproperties('\x05DocumentSummaryInformation') - prop = maxfile.getproperties('\x05SummaryInformation') read_class_data(maxfile, filename) read_config(maxfile, filename) read_directory(maxfile, filename) @@ -1611,10 +1533,12 @@ def read(context, filename, mscale, usemat, uvmesh, transform): def load(operator, context, filepath="", scale_objects=1.0, use_material=True, use_uv_mesh=False, use_apply_matrix=False, global_matrix=None): + context.window.cursor_set('WAIT') mscale = mathutils.Matrix.Scale(scale_objects, 4) if global_matrix is not None: mscale = global_matrix @ mscale read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) + context.window.cursor_set('DEFAULT') return {'FINISHED'} \ No newline at end of file -- 2.30.2 From b112203b652fc09b5a60079de0c4fc883df9bbc9 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 4 Dec 2023 20:17:25 +0100 Subject: [PATCH 24/38] Import_max: Removed unused variables Removed unused variables --- io_import_max.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 46c2512..55440e2 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -169,11 +169,7 @@ FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain FREESECT = 0xFFFFFFFF # (-1) unallocated sector -STGTY_EMPTY = 0 # empty directory entry -STGTY_STORAGE = 1 # element is a storage object STGTY_STREAM = 2 # element is a stream object -STGTY_LOCKBYTES = 3 # element is an ILockBytes object -STGTY_PROPERTY = 4 # element is an IPropertyStorage object STGTY_ROOT = 5 # element is a root storage TYP_NAME = 0x0962 -- 2.30.2 From 79d9c07b6e520a873cfe80b1cd1fb3e8606be7bc Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 4 Dec 2023 22:17:58 +0100 Subject: [PATCH 25/38] Import_max: Removed unused code Removed unused code --- io_import_max.py | 32 +++++--------------------------- 1 file changed, 5 insertions(+), 27 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 55440e2..c82a27d 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -168,9 +168,8 @@ DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain FREESECT = 0xFFFFFFFF # (-1) unallocated sector - -STGTY_STREAM = 2 # element is a stream object -STGTY_ROOT = 5 # element is a root storage +MAX_STREAM = 2 # element is a stream object +ROOT_STORE = 5 # element is a root storage TYP_NAME = 0x0962 INVALID_NAME = re.compile('^[0-9].*') @@ -372,9 +371,9 @@ class MaxFileDirEntry: self.size = self.sizeLow + (int(self.sizeHigh) << 32) self.clsid = _clsid(clsid) self.is_minifat = False - if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: + if self.entry_type in (ROOT_STORE, MAX_STREAM) and self.size > 0: if self.size < maxfile.minisectorcutoff \ - and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + and self.entry_type == MAX_STREAM: # only streams can be in MiniFAT self.is_minifat = True else: self.is_minifat = False @@ -384,7 +383,7 @@ class MaxFileDirEntry: def build_sect_chain(self, maxfile): if self.sect_chain: return - if self.entry_type not in (STGTY_ROOT, STGTY_STREAM) or self.size == 0: + if self.entry_type not in (ROOT_STORE, MAX_STREAM) or self.size == 0: return self.sect_chain = list() if self.is_minifat and not maxfile.minifat: @@ -656,27 +655,6 @@ class ImportMaxFile: entry = self.direntries[sid] return self._open(entry.isectStart, entry.size) - def get_type(self, filename): - try: - sid = self._find(filename) - entry = self.direntries[sid] - return entry.entry_type - except: - return False - - def getclsid(self, filename): - sid = self._find(filename) - entry = self.direntries[sid] - return entry.clsid - - def get_size(self, filename): - sid = self._find(filename) - entry = self.direntries[sid] - return entry.size - - def get_rootentry_name(self): - return self.root.name - ################### # DATA PROCESSING # -- 2.30.2 From ff4a2908f631e379bb601e2fc8ebecbdd011199a Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Mon, 4 Dec 2023 23:23:11 +0100 Subject: [PATCH 26/38] Import_max: Removed unused boolean Removed unused boolean --- io_import_max.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index c82a27d..ed725cd 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -304,10 +304,8 @@ def is_maxfile (filename): class MaxStream(io.BytesIO): """Returns an instance of the BytesIO class as read-only file object.""" def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize): - unknown_size = False if size == UNKNOWN_SIZE: size = len(fat) * sectorsize - unknown_size = True nb_sectors = (size + (sectorsize-1)) // sectorsize data = [] @@ -1178,7 +1176,7 @@ def get_vray_material(vry): material.set('diffuse', get_color(vry, 0x01)) material.set('specular', get_color(vry, 0x02)) material.set('shinines', get_value(vry, 0x03)) - material.set('ior', get_value(vry, 0x09)) + material.set('refraction', get_value(vry, 0x09)) material.set('emissive', get_color(vry, 0x17)) material.set('glossines', get_value(vry, 0x18)) material.set('metallic', get_value(vry, 0x19)) @@ -1224,7 +1222,7 @@ def adjust_material(obj, mat): matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) matShader.metallic = objMaterial.metallic = material.get('metallic', 0) matShader.emission_color = material.get('emissive', (0,0,0)) - matShader.ior = material.get('ior', 1.45) + matShader.ior = material.get('refraction', 1.45) def adjust_matrix(obj, node): -- 2.30.2 From d33a391d94c4bd08239b585dff651f70f8171547 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Tue, 5 Dec 2023 01:15:25 +0100 Subject: [PATCH 27/38] Import_max: Added corona material Added material properties for corona renderer --- io_import_max.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/io_import_max.py b/io_import_max.py index ed725cd..25ea22b 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -1150,6 +1150,16 @@ def get_value(colors, idx): return None +def get_parameter(colors, fmt): + if (fmt == 0x1): + siz = len(colors.data) - 12 + para, offset = get_floats(colors.data, siz, 3) + else: + siz = len(colors.data) - 4 + para, offset = get_float(colors.data, siz) + return para + + def get_standard_material(refs): material = None try: @@ -1185,6 +1195,19 @@ def get_vray_material(vry): return material +def get_corona_material(mtl): + material = Material() + try: + cor = mtl.children + material.set('diffuse', get_parameter(cor[3], 0x1)) + material.set('specular', get_parameter(cor[4], 0x1)) + material.set('emissive', get_parameter(cor[8], 0x1)) + material.set('glossines', get_parameter(cor[9], 0x2)) + except: + pass + return material + + def get_arch_material(ad): material = Material() try: @@ -1209,6 +1232,9 @@ def adjust_material(obj, mat): elif (uid == VRAY_MTL): # VRayMtl refs = get_reference(mat) material = get_vray_material(refs[1]) + elif (uid == CORO_MTL): # Corona + refs = get_references(mat) + material = get_corona_material(refs[0]) elif (uid == ARCH_MTL): # Arch refs = get_references(mat) material = get_arch_material(refs[0]) -- 2.30.2 From 3648a9546e9ca5dc15c73588eddbdd08e6d35889 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Tue, 5 Dec 2023 02:14:49 +0100 Subject: [PATCH 28/38] Cleanup: Import_max Cleanup code and style --- io_import_max.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 25ea22b..66074a4 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -813,7 +813,7 @@ class ChunkReader(): long, ofst = get_long(data, ofst) if (short == 0x8B1F): short, ofst = get_long(data, ofst) - if (short in (0xB000000, 0xa040000)): + if (short in (0xB000000, 0xA040000)): data = zlib.decompress(data, zlib.MAX_WBITS|32) print(" reading '%s'..." %self.name, len(data)) while offset < len(data): @@ -1232,7 +1232,7 @@ def adjust_material(obj, mat): elif (uid == VRAY_MTL): # VRayMtl refs = get_reference(mat) material = get_vray_material(refs[1]) - elif (uid == CORO_MTL): # Corona + elif (uid == CORO_MTL): # CoronaMtl refs = get_references(mat) material = get_corona_material(refs[0]) elif (uid == ARCH_MTL): # Arch -- 2.30.2 From 2007e4bf8a8714a0fd63aab3304e22575d01c70e Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Wed, 6 Dec 2023 22:56:30 +0100 Subject: [PATCH 29/38] Cleanup: Import_max Removed unused code --- io_import_max.py | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/io_import_max.py b/io_import_max.py index 66074a4..58b7fae 100644 --- a/io_import_max.py +++ b/io_import_max.py @@ -671,7 +671,6 @@ class MaxChunk(): self.unknown = True self.format = None self.data = None - self.resolved = False def __str__(self): if (self.unknown == True): @@ -701,17 +700,6 @@ class ByteArrayChunk(MaxChunk): except: self.data = data - def set_le16_string(self, data): - try: - long, offset = get_long(data, 0) - self.data = data[offset:offset + l * 2].decode('utf-16-le') - if (self.data[-1] == b'\0'): - self.data = self.data[0:-1] - self.format = "LStr16" - self.unknown = False - except: - self.data = data - def set_data(self, data): if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]): self.set_string(data) @@ -1500,7 +1488,7 @@ def create_object(context, node, mscale, usemat, uvmesh, transform): created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh) -def make_scene(context, mscale, usemat, uvmesh, transform, parent, level=0): +def make_scene(context, mscale, usemat, uvmesh, transform, parent): for chunk in parent.children: if (isinstance(chunk, SceneChunk)): if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)): @@ -1513,7 +1501,7 @@ def make_scene(context, mscale, usemat, uvmesh, transform, parent, level=0): def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): global SCENE_LIST SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', containerReader=SceneChunk) - make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0], 0) + make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0]) def read(context, filename, mscale, usemat, uvmesh, transform): -- 2.30.2 From bca246714dfc88c0c9f2d054014fbf2260504e7f Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:37:28 +0100 Subject: [PATCH 30/38] =?UTF-8?q?io=5Fimport=5Fmax.py=20gel=C3=B6scht?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- io_import_max.py | 1530 ---------------------------------------------- 1 file changed, 1530 deletions(-) delete mode 100644 io_import_max.py diff --git a/io_import_max.py b/io_import_max.py deleted file mode 100644 index 58b7fae..0000000 --- a/io_import_max.py +++ /dev/null @@ -1,1530 +0,0 @@ -# SPDX-FileCopyrightText: 2023 Sebastian Schrand -# -# SPDX-License-Identifier: GPL-2.0-or-later -# Import is based on using information from olefile IO sourcecode -# and the FreeCAD Autodesk 3DS Max importer ImportMAX -# -# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2018 Philippe Lagadec -# (https://www.decalage.info) -# -# ImportMAX is copyright (c) 2017-2022 Jens M. Plonka -# (https://www.github.com/jmplonka/Importer3D) - - -bl_info = { - "name": "Import Autodesk MAX (.max)", - "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", - "version": (1, 1, 2), - "blender": (3, 6, 0), - "location": "File > Import", - "description": "Import 3DSMAX meshes & materials", - "warning": "", - "filepath_url": "", - "category": "Import-Export"} - - -################## -# IMPORT MODULES # -################## - -import io, re -import os, sys, zlib -import struct, array -import math, mathutils -import bpy, bpy_extras -from bpy_extras.io_utils import axis_conversion -from bpy_extras.io_utils import orientation_helper -from bpy_extras.node_shader_utils import PrincipledBSDFWrapper - -@orientation_helper(axis_forward='Y', axis_up='Z') - -### IMPORT OPERATOR ### -class Import_max(bpy.types.Operator, bpy_extras.io_utils.ImportHelper): - """Import Autodesk MAX""" - bl_idname = "import_autodesk.max" - bl_label = "Import MAX (.max)" - bl_options = {'PRESET', 'UNDO'} - - filename_ext = ".max" - filter_glob: bpy.props.StringProperty(default="*.max", options={'HIDDEN'},) - - scale_objects: bpy.props.FloatProperty(name="Scale", - description="Scale factor for all objects", - min=0.0, max=10000.0, - soft_min=0.0, soft_max=10000.0, - default=1.0, - ) - use_material: bpy.props.BoolProperty(name="Materials", - description="Import the materials of the objects", - default=True, - ) - use_uv_mesh: bpy.props.BoolProperty(name="UV Mesh", - description="Import texture coordinates as mesh objects", - default=False, - ) - use_apply_matrix: bpy.props.BoolProperty(name="Apply Matrix", - description="Use transformation matrix to transform the objects", - default=False, - ) - - def execute(self, context): - keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) - global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up,).to_4x4() - keywords["global_matrix"] = global_matrix - - return load(self, context, **keywords) - - def draw(self, context): - pass - - -class MAX_PT_import_include(bpy.types.Panel): - bl_space_type = 'FILE_BROWSER' - bl_region_type = 'TOOL_PROPS' - bl_label = "Include" - bl_parent_id = "FILE_PT_operator" - - @classmethod - def poll(cls, context): - sfile = context.space_data - operator = sfile.active_operator - - return operator.bl_idname == "IMPORT_AUTODESK_OT_max" - - def draw(self, context): - layout = self.layout - layout.use_property_split = True - layout.use_property_decorate = False - - sfile = context.space_data - operator = sfile.active_operator - - layrow = layout.row(align=True) - layrow.prop(operator, "use_material") - layrow.label(text="", icon='MATERIAL' if operator.use_material else 'SHADING_TEXTURE') - layrow = layout.row(align=True) - layrow.prop(operator, "use_uv_mesh") - layrow.label(text="", icon='UV' if operator.use_uv_mesh else 'GROUP_UVS') - - -class MAX_PT_import_transform(bpy.types.Panel): - bl_space_type = 'FILE_BROWSER' - bl_region_type = 'TOOL_PROPS' - bl_label = "Transform" - bl_parent_id = "FILE_PT_operator" - - @classmethod - def poll(cls, context): - sfile = context.space_data - operator = sfile.active_operator - - return operator.bl_idname == "IMPORT_AUTODESK_OT_max" - - def draw(self, context): - layout = self.layout - layout.use_property_split = True - layout.use_property_decorate = False - - sfile = context.space_data - operator = sfile.active_operator - - layout.prop(operator, "scale_objects") - layrow = layout.row(align=True) - layrow.prop(operator, "use_apply_matrix") - layrow.label(text="", icon='VIEW_ORTHO' if operator.use_apply_matrix else 'MESH_GRID') - layout.prop(operator, "axis_forward") - layout.prop(operator, "axis_up") - - -### REGISTER ### -def menu_func(self, context): - self.layout.operator(Import_max.bl_idname, text="Autodesk MAX (.max)") - -def register(): - bpy.utils.register_class(Import_max) - bpy.utils.register_class(MAX_PT_import_include) - bpy.utils.register_class(MAX_PT_import_transform) - bpy.types.TOPBAR_MT_file_import.append(menu_func) - -def unregister(): - bpy.types.TOPBAR_MT_file_import.remove(menu_func) - bpy.utils.unregister_class(MAX_PT_import_transform) - bpy.utils.unregister_class(MAX_PT_import_include) - bpy.utils.unregister_class(Import_max) - - -################### -# DATA STRUCTURES # -################### - -MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' -WORD_CLSID = "00020900-0000-0000-C000-000000000046" - -MIN_FILE_SIZE = 1536 -UNKNOWN_SIZE = 0x7FFFFFFF -MAXFILE_SIZE = 0x7FFFFFFFFFFFFFFF -MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT -DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT -FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT -ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain -FREESECT = 0xFFFFFFFF # (-1) unallocated sector -MAX_STREAM = 2 # element is a stream object -ROOT_STORE = 5 # element is a root storage - -TYP_NAME = 0x0962 -INVALID_NAME = re.compile('^[0-9].*') -UNPACK_BOX_DATA = struct.Struct('= MIN_FILE_SIZE: - header = filename[:len(MAGIC)] - else: - with open(filename, 'rb') as fp: - header = fp.read(len(MAGIC)) - if header == MAGIC: - return True - else: - return False - - -class MaxStream(io.BytesIO): - """Returns an instance of the BytesIO class as read-only file object.""" - def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize): - if size == UNKNOWN_SIZE: - size = len(fat) * sectorsize - nb_sectors = (size + (sectorsize-1)) // sectorsize - - data = [] - for i in range(nb_sectors): - try: - fp.seek(offset + sectorsize * sect) - except: - break - sector_data = fp.read(sectorsize) - data.append(sector_data) - try: - sect = fat[sect] & FREESECT - except IndexError: - break - data = b"".join(data) - if len(data) >= size: - data = data[:size] - self.size = size - else: - self.size = len(data) - io.BytesIO.__init__(self, data) - - -class MaxFileDirEntry: - """Directory Entry for a stream or storage.""" - STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' - DIRENTRY_SIZE = 128 - assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE - - def __init__(self, entry, sid, maxfile): - self.sid = sid - self.maxfile = maxfile - self.kids = [] - self.kids_dict = {} - self.used = False - ( - self.name_raw, - self.namelength, - self.entry_type, - self.color, - self.sid_left, - self.sid_right, - self.sid_child, - clsid, - self.dwUserFlags, - self.createTime, - self.modifyTime, - self.isectStart, - self.sizeLow, - self.sizeHigh - ) = struct.unpack(MaxFileDirEntry.STRUCT_DIRENTRY, entry) - - if self.namelength > 64: - self.namelength = 64 - self.name_utf16 = self.name_raw[:(self.namelength - 2)] - self.name = maxfile._decode_utf16_str(self.name_utf16) - # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) - if maxfile.sectorsize == 512: - self.size = self.sizeLow - else: - self.size = self.sizeLow + (int(self.sizeHigh) << 32) - self.clsid = _clsid(clsid) - self.is_minifat = False - if self.entry_type in (ROOT_STORE, MAX_STREAM) and self.size > 0: - if self.size < maxfile.minisectorcutoff \ - and self.entry_type == MAX_STREAM: # only streams can be in MiniFAT - self.is_minifat = True - else: - self.is_minifat = False - maxfile._check_duplicate_stream(self.isectStart, self.is_minifat) - self.sect_chain = None - - def build_sect_chain(self, maxfile): - if self.sect_chain: - return - if self.entry_type not in (ROOT_STORE, MAX_STREAM) or self.size == 0: - return - self.sect_chain = list() - if self.is_minifat and not maxfile.minifat: - maxfile.loadminifat() - next_sect = self.isectStart - while next_sect != ENDOFCHAIN: - self.sect_chain.append(next_sect) - if self.is_minifat: - next_sect = maxfile.minifat[next_sect] - else: - next_sect = maxfile.fat[next_sect] - - def build_storage_tree(self): - if self.sid_child != FREESECT: - self.append_kids(self.sid_child) - self.kids.sort() - - def append_kids(self, child_sid): - if child_sid == FREESECT: - return - else: - child = self.maxfile._load_direntry(child_sid) - if child.used: - return - child.used = True - self.append_kids(child.sid_left) - name_lower = child.name.lower() - self.kids.append(child) - self.kids_dict[name_lower] = child - self.append_kids(child.sid_right) - child.build_storage_tree() - - def __eq__(self, other): - return self.name == other.name - - def __lt__(self, other): - return self.name < other.name - - def __ne__(self, other): - return not self.__eq__(other) - - def __le__(self, other): - return self.__eq__(other) or self.__lt__(other) - - -class ImportMaxFile: - """Representing an interface for importing .max files.""" - def __init__(self, filename=None): - self._filesize = None - self.byte_order = None - self.directory_fp = None - self.direntries = None - self.dll_version = None - self.fat = None - self.first_difat_sector = None - self.first_dir_sector = None - self.first_mini_fat_sector = None - self.fp = None - self.header_clsid = None - self.header_signature = None - self.mini_sector_shift = None - self.mini_sector_size = None - self.mini_stream_cutoff_size = None - self.minifat = None - self.minifatsect = None - self.minisectorcutoff = None - self.minisectorsize = None - self.ministream = None - self.minor_version = None - self.nb_sect = None - self.num_difat_sectors = None - self.num_dir_sectors = None - self.num_fat_sectors = None - self.num_mini_fat_sectors = None - self.reserved1 = None - self.reserved2 = None - self.root = None - self.sector_shift = None - self.sector_size = None - self.transaction_signature_number = None - if filename: - self.open(filename) - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def _decode_utf16_str(self, utf16_str, errors='replace'): - unicode_str = utf16_str.decode('UTF-16LE', errors) - return unicode_str - - def open(self, filename): - if hasattr(filename, 'read'): - self.fp = filename - elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE: - self.fp = io.BytesIO(filename) - else: - self.fp = open(filename, 'rb') - filesize = 0 - self.fp.seek(0, os.SEEK_END) - try: - filesize = self.fp.tell() - finally: - self.fp.seek(0) - self._filesize = filesize - self._used_streams_fat = [] - self._used_streams_minifat = [] - header = self.fp.read(512) - fmt_header = '<8s16sHHHHHHLLLLLLLLLL' - header_size = struct.calcsize(fmt_header) - header1 = header[:header_size] - ( - self.header_signature, - self.header_clsid, - self.minor_version, - self.dll_version, - self.byte_order, - self.sector_shift, - self.mini_sector_shift, - self.reserved1, - self.reserved2, - self.num_dir_sectors, - self.num_fat_sectors, - self.first_dir_sector, - self.transaction_signature_number, - self.mini_stream_cutoff_size, - self.first_mini_fat_sector, - self.num_mini_fat_sectors, - self.first_difat_sector, - self.num_difat_sectors - ) = struct.unpack(fmt_header, header1) - - self.sector_size = 2**self.sector_shift - self.mini_sector_size = 2**self.mini_sector_shift - if self.mini_stream_cutoff_size != 0x1000: - self.mini_stream_cutoff_size = 0x1000 - self.nb_sect = ((filesize + self.sector_size-1) // self.sector_size) - 1 - - # file clsid - self.header_clsid = _clsid(header[8:24]) - self.sectorsize = self.sector_size #1 << i16(header, 30) - self.minisectorsize = self.mini_sector_size #1 << i16(header, 32) - self.minisectorcutoff = self.mini_stream_cutoff_size # i32(header, 56) - self._check_duplicate_stream(self.first_dir_sector) - if self.num_mini_fat_sectors: - self._check_duplicate_stream(self.first_mini_fat_sector) - if self.num_difat_sectors: - self._check_duplicate_stream(self.first_difat_sector) - - # Load file allocation tables - self.loadfat(header) - self.loaddirectory(self.first_dir_sector) - self.minifatsect = self.first_mini_fat_sector - - def close(self): - self.fp.close() - - def _check_duplicate_stream(self, first_sect, minifat=False): - if minifat: - used_streams = self._used_streams_minifat - else: - if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): - return - used_streams = self._used_streams_fat - if first_sect in used_streams: - pass - else: - used_streams.append(first_sect) - - def sector_array(self, sect): - ary = array.array('I', sect) - if sys.byteorder == 'big': - ary.byteswap() - return ary - - def loadfat_sect(self, sect): - if isinstance(sect, array.array): - fat1 = sect - else: - fat1 = self.sector_array(sect) - isect = None - for isect in fat1: - isect = isect & FREESECT - if isect == ENDOFCHAIN or isect == FREESECT: - break - sector = self.getsect(isect) - nextfat = self.sector_array(sector) - self.fat = self.fat + nextfat - return isect - - def loadfat(self, header): - sect = header[76:512] - self.fat = array.array('I') - self.loadfat_sect(sect) - if self.num_difat_sectors != 0: - nb_difat_sectors = (self.sectorsize // 4) - 1 - nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors - isect_difat = self.first_difat_sector - for i in range(nb_difat): - sector_difat = self.getsect(isect_difat) - difat = self.sector_array(sector_difat) - self.loadfat_sect(difat[:nb_difat_sectors]) - isect_difat = difat[nb_difat_sectors] - if len(self.fat) > self.nb_sect: - self.fat = self.fat[:self.nb_sect] - - def loadminifat(self): - stream_size = self.num_mini_fat_sectors * self.sector_size - nb_minisectors = (self.root.size + self.mini_sector_size - 1) // self.mini_sector_size - used_size = nb_minisectors * 4 - sect = self._open(self.minifatsect, stream_size, force_FAT=True).read() - self.minifat = self.sector_array(sect) - self.minifat = self.minifat[:nb_minisectors] - - def getsect(self, sect): - try: - self.fp.seek(self.sectorsize * (sect + 1)) - except: - print('IndexError: Sector index out of range') - sector = self.fp.read(self.sectorsize) - return sector - - def loaddirectory(self, sect): - self.directory_fp = self._open(sect, force_FAT=True) - max_entries = self.directory_fp.size // 128 - self.direntries = [None] * max_entries - root_entry = self._load_direntry(0) - self.root = self.direntries[0] - self.root.build_storage_tree() - - def _load_direntry (self, sid): - if self.direntries[sid] is not None: - return self.direntries[sid] - self.directory_fp.seek(sid * 128) - entry = self.directory_fp.read(128) - self.direntries[sid] = MaxFileDirEntry(entry, sid, self) - return self.direntries[sid] - - def _open(self, start, size = UNKNOWN_SIZE, force_FAT=False): - if size < self.minisectorcutoff and not force_FAT: - if not self.ministream: - self.loadminifat() - size_ministream = self.root.size - self.ministream = self._open(self.root.isectStart, - size_ministream, force_FAT=True) - return MaxStream(fp=self.ministream, sect=start, size=size, - offset=0, sectorsize=self.minisectorsize, - fat=self.minifat, filesize=self.ministream.size) - else: - return MaxStream(fp=self.fp, sect=start, size=size, - offset=self.sectorsize, sectorsize=self.sectorsize, - fat=self.fat, filesize=self._filesize) - - def _find(self, filename): - if isinstance(filename, str): - filename = filename.split('/') - node = self.root - for name in filename: - for kid in node.kids: - if kid.name.lower() == name.lower(): - break - node = kid - return node.sid - - def openstream(self, filename): - sid = self._find(filename) - entry = self.direntries[sid] - return self._open(entry.isectStart, entry.size) - - -################### -# DATA PROCESSING # -################### - -class MaxChunk(): - """Representing a chunk of a .max file.""" - def __init__(self, types, size, level, number): - self.number = number - self.types = types - self.level = level - self.parent = None - self.previous = None - self.next = None - self.size = size - self.unknown = True - self.format = None - self.data = None - - def __str__(self): - if (self.unknown == True): - return "%s[%4x] %04X: %s" %(""*self.level, self.number, self.types, ":".join("%02x"%(c) for c in self.data)) - return "%s[%4x] %04X: %s=%s" %(""*self.level, self.number, self.types, self.format, self.data) - - -class ByteArrayChunk(MaxChunk): - """A byte array of a .max chunk.""" - def __init__(self, types, data, level, number): - MaxChunk.__init__(self, types, data, level, number) - - def set(self, data, name, fmt, start, end): - try: - self.data = struct.unpack(fmt, data[start:end]) - self.format = name - self.unknown = False - except Exception as exc: - self.data = data - # print('StructError:', exc, name) - - def set_string(self, data): - try: - self.data = data.decode('UTF-16LE') - self.format = "Str16" - self.unknown = False - except: - self.data = data - - def set_data(self, data): - if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]): - self.set_string(data) - elif (self.types in [0x2034, 0x2035]): - self.set(data, "ints", '<'+'I'*int(len(data) / 4), 0, len(data)) - elif (self.types in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]): - self.set(data, "floats", '<'+'f'*int(len(data) / 4), 0, len(data)) - elif (self.types == 0x2510): - self.set(data, "struct", '<'+'f'*int(len(data) / 4 - 1) + 'I', 0, len(data)) - elif (self.types == 0x0100): - self.set(data, "float", ' 3): - return get_rotation(refs[0]) - elif (uid == MATRIX_ROT): # Rotation Wire - return get_rotation(get_references(pos)[0]) - if (rotation): - mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis) - return mtx - - -def get_scale(pos): - mtx = mathutils.Matrix.Identity(4) - if (pos): - uid = get_guid(pos) - if (uid == 0x2010): # Bezier Scale - scale = pos.get_first(0x2501) - if (scale is None): - scale = pos.get_first(0x2505) - pos = scale.data - elif (uid == 0x442315): # TCB Zoom - scale = pos.get_first(0x2501) - if (scale is None): - scale = pos.get_first(0x2505) - pos = scale.data - elif (uid == MATRIX_SCL): # ScaleXYZ - pos = get_point_3d(pos, 1.0) - else: - return mtx - mtx = mathutils.Matrix.Diagonal(pos[:3]).to_4x4() - return mtx - - -def create_matrix(prc): - mtx = mathutils.Matrix.Identity(4) - pos = rot = scl = None - uid = get_guid(prc) - if (uid == 0x2005): # Position/Rotation/Scale - pos = get_position(get_references(prc)[0]) - rot = get_rotation(get_references(prc)[1]) - scl = get_scale(get_references(prc)[2]) - elif (uid == 0x9154): # BipSlave Control - biped_sub_anim = get_references(prc)[2] - refs = get_references(biped_sub_anim) - scl = get_scale(get_references(refs[1])[0]) - rot = get_rotation(get_references(refs[2])[0]) - pos = get_position(get_references(refs[3])[0]) - if (pos is not None): - mtx = pos @ mtx - if (rot is not None): - mtx = rot @ mtx - if (scl is not None): - mtx = scl @ mtx - return mtx - - -def get_matrix_mesh_material(node): - refs = get_reference(node) - if (refs): - prs = refs.get(0, None) - msh = refs.get(1, None) - mat = refs.get(3, None) - lyr = refs.get(6, None) - else: - refs = get_references(node) - prs = refs[0] - msh = refs[1] - mat = refs[3] - lyr = None - if (len(refs) > 6): - lyr = refs[6] - return prs, msh, mat, lyr - - -def get_property(properties, idx): - for child in properties.children: - if (child.types & 0x100E): - if (get_short(child.data, 0)[0] == idx): - return child - return None - - -def get_color(colors, idx): - prop = get_property(colors, idx) - if (prop is not None): - siz = len(prop.data) - 12 - col, offset = get_floats(prop.data, siz, 3) - return (col[0], col[1], col[2]) - return None - - -def get_value(colors, idx): - prop = get_property(colors, idx) - if (prop is not None): - siz = len(prop.data) - 4 - val, offset = get_float(prop.data, siz) - return val - return None - - -def get_parameter(colors, fmt): - if (fmt == 0x1): - siz = len(colors.data) - 12 - para, offset = get_floats(colors.data, siz, 3) - else: - siz = len(colors.data) - 4 - para, offset = get_float(colors.data, siz) - return para - - -def get_standard_material(refs): - material = None - try: - if (len(refs) > 2): - colors = refs[2] - parameters = get_references(colors)[0] - material = Material() - material.set('ambient', get_color(parameters, 0x00)) - material.set('diffuse', get_color(parameters, 0x01)) - material.set('specular', get_color(parameters, 0x02)) - material.set('emissive', get_color(parameters, 0x08)) - material.set('shinines', get_value(parameters, 0x0B)) - parablock = refs[4] # ParameterBlock2 - material.set('glossines', get_value(parablock, 0x02)) - material.set('metallic', get_value(parablock, 0x05)) - except: - pass - return material - - -def get_vray_material(vry): - material = Material() - try: - material.set('diffuse', get_color(vry, 0x01)) - material.set('specular', get_color(vry, 0x02)) - material.set('shinines', get_value(vry, 0x03)) - material.set('refraction', get_value(vry, 0x09)) - material.set('emissive', get_color(vry, 0x17)) - material.set('glossines', get_value(vry, 0x18)) - material.set('metallic', get_value(vry, 0x19)) - except: - pass - return material - - -def get_corona_material(mtl): - material = Material() - try: - cor = mtl.children - material.set('diffuse', get_parameter(cor[3], 0x1)) - material.set('specular', get_parameter(cor[4], 0x1)) - material.set('emissive', get_parameter(cor[8], 0x1)) - material.set('glossines', get_parameter(cor[9], 0x2)) - except: - pass - return material - - -def get_arch_material(ad): - material = Material() - try: - material.set('diffuse', get_color(ad, 0x1A)) - material.set('specular', get_color(ad, 0x05)) - material.set('shinines', get_value(ad, 0x0B)) - except: - pass - return material - - -def adjust_material(obj, mat): - material = None - if (mat is not None): - uid = get_guid(mat) - if (uid == 0x0002): # Standard - refs = get_references(mat) - material = get_standard_material(refs) - elif (uid == 0x0200): # Multi/Sub-Object - refs = get_references(mat) - material = adjust_material(obj, refs[-1]) - elif (uid == VRAY_MTL): # VRayMtl - refs = get_reference(mat) - material = get_vray_material(refs[1]) - elif (uid == CORO_MTL): # CoronaMtl - refs = get_references(mat) - material = get_corona_material(refs[0]) - elif (uid == ARCH_MTL): # Arch - refs = get_references(mat) - material = get_arch_material(refs[0]) - if (obj is not None) and (material is not None): - objMaterial = bpy.data.materials.new(get_class_name(mat)) - obj.data.materials.append(objMaterial) - matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) - matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8,0.8,0.8)) - matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1.0,1.0,1.0)) - matShader.specular = objMaterial.specular_intensity = material.get('glossines', 0.5) - matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) - matShader.metallic = objMaterial.metallic = material.get('metallic', 0) - matShader.emission_color = material.get('emissive', (0,0,0)) - matShader.ior = material.get('refraction', 1.45) - - -def adjust_matrix(obj, node): - mtx = create_matrix(node).flatten() - plc = mathutils.Matrix(*mtx) - obj.matrix_world = plc - return plc - - -def create_shape(context, pts, indices, node, key, mtx, mat, umt): - name = node.get_first(TYP_NAME).data - shape = bpy.data.meshes.new(name) - if (key is not None): - name = "%s_%d" %(name, key) - data = [] - if (pts): - loopstart = [] - looplines = loop = 0 - nb_faces = len(indices) - for fid in range(nb_faces): - polyface = indices[fid] - looplines += len(polyface) - shape.vertices.add(len(pts) // 3) - shape.loops.add(looplines) - shape.polygons.add(nb_faces) - shape.vertices.foreach_set("co", pts) - for vtx in indices: - loopstart.append(loop) - data.extend(vtx) - loop += len(vtx) - shape.polygons.foreach_set("loop_start", loopstart) - shape.loops.foreach_set("vertex_index", data) - - if (len(data) > 0): - shape.validate() - shape.update() - obj = bpy.data.objects.new(name, shape) - context.view_layer.active_layer_collection.collection.objects.link(obj) - obj.matrix_world = mtx - if (umt): - adjust_material(obj, mat) - return True - return True - - -def calc_point(data): - points = [] - long, offset = get_long(data, 0) - while (offset < len(data)): - val, offset = get_long(data, offset) - flt, offset = get_floats(data, offset, 3) - points.extend(flt) - return points - - -def calc_point_float(data): - points = [] - long, offset = get_long(data, 0) - while (offset < len(data)): - flt, offset = get_floats(data, offset, 3) - points.extend(flt) - return points - - -def calc_point_3d(chunk): - data = chunk.data - count, offset = get_long(data, 0) - pointlist = [] - try: - while (offset < len(data)): - pt = Point3d() - long, offset = get_long(data, offset) - pt.points, offset = get_longs(data, offset, long) - pt.flags, offset = get_short(data, offset) - if ((pt.flags & 0x01) != 0): - pt.f1, offset = get_long(data, offset) - if ((pt.flags & 0x08) != 0): - pt.fH, offset = get_short(data, offset) - if ((pt.flags & 0x10) != 0): - pt.f2, offset = get_long(data, offset) - if ((pt.flags & 0x20) != 0): - pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) - if (len(pt.points) > 0): - pointlist.append(pt) - except Exception as exc: - print('ArrayError:\n', "%s: offset = %d\n" %(exc, offset)) - return pointlist - - -def get_point_array(values): - verts = [] - if len(values) >= 4: - count, offset = get_long(values, 0) - while (count > 0): - floats, offset = get_floats(values, offset, 3) - verts.extend(floats) - count -= 1 - return verts - - -def get_poly_4p(points): - vertex = {} - for point in points: - ngon = point.points - key = point.fH - if (key not in vertex): - vertex[key] = [] - vertex[key].append(ngon) - return vertex - - -def get_poly_5p(data): - count, offset = get_long(data, 0) - ngons = [] - while count > 0: - pt, offset = get_longs(data, offset, 3) - offset += 8 - ngons.append(pt) - count -= 1 - return ngons - - -def get_poly_6p(data): - count, offset = get_long(data, 0) - polylist = [] - while (offset < len(data)): - long, offset = get_longs(data, offset, 6) - i = 5 - while ((i > 3) and (long[i] < 0)): - i -= 1 - if (i > 2): - polylist.append(long[1:i]) - return polylist - - -def get_poly_data(chunk): - offset = 0 - polylist = [] - data = chunk.data - while (offset < len(data)): - count, offset = get_long(data, offset) - points, offset = get_longs(data, offset, count) - polylist.append(points) - return polylist - - -def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): - coords = point4i = point6i = pointNi = None - poly = msh.get_first(0x08FE) - created = False - lidx = [] - lcrd = [] - lply = [] - if (poly): - for child in poly.children: - if (child.types == 0x0100): - coords = calc_point(child.data) - elif (child.types == 0x0108): - point6i = child.data - elif (child.types == 0x011A): - point4i = calc_point_3d(child) - elif (child.types == 0x0310): - pointNi = child.data - elif (child.types == 0x0124): - lidx.append(get_long(child.data, 0)[0]) - elif (child.types == 0x0128): - lcrd.append(calc_point_float(child.data)) - elif (child.types == 0x012B): - lply.append(get_poly_data(child)) - if (point4i is not None): - vertex = get_poly_4p(point4i) - if (len(vertex) > 0): - for key, ngons in vertex.items(): - created |= create_shape(context, coords, ngons, node, key, mtx, mat, umt) - else: - created = True - elif (point6i is not None): - ngons = get_poly_6p(point6i) - created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) - elif (pointNi is not None): - ngons = get_poly_5p(pointNi) - created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) - if (uvm and len(lidx) > 0): - for i in range(len(lidx)): - created |= create_shape(context, lcrd[i], lply[i], node, lidx[i], mtx, mat, umt) - return created - - -def create_editable_mesh(context, node, msh, mat, mtx, umt): - poly = msh.get_first(0x08FE) - created = False - if (poly): - vertex_chunk = poly.get_first(0x0914) - clsid_chunk = poly.get_first(0x0912) - coords = get_point_array(vertex_chunk.data) - ngons = get_poly_5p(clsid_chunk.data) - created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) - return created - - -def create_shell(context, node, shell, mat, mtx, umt, uvm): - refs = get_references(shell) - msh = refs[-1] - if (get_class_name(msh) == "'Editable Poly'"): - created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) - else: - created = create_editable_mesh(context, node, msh, mat, mtx, umt) - return created - - -def create_skipable(context, node, skip): - name = node.get_first(TYP_NAME).data - print(" skipping %s '%s'... " %(skip, name)) - return True - - -def create_mesh(context, node, msh, mtx, mat, umt, uvm): - created = False - uid = get_guid(msh) - msh.geometry = None - if (uid == EDIT_MESH): - created = create_editable_mesh(context, node, msh, mat, mtx, umt) - elif (uid == EDIT_POLY): - created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) - elif (uid in {0x2032, 0x2033}): - created = create_shell(context, node, msh, mat, mtx, umt, uvm) - else: - skip = SKIPPABLE.get(uid) - if (skip is not None): - created = create_skipable(context, node, skip) - return created, uid - - -def create_object(context, node, mscale, usemat, uvmesh, transform): - parent = get_node_parent(node) - node.parent = parent - name = get_node_name(node) - prs, msh, mat, lyr = get_matrix_mesh_material(node) - while ((parent is not None) and (get_guid(parent) != 0x02)): - name = "%s/%s" %(get_node_name(parent), name) - parent_mtx = parent.matrix - if (parent_mtx): - prs = prs.dot(parent_mtx) - parent = get_node_parent(parent) - if (transform): - mtx = create_matrix(prs) @ mscale - else: - mtx = mscale - created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh) - - -def make_scene(context, mscale, usemat, uvmesh, transform, parent): - for chunk in parent.children: - if (isinstance(chunk, SceneChunk)): - if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)): - try: - create_object(context, chunk, mscale, usemat, uvmesh, transform) - except Exception as exc: - print('ImportError:', exc, chunk) - - -def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): - global SCENE_LIST - SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', containerReader=SceneChunk) - make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0]) - - -def read(context, filename, mscale, usemat, uvmesh, transform): - if (is_maxfile(filename)): - maxfile = ImportMaxFile(filename) - read_class_data(maxfile, filename) - read_config(maxfile, filename) - read_directory(maxfile, filename) - read_class_directory(maxfile, filename) - read_video_postqueue(maxfile, filename) - read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform) - else: - print("File seems to be no 3D Studio Max file!") - - -def load(operator, context, filepath="", scale_objects=1.0, use_material=True, - use_uv_mesh=False, use_apply_matrix=False, global_matrix=None): - context.window.cursor_set('WAIT') - mscale = mathutils.Matrix.Scale(scale_objects, 4) - if global_matrix is not None: - mscale = global_matrix @ mscale - - read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) - context.window.cursor_set('DEFAULT') - - return {'FINISHED'} \ No newline at end of file -- 2.30.2 From 81e5d6f63f02f1c7820bda3ae4c1fcc80d93a4d0 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:38:52 +0100 Subject: [PATCH 31/38] Uploaded: io_scene_max --- io_scene_max.zip | Bin 0 -> 51329 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 io_scene_max.zip diff --git a/io_scene_max.zip b/io_scene_max.zip new file mode 100644 index 0000000000000000000000000000000000000000..c33d151062f3c737943916ce883a4ce60cfa257a GIT binary patch literal 51329 zcmeHwOK)UJc3#VpydWUJ5Cp>zytu=5iOFsb$@ zr;?dD`KThNH53Byi4%`|^!4xm!JmA>&)?tv=Cyn=|NIgD=Dl>-&bnE8)4KiQ2%s}kc>Y(vdA<2} zfBg@C^S7UT!vFrX2%6vYdxH^cRr`0JCMQR`Zx;9RPPWtQ-wpDs>(Oa;JK9JZwZ>9% zlAX7PqrBBkPTJRlR`+7&(-~9cAV=fFY;pf0>yGlvJR58zUmYDRHmbG7-e9rQ8f62( z?n`S)K1_fqyGVN7WIW8fS4rNz>=&u_6AwDv$dOSjYlXLzDt&NPm<@Y+eAB*4NP0lx84Yq7GH(!WP{FKa@fh((fz~p z=wO~CwQZM^N_!qXuB|LKYW4Nx=sNG@{eG4lw60ngSsPSTu1BN(aN~*qmez|k{^0;}DK(!`6Gc(V(zS`eOHk0$YZ*O;N-`?)*eS2H4e|y`+_bvS0 z#((uK{I1WMKjDGcR9!{o?TT3;V2gySAqv(DNpQ_9X^jj}XVruTGjfr@&UbHP5$_1ZuRn zI-kJw@|*EZLWr5&{ktiN1YldI$uMh=dIQ)Fp5XTk{wr4A)yhj@ z<+7^H7rTdhJI}WEUxY+9wRU4Z$zXQ9OSJPoAB@JWP6F-7S~p4ix|Mfl_Kus)V#|A4 zw?3bYyRA;A*MhG4O|E9rDc)#&VPX(&Kw^eo&x&tnBrKO z03P&uBh=^(V1hF9b~krJ}kd7_wUYXi%;KH zKbxC}gpam%zD~Cf-=w06n1rO^*)uU#JeO)%WW4mF1_)PuJIKjpiEr zv`5h7bq?EozIA%M{|4|UMi|XzV`+V>USHo^Yu1EaN4;S#&hpJSKezD5hl+h^>uGJd zzPh$lUs|s}tuGP#ac`u~HEiHy=Rg~1N(0-{z)r2s3McSsSwig1-Tl+_dGq9%vQ$l_ z)?8lRtJk&#dGjJ4wax+lJR4ps{L$gTHv(UO+SpsIJzcMF@2xK_t&2W7_@iFuZf583 z_%MBbs-!G0uRmR?UtFx!&a>5(+A2xe=?!|_*7H$kX6tz8nFrmfuP&{wEU(qJYb$F@ zq8M9)_B8=t9dCW(0M}|u%d1aUc9zz*_m=h=qL{CaTXzJUIr)13=xA&EfC}^7Oo9*8 zCiv9hH8zsDofZa-*4#oVM7<7(!<%kCt$YdP9I%pIh_^kO0@kRxhLY3P;3}I2xJHz2UUjAcJzWD(vxAXsI0fYC)2AdTAGD_+!RW}I9rd94rKCKqH%Q8M z-iPTQiZz$Ot*@;%mX-mxJHENO^8hQWjmCCuclBwlUT?086kxcm;n0E98(Zs*#@_bY zDvV=wd6ne8N>0Xu%U0WWs@~jQT3)Kv)|U64E^XJ?@1xdWl%re8t33PQ&^4MnJ6r3^ zwZ{7P-fDAwm!$UsE-!8GZr7T%oz=#AeYv*B=BP(UomK#}+(|m7;`w*)>c+m z_h2FGa3#BYI{*sF`1K6T>c!svSJ3LSw=)<6tyjaZ-GhU4cmFs&*gv7eu;u&5OTnX8 z@R&y@r|HjL(lP*ZveSIg3;ihSwM-+X9-kzj ziGQnU+U`J)(v-sulozAVFq5LQ+FL{@>{(u46*LPN_b!LoXtNe0G{b|A1(n$^pUo~P z1_j^HKgs7w@2?=>HfrTz)(a)P`Y! zh6T)U#@*3o-6(QVFQn8nWmuvdbI|9x?YZPLMOCILg1{J^sL8NgPdftM< z5f>6X!Vw`8Pz9i<5d1TMlTOw(c#=&_##W^aejGEQMDSNxuU|Qz{kAr1+6o+%L1y%^ zQki{HTYt0oq_*t-X}oz-YrI*Ug=V9$@n#k}hFRGYX^5EksZ??}ZfS2Jsm&wU$LL^w zF2_Gt=jW5pqspT(=D?MkR=<+3!v@Kkv-OSo>f3pICg;O;F1-Kz=Ed&BzB0+ z)}Bv(x|u|iag8h3!UQpA8=jE2!hDYCzQPA2Q<7dkK+i6o&!O32a@qHQG4_6WC&@9x z4v@%%ghyaB?Q}5CO6V}9do550$x9T%tksFkrYm``x=qKuf9S2|SOO#%O&5`hGLJKH z=D-XrR0_tzo=_x2yj71kj*4ak9by)mu8F+!L~;8uq{nm@VPUR?s;mSLcc0f<#YT6T}s1Z z9X9pF7hzRbSK>MeP}v+mNbn^&lOo7aULhoP$OGC5HyYfFJQap^RcFHT zlijwn{%8`ZSrS{ zP0vL1DCL^GXbuLw!9$%el%LPeR)2vRp9B}49%uuFOdzpbN-@T0>hsUp28-2D$s2yC zkX4aAV$x7MVoNT9x<*~+yb&SJkF;Hg3(am1T8$}rv+Fns=c#k_-rz2g=b}$OB{i_A zCTCWfv*e7b`Qr4LS&z9dSC@y+wzv29_v@>}{h$2|U*?293uF_C^~Sn9H-nvt#gTex zlLeIm>Uj=<5sDe9U1Hcsr48LJ-U{r$% zp-9aNj*rueTvYG7U&m!*X8Jc1b$Yf|#lbjVNbDH+a2w990v{apgUKtx%@(g(dM<_I^FagA16k0{Kpn?m?1VghB zwpjUa7@+BT5hXuOR+nSjFG6er!eMZQSU$L{uaYE-km_)@A%j)ez+%#vFDM|JM@^%% z)f9^|T$d1xDeTJQ&}ZIreVX(Uc#bM_))ORHFxY%@v5`C(&M_q+)QBYwvVMSw(ObX; z+q8G-G}o1Sql6U{gzcvhmqT^Gzca*$gb}h-zA2B;9cLq0E%m-Np6d<>5yF_Z!b z>N3agSnqJfqS5%&vB`o5Ea$xIKG`Gq@}h@)3bHRWprDQzbgYvPM-|_5OF^KVR0L9C z*=v+G?uaeKqRNE<2(WgsbSdmYgGQ3ppSr$(fS1n_5BZbL#4nP?jTL%PHF2A(>$^j> znCw!FjWDh%2((=>-8ge)-UG5qxt==U01-n&_Z;a?)@BG{v`AOdQs`(VdKAPBmUIrD zG72W(ij6MgUI!eJ@m?Z*CsOk`_XAqR5sfYpdXO6xpD2VBDkLh&FNh;{x zB*U~zMkOrUQ>qX`n}HZX#kCXn0GyGMDubPv6J&3pX+HAc5Y2?wnP8uS+o&aOoMRuz z{2=pl1>~cuJPOpF&3aZFEfx3}pM#Nz?#!cq3sME%S&2g#3d++eVN{G<3rln@kb0VC zzjSld9;Uj3K-kJl?FdPuzy(Gk1g|=2+VK2}lp5tG<4|7RfDIl7qEk*ELRa2LO(AJOR!W8e80J^q z78c1EFiv(!2+*(z>o55wgSQ8uH1?bZB(X`VAsF1O#Hr=5)3|H~DiSr7m?+SjMzI3j zOM`||)0*iG?o|_$8Y->b9p6NXQm%*!W%Xn_jMtFqFa=dA2c&rq-e(u}Qlf2Xqr8+G zWy*N{p!!&VY1)8&VBKM>%?&9~8KbT#m~$>eQlvf@DXn9P$4PMRYwmz&)NS-Y^S8jq)kuLgJ>7nENtMHrzyVyx-vtg8=8ao6Cd~ z3JD&&I@}y6OW7|^_ZI7`2Td&LDHcB}nl1zwd{2ZQq*qh0bjLxX;N>Gy_s%Pev$f23 zL->!HnUCA@sW_k3584eg^l`;V4NR)uRSZU+ntLcRMOIbU9FWN#R!^GEuT#YAp3^SP z$JY1>P+0_{tkVf%rpuhW*9w_p3mYzQ5S=_v1EltAOlHa;(aZve96g{h#e`Z6V_a$s zXV)OKS({Nt!ohd=@@AAeu(>bShsZ}hlh1+sXl9b+y8%a&6SDEG>XMNjm&dO)=#2E+ z*+ziF&reE{eGblei;_H+0hyLt15dnvFR6avgM^Y&AEA^KT7Sp!w4_)(^#d^071vAZ z+p03>LPaB zOlYm}2PV;|PqEzHz=ox`f;M8JyNJc)AzI=ZyVhSwmdr9TX>dH4#1QdY&=m0m&ar^U zWF77FpMkwbmr*cGeAo(L=~`JW^y=PZpVek-n7*!6hcdG$36mbtim<|)O?gw1)qGq# zA&S6~K1~jgH&Jad+rK$mlYPS8K1Bpfuz*m*Q%|TXbkjvmB2DL_lPJ(pIiQE~sMIO> zAWK6*LI$ywR_s?Ni2ge2CTIz($6e>PURQ6jS7Ak) zUiIA!<_YQ&QaQ+Cvfz9a^sa@4-OWS*stPv8y?z4aD4;ps4XX?iOcn#Hy65@TTue1^ z3C!>VHureNH32QV0aIK> zS%d(#XrdZQH(HJ{zaM*~$K3Qo$wx+AW zp$dF5IkF)u)Si-S#U@Z7p+&JELN82J+N{*nz(aDQ8k75c1T6 zaa%E}QKCfC>P<>aFr94_SrEiTw#}FkT5qD7V+0J0J7BCDVe5^Kc&I^oW8qsU%L7Oc zJ8@*KUBQFJAnKbLqy&4wRudJ4ky}~VBKxJh-yygo8{`qWQ97qmJj$hefJ}T!AfsTK zi?q5VR2NGfdL1q_6fZROS+eZF*u3RM>M|`=(`}YwT+lF^)MIe(;UH>?adeXlHuJhW z(5sCj>OR+VKe%aIG=<*{hXTP^#bsk=*z!`INN@;AHZ3AQ3`R2NGu>ndo5cn@$Ca`z zZDxmroOIL8VmJ(J4j{8DA@4cW{w?G^^7kuiJW)ZL<@)D<5T z!%z&ujL=d0ZnBsklA*3va9A!oN9*{a;Nj#ts$lI10++htm`XOFSu|7s76=NtC_XhJ z$OsG4JrQaum;FuYwC4)t+a7I(QeqQv#%Y2Y-QK%d)7rYrIKV8f?Gb3;=Y1{wKyS>d zsDG>jh&T~1p2`W$qol)5$`8><~m!_QJ3=$reb&|o0q4rT?Eq| z@{Nldw6}1vpH$c~D6jom9bzvPw(NZo3^u%nAk=Ty8wQH9QGoLp(@tzmguwO_PIQQ+ z%B?6d1nGcTqCj>T*sT?u*ATV+Jz0qrCnqw7R-kiK8U>K{;UxX=M_;5$WTqIl4`E0K z;_bj9YYPt zmM9bol<%g4@qq&#*V*C?E2`}n)#QuU`YpLOLUU7rwu|;ZLfzTgbal7Q6%R!iTPdsT zeGtw+8|R~N>Z-ApLVwL&=)52#(2eqW2LMyY1m8LblF+xQc(xa1PEOY9hLXRc?R{8u)s6ioUycw zn5uTWT&vajx3;{pDt~bi%iDkvt}krEaE$%;y_H=F>UzQk8fbwC@pok!LUZhS7F*+o zI6go3rJP!pBe&cw&wPorl`zi*WV9Lc=;8KPv}28x+LF+*Qd4?XYAgD?UVn%#J-h2E zlcTmjfHK4&V8}|{UQl4wK~#a3%ib;|v*0r>w0Wpq=J9ZXHIc0NBY7hf)UaAPMZ}f@ z>A4WVqwUj>mu?I31|E%dPLbG0YrZzH*x|Xz{atmpvG+BEBGfp=K}tP9iEE;U<8$%0 z|0KL`BuDIKK=Aox0t zSYs)5x6?7VC++Vp;Nlis5VpVT{MqCz>0a+v)P)BE2K6^qS9a}|)Q7djr6@i&E>(M+ z7MK3ef%@@2+X!&!>0?wPkV?VzTQqBrLSq?@+W_<+x-#7M+8gGUKdWvSjWH&e03;vs z_BA5Ec4v$W0E5tJ_;In)0?&cn9LvG(%~Eec1+#R1pD-#iWZwwv+5hlCL`qBiF(PpZ zkslj|`;o6KXvR^v?}g38zZwh&0TOSCk!2kdQ^?RBsEtJPJmBFXdO0qD2~`}lPy_WK zfd&Uq!6GWlbkiMyb~j>UB7>2ldHLbHC+ql<`yx=?QQB3YM`5 z%xjdGA#kw4?Oc572OUC7Si$QvnN_Xbu5Z`(3XKzLl7M@W#)uI?RBE@|dcPy*Yg;u; zRWWZVX^1t=tb%vs%GCUZ=c}xHmZv;_{Ca!;^d$Mq=+rf9!!={RImwkT`5@TpC{E`b% zmoBESXN<0S8SDiv{^;@yBd$|^ue&Mw9V`y?d7tVf?hKhej|r4UY+I_7FDnocOT2%V z8kcgNuamMqpA?Q+_zDx><`x`9xCib@g1xjD*0C?sEH7Mc zA%?L*_H*88pSN&W$i)Wn`CIG+QHU_a4r1=di`VEhF4l1@XI_up(;W0dB3CUI)k2Jb zgA5OD3xr32;UGc)Ikv`~Q85;gTBsc@X3>91d}0~HhJwb)SX)tK;0bXWM;r=+Z6?HC zsBe_VkmahG$EGvx4q7go;w}{IMKWlBK3HCsl7&QW;tSf5$LO5=wVP|MI=ypPq`&W0 z+9tUQuEVh}M!l+?%!P4;p|l+EOmM09F@QQ5je3;oT4II0Gh|P$E{ZTb0C_K-3H_4A zWRCkf=avi6gAk216*SoC6R?M<@(WyTZd{(mdlGI*wJODmD9XD?)^HR`z#($ExK$9- zQ-y#b(RWxVbAhn$NJ+m$=O(&kuYC7pNL8qPp)aO+ViD3su;tvBpkA3{kQlX)tCa0k z$xbtr9WvWZR6ioP619lR&w~2*=>(F%Wi*8sIx9&RoXslIQ2dP=lC{!xtL)?yL~Mu- z5tYy!7N4QkeWYHEc>yTnPs6`9E{O(FBqwC9XCcZ1c^R>=mLx{31_xcJN9_kJ3^Z%V zlydr8C&ldOo~BD?X8!}&fkTjDAQbB)OOVv}3QZfFSbsH?&2k1JgRe#CjYgMBeUwZ* zzS^LynkgP*MSfWZOqIAU>~o~F20q9xGbHiaxRW?!0vppv0QDII6Ke@qA~o@pl#y4Y&Jc}u%%yhsQM@vhV&^~omim$zo+CKuD~0eb zUT`@Fm=YtW_J$BLV_O%`V}*`c%2{E6OD z?-=Pi$)r`7rWL%hp5^91A??-zlo@j4JV#7i1uPdb>TW?HSxF6t7`zQf+Lv*Hug1|d z@dQujc8GTuXpwHSti|MHuDa9Vttqq1t6%5}}$w3mE$9b1+Nbn^3Io<)l`*MNwbfiqE(+qupT; zAvA;!rYaB?ig&I0JS-L4veWtyZ<^T0~!Hm9P?sSx`Sh z2>{cqc+VLYZn+r0o&5^$ZCl*Kj&gsd@UrtBh;02R6kDq|))XNPm;*I{3u9O0E3BOx ze+uXbANJJLE@FW^s;uw3xuM%+NC$a)T;?OU!D_Vw- zmyd*VOm@TYCU|R;GDtY1uV}IqD0|3fKLq-gY?MuC)9Fxr?~SL5t|~WhmL?xViO^G| zl_Xp$6T0+}0hP(`vo8W12!&yU(Tsp%g%Z6bt&;WV*dWULAt~!{S83P{M~YVl0;)oU zhe!f42*8h$Y!kwl9djtvX$VAyRIrO9VU67BCmh{Xg4og{A~^-KVWDG6pc(!JU8)#I;oG=~$9@9h9LFfa2(fOgaR#}(+*raL zE@G0WJKKpx?g%0B05QH6&1k7G9VoN_lV1-_{>Ny7+_6Lz+miB-aWg z+b%4Dbk}r|)a~VM>>4!0T0)Zp+yU#V2mz%|3)Fke1fvdeMT$O-Ct-m%LZhg}<%`F* z8bx@xYL6@;(-=G6uBSY8WXh}bh|_)o7Z2g7W=lhU_DhWs>M{rJ0%TsN{wmAxBCT_G^ME@&0&hwAU5$~hQ44h5|0oC(<#wP+|gs1%)exs zLYL=`33CrNGf^iLKWGoF3VfVJiPQj_n(3HVykY&k*Sm?xx~Iv10v#}VjF&!Y7fz@H zMdziwD2`RwuZhUg^>uR1E>JS%DQ>?mAA@)E))iO0sC;J|a1L~9%eTwMp9P3?)Ym)m z@va+=;FbQMZH#0J`l7~ae_$3OVYh{rL>9+-pq3s|x!hpPw*V`iZOL95oi0_fI;l_nB-c z8}vF3`#E3Km|_!j+Un*vrlBBAB>OQoIe0qGNvIghqiGi}jJ|$P7D+XI$DVKMVPQx$ zsGn<%VrRATr4Us&mj5|5TR31`_q{bJ;ahapp~yqD~c-S6r_+=9=SyBDp&h4-6uIEDq* zmqH7jYlsq9Pg0j?Xm#N|2nAqcOEnWSLY&)VZo3okpKPD0~e60)0o zILzPQOT+rT7;p`G7jHPkYodh)HRA#swhI_oe2$mYbGIZl;%xaX=PF0mV76K29noaR zk=+K_WvvQY;r;y`hhgHP#JoL0wnwmHs4nJ1-R*fs>8;)bivJ$xj=j>q$19w4Y_Xu| zcR|G>&9`9F)dVP|lc9dZwMuovRttGks8%LUTWu+jV_XeV9O#wg5kqaor&BAIDxj%W zUz=L3TmfvoP-9~{OT})hSt>t5;O*W3^DHm&n^EWci5v)^6sYE+5$8DK9>h+=85;cZ z05KPp;?X)0S6+UgazRpY<@Ek*l+UaCuZ1pyduBKry3EukgX7zMAqdIgu`5zb=QzV&{_9$?5@ne zI2oTW9-jXK+kIV)kE5lOtuk~ih6#g+oL9$N-=xn^O#w2C=cpJZ1fWZP&uLf zky^`p?btV&#YaVc@<*Pd;&ThNnVk3UB;duHe6;4U+RZ-5)X(nh5{AWc1CC-X^A@N8 zdt3$hPp-{jts|UgY3F#E^~Ltd?%wMG>kil>OBklMLo9^0F8CTgz4jRI&lqMYlM_R| zG%;ec!(Kga0ZbxR*ja!Q_sdC0s%6X=a!@jzmSn}8AAc&fD&DYd{|TI&QCjT@E|1b6 zHN|_9m>3M3b7ewHX?qp~aYY505dQzcrlz$50(b^s#2sF7ufp-ERxPBgJ>Les@zpiL zPEIPqeuU4Fyv3S$j6MjGYt`Tq^O#dMI4K|x9>-X10az2{i({NRYU52YR@yO&U{Gfs zivOR=-CRSINE={vKJKemm%%wrUwDVfOfqQ&RkllH0@URzE|y1YNjA2B|+{YNd;}=`vYF zA{Mv@5QF9jd$EBVHsz-64{V7RbC<@Y5s>aZ2hxX`x((TbCQx)y+;3Q(p)rl&RA;J3 zHe{omP^X<|stIib`w!3=Gtg5DI=xq zDcP)`m$$h$eoKX*T{hD1b?)R{XtLj*%ssmdFRY9S!Eq_cM43vyIpJd!Zyy^GMYVNt zAt?d&lkZOmGEWLAT~;vkOnO;FM#``#qR4D_i$JbL^fzlExa5p~w%x^)EBTfW%(*+;+>f{2YScNL`<>NpV7m9ogccMb=uIo!gCM)4u1Kbn^avcR8 zamcEuB72?P;x*nl*a+oz@+8g>tI-9&cgf4|=u^FkqR!8ho{Ap#$&ee>^_? zDEbV$$Y8tbEuK&K@%~H~0Dwf}YNrau-?$5N!r#UL;Z5UaH<|EJRmGhqGj^=Uc%x)w zaOT!Hdfm`mF;Kyo)xZoVlbQh@t?8M9%|PWTMN(=@@MaKSL+Q4fTii^frPlMhj+?yd z@1py^s~*`sVuthV8q{0%c=_?5-5Ym{3=X7Wq$*6~J9&Y?XhW12dW_VmtbHOl;=x9q zi*y>wqWiV0s1=H=s=~Bd0Nf0}iOC+$)_Qa{-K)60{)j_5UdV1Y&6|0hteAwl6<4Fb zso1(W*o#~$?#bc}`+7br!G2m{UHAJd+|Rj;1AE?mvL>f1Gq8lz78}B z4FuZpIQ)(K@o$yjFN&{`;F5ASi~u&w^^wVT1OT&D9g4#Mf%tJsY=^wQnMrCUBWX^( z9Z)xz&dKc%y1(?>E@E2*KpS4TPcF<3Q=qe5G9jyt_^9}_q97qFM44AoNQ&CVNgVPR z?D!yx!<+$0e!7_~`vHBb5i58Ug&R|qDuwZqOwyG!OTV!wv%F@pS)&SkE~^T~-tBVF zW~|%V=Au2EVz?sJB93(K(&au@;lZtt8@OQJ9l&9DBLEC=6w1g4EjE}I55B|`*f`CFWfoVXn%Pn;@~CHzp0Si86)$THg} z%TB>u-^k529p@vo=S4131)^QtMqGA^WU-?|Ml;LD`^WI{0X-~weKo&FFkbL_mtaY@ z3{gCh)mRY24Kpv#E^>K+7(&+0ou4r46xnaVka;o!X3Ue{-xynN_wXVoT8}igO!>>V zRrl=$zqu4NTflE1jW$=;*7ur9sFK7}7XHRr%s1$&j2t@c0gLUgz92Nes4r|H@HF&n zvi(9mTj$QLyLi*Ksuu19k;`&QF5e`q>owovX##z%OC|9VA*^PK%P?cJxHA<(^5dE3 z@kKyW(P7PE3ObRBr&P~6;4-+!(YZ8n0ji@@MEiZ|Y+$!s)FYy!gss}zkaKdI&Ep8> zi!Dl8fY#U+0$%9u58sDEPHZZ-8{1`VKo@Nr>;*MZuZ@%j0^F*n7>^;F@|+O4q5~v! z-66BX_;Mceg7_qyk$L^JfQ_ag88033+_+rLN7>ELUbw-Q!;iJdzr)Uxu_cgH? zk#7ku{H0GUCLdkBpck;R@6iiHK+=mB1r7N}=>^piV}*?TR8N|DnQ~Gw(MudI$DaF4 zG+97PY|Qp!sy1?XK!AZMDy)%T98U}UpZKZD$cCkUoE8==CZ;enOc_MEf7Bbtv_aH# z?W4xSl2J}wg&t#u!ywvtfPGqD_G*o_tlXRV@F6?DR^Lh&}=08<;RI zsAbToq)R-smFeRgj7i;3HJ|&0gw#Cm`mjp^Dja!eRshOk94;0pnt?h%REKZ}(-hGfTfto%{P8a~;r|N5ownIPo@*p`etH@GacbH@%&;I~?nCX(@ zdrA~{Wbs%g#yLB2!oDbdoejMNZ6&&tN+{sds)N%nQZTtS-nb{TK*|>V#vr0mX4(0; z!5OkkC+DO^4z??VrZI1^+b0mP7NyWc4js}9v&bT}(P}9W4;IvAp`J3pztxN0C<#{rJ`Wivv76%}{cOo*y@Lk0Y>sM0;sGn~P0lafpC{M~g>E@@F{2(Jhcks&2+ zuiW^2u6ok$n%g!IzZ`?kY_J(fR`mKKHO&)*t@0r&yuMmO9^ZG>Mff4P-=ks?geqvt zzy%e|bA8{Whjl(dxpd!MKLyZfSGL4p_g$|NL(1i9@?ufGd-V>xz>3$F#PAPaa}vWB z-9r)mWB4u@W7lS4sXg4=glju7%TGqV>3aoFvH+*6T6|r9^Akq`_h8h7dNbp-^=m3a1R@HyCF-W zp4vWFv#}2uR2h4+I|M#p=R)s8wLKmT@d9!=r15%d|J1y;P^}YM1+3A2BG?cQXh($O zfVUe_qISbsx6daz+%c>hT|@@d*4T8&w4w+%$q|q!`wYN4i4^3aVN(z_$m5VzO7lJZ z*lq4@y*waG3`uf8|GT-p{TKTu&zie)zn(ez`uG1}<`aJA{{3%W|F8f0oB#BOfBeZO z{O@1kS>97e2oi6KWAEjxvfA?u}a)#FXtLxDzwzX^|jap*~FEGZQ z!%^PqV%WJx&~q{K>5QpzfUP~ z%v=t7H~f+U{h&3hf~4^%@8C44!c?Sd>N}og9q6m-TMsWC#@#jCDUxEw=brQUK6a6G zF_#%H6enBb5n{99y975xCly3dxAU{w#%yam!ooRW1AgY%{<4r9;b0?XMOkvty22oU zwb8!-SCi+}Mm(~+g!6Lx`dCpMJ*ncEYI_k?N3KeTYWM)!(U>?7++((#}ZJ_iBW6+Em* zzu3Ixs6C5odNHQVLLX)s1d-N`%UT+Z&_B3WUfLzNKA{qq)M9Ahs^AJc@q4}o-Qk~4 zDpN)D2oV{&b+I}34aRACGVX8A{Tv1-V})TH8?JB*l-y!kJ#;>GUsPDN)_6C(20@=j znt`X%8EVso-708kgqny3QFk0vr*)q3IJ-p_XqQFQ6A3b_@!j0far2~kiayM}e6f27 zw;(_i4;!QD5tnZPC9u#NT~T0dt=t@X9ABkENy7>ySZokySvR7bH83N4k>cN z8%bnUYS$DFN!Xkfw`~-MPXjG9g$cGGVyKcUmT=sQdv>|kEU1Hft2S|M6`XMEb`wW@ zYy2TyCB|Xz5@#tai2f>rvmNAU=PIi&4!!wC68eo|cb*%cACeb!$!G!JcVWkeAvQ+H z3os^oW@%#Xql_psn%qy*%U8*Bm~X(aQi`Ar;{o36#$~^}i+&9G>PH%W3Ck^EvXWOu zSva8GPu>=z1RkZs2H0Kp)~O%5O6+5~YTR*;TYQDQ$k_BrZ(;AW@|raUq_ zRqa@0EE3w~^a5)w*sCndqjFt$Yna+cmHeuU4X2xx+0d6DsM6o#KEDQ*HXAG+#gWIR z`W`>3RY+HAy^}V~anC%#oPy=>RlaE4UnpD{LYuI(p3O!r&dQagGO=Ow0(Ps%U=@>v z^SrgbfB~!JB&NF|9`6P@I!#R@=1paRk3DL2GkYix zPIK7DHs_S?7k&)k&p|WYK0bVX(mb9MNdaV#T`^R!FsFxy2Ps~&e01Uqr>CVbnE5m;coNf>-6wchDxpepj@yow>ReaAiAW# z9Etj~O25d!3abTo5C&c_4tTeDmReH=$d~?L_CYS_MgWKQM`aG09`rt-5kmuhRItDK zs#`c!qi_-+2J@oX(C1_F#LnW=Wt(8^LT&jV2;TbCzKEowhX|N z&7C~k+Qm!0)6?dg)0f9hGq>^Re-P1TOn4;CFJDbkYcLpMzdAmAd6d3kaQzxU7n?I(XC$)mr&{mpCnV*dG~ zPyUVh_AkueD3vtxNg|Jb4_T)F@!$XWKm5!2|M`0c&izsJnwvWRtKYod{JX#YhrjvT zPd?#)f9lW^&*%6rLT|!CYArHIL0dohkN@YtBoDMTnFq67VO#$jtw(JIS+TICf4%kB a|9Q5wr9b@VK=aA(<9{pY^Xh-&um1}sBeOpM literal 0 HcmV?d00001 -- 2.30.2 From 10ab416b297fb74c0fbf279d8f7bca6453f076d5 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:39:07 +0100 Subject: [PATCH 32/38] =?UTF-8?q?io=5Fscene=5Fmax.zip=20gel=C3=B6scht?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- io_scene_max.zip | Bin 51329 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 io_scene_max.zip diff --git a/io_scene_max.zip b/io_scene_max.zip deleted file mode 100644 index c33d151062f3c737943916ce883a4ce60cfa257a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 51329 zcmeHwOK)UJc3#VpydWUJ5Cp>zytu=5iOFsb$@ zr;?dD`KThNH53Byi4%`|^!4xm!JmA>&)?tv=Cyn=|NIgD=Dl>-&bnE8)4KiQ2%s}kc>Y(vdA<2} zfBg@C^S7UT!vFrX2%6vYdxH^cRr`0JCMQR`Zx;9RPPWtQ-wpDs>(Oa;JK9JZwZ>9% zlAX7PqrBBkPTJRlR`+7&(-~9cAV=fFY;pf0>yGlvJR58zUmYDRHmbG7-e9rQ8f62( z?n`S)K1_fqyGVN7WIW8fS4rNz>=&u_6AwDv$dOSjYlXLzDt&NPm<@Y+eAB*4NP0lx84Yq7GH(!WP{FKa@fh((fz~p z=wO~CwQZM^N_!qXuB|LKYW4Nx=sNG@{eG4lw60ngSsPSTu1BN(aN~*qmez|k{^0;}DK(!`6Gc(V(zS`eOHk0$YZ*O;N-`?)*eS2H4e|y`+_bvS0 z#((uK{I1WMKjDGcR9!{o?TT3;V2gySAqv(DNpQ_9X^jj}XVruTGjfr@&UbHP5$_1ZuRn zI-kJw@|*EZLWr5&{ktiN1YldI$uMh=dIQ)Fp5XTk{wr4A)yhj@ z<+7^H7rTdhJI}WEUxY+9wRU4Z$zXQ9OSJPoAB@JWP6F-7S~p4ix|Mfl_Kus)V#|A4 zw?3bYyRA;A*MhG4O|E9rDc)#&VPX(&Kw^eo&x&tnBrKO z03P&uBh=^(V1hF9b~krJ}kd7_wUYXi%;KH zKbxC}gpam%zD~Cf-=w06n1rO^*)uU#JeO)%WW4mF1_)PuJIKjpiEr zv`5h7bq?EozIA%M{|4|UMi|XzV`+V>USHo^Yu1EaN4;S#&hpJSKezD5hl+h^>uGJd zzPh$lUs|s}tuGP#ac`u~HEiHy=Rg~1N(0-{z)r2s3McSsSwig1-Tl+_dGq9%vQ$l_ z)?8lRtJk&#dGjJ4wax+lJR4ps{L$gTHv(UO+SpsIJzcMF@2xK_t&2W7_@iFuZf583 z_%MBbs-!G0uRmR?UtFx!&a>5(+A2xe=?!|_*7H$kX6tz8nFrmfuP&{wEU(qJYb$F@ zq8M9)_B8=t9dCW(0M}|u%d1aUc9zz*_m=h=qL{CaTXzJUIr)13=xA&EfC}^7Oo9*8 zCiv9hH8zsDofZa-*4#oVM7<7(!<%kCt$YdP9I%pIh_^kO0@kRxhLY3P;3}I2xJHz2UUjAcJzWD(vxAXsI0fYC)2AdTAGD_+!RW}I9rd94rKCKqH%Q8M z-iPTQiZz$Ot*@;%mX-mxJHENO^8hQWjmCCuclBwlUT?086kxcm;n0E98(Zs*#@_bY zDvV=wd6ne8N>0Xu%U0WWs@~jQT3)Kv)|U64E^XJ?@1xdWl%re8t33PQ&^4MnJ6r3^ zwZ{7P-fDAwm!$UsE-!8GZr7T%oz=#AeYv*B=BP(UomK#}+(|m7;`w*)>c+m z_h2FGa3#BYI{*sF`1K6T>c!svSJ3LSw=)<6tyjaZ-GhU4cmFs&*gv7eu;u&5OTnX8 z@R&y@r|HjL(lP*ZveSIg3;ihSwM-+X9-kzj ziGQnU+U`J)(v-sulozAVFq5LQ+FL{@>{(u46*LPN_b!LoXtNe0G{b|A1(n$^pUo~P z1_j^HKgs7w@2?=>HfrTz)(a)P`Y! zh6T)U#@*3o-6(QVFQn8nWmuvdbI|9x?YZPLMOCILg1{J^sL8NgPdftM< z5f>6X!Vw`8Pz9i<5d1TMlTOw(c#=&_##W^aejGEQMDSNxuU|Qz{kAr1+6o+%L1y%^ zQki{HTYt0oq_*t-X}oz-YrI*Ug=V9$@n#k}hFRGYX^5EksZ??}ZfS2Jsm&wU$LL^w zF2_Gt=jW5pqspT(=D?MkR=<+3!v@Kkv-OSo>f3pICg;O;F1-Kz=Ed&BzB0+ z)}Bv(x|u|iag8h3!UQpA8=jE2!hDYCzQPA2Q<7dkK+i6o&!O32a@qHQG4_6WC&@9x z4v@%%ghyaB?Q}5CO6V}9do550$x9T%tksFkrYm``x=qKuf9S2|SOO#%O&5`hGLJKH z=D-XrR0_tzo=_x2yj71kj*4ak9by)mu8F+!L~;8uq{nm@VPUR?s;mSLcc0f<#YT6T}s1Z z9X9pF7hzRbSK>MeP}v+mNbn^&lOo7aULhoP$OGC5HyYfFJQap^RcFHT zlijwn{%8`ZSrS{ zP0vL1DCL^GXbuLw!9$%el%LPeR)2vRp9B}49%uuFOdzpbN-@T0>hsUp28-2D$s2yC zkX4aAV$x7MVoNT9x<*~+yb&SJkF;Hg3(am1T8$}rv+Fns=c#k_-rz2g=b}$OB{i_A zCTCWfv*e7b`Qr4LS&z9dSC@y+wzv29_v@>}{h$2|U*?293uF_C^~Sn9H-nvt#gTex zlLeIm>Uj=<5sDe9U1Hcsr48LJ-U{r$% zp-9aNj*rueTvYG7U&m!*X8Jc1b$Yf|#lbjVNbDH+a2w990v{apgUKtx%@(g(dM<_I^FagA16k0{Kpn?m?1VghB zwpjUa7@+BT5hXuOR+nSjFG6er!eMZQSU$L{uaYE-km_)@A%j)ez+%#vFDM|JM@^%% z)f9^|T$d1xDeTJQ&}ZIreVX(Uc#bM_))ORHFxY%@v5`C(&M_q+)QBYwvVMSw(ObX; z+q8G-G}o1Sql6U{gzcvhmqT^Gzca*$gb}h-zA2B;9cLq0E%m-Np6d<>5yF_Z!b z>N3agSnqJfqS5%&vB`o5Ea$xIKG`Gq@}h@)3bHRWprDQzbgYvPM-|_5OF^KVR0L9C z*=v+G?uaeKqRNE<2(WgsbSdmYgGQ3ppSr$(fS1n_5BZbL#4nP?jTL%PHF2A(>$^j> znCw!FjWDh%2((=>-8ge)-UG5qxt==U01-n&_Z;a?)@BG{v`AOdQs`(VdKAPBmUIrD zG72W(ij6MgUI!eJ@m?Z*CsOk`_XAqR5sfYpdXO6xpD2VBDkLh&FNh;{x zB*U~zMkOrUQ>qX`n}HZX#kCXn0GyGMDubPv6J&3pX+HAc5Y2?wnP8uS+o&aOoMRuz z{2=pl1>~cuJPOpF&3aZFEfx3}pM#Nz?#!cq3sME%S&2g#3d++eVN{G<3rln@kb0VC zzjSld9;Uj3K-kJl?FdPuzy(Gk1g|=2+VK2}lp5tG<4|7RfDIl7qEk*ELRa2LO(AJOR!W8e80J^q z78c1EFiv(!2+*(z>o55wgSQ8uH1?bZB(X`VAsF1O#Hr=5)3|H~DiSr7m?+SjMzI3j zOM`||)0*iG?o|_$8Y->b9p6NXQm%*!W%Xn_jMtFqFa=dA2c&rq-e(u}Qlf2Xqr8+G zWy*N{p!!&VY1)8&VBKM>%?&9~8KbT#m~$>eQlvf@DXn9P$4PMRYwmz&)NS-Y^S8jq)kuLgJ>7nENtMHrzyVyx-vtg8=8ao6Cd~ z3JD&&I@}y6OW7|^_ZI7`2Td&LDHcB}nl1zwd{2ZQq*qh0bjLxX;N>Gy_s%Pev$f23 zL->!HnUCA@sW_k3584eg^l`;V4NR)uRSZU+ntLcRMOIbU9FWN#R!^GEuT#YAp3^SP z$JY1>P+0_{tkVf%rpuhW*9w_p3mYzQ5S=_v1EltAOlHa;(aZve96g{h#e`Z6V_a$s zXV)OKS({Nt!ohd=@@AAeu(>bShsZ}hlh1+sXl9b+y8%a&6SDEG>XMNjm&dO)=#2E+ z*+ziF&reE{eGblei;_H+0hyLt15dnvFR6avgM^Y&AEA^KT7Sp!w4_)(^#d^071vAZ z+p03>LPaB zOlYm}2PV;|PqEzHz=ox`f;M8JyNJc)AzI=ZyVhSwmdr9TX>dH4#1QdY&=m0m&ar^U zWF77FpMkwbmr*cGeAo(L=~`JW^y=PZpVek-n7*!6hcdG$36mbtim<|)O?gw1)qGq# zA&S6~K1~jgH&Jad+rK$mlYPS8K1Bpfuz*m*Q%|TXbkjvmB2DL_lPJ(pIiQE~sMIO> zAWK6*LI$ywR_s?Ni2ge2CTIz($6e>PURQ6jS7Ak) zUiIA!<_YQ&QaQ+Cvfz9a^sa@4-OWS*stPv8y?z4aD4;ps4XX?iOcn#Hy65@TTue1^ z3C!>VHureNH32QV0aIK> zS%d(#XrdZQH(HJ{zaM*~$K3Qo$wx+AW zp$dF5IkF)u)Si-S#U@Z7p+&JELN82J+N{*nz(aDQ8k75c1T6 zaa%E}QKCfC>P<>aFr94_SrEiTw#}FkT5qD7V+0J0J7BCDVe5^Kc&I^oW8qsU%L7Oc zJ8@*KUBQFJAnKbLqy&4wRudJ4ky}~VBKxJh-yygo8{`qWQ97qmJj$hefJ}T!AfsTK zi?q5VR2NGfdL1q_6fZROS+eZF*u3RM>M|`=(`}YwT+lF^)MIe(;UH>?adeXlHuJhW z(5sCj>OR+VKe%aIG=<*{hXTP^#bsk=*z!`INN@;AHZ3AQ3`R2NGu>ndo5cn@$Ca`z zZDxmroOIL8VmJ(J4j{8DA@4cW{w?G^^7kuiJW)ZL<@)D<5T z!%z&ujL=d0ZnBsklA*3va9A!oN9*{a;Nj#ts$lI10++htm`XOFSu|7s76=NtC_XhJ z$OsG4JrQaum;FuYwC4)t+a7I(QeqQv#%Y2Y-QK%d)7rYrIKV8f?Gb3;=Y1{wKyS>d zsDG>jh&T~1p2`W$qol)5$`8><~m!_QJ3=$reb&|o0q4rT?Eq| z@{Nldw6}1vpH$c~D6jom9bzvPw(NZo3^u%nAk=Ty8wQH9QGoLp(@tzmguwO_PIQQ+ z%B?6d1nGcTqCj>T*sT?u*ATV+Jz0qrCnqw7R-kiK8U>K{;UxX=M_;5$WTqIl4`E0K z;_bj9YYPt zmM9bol<%g4@qq&#*V*C?E2`}n)#QuU`YpLOLUU7rwu|;ZLfzTgbal7Q6%R!iTPdsT zeGtw+8|R~N>Z-ApLVwL&=)52#(2eqW2LMyY1m8LblF+xQc(xa1PEOY9hLXRc?R{8u)s6ioUycw zn5uTWT&vajx3;{pDt~bi%iDkvt}krEaE$%;y_H=F>UzQk8fbwC@pok!LUZhS7F*+o zI6go3rJP!pBe&cw&wPorl`zi*WV9Lc=;8KPv}28x+LF+*Qd4?XYAgD?UVn%#J-h2E zlcTmjfHK4&V8}|{UQl4wK~#a3%ib;|v*0r>w0Wpq=J9ZXHIc0NBY7hf)UaAPMZ}f@ z>A4WVqwUj>mu?I31|E%dPLbG0YrZzH*x|Xz{atmpvG+BEBGfp=K}tP9iEE;U<8$%0 z|0KL`BuDIKK=Aox0t zSYs)5x6?7VC++Vp;Nlis5VpVT{MqCz>0a+v)P)BE2K6^qS9a}|)Q7djr6@i&E>(M+ z7MK3ef%@@2+X!&!>0?wPkV?VzTQqBrLSq?@+W_<+x-#7M+8gGUKdWvSjWH&e03;vs z_BA5Ec4v$W0E5tJ_;In)0?&cn9LvG(%~Eec1+#R1pD-#iWZwwv+5hlCL`qBiF(PpZ zkslj|`;o6KXvR^v?}g38zZwh&0TOSCk!2kdQ^?RBsEtJPJmBFXdO0qD2~`}lPy_WK zfd&Uq!6GWlbkiMyb~j>UB7>2ldHLbHC+ql<`yx=?QQB3YM`5 z%xjdGA#kw4?Oc572OUC7Si$QvnN_Xbu5Z`(3XKzLl7M@W#)uI?RBE@|dcPy*Yg;u; zRWWZVX^1t=tb%vs%GCUZ=c}xHmZv;_{Ca!;^d$Mq=+rf9!!={RImwkT`5@TpC{E`b% zmoBESXN<0S8SDiv{^;@yBd$|^ue&Mw9V`y?d7tVf?hKhej|r4UY+I_7FDnocOT2%V z8kcgNuamMqpA?Q+_zDx><`x`9xCib@g1xjD*0C?sEH7Mc zA%?L*_H*88pSN&W$i)Wn`CIG+QHU_a4r1=di`VEhF4l1@XI_up(;W0dB3CUI)k2Jb zgA5OD3xr32;UGc)Ikv`~Q85;gTBsc@X3>91d}0~HhJwb)SX)tK;0bXWM;r=+Z6?HC zsBe_VkmahG$EGvx4q7go;w}{IMKWlBK3HCsl7&QW;tSf5$LO5=wVP|MI=ypPq`&W0 z+9tUQuEVh}M!l+?%!P4;p|l+EOmM09F@QQ5je3;oT4II0Gh|P$E{ZTb0C_K-3H_4A zWRCkf=avi6gAk216*SoC6R?M<@(WyTZd{(mdlGI*wJODmD9XD?)^HR`z#($ExK$9- zQ-y#b(RWxVbAhn$NJ+m$=O(&kuYC7pNL8qPp)aO+ViD3su;tvBpkA3{kQlX)tCa0k z$xbtr9WvWZR6ioP619lR&w~2*=>(F%Wi*8sIx9&RoXslIQ2dP=lC{!xtL)?yL~Mu- z5tYy!7N4QkeWYHEc>yTnPs6`9E{O(FBqwC9XCcZ1c^R>=mLx{31_xcJN9_kJ3^Z%V zlydr8C&ldOo~BD?X8!}&fkTjDAQbB)OOVv}3QZfFSbsH?&2k1JgRe#CjYgMBeUwZ* zzS^LynkgP*MSfWZOqIAU>~o~F20q9xGbHiaxRW?!0vppv0QDII6Ke@qA~o@pl#y4Y&Jc}u%%yhsQM@vhV&^~omim$zo+CKuD~0eb zUT`@Fm=YtW_J$BLV_O%`V}*`c%2{E6OD z?-=Pi$)r`7rWL%hp5^91A??-zlo@j4JV#7i1uPdb>TW?HSxF6t7`zQf+Lv*Hug1|d z@dQujc8GTuXpwHSti|MHuDa9Vttqq1t6%5}}$w3mE$9b1+Nbn^3Io<)l`*MNwbfiqE(+qupT; zAvA;!rYaB?ig&I0JS-L4veWtyZ<^T0~!Hm9P?sSx`Sh z2>{cqc+VLYZn+r0o&5^$ZCl*Kj&gsd@UrtBh;02R6kDq|))XNPm;*I{3u9O0E3BOx ze+uXbANJJLE@FW^s;uw3xuM%+NC$a)T;?OU!D_Vw- zmyd*VOm@TYCU|R;GDtY1uV}IqD0|3fKLq-gY?MuC)9Fxr?~SL5t|~WhmL?xViO^G| zl_Xp$6T0+}0hP(`vo8W12!&yU(Tsp%g%Z6bt&;WV*dWULAt~!{S83P{M~YVl0;)oU zhe!f42*8h$Y!kwl9djtvX$VAyRIrO9VU67BCmh{Xg4og{A~^-KVWDG6pc(!JU8)#I;oG=~$9@9h9LFfa2(fOgaR#}(+*raL zE@G0WJKKpx?g%0B05QH6&1k7G9VoN_lV1-_{>Ny7+_6Lz+miB-aWg z+b%4Dbk}r|)a~VM>>4!0T0)Zp+yU#V2mz%|3)Fke1fvdeMT$O-Ct-m%LZhg}<%`F* z8bx@xYL6@;(-=G6uBSY8WXh}bh|_)o7Z2g7W=lhU_DhWs>M{rJ0%TsN{wmAxBCT_G^ME@&0&hwAU5$~hQ44h5|0oC(<#wP+|gs1%)exs zLYL=`33CrNGf^iLKWGoF3VfVJiPQj_n(3HVykY&k*Sm?xx~Iv10v#}VjF&!Y7fz@H zMdziwD2`RwuZhUg^>uR1E>JS%DQ>?mAA@)E))iO0sC;J|a1L~9%eTwMp9P3?)Ym)m z@va+=;FbQMZH#0J`l7~ae_$3OVYh{rL>9+-pq3s|x!hpPw*V`iZOL95oi0_fI;l_nB-c z8}vF3`#E3Km|_!j+Un*vrlBBAB>OQoIe0qGNvIghqiGi}jJ|$P7D+XI$DVKMVPQx$ zsGn<%VrRATr4Us&mj5|5TR31`_q{bJ;ahapp~yqD~c-S6r_+=9=SyBDp&h4-6uIEDq* zmqH7jYlsq9Pg0j?Xm#N|2nAqcOEnWSLY&)VZo3okpKPD0~e60)0o zILzPQOT+rT7;p`G7jHPkYodh)HRA#swhI_oe2$mYbGIZl;%xaX=PF0mV76K29noaR zk=+K_WvvQY;r;y`hhgHP#JoL0wnwmHs4nJ1-R*fs>8;)bivJ$xj=j>q$19w4Y_Xu| zcR|G>&9`9F)dVP|lc9dZwMuovRttGks8%LUTWu+jV_XeV9O#wg5kqaor&BAIDxj%W zUz=L3TmfvoP-9~{OT})hSt>t5;O*W3^DHm&n^EWci5v)^6sYE+5$8DK9>h+=85;cZ z05KPp;?X)0S6+UgazRpY<@Ek*l+UaCuZ1pyduBKry3EukgX7zMAqdIgu`5zb=QzV&{_9$?5@ne zI2oTW9-jXK+kIV)kE5lOtuk~ih6#g+oL9$N-=xn^O#w2C=cpJZ1fWZP&uLf zky^`p?btV&#YaVc@<*Pd;&ThNnVk3UB;duHe6;4U+RZ-5)X(nh5{AWc1CC-X^A@N8 zdt3$hPp-{jts|UgY3F#E^~Ltd?%wMG>kil>OBklMLo9^0F8CTgz4jRI&lqMYlM_R| zG%;ec!(Kga0ZbxR*ja!Q_sdC0s%6X=a!@jzmSn}8AAc&fD&DYd{|TI&QCjT@E|1b6 zHN|_9m>3M3b7ewHX?qp~aYY505dQzcrlz$50(b^s#2sF7ufp-ERxPBgJ>Les@zpiL zPEIPqeuU4Fyv3S$j6MjGYt`Tq^O#dMI4K|x9>-X10az2{i({NRYU52YR@yO&U{Gfs zivOR=-CRSINE={vKJKemm%%wrUwDVfOfqQ&RkllH0@URzE|y1YNjA2B|+{YNd;}=`vYF zA{Mv@5QF9jd$EBVHsz-64{V7RbC<@Y5s>aZ2hxX`x((TbCQx)y+;3Q(p)rl&RA;J3 zHe{omP^X<|stIib`w!3=Gtg5DI=xq zDcP)`m$$h$eoKX*T{hD1b?)R{XtLj*%ssmdFRY9S!Eq_cM43vyIpJd!Zyy^GMYVNt zAt?d&lkZOmGEWLAT~;vkOnO;FM#``#qR4D_i$JbL^fzlExa5p~w%x^)EBTfW%(*+;+>f{2YScNL`<>NpV7m9ogccMb=uIo!gCM)4u1Kbn^avcR8 zamcEuB72?P;x*nl*a+oz@+8g>tI-9&cgf4|=u^FkqR!8ho{Ap#$&ee>^_? zDEbV$$Y8tbEuK&K@%~H~0Dwf}YNrau-?$5N!r#UL;Z5UaH<|EJRmGhqGj^=Uc%x)w zaOT!Hdfm`mF;Kyo)xZoVlbQh@t?8M9%|PWTMN(=@@MaKSL+Q4fTii^frPlMhj+?yd z@1py^s~*`sVuthV8q{0%c=_?5-5Ym{3=X7Wq$*6~J9&Y?XhW12dW_VmtbHOl;=x9q zi*y>wqWiV0s1=H=s=~Bd0Nf0}iOC+$)_Qa{-K)60{)j_5UdV1Y&6|0hteAwl6<4Fb zso1(W*o#~$?#bc}`+7br!G2m{UHAJd+|Rj;1AE?mvL>f1Gq8lz78}B z4FuZpIQ)(K@o$yjFN&{`;F5ASi~u&w^^wVT1OT&D9g4#Mf%tJsY=^wQnMrCUBWX^( z9Z)xz&dKc%y1(?>E@E2*KpS4TPcF<3Q=qe5G9jyt_^9}_q97qFM44AoNQ&CVNgVPR z?D!yx!<+$0e!7_~`vHBb5i58Ug&R|qDuwZqOwyG!OTV!wv%F@pS)&SkE~^T~-tBVF zW~|%V=Au2EVz?sJB93(K(&au@;lZtt8@OQJ9l&9DBLEC=6w1g4EjE}I55B|`*f`CFWfoVXn%Pn;@~CHzp0Si86)$THg} z%TB>u-^k529p@vo=S4131)^QtMqGA^WU-?|Ml;LD`^WI{0X-~weKo&FFkbL_mtaY@ z3{gCh)mRY24Kpv#E^>K+7(&+0ou4r46xnaVka;o!X3Ue{-xynN_wXVoT8}igO!>>V zRrl=$zqu4NTflE1jW$=;*7ur9sFK7}7XHRr%s1$&j2t@c0gLUgz92Nes4r|H@HF&n zvi(9mTj$QLyLi*Ksuu19k;`&QF5e`q>owovX##z%OC|9VA*^PK%P?cJxHA<(^5dE3 z@kKyW(P7PE3ObRBr&P~6;4-+!(YZ8n0ji@@MEiZ|Y+$!s)FYy!gss}zkaKdI&Ep8> zi!Dl8fY#U+0$%9u58sDEPHZZ-8{1`VKo@Nr>;*MZuZ@%j0^F*n7>^;F@|+O4q5~v! z-66BX_;Mceg7_qyk$L^JfQ_ag88033+_+rLN7>ELUbw-Q!;iJdzr)Uxu_cgH? zk#7ku{H0GUCLdkBpck;R@6iiHK+=mB1r7N}=>^piV}*?TR8N|DnQ~Gw(MudI$DaF4 zG+97PY|Qp!sy1?XK!AZMDy)%T98U}UpZKZD$cCkUoE8==CZ;enOc_MEf7Bbtv_aH# z?W4xSl2J}wg&t#u!ywvtfPGqD_G*o_tlXRV@F6?DR^Lh&}=08<;RI zsAbToq)R-smFeRgj7i;3HJ|&0gw#Cm`mjp^Dja!eRshOk94;0pnt?h%REKZ}(-hGfTfto%{P8a~;r|N5ownIPo@*p`etH@GacbH@%&;I~?nCX(@ zdrA~{Wbs%g#yLB2!oDbdoejMNZ6&&tN+{sds)N%nQZTtS-nb{TK*|>V#vr0mX4(0; z!5OkkC+DO^4z??VrZI1^+b0mP7NyWc4js}9v&bT}(P}9W4;IvAp`J3pztxN0C<#{rJ`Wivv76%}{cOo*y@Lk0Y>sM0;sGn~P0lafpC{M~g>E@@F{2(Jhcks&2+ zuiW^2u6ok$n%g!IzZ`?kY_J(fR`mKKHO&)*t@0r&yuMmO9^ZG>Mff4P-=ks?geqvt zzy%e|bA8{Whjl(dxpd!MKLyZfSGL4p_g$|NL(1i9@?ufGd-V>xz>3$F#PAPaa}vWB z-9r)mWB4u@W7lS4sXg4=glju7%TGqV>3aoFvH+*6T6|r9^Akq`_h8h7dNbp-^=m3a1R@HyCF-W zp4vWFv#}2uR2h4+I|M#p=R)s8wLKmT@d9!=r15%d|J1y;P^}YM1+3A2BG?cQXh($O zfVUe_qISbsx6daz+%c>hT|@@d*4T8&w4w+%$q|q!`wYN4i4^3aVN(z_$m5VzO7lJZ z*lq4@y*waG3`uf8|GT-p{TKTu&zie)zn(ez`uG1}<`aJA{{3%W|F8f0oB#BOfBeZO z{O@1kS>97e2oi6KWAEjxvfA?u}a)#FXtLxDzwzX^|jap*~FEGZQ z!%^PqV%WJx&~q{K>5QpzfUP~ z%v=t7H~f+U{h&3hf~4^%@8C44!c?Sd>N}og9q6m-TMsWC#@#jCDUxEw=brQUK6a6G zF_#%H6enBb5n{99y975xCly3dxAU{w#%yam!ooRW1AgY%{<4r9;b0?XMOkvty22oU zwb8!-SCi+}Mm(~+g!6Lx`dCpMJ*ncEYI_k?N3KeTYWM)!(U>?7++((#}ZJ_iBW6+Em* zzu3Ixs6C5odNHQVLLX)s1d-N`%UT+Z&_B3WUfLzNKA{qq)M9Ahs^AJc@q4}o-Qk~4 zDpN)D2oV{&b+I}34aRACGVX8A{Tv1-V})TH8?JB*l-y!kJ#;>GUsPDN)_6C(20@=j znt`X%8EVso-708kgqny3QFk0vr*)q3IJ-p_XqQFQ6A3b_@!j0far2~kiayM}e6f27 zw;(_i4;!QD5tnZPC9u#NT~T0dt=t@X9ABkENy7>ySZokySvR7bH83N4k>cN z8%bnUYS$DFN!Xkfw`~-MPXjG9g$cGGVyKcUmT=sQdv>|kEU1Hft2S|M6`XMEb`wW@ zYy2TyCB|Xz5@#tai2f>rvmNAU=PIi&4!!wC68eo|cb*%cACeb!$!G!JcVWkeAvQ+H z3os^oW@%#Xql_psn%qy*%U8*Bm~X(aQi`Ar;{o36#$~^}i+&9G>PH%W3Ck^EvXWOu zSva8GPu>=z1RkZs2H0Kp)~O%5O6+5~YTR*;TYQDQ$k_BrZ(;AW@|raUq_ zRqa@0EE3w~^a5)w*sCndqjFt$Yna+cmHeuU4X2xx+0d6DsM6o#KEDQ*HXAG+#gWIR z`W`>3RY+HAy^}V~anC%#oPy=>RlaE4UnpD{LYuI(p3O!r&dQagGO=Ow0(Ps%U=@>v z^SrgbfB~!JB&NF|9`6P@I!#R@=1paRk3DL2GkYix zPIK7DHs_S?7k&)k&p|WYK0bVX(mb9MNdaV#T`^R!FsFxy2Ps~&e01Uqr>CVbnE5m;coNf>-6wchDxpepj@yow>ReaAiAW# z9Etj~O25d!3abTo5C&c_4tTeDmReH=$d~?L_CYS_MgWKQM`aG09`rt-5kmuhRItDK zs#`c!qi_-+2J@oX(C1_F#LnW=Wt(8^LT&jV2;TbCzKEowhX|N z&7C~k+Qm!0)6?dg)0f9hGq>^Re-P1TOn4;CFJDbkYcLpMzdAmAd6d3kaQzxU7n?I(XC$)mr&{mpCnV*dG~ zPyUVh_AkueD3vtxNg|Jb4_T)F@!$XWKm5!2|M`0c&izsJnwvWRtKYod{JX#YhrjvT zPd?#)f9lW^&*%6rLT|!CYArHIL0dohkN@YtBoDMTnFq67VO#$jtw(JIS+TICf4%kB a|9Q5wr9b@VK=aA(<9{pY^Xh-&um1}sBeOpM -- 2.30.2 From 3020e888510bc3a7b5eb0e47fb7029fc5bcc1d0d Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:41:37 +0100 Subject: [PATCH 33/38] Uploaded: io_scene_max --- __init__.py | 156 ++++++ import_max.py | 1422 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1578 insertions(+) create mode 100644 __init__.py create mode 100644 import_max.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..51af2e1 --- /dev/null +++ b/__init__.py @@ -0,0 +1,156 @@ +# SPDX-FileCopyrightText: 2023 Sebastian Schrand +# +# SPDX-License-Identifier: GPL-2.0-or-later + +import bpy +from bpy_extras.io_utils import ( + ImportHelper, + orientation_helper, + axis_conversion, + ) +from bpy.props import ( + BoolProperty, + FloatProperty, + StringProperty, + ) + +bl_info = { + "name": "Import Autodesk MAX (.max)", + "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", + "version": (1, 1, 2), + "blender": (3, 6, 0), + "location": "File > Import", + "description": "Import 3DSMAX meshes & materials", + "warning": "", + "filepath_url": "", + "category": "Import-Export"} + +if "bpy" in locals(): + import importlib + if "import_max" in locals(): + importlib.reload(import_max) + + +@orientation_helper(axis_forward='Y', axis_up='Z') +class Import_max(bpy.types.Operator, ImportHelper): + """Import Autodesk MAX""" + bl_idname = "import_autodesk.max" + bl_label = "Import MAX (.max)" + bl_options = {'PRESET', 'UNDO'} + + filename_ext = ".max" + filter_glob: StringProperty(default="*.max", options={'HIDDEN'},) + + scale_objects: FloatProperty( + name="Scale", + description="Scale factor for all objects", + min=0.0, max=10000.0, + soft_min=0.0, soft_max=10000.0, + default=1.0, + ) + use_material: BoolProperty( + name="Materials", + description="Import the materials of the objects", + default=True, + ) + use_uv_mesh: BoolProperty( + name="UV Mesh", + description="Import texture coordinates as mesh objects", + default=False, + ) + use_apply_matrix: BoolProperty( + name="Apply Matrix", + description="Use matrix to transform the objects", + default=False, + ) + + def execute(self, context): + from . import import_max + keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) + global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up,).to_4x4() + keywords["global_matrix"] = global_matrix + + return import_max.load(self, context, **keywords) + + def draw(self, context): + pass + + +class MAX_PT_import_include(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Include" + bl_parent_id = "FILE_PT_operator" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "IMPORT_AUTODESK_OT_max" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False + + sfile = context.space_data + operator = sfile.active_operator + + layrow = layout.row(align=True) + layrow.prop(operator, "use_material") + layrow.label(text="", icon='MATERIAL' if operator.use_material else 'SHADING_TEXTURE') + layrow = layout.row(align=True) + layrow.prop(operator, "use_uv_mesh") + layrow.label(text="", icon='UV' if operator.use_uv_mesh else 'GROUP_UVS') + + +class MAX_PT_import_transform(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Transform" + bl_parent_id = "FILE_PT_operator" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "IMPORT_AUTODESK_OT_max" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False + + sfile = context.space_data + operator = sfile.active_operator + + layout.prop(operator, "scale_objects") + layrow = layout.row(align=True) + layrow.prop(operator, "use_apply_matrix") + layrow.label(text="", icon='VIEW_ORTHO' if operator.use_apply_matrix else 'MESH_GRID') + layout.prop(operator, "axis_forward") + layout.prop(operator, "axis_up") + + +def menu_func(self, context): + self.layout.operator(Import_max.bl_idname, text="Autodesk MAX (.max)") + + +def register(): + bpy.utils.register_class(Import_max) + bpy.utils.register_class(MAX_PT_import_include) + bpy.utils.register_class(MAX_PT_import_transform) + bpy.types.TOPBAR_MT_file_import.append(menu_func) + + +def unregister(): + bpy.types.TOPBAR_MT_file_import.remove(menu_func) + bpy.utils.unregister_class(MAX_PT_import_transform) + bpy.utils.unregister_class(MAX_PT_import_include) + bpy.utils.unregister_class(Import_max) + + +if __name__ == "__main__": + register() diff --git a/import_max.py b/import_max.py new file mode 100644 index 0000000..0370228 --- /dev/null +++ b/import_max.py @@ -0,0 +1,1422 @@ +# SPDX-FileCopyrightText: 2023 Sebastian Schrand +# +# SPDX-License-Identifier: GPL-2.0-or-later +# Import is based on using information from olefile IO sourcecode +# and the FreeCAD Autodesk 3DS Max importer ImportMAX +# +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2018 Philippe Lagadec +# (https://www.decalage.info) +# +# ImportMAX is copyright (c) 2017-2022 Jens M. Plonka +# (https://www.github.com/jmplonka/Importer3D) + +import io +import os +import re +import sys +import bpy +import math +import zlib +import array +import struct +import mathutils +from bpy_extras.node_shader_utils import PrincipledBSDFWrapper + + +################### +# DATA STRUCTURES # +################### + +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' +WORD_CLSID = "00020900-0000-0000-C000-000000000046" + +MIN_FILE_SIZE = 1536 +UNKNOWN_SIZE = 0x7FFFFFFF +MAXFILE_SIZE = 0x7FFFFFFFFFFFFFFF +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector +MAX_STREAM = 2 # element is a stream object +ROOT_STORE = 5 # element is a root storage + +TYP_NAME = 0x0962 +INVALID_NAME = re.compile('^[0-9].*') +UNPACK_BOX_DATA = struct.Struct('= MIN_FILE_SIZE: + header = filename[:len(MAGIC)] + else: + with open(filename, 'rb') as fp: + header = fp.read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +class MaxStream(io.BytesIO): + """Returns an instance of the BytesIO class as read-only file object.""" + def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize): + if size == UNKNOWN_SIZE: + size = len(fat) * sectorsize + nb_sectors = (size + (sectorsize-1)) // sectorsize + + data = [] + for i in range(nb_sectors): + try: + fp.seek(offset + sectorsize * sect) + except: + break + sector_data = fp.read(sectorsize) + data.append(sector_data) + try: + sect = fat[sect] & FREESECT + except IndexError: + break + data = b"".join(data) + if len(data) >= size: + data = data[:size] + self.size = size + else: + self.size = len(data) + io.BytesIO.__init__(self, data) + + +class MaxFileDirEntry: + """Directory Entry for a stream or storage.""" + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, maxfile): + self.sid = sid + self.maxfile = maxfile + self.kids = [] + self.kids_dict = {} + self.used = False + ( + self.name_raw, + self.namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + self.sizeLow, + self.sizeHigh + ) = struct.unpack(MaxFileDirEntry.STRUCT_DIRENTRY, entry) + + if self.namelength > 64: + self.namelength = 64 + self.name_utf16 = self.name_raw[:(self.namelength - 2)] + self.name = maxfile._decode_utf16_str(self.name_utf16) + # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + if maxfile.sectorsize == 512: + self.size = self.sizeLow + else: + self.size = self.sizeLow + (int(self.sizeHigh) << 32) + self.clsid = _clsid(clsid) + self.is_minifat = False + if self.entry_type in (ROOT_STORE, MAX_STREAM) and self.size > 0: + if self.size < maxfile.minisectorcutoff \ + and self.entry_type == MAX_STREAM: # only streams can be in MiniFAT + self.is_minifat = True + else: + self.is_minifat = False + maxfile._check_duplicate_stream(self.isectStart, self.is_minifat) + self.sect_chain = None + + def build_sect_chain(self, maxfile): + if self.sect_chain: + return + if self.entry_type not in (ROOT_STORE, MAX_STREAM) or self.size == 0: + return + self.sect_chain = list() + if self.is_minifat and not maxfile.minifat: + maxfile.loadminifat() + next_sect = self.isectStart + while next_sect != ENDOFCHAIN: + self.sect_chain.append(next_sect) + if self.is_minifat: + next_sect = maxfile.minifat[next_sect] + else: + next_sect = maxfile.fat[next_sect] + + def build_storage_tree(self): + if self.sid_child != FREESECT: + self.append_kids(self.sid_child) + self.kids.sort() + + def append_kids(self, child_sid): + if child_sid == FREESECT: + return + else: + child = self.maxfile._load_direntry(child_sid) + if child.used: + return + child.used = True + self.append_kids(child.sid_left) + name_lower = child.name.lower() + self.kids.append(child) + self.kids_dict[name_lower] = child + self.append_kids(child.sid_right) + child.build_storage_tree() + + def __eq__(self, other): + return self.name == other.name + + def __lt__(self, other): + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + +class ImportMaxFile: + """Representing an interface for importing .max files.""" + def __init__(self, filename=None): + self._filesize = None + self.byte_order = None + self.directory_fp = None + self.direntries = None + self.dll_version = None + self.fat = None + self.first_difat_sector = None + self.first_dir_sector = None + self.first_mini_fat_sector = None + self.fp = None + self.header_clsid = None + self.header_signature = None + self.mini_sector_shift = None + self.mini_sector_size = None + self.mini_stream_cutoff_size = None + self.minifat = None + self.minifatsect = None + self.minisectorcutoff = None + self.minisectorsize = None + self.ministream = None + self.minor_version = None + self.nb_sect = None + self.num_difat_sectors = None + self.num_dir_sectors = None + self.num_fat_sectors = None + self.num_mini_fat_sectors = None + self.reserved1 = None + self.reserved2 = None + self.root = None + self.sector_shift = None + self.sector_size = None + self.transaction_signature_number = None + if filename: + self.open(filename) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def _decode_utf16_str(self, utf16_str, errors='replace'): + unicode_str = utf16_str.decode('UTF-16LE', errors) + return unicode_str + + def open(self, filename): + if hasattr(filename, 'read'): + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE: + self.fp = io.BytesIO(filename) + else: + self.fp = open(filename, 'rb') + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + self._used_streams_fat = [] + self._used_streams_minifat = [] + header = self.fp.read(512) + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + header1 = header[:header_size] + ( + self.header_signature, + self.header_clsid, + self.minor_version, + self.dll_version, + self.byte_order, + self.sector_shift, + self.mini_sector_shift, + self.reserved1, + self.reserved2, + self.num_dir_sectors, + self.num_fat_sectors, + self.first_dir_sector, + self.transaction_signature_number, + self.mini_stream_cutoff_size, + self.first_mini_fat_sector, + self.num_mini_fat_sectors, + self.first_difat_sector, + self.num_difat_sectors + ) = struct.unpack(fmt_header, header1) + + self.sector_size = 2**self.sector_shift + self.mini_sector_size = 2**self.mini_sector_shift + if self.mini_stream_cutoff_size != 0x1000: + self.mini_stream_cutoff_size = 0x1000 + self.nb_sect = ((filesize + self.sector_size-1) // self.sector_size) - 1 + + # file clsid + self.header_clsid = _clsid(header[8:24]) + self.sectorsize = self.sector_size # i16(header, 30) + self.minisectorsize = self.mini_sector_size # i16(header, 32) + self.minisectorcutoff = self.mini_stream_cutoff_size # i32(header, 56) + self._check_duplicate_stream(self.first_dir_sector) + if self.num_mini_fat_sectors: + self._check_duplicate_stream(self.first_mini_fat_sector) + if self.num_difat_sectors: + self._check_duplicate_stream(self.first_difat_sector) + + # Load file allocation tables + self.loadfat(header) + self.loaddirectory(self.first_dir_sector) + self.minifatsect = self.first_mini_fat_sector + + def close(self): + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + if minifat: + used_streams = self._used_streams_minifat + else: + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): + return + used_streams = self._used_streams_fat + if first_sect in used_streams: + pass + else: + used_streams.append(first_sect) + + def sector_array(self, sect): + ary = array.array('I', sect) + if sys.byteorder == 'big': + ary.byteswap() + return ary + + def loadfat_sect(self, sect): + if isinstance(sect, array.array): + fat1 = sect + else: + fat1 = self.sector_array(sect) + isect = None + for isect in fat1: + isect = isect & FREESECT + if isect == ENDOFCHAIN or isect == FREESECT: + break + sector = self.getsect(isect) + nextfat = self.sector_array(sector) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + sect = header[76:512] + self.fat = array.array('I') + self.loadfat_sect(sect) + if self.num_difat_sectors != 0: + nb_difat_sectors = (self.sectorsize // 4) - 1 + nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors + isect_difat = self.first_difat_sector + for i in range(nb_difat): + sector_difat = self.getsect(isect_difat) + difat = self.sector_array(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + isect_difat = difat[nb_difat_sectors] + if len(self.fat) > self.nb_sect: + self.fat = self.fat[:self.nb_sect] + + def loadminifat(self): + stream_size = self.num_mini_fat_sectors * self.sector_size + nb_minisectors = (self.root.size + self.mini_sector_size - 1) // self.mini_sector_size + used_size = nb_minisectors * 4 + sect = self._open(self.minifatsect, stream_size, force_FAT=True).read() + self.minifat = self.sector_array(sect) + self.minifat = self.minifat[:nb_minisectors] + + def getsect(self, sect): + try: + self.fp.seek(self.sectorsize * (sect + 1)) + except: + print('IndexError: Sector index out of range') + sector = self.fp.read(self.sectorsize) + return sector + + def loaddirectory(self, sect): + self.directory_fp = self._open(sect, force_FAT=True) + max_entries = self.directory_fp.size // 128 + self.direntries = [None] * max_entries + root_entry = self._load_direntry(0) + self.root = self.direntries[0] + self.root.build_storage_tree() + + def _load_direntry(self, sid): + if self.direntries[sid] is not None: + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = MaxFileDirEntry(entry, sid, self) + return self.direntries[sid] + + def _open(self, start, size=UNKNOWN_SIZE, force_FAT=False): + if size < self.minisectorcutoff and not force_FAT: + if not self.ministream: + self.loadminifat() + size_ministream = self.root.size + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return MaxStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + return MaxStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, sectorsize=self.sectorsize, + fat=self.fat, filesize=self._filesize) + + def _find(self, filename): + if isinstance(filename, str): + filename = filename.split('/') + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + node = kid + return node.sid + + def openstream(self, filename): + sid = self._find(filename) + entry = self.direntries[sid] + return self._open(entry.isectStart, entry.size) + + +################### +# DATA PROCESSING # +################### + +class MaxChunk(): + """Representing a chunk of a .max file.""" + def __init__(self, types, size, level, number): + self.number = number + self.types = types + self.level = level + self.parent = None + self.previous = None + self.next = None + self.size = size + self.unknown = True + self.format = None + self.data = None + + def __str__(self): + if (self.unknown): + return "%s[%4x]%04X:%s" % ("" * self.level, self.number, self.types, + ":".join("%02x" % (c) for c in self.data)) + return "%s[%4x]%04X:%s=%s" % ("" * self.level, self.number, self.types, + self.format, self.data) + + +class ByteArrayChunk(MaxChunk): + """A byte array of a .max chunk.""" + def __init__(self, types, data, level, number): + MaxChunk.__init__(self, types, data, level, number) + + def set(self, data, name, fmt, start, end): + try: + self.data = struct.unpack(fmt, data[start:end]) + self.format = name + self.unknown = False + except Exception as exc: + self.data = data + # print('StructError:', exc, name) + + def set_string(self, data): + try: + self.data = data.decode('UTF-16LE') + self.format = "Str16" + self.unknown = False + except: + self.data = data + + def set_data(self, data): + if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]): + self.set_string(data) + elif (self.types in [0x2034, 0x2035]): + self.set(data, "ints", '<' + 'I' * int(len(data) / 4), 0, len(data)) + elif (self.types in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]): + self.set(data, "floats", '<' + 'f' * int(len(data) / 4), 0, len(data)) + elif (self.types == 0x2510): + self.set(data, "struct", '<' + 'f' * int(len(data) / 4 - 1) + 'I', 0, len(data)) + elif (self.types == 0x0100): + self.set(data, "float", ' 3): + return get_rotation(refs[0]) + elif (uid == MATRIX_ROT): # Rotation Wire + return get_rotation(get_references(pos)[0]) + if (rotation): + mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis) + return mtx + + +def get_scale(pos): + mtx = mathutils.Matrix.Identity(4) + if (pos): + uid = get_guid(pos) + if (uid == 0x2010): # Bezier Scale + scale = pos.get_first(0x2501) + if (scale is None): + scale = pos.get_first(0x2505) + pos = scale.data + elif (uid == 0x442315): # TCB Zoom + scale = pos.get_first(0x2501) + if (scale is None): + scale = pos.get_first(0x2505) + pos = scale.data + elif (uid == MATRIX_SCL): # ScaleXYZ + pos = get_point_3d(pos, 1.0) + else: + return mtx + mtx = mathutils.Matrix.Diagonal(pos[:3]).to_4x4() + return mtx + + +def create_matrix(prc): + mtx = mathutils.Matrix.Identity(4) + pos = rot = scl = None + uid = get_guid(prc) + if (uid == 0x2005): # Position/Rotation/Scale + pos = get_position(get_references(prc)[0]) + rot = get_rotation(get_references(prc)[1]) + scl = get_scale(get_references(prc)[2]) + elif (uid == 0x9154): # BipSlave Control + biped_sub_anim = get_references(prc)[2] + refs = get_references(biped_sub_anim) + scl = get_scale(get_references(refs[1])[0]) + rot = get_rotation(get_references(refs[2])[0]) + pos = get_position(get_references(refs[3])[0]) + if (pos is not None): + mtx = pos @ mtx + if (rot is not None): + mtx = rot @ mtx + if (scl is not None): + mtx = scl @ mtx + return mtx + + +def get_matrix_mesh_material(node): + refs = get_reference(node) + if (refs): + prs = refs.get(0, None) + msh = refs.get(1, None) + mat = refs.get(3, None) + lyr = refs.get(6, None) + else: + refs = get_references(node) + prs = refs[0] + msh = refs[1] + mat = refs[3] + lyr = None + if (len(refs) > 6): + lyr = refs[6] + return prs, msh, mat, lyr + + +def get_property(properties, idx): + for child in properties.children: + if (child.types & 0x100E): + if (get_short(child.data, 0)[0] == idx): + return child + return None + + +def get_color(colors, idx): + prop = get_property(colors, idx) + if (prop is not None): + siz = len(prop.data) - 12 + col, offset = get_floats(prop.data, siz, 3) + return (col[0], col[1], col[2]) + return None + + +def get_value(colors, idx): + prop = get_property(colors, idx) + if (prop is not None): + siz = len(prop.data) - 4 + val, offset = get_float(prop.data, siz) + return val + return None + + +def get_parameter(colors, fmt): + if (fmt == 0x1): + siz = len(colors.data) - 12 + para, offset = get_floats(colors.data, siz, 3) + else: + siz = len(colors.data) - 4 + para, offset = get_float(colors.data, siz) + return para + + +def get_standard_material(refs): + material = None + try: + if (len(refs) > 2): + colors = refs[2] + parameters = get_references(colors)[0] + material = Material() + material.set('ambient', get_color(parameters, 0x00)) + material.set('diffuse', get_color(parameters, 0x01)) + material.set('specular', get_color(parameters, 0x02)) + material.set('emissive', get_color(parameters, 0x08)) + material.set('shinines', get_value(parameters, 0x0B)) + parablock = refs[4] # ParameterBlock2 + material.set('glossines', get_value(parablock, 0x02)) + material.set('metallic', get_value(parablock, 0x05)) + except: + pass + return material + + +def get_vray_material(vry): + material = Material() + try: + material.set('diffuse', get_color(vry, 0x01)) + material.set('specular', get_color(vry, 0x02)) + material.set('shinines', get_value(vry, 0x03)) + material.set('refraction', get_value(vry, 0x09)) + material.set('emissive', get_color(vry, 0x17)) + material.set('glossines', get_value(vry, 0x18)) + material.set('metallic', get_value(vry, 0x19)) + except: + pass + return material + + +def get_corona_material(mtl): + material = Material() + try: + cor = mtl.children + material.set('diffuse', get_parameter(cor[3], 0x1)) + material.set('specular', get_parameter(cor[4], 0x1)) + material.set('emissive', get_parameter(cor[8], 0x1)) + material.set('glossines', get_parameter(cor[9], 0x2)) + except: + pass + return material + + +def get_arch_material(ad): + material = Material() + try: + material.set('diffuse', get_color(ad, 0x1A)) + material.set('specular', get_color(ad, 0x05)) + material.set('shinines', get_value(ad, 0x0B)) + except: + pass + return material + + +def adjust_material(obj, mat): + material = None + if (mat is not None): + uid = get_guid(mat) + if (uid == 0x0002): # Standard + refs = get_references(mat) + material = get_standard_material(refs) + elif (uid == 0x0200): # Multi/Sub-Object + refs = get_references(mat) + material = adjust_material(obj, refs[-1]) + elif (uid == VRAY_MTL): # VRayMtl + refs = get_reference(mat) + material = get_vray_material(refs[1]) + elif (uid == CORO_MTL): # CoronaMtl + refs = get_references(mat) + material = get_corona_material(refs[0]) + elif (uid == ARCH_MTL): # Arch + refs = get_references(mat) + material = get_arch_material(refs[0]) + if (obj is not None) and (material is not None): + objMaterial = bpy.data.materials.new(get_cls_name(mat)) + obj.data.materials.append(objMaterial) + matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) + matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8, 0.8, 0.8)) + matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1, 1, 1)) + matShader.specular = objMaterial.specular_intensity = material.get('glossines', 0.5) + matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) + matShader.metallic = objMaterial.metallic = material.get('metallic', 0) + matShader.emission_color = material.get('emissive', (0, 0, 0)) + matShader.ior = material.get('refraction', 1.45) + + +def adjust_matrix(obj, node): + mtx = create_matrix(node).flatten() + plc = mathutils.Matrix(*mtx) + obj.matrix_world = plc + return plc + + +def create_shape(context, pts, indices, node, key, mtx, mat, umt): + name = node.get_first(TYP_NAME).data + shape = bpy.data.meshes.new(name) + if (key is not None): + name = "%s_%d" % (name, key) + data = [] + if (pts): + loopstart = [] + looplines = loop = 0 + nb_faces = len(indices) + for fid in range(nb_faces): + polyface = indices[fid] + looplines += len(polyface) + shape.vertices.add(len(pts) // 3) + shape.loops.add(looplines) + shape.polygons.add(nb_faces) + shape.vertices.foreach_set("co", pts) + for vtx in indices: + loopstart.append(loop) + data.extend(vtx) + loop += len(vtx) + shape.polygons.foreach_set("loop_start", loopstart) + shape.loops.foreach_set("vertex_index", data) + + if (len(data) > 0): + shape.validate() + shape.update() + obj = bpy.data.objects.new(name, shape) + context.view_layer.active_layer_collection.collection.objects.link(obj) + obj.matrix_world = mtx + if (umt): + adjust_material(obj, mat) + return True + return True + + +def calc_point(data): + points = [] + long, offset = get_long(data, 0) + while (offset < len(data)): + val, offset = get_long(data, offset) + flt, offset = get_floats(data, offset, 3) + points.extend(flt) + return points + + +def calc_point_float(data): + points = [] + long, offset = get_long(data, 0) + while (offset < len(data)): + flt, offset = get_floats(data, offset, 3) + points.extend(flt) + return points + + +def calc_point_3d(chunk): + data = chunk.data + count, offset = get_long(data, 0) + pointlist = [] + try: + while (offset < len(data)): + pt = Point3d() + long, offset = get_long(data, offset) + pt.points, offset = get_longs(data, offset, long) + pt.flags, offset = get_short(data, offset) + if ((pt.flags & 0x01) != 0): + pt.f1, offset = get_long(data, offset) + if ((pt.flags & 0x08) != 0): + pt.fH, offset = get_short(data, offset) + if ((pt.flags & 0x10) != 0): + pt.f2, offset = get_long(data, offset) + if ((pt.flags & 0x20) != 0): + pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) + if (len(pt.points) > 0): + pointlist.append(pt) + except Exception as exc: + print('ArrayError:\n', "%s: offset = %d\n" % (exc, offset)) + return pointlist + + +def get_point_array(values): + verts = [] + if len(values) >= 4: + count, offset = get_long(values, 0) + while (count > 0): + floats, offset = get_floats(values, offset, 3) + verts.extend(floats) + count -= 1 + return verts + + +def get_poly_4p(points): + vertex = {} + for point in points: + ngon = point.points + key = point.fH + if (key not in vertex): + vertex[key] = [] + vertex[key].append(ngon) + return vertex + + +def get_poly_5p(data): + count, offset = get_long(data, 0) + ngons = [] + while count > 0: + pt, offset = get_longs(data, offset, 3) + offset += 8 + ngons.append(pt) + count -= 1 + return ngons + + +def get_poly_6p(data): + count, offset = get_long(data, 0) + polylist = [] + while (offset < len(data)): + long, offset = get_longs(data, offset, 6) + i = 5 + while ((i > 3) and (long[i] < 0)): + i -= 1 + if (i > 2): + polylist.append(long[1:i]) + return polylist + + +def get_poly_data(chunk): + offset = 0 + polylist = [] + data = chunk.data + while (offset < len(data)): + count, offset = get_long(data, offset) + points, offset = get_longs(data, offset, count) + polylist.append(points) + return polylist + + +def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): + coords = point4i = point6i = pointNi = None + poly = msh.get_first(0x08FE) + created = False + lidx = [] + lcrd = [] + lply = [] + if (poly): + for child in poly.children: + if (child.types == 0x0100): + coords = calc_point(child.data) + elif (child.types == 0x0108): + point6i = child.data + elif (child.types == 0x011A): + point4i = calc_point_3d(child) + elif (child.types == 0x0310): + pointNi = child.data + elif (child.types == 0x0124): + lidx.append(get_long(child.data, 0)[0]) + elif (child.types == 0x0128): + lcrd.append(calc_point_float(child.data)) + elif (child.types == 0x012B): + lply.append(get_poly_data(child)) + if (point4i is not None): + vertex = get_poly_4p(point4i) + if (len(vertex) > 0): + for key, ngons in vertex.items(): + created |= create_shape(context, coords, ngons, + node, key, mtx, mat, umt) + else: + created = True + elif (point6i is not None): + ngons = get_poly_6p(point6i) + created = create_shape(context, coords, ngons, node, + None, mtx, mat, umt) + elif (pointNi is not None): + ngons = get_poly_5p(pointNi) + created = create_shape(context, coords, ngons, node, + None, mtx, mat, umt) + if (uvm and len(lidx) > 0): + for i in range(len(lidx)): + created |= create_shape(context, lcrd[i], lply[i], + node, lidx[i], mtx, mat, umt) + return created + + +def create_editable_mesh(context, node, msh, mat, mtx, umt): + poly = msh.get_first(0x08FE) + created = False + if (poly): + vertex_chunk = poly.get_first(0x0914) + clsid_chunk = poly.get_first(0x0912) + coords = get_point_array(vertex_chunk.data) + ngons = get_poly_5p(clsid_chunk.data) + created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) + return created + + +def create_shell(context, node, shell, mat, mtx, umt, uvm): + refs = get_references(shell) + msh = refs[-1] + if (get_cls_name(msh) == "'Editable Poly'"): + created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) + else: + created = create_editable_mesh(context, node, msh, mat, mtx, umt) + return created + + +def create_skipable(context, node, skip): + name = node.get_first(TYP_NAME).data + print(" skipping %s '%s'... " % (skip, name)) + return True + + +def create_mesh(context, node, msh, mtx, mat, umt, uvm): + created = False + uid = get_guid(msh) + msh.geometry = None + if (uid == EDIT_MESH): + created = create_editable_mesh(context, node, msh, mat, mtx, umt) + elif (uid == EDIT_POLY): + created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) + elif (uid in {0x2032, 0x2033}): + created = create_shell(context, node, msh, mat, mtx, umt, uvm) + else: + skip = SKIPPABLE.get(uid) + if (skip is not None): + created = create_skipable(context, node, skip) + return created, uid + + +def create_object(context, node, mscale, usemat, uvmesh, transform): + parent = get_node_parent(node) + node.parent = parent + prs, msh, mat, lyr = get_matrix_mesh_material(node) + while ((parent is not None) and (get_guid(parent) != 0x02)): + parent_mtx = parent.matrix + if (parent_mtx): + prs = prs.dot(parent_mtx) + parent = get_node_parent(parent) + if (transform): + mtx = create_matrix(prs) @ mscale + else: + mtx = mscale + created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh) + + +def make_scene(context, mscale, usemat, uvmesh, transform, parent): + for chunk in parent.children: + if (isinstance(chunk, SceneChunk)): + if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)): + try: + create_object(context, chunk, mscale, usemat, uvmesh, transform) + except Exception as exc: + print('ImportError:', exc, chunk) + + +def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): + global SCENE_LIST + SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', conReader=SceneChunk) + make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0]) + + +def read(context, filename, mscale, usemat, uvmesh, transform): + if (is_maxfile(filename)): + maxfile = ImportMaxFile(filename) + read_class_data(maxfile, filename) + read_config(maxfile, filename) + read_directory(maxfile, filename) + read_class_directory(maxfile, filename) + read_video_postqueue(maxfile, filename) + read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform) + else: + print("File seems to be no 3D Studio Max file!") + + +def load(operator, context, filepath="", scale_objects=1.0, use_material=True, + use_uv_mesh=False, use_apply_matrix=False, global_matrix=None): + context.window.cursor_set('WAIT') + mscale = mathutils.Matrix.Scale(scale_objects, 4) + if global_matrix is not None: + mscale = global_matrix @ mscale + + read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) + context.window.cursor_set('DEFAULT') + + return {'FINISHED'} -- 2.30.2 From 09be6b7404dcbd9b55973722238cb90544c76a15 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:46:43 +0100 Subject: [PATCH 34/38] =?UTF-8?q?io=5Fscene=5Fmax/=5F=5Finit.py=20hinzugef?= =?UTF-8?q?=C3=BCgt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- io_scene_max/__init.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 io_scene_max/__init.py diff --git a/io_scene_max/__init.py b/io_scene_max/__init.py new file mode 100644 index 0000000..e69de29 -- 2.30.2 From 14f3ae1eace528b66ef5e098fd93d3fea6aeb121 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:47:30 +0100 Subject: [PATCH 35/38] Uploaded: io_scene_max --- io_scene_max/__init__.py | 156 ++++ io_scene_max/import_max.py | 1422 ++++++++++++++++++++++++++++++++++++ 2 files changed, 1578 insertions(+) create mode 100644 io_scene_max/__init__.py create mode 100644 io_scene_max/import_max.py diff --git a/io_scene_max/__init__.py b/io_scene_max/__init__.py new file mode 100644 index 0000000..51af2e1 --- /dev/null +++ b/io_scene_max/__init__.py @@ -0,0 +1,156 @@ +# SPDX-FileCopyrightText: 2023 Sebastian Schrand +# +# SPDX-License-Identifier: GPL-2.0-or-later + +import bpy +from bpy_extras.io_utils import ( + ImportHelper, + orientation_helper, + axis_conversion, + ) +from bpy.props import ( + BoolProperty, + FloatProperty, + StringProperty, + ) + +bl_info = { + "name": "Import Autodesk MAX (.max)", + "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", + "version": (1, 1, 2), + "blender": (3, 6, 0), + "location": "File > Import", + "description": "Import 3DSMAX meshes & materials", + "warning": "", + "filepath_url": "", + "category": "Import-Export"} + +if "bpy" in locals(): + import importlib + if "import_max" in locals(): + importlib.reload(import_max) + + +@orientation_helper(axis_forward='Y', axis_up='Z') +class Import_max(bpy.types.Operator, ImportHelper): + """Import Autodesk MAX""" + bl_idname = "import_autodesk.max" + bl_label = "Import MAX (.max)" + bl_options = {'PRESET', 'UNDO'} + + filename_ext = ".max" + filter_glob: StringProperty(default="*.max", options={'HIDDEN'},) + + scale_objects: FloatProperty( + name="Scale", + description="Scale factor for all objects", + min=0.0, max=10000.0, + soft_min=0.0, soft_max=10000.0, + default=1.0, + ) + use_material: BoolProperty( + name="Materials", + description="Import the materials of the objects", + default=True, + ) + use_uv_mesh: BoolProperty( + name="UV Mesh", + description="Import texture coordinates as mesh objects", + default=False, + ) + use_apply_matrix: BoolProperty( + name="Apply Matrix", + description="Use matrix to transform the objects", + default=False, + ) + + def execute(self, context): + from . import import_max + keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) + global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up,).to_4x4() + keywords["global_matrix"] = global_matrix + + return import_max.load(self, context, **keywords) + + def draw(self, context): + pass + + +class MAX_PT_import_include(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Include" + bl_parent_id = "FILE_PT_operator" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "IMPORT_AUTODESK_OT_max" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False + + sfile = context.space_data + operator = sfile.active_operator + + layrow = layout.row(align=True) + layrow.prop(operator, "use_material") + layrow.label(text="", icon='MATERIAL' if operator.use_material else 'SHADING_TEXTURE') + layrow = layout.row(align=True) + layrow.prop(operator, "use_uv_mesh") + layrow.label(text="", icon='UV' if operator.use_uv_mesh else 'GROUP_UVS') + + +class MAX_PT_import_transform(bpy.types.Panel): + bl_space_type = 'FILE_BROWSER' + bl_region_type = 'TOOL_PROPS' + bl_label = "Transform" + bl_parent_id = "FILE_PT_operator" + + @classmethod + def poll(cls, context): + sfile = context.space_data + operator = sfile.active_operator + + return operator.bl_idname == "IMPORT_AUTODESK_OT_max" + + def draw(self, context): + layout = self.layout + layout.use_property_split = True + layout.use_property_decorate = False + + sfile = context.space_data + operator = sfile.active_operator + + layout.prop(operator, "scale_objects") + layrow = layout.row(align=True) + layrow.prop(operator, "use_apply_matrix") + layrow.label(text="", icon='VIEW_ORTHO' if operator.use_apply_matrix else 'MESH_GRID') + layout.prop(operator, "axis_forward") + layout.prop(operator, "axis_up") + + +def menu_func(self, context): + self.layout.operator(Import_max.bl_idname, text="Autodesk MAX (.max)") + + +def register(): + bpy.utils.register_class(Import_max) + bpy.utils.register_class(MAX_PT_import_include) + bpy.utils.register_class(MAX_PT_import_transform) + bpy.types.TOPBAR_MT_file_import.append(menu_func) + + +def unregister(): + bpy.types.TOPBAR_MT_file_import.remove(menu_func) + bpy.utils.unregister_class(MAX_PT_import_transform) + bpy.utils.unregister_class(MAX_PT_import_include) + bpy.utils.unregister_class(Import_max) + + +if __name__ == "__main__": + register() diff --git a/io_scene_max/import_max.py b/io_scene_max/import_max.py new file mode 100644 index 0000000..0370228 --- /dev/null +++ b/io_scene_max/import_max.py @@ -0,0 +1,1422 @@ +# SPDX-FileCopyrightText: 2023 Sebastian Schrand +# +# SPDX-License-Identifier: GPL-2.0-or-later +# Import is based on using information from olefile IO sourcecode +# and the FreeCAD Autodesk 3DS Max importer ImportMAX +# +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2018 Philippe Lagadec +# (https://www.decalage.info) +# +# ImportMAX is copyright (c) 2017-2022 Jens M. Plonka +# (https://www.github.com/jmplonka/Importer3D) + +import io +import os +import re +import sys +import bpy +import math +import zlib +import array +import struct +import mathutils +from bpy_extras.node_shader_utils import PrincipledBSDFWrapper + + +################### +# DATA STRUCTURES # +################### + +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' +WORD_CLSID = "00020900-0000-0000-C000-000000000046" + +MIN_FILE_SIZE = 1536 +UNKNOWN_SIZE = 0x7FFFFFFF +MAXFILE_SIZE = 0x7FFFFFFFFFFFFFFF +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector +MAX_STREAM = 2 # element is a stream object +ROOT_STORE = 5 # element is a root storage + +TYP_NAME = 0x0962 +INVALID_NAME = re.compile('^[0-9].*') +UNPACK_BOX_DATA = struct.Struct('= MIN_FILE_SIZE: + header = filename[:len(MAGIC)] + else: + with open(filename, 'rb') as fp: + header = fp.read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +class MaxStream(io.BytesIO): + """Returns an instance of the BytesIO class as read-only file object.""" + def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize): + if size == UNKNOWN_SIZE: + size = len(fat) * sectorsize + nb_sectors = (size + (sectorsize-1)) // sectorsize + + data = [] + for i in range(nb_sectors): + try: + fp.seek(offset + sectorsize * sect) + except: + break + sector_data = fp.read(sectorsize) + data.append(sector_data) + try: + sect = fat[sect] & FREESECT + except IndexError: + break + data = b"".join(data) + if len(data) >= size: + data = data[:size] + self.size = size + else: + self.size = len(data) + io.BytesIO.__init__(self, data) + + +class MaxFileDirEntry: + """Directory Entry for a stream or storage.""" + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, maxfile): + self.sid = sid + self.maxfile = maxfile + self.kids = [] + self.kids_dict = {} + self.used = False + ( + self.name_raw, + self.namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + self.sizeLow, + self.sizeHigh + ) = struct.unpack(MaxFileDirEntry.STRUCT_DIRENTRY, entry) + + if self.namelength > 64: + self.namelength = 64 + self.name_utf16 = self.name_raw[:(self.namelength - 2)] + self.name = maxfile._decode_utf16_str(self.name_utf16) + # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + if maxfile.sectorsize == 512: + self.size = self.sizeLow + else: + self.size = self.sizeLow + (int(self.sizeHigh) << 32) + self.clsid = _clsid(clsid) + self.is_minifat = False + if self.entry_type in (ROOT_STORE, MAX_STREAM) and self.size > 0: + if self.size < maxfile.minisectorcutoff \ + and self.entry_type == MAX_STREAM: # only streams can be in MiniFAT + self.is_minifat = True + else: + self.is_minifat = False + maxfile._check_duplicate_stream(self.isectStart, self.is_minifat) + self.sect_chain = None + + def build_sect_chain(self, maxfile): + if self.sect_chain: + return + if self.entry_type not in (ROOT_STORE, MAX_STREAM) or self.size == 0: + return + self.sect_chain = list() + if self.is_minifat and not maxfile.minifat: + maxfile.loadminifat() + next_sect = self.isectStart + while next_sect != ENDOFCHAIN: + self.sect_chain.append(next_sect) + if self.is_minifat: + next_sect = maxfile.minifat[next_sect] + else: + next_sect = maxfile.fat[next_sect] + + def build_storage_tree(self): + if self.sid_child != FREESECT: + self.append_kids(self.sid_child) + self.kids.sort() + + def append_kids(self, child_sid): + if child_sid == FREESECT: + return + else: + child = self.maxfile._load_direntry(child_sid) + if child.used: + return + child.used = True + self.append_kids(child.sid_left) + name_lower = child.name.lower() + self.kids.append(child) + self.kids_dict[name_lower] = child + self.append_kids(child.sid_right) + child.build_storage_tree() + + def __eq__(self, other): + return self.name == other.name + + def __lt__(self, other): + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + +class ImportMaxFile: + """Representing an interface for importing .max files.""" + def __init__(self, filename=None): + self._filesize = None + self.byte_order = None + self.directory_fp = None + self.direntries = None + self.dll_version = None + self.fat = None + self.first_difat_sector = None + self.first_dir_sector = None + self.first_mini_fat_sector = None + self.fp = None + self.header_clsid = None + self.header_signature = None + self.mini_sector_shift = None + self.mini_sector_size = None + self.mini_stream_cutoff_size = None + self.minifat = None + self.minifatsect = None + self.minisectorcutoff = None + self.minisectorsize = None + self.ministream = None + self.minor_version = None + self.nb_sect = None + self.num_difat_sectors = None + self.num_dir_sectors = None + self.num_fat_sectors = None + self.num_mini_fat_sectors = None + self.reserved1 = None + self.reserved2 = None + self.root = None + self.sector_shift = None + self.sector_size = None + self.transaction_signature_number = None + if filename: + self.open(filename) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def _decode_utf16_str(self, utf16_str, errors='replace'): + unicode_str = utf16_str.decode('UTF-16LE', errors) + return unicode_str + + def open(self, filename): + if hasattr(filename, 'read'): + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE: + self.fp = io.BytesIO(filename) + else: + self.fp = open(filename, 'rb') + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + self._used_streams_fat = [] + self._used_streams_minifat = [] + header = self.fp.read(512) + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + header1 = header[:header_size] + ( + self.header_signature, + self.header_clsid, + self.minor_version, + self.dll_version, + self.byte_order, + self.sector_shift, + self.mini_sector_shift, + self.reserved1, + self.reserved2, + self.num_dir_sectors, + self.num_fat_sectors, + self.first_dir_sector, + self.transaction_signature_number, + self.mini_stream_cutoff_size, + self.first_mini_fat_sector, + self.num_mini_fat_sectors, + self.first_difat_sector, + self.num_difat_sectors + ) = struct.unpack(fmt_header, header1) + + self.sector_size = 2**self.sector_shift + self.mini_sector_size = 2**self.mini_sector_shift + if self.mini_stream_cutoff_size != 0x1000: + self.mini_stream_cutoff_size = 0x1000 + self.nb_sect = ((filesize + self.sector_size-1) // self.sector_size) - 1 + + # file clsid + self.header_clsid = _clsid(header[8:24]) + self.sectorsize = self.sector_size # i16(header, 30) + self.minisectorsize = self.mini_sector_size # i16(header, 32) + self.minisectorcutoff = self.mini_stream_cutoff_size # i32(header, 56) + self._check_duplicate_stream(self.first_dir_sector) + if self.num_mini_fat_sectors: + self._check_duplicate_stream(self.first_mini_fat_sector) + if self.num_difat_sectors: + self._check_duplicate_stream(self.first_difat_sector) + + # Load file allocation tables + self.loadfat(header) + self.loaddirectory(self.first_dir_sector) + self.minifatsect = self.first_mini_fat_sector + + def close(self): + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + if minifat: + used_streams = self._used_streams_minifat + else: + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): + return + used_streams = self._used_streams_fat + if first_sect in used_streams: + pass + else: + used_streams.append(first_sect) + + def sector_array(self, sect): + ary = array.array('I', sect) + if sys.byteorder == 'big': + ary.byteswap() + return ary + + def loadfat_sect(self, sect): + if isinstance(sect, array.array): + fat1 = sect + else: + fat1 = self.sector_array(sect) + isect = None + for isect in fat1: + isect = isect & FREESECT + if isect == ENDOFCHAIN or isect == FREESECT: + break + sector = self.getsect(isect) + nextfat = self.sector_array(sector) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + sect = header[76:512] + self.fat = array.array('I') + self.loadfat_sect(sect) + if self.num_difat_sectors != 0: + nb_difat_sectors = (self.sectorsize // 4) - 1 + nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors + isect_difat = self.first_difat_sector + for i in range(nb_difat): + sector_difat = self.getsect(isect_difat) + difat = self.sector_array(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + isect_difat = difat[nb_difat_sectors] + if len(self.fat) > self.nb_sect: + self.fat = self.fat[:self.nb_sect] + + def loadminifat(self): + stream_size = self.num_mini_fat_sectors * self.sector_size + nb_minisectors = (self.root.size + self.mini_sector_size - 1) // self.mini_sector_size + used_size = nb_minisectors * 4 + sect = self._open(self.minifatsect, stream_size, force_FAT=True).read() + self.minifat = self.sector_array(sect) + self.minifat = self.minifat[:nb_minisectors] + + def getsect(self, sect): + try: + self.fp.seek(self.sectorsize * (sect + 1)) + except: + print('IndexError: Sector index out of range') + sector = self.fp.read(self.sectorsize) + return sector + + def loaddirectory(self, sect): + self.directory_fp = self._open(sect, force_FAT=True) + max_entries = self.directory_fp.size // 128 + self.direntries = [None] * max_entries + root_entry = self._load_direntry(0) + self.root = self.direntries[0] + self.root.build_storage_tree() + + def _load_direntry(self, sid): + if self.direntries[sid] is not None: + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = MaxFileDirEntry(entry, sid, self) + return self.direntries[sid] + + def _open(self, start, size=UNKNOWN_SIZE, force_FAT=False): + if size < self.minisectorcutoff and not force_FAT: + if not self.ministream: + self.loadminifat() + size_ministream = self.root.size + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return MaxStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + return MaxStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, sectorsize=self.sectorsize, + fat=self.fat, filesize=self._filesize) + + def _find(self, filename): + if isinstance(filename, str): + filename = filename.split('/') + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + node = kid + return node.sid + + def openstream(self, filename): + sid = self._find(filename) + entry = self.direntries[sid] + return self._open(entry.isectStart, entry.size) + + +################### +# DATA PROCESSING # +################### + +class MaxChunk(): + """Representing a chunk of a .max file.""" + def __init__(self, types, size, level, number): + self.number = number + self.types = types + self.level = level + self.parent = None + self.previous = None + self.next = None + self.size = size + self.unknown = True + self.format = None + self.data = None + + def __str__(self): + if (self.unknown): + return "%s[%4x]%04X:%s" % ("" * self.level, self.number, self.types, + ":".join("%02x" % (c) for c in self.data)) + return "%s[%4x]%04X:%s=%s" % ("" * self.level, self.number, self.types, + self.format, self.data) + + +class ByteArrayChunk(MaxChunk): + """A byte array of a .max chunk.""" + def __init__(self, types, data, level, number): + MaxChunk.__init__(self, types, data, level, number) + + def set(self, data, name, fmt, start, end): + try: + self.data = struct.unpack(fmt, data[start:end]) + self.format = name + self.unknown = False + except Exception as exc: + self.data = data + # print('StructError:', exc, name) + + def set_string(self, data): + try: + self.data = data.decode('UTF-16LE') + self.format = "Str16" + self.unknown = False + except: + self.data = data + + def set_data(self, data): + if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]): + self.set_string(data) + elif (self.types in [0x2034, 0x2035]): + self.set(data, "ints", '<' + 'I' * int(len(data) / 4), 0, len(data)) + elif (self.types in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]): + self.set(data, "floats", '<' + 'f' * int(len(data) / 4), 0, len(data)) + elif (self.types == 0x2510): + self.set(data, "struct", '<' + 'f' * int(len(data) / 4 - 1) + 'I', 0, len(data)) + elif (self.types == 0x0100): + self.set(data, "float", ' 3): + return get_rotation(refs[0]) + elif (uid == MATRIX_ROT): # Rotation Wire + return get_rotation(get_references(pos)[0]) + if (rotation): + mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis) + return mtx + + +def get_scale(pos): + mtx = mathutils.Matrix.Identity(4) + if (pos): + uid = get_guid(pos) + if (uid == 0x2010): # Bezier Scale + scale = pos.get_first(0x2501) + if (scale is None): + scale = pos.get_first(0x2505) + pos = scale.data + elif (uid == 0x442315): # TCB Zoom + scale = pos.get_first(0x2501) + if (scale is None): + scale = pos.get_first(0x2505) + pos = scale.data + elif (uid == MATRIX_SCL): # ScaleXYZ + pos = get_point_3d(pos, 1.0) + else: + return mtx + mtx = mathutils.Matrix.Diagonal(pos[:3]).to_4x4() + return mtx + + +def create_matrix(prc): + mtx = mathutils.Matrix.Identity(4) + pos = rot = scl = None + uid = get_guid(prc) + if (uid == 0x2005): # Position/Rotation/Scale + pos = get_position(get_references(prc)[0]) + rot = get_rotation(get_references(prc)[1]) + scl = get_scale(get_references(prc)[2]) + elif (uid == 0x9154): # BipSlave Control + biped_sub_anim = get_references(prc)[2] + refs = get_references(biped_sub_anim) + scl = get_scale(get_references(refs[1])[0]) + rot = get_rotation(get_references(refs[2])[0]) + pos = get_position(get_references(refs[3])[0]) + if (pos is not None): + mtx = pos @ mtx + if (rot is not None): + mtx = rot @ mtx + if (scl is not None): + mtx = scl @ mtx + return mtx + + +def get_matrix_mesh_material(node): + refs = get_reference(node) + if (refs): + prs = refs.get(0, None) + msh = refs.get(1, None) + mat = refs.get(3, None) + lyr = refs.get(6, None) + else: + refs = get_references(node) + prs = refs[0] + msh = refs[1] + mat = refs[3] + lyr = None + if (len(refs) > 6): + lyr = refs[6] + return prs, msh, mat, lyr + + +def get_property(properties, idx): + for child in properties.children: + if (child.types & 0x100E): + if (get_short(child.data, 0)[0] == idx): + return child + return None + + +def get_color(colors, idx): + prop = get_property(colors, idx) + if (prop is not None): + siz = len(prop.data) - 12 + col, offset = get_floats(prop.data, siz, 3) + return (col[0], col[1], col[2]) + return None + + +def get_value(colors, idx): + prop = get_property(colors, idx) + if (prop is not None): + siz = len(prop.data) - 4 + val, offset = get_float(prop.data, siz) + return val + return None + + +def get_parameter(colors, fmt): + if (fmt == 0x1): + siz = len(colors.data) - 12 + para, offset = get_floats(colors.data, siz, 3) + else: + siz = len(colors.data) - 4 + para, offset = get_float(colors.data, siz) + return para + + +def get_standard_material(refs): + material = None + try: + if (len(refs) > 2): + colors = refs[2] + parameters = get_references(colors)[0] + material = Material() + material.set('ambient', get_color(parameters, 0x00)) + material.set('diffuse', get_color(parameters, 0x01)) + material.set('specular', get_color(parameters, 0x02)) + material.set('emissive', get_color(parameters, 0x08)) + material.set('shinines', get_value(parameters, 0x0B)) + parablock = refs[4] # ParameterBlock2 + material.set('glossines', get_value(parablock, 0x02)) + material.set('metallic', get_value(parablock, 0x05)) + except: + pass + return material + + +def get_vray_material(vry): + material = Material() + try: + material.set('diffuse', get_color(vry, 0x01)) + material.set('specular', get_color(vry, 0x02)) + material.set('shinines', get_value(vry, 0x03)) + material.set('refraction', get_value(vry, 0x09)) + material.set('emissive', get_color(vry, 0x17)) + material.set('glossines', get_value(vry, 0x18)) + material.set('metallic', get_value(vry, 0x19)) + except: + pass + return material + + +def get_corona_material(mtl): + material = Material() + try: + cor = mtl.children + material.set('diffuse', get_parameter(cor[3], 0x1)) + material.set('specular', get_parameter(cor[4], 0x1)) + material.set('emissive', get_parameter(cor[8], 0x1)) + material.set('glossines', get_parameter(cor[9], 0x2)) + except: + pass + return material + + +def get_arch_material(ad): + material = Material() + try: + material.set('diffuse', get_color(ad, 0x1A)) + material.set('specular', get_color(ad, 0x05)) + material.set('shinines', get_value(ad, 0x0B)) + except: + pass + return material + + +def adjust_material(obj, mat): + material = None + if (mat is not None): + uid = get_guid(mat) + if (uid == 0x0002): # Standard + refs = get_references(mat) + material = get_standard_material(refs) + elif (uid == 0x0200): # Multi/Sub-Object + refs = get_references(mat) + material = adjust_material(obj, refs[-1]) + elif (uid == VRAY_MTL): # VRayMtl + refs = get_reference(mat) + material = get_vray_material(refs[1]) + elif (uid == CORO_MTL): # CoronaMtl + refs = get_references(mat) + material = get_corona_material(refs[0]) + elif (uid == ARCH_MTL): # Arch + refs = get_references(mat) + material = get_arch_material(refs[0]) + if (obj is not None) and (material is not None): + objMaterial = bpy.data.materials.new(get_cls_name(mat)) + obj.data.materials.append(objMaterial) + matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) + matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8, 0.8, 0.8)) + matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1, 1, 1)) + matShader.specular = objMaterial.specular_intensity = material.get('glossines', 0.5) + matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) + matShader.metallic = objMaterial.metallic = material.get('metallic', 0) + matShader.emission_color = material.get('emissive', (0, 0, 0)) + matShader.ior = material.get('refraction', 1.45) + + +def adjust_matrix(obj, node): + mtx = create_matrix(node).flatten() + plc = mathutils.Matrix(*mtx) + obj.matrix_world = plc + return plc + + +def create_shape(context, pts, indices, node, key, mtx, mat, umt): + name = node.get_first(TYP_NAME).data + shape = bpy.data.meshes.new(name) + if (key is not None): + name = "%s_%d" % (name, key) + data = [] + if (pts): + loopstart = [] + looplines = loop = 0 + nb_faces = len(indices) + for fid in range(nb_faces): + polyface = indices[fid] + looplines += len(polyface) + shape.vertices.add(len(pts) // 3) + shape.loops.add(looplines) + shape.polygons.add(nb_faces) + shape.vertices.foreach_set("co", pts) + for vtx in indices: + loopstart.append(loop) + data.extend(vtx) + loop += len(vtx) + shape.polygons.foreach_set("loop_start", loopstart) + shape.loops.foreach_set("vertex_index", data) + + if (len(data) > 0): + shape.validate() + shape.update() + obj = bpy.data.objects.new(name, shape) + context.view_layer.active_layer_collection.collection.objects.link(obj) + obj.matrix_world = mtx + if (umt): + adjust_material(obj, mat) + return True + return True + + +def calc_point(data): + points = [] + long, offset = get_long(data, 0) + while (offset < len(data)): + val, offset = get_long(data, offset) + flt, offset = get_floats(data, offset, 3) + points.extend(flt) + return points + + +def calc_point_float(data): + points = [] + long, offset = get_long(data, 0) + while (offset < len(data)): + flt, offset = get_floats(data, offset, 3) + points.extend(flt) + return points + + +def calc_point_3d(chunk): + data = chunk.data + count, offset = get_long(data, 0) + pointlist = [] + try: + while (offset < len(data)): + pt = Point3d() + long, offset = get_long(data, offset) + pt.points, offset = get_longs(data, offset, long) + pt.flags, offset = get_short(data, offset) + if ((pt.flags & 0x01) != 0): + pt.f1, offset = get_long(data, offset) + if ((pt.flags & 0x08) != 0): + pt.fH, offset = get_short(data, offset) + if ((pt.flags & 0x10) != 0): + pt.f2, offset = get_long(data, offset) + if ((pt.flags & 0x20) != 0): + pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) + if (len(pt.points) > 0): + pointlist.append(pt) + except Exception as exc: + print('ArrayError:\n', "%s: offset = %d\n" % (exc, offset)) + return pointlist + + +def get_point_array(values): + verts = [] + if len(values) >= 4: + count, offset = get_long(values, 0) + while (count > 0): + floats, offset = get_floats(values, offset, 3) + verts.extend(floats) + count -= 1 + return verts + + +def get_poly_4p(points): + vertex = {} + for point in points: + ngon = point.points + key = point.fH + if (key not in vertex): + vertex[key] = [] + vertex[key].append(ngon) + return vertex + + +def get_poly_5p(data): + count, offset = get_long(data, 0) + ngons = [] + while count > 0: + pt, offset = get_longs(data, offset, 3) + offset += 8 + ngons.append(pt) + count -= 1 + return ngons + + +def get_poly_6p(data): + count, offset = get_long(data, 0) + polylist = [] + while (offset < len(data)): + long, offset = get_longs(data, offset, 6) + i = 5 + while ((i > 3) and (long[i] < 0)): + i -= 1 + if (i > 2): + polylist.append(long[1:i]) + return polylist + + +def get_poly_data(chunk): + offset = 0 + polylist = [] + data = chunk.data + while (offset < len(data)): + count, offset = get_long(data, offset) + points, offset = get_longs(data, offset, count) + polylist.append(points) + return polylist + + +def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): + coords = point4i = point6i = pointNi = None + poly = msh.get_first(0x08FE) + created = False + lidx = [] + lcrd = [] + lply = [] + if (poly): + for child in poly.children: + if (child.types == 0x0100): + coords = calc_point(child.data) + elif (child.types == 0x0108): + point6i = child.data + elif (child.types == 0x011A): + point4i = calc_point_3d(child) + elif (child.types == 0x0310): + pointNi = child.data + elif (child.types == 0x0124): + lidx.append(get_long(child.data, 0)[0]) + elif (child.types == 0x0128): + lcrd.append(calc_point_float(child.data)) + elif (child.types == 0x012B): + lply.append(get_poly_data(child)) + if (point4i is not None): + vertex = get_poly_4p(point4i) + if (len(vertex) > 0): + for key, ngons in vertex.items(): + created |= create_shape(context, coords, ngons, + node, key, mtx, mat, umt) + else: + created = True + elif (point6i is not None): + ngons = get_poly_6p(point6i) + created = create_shape(context, coords, ngons, node, + None, mtx, mat, umt) + elif (pointNi is not None): + ngons = get_poly_5p(pointNi) + created = create_shape(context, coords, ngons, node, + None, mtx, mat, umt) + if (uvm and len(lidx) > 0): + for i in range(len(lidx)): + created |= create_shape(context, lcrd[i], lply[i], + node, lidx[i], mtx, mat, umt) + return created + + +def create_editable_mesh(context, node, msh, mat, mtx, umt): + poly = msh.get_first(0x08FE) + created = False + if (poly): + vertex_chunk = poly.get_first(0x0914) + clsid_chunk = poly.get_first(0x0912) + coords = get_point_array(vertex_chunk.data) + ngons = get_poly_5p(clsid_chunk.data) + created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) + return created + + +def create_shell(context, node, shell, mat, mtx, umt, uvm): + refs = get_references(shell) + msh = refs[-1] + if (get_cls_name(msh) == "'Editable Poly'"): + created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) + else: + created = create_editable_mesh(context, node, msh, mat, mtx, umt) + return created + + +def create_skipable(context, node, skip): + name = node.get_first(TYP_NAME).data + print(" skipping %s '%s'... " % (skip, name)) + return True + + +def create_mesh(context, node, msh, mtx, mat, umt, uvm): + created = False + uid = get_guid(msh) + msh.geometry = None + if (uid == EDIT_MESH): + created = create_editable_mesh(context, node, msh, mat, mtx, umt) + elif (uid == EDIT_POLY): + created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) + elif (uid in {0x2032, 0x2033}): + created = create_shell(context, node, msh, mat, mtx, umt, uvm) + else: + skip = SKIPPABLE.get(uid) + if (skip is not None): + created = create_skipable(context, node, skip) + return created, uid + + +def create_object(context, node, mscale, usemat, uvmesh, transform): + parent = get_node_parent(node) + node.parent = parent + prs, msh, mat, lyr = get_matrix_mesh_material(node) + while ((parent is not None) and (get_guid(parent) != 0x02)): + parent_mtx = parent.matrix + if (parent_mtx): + prs = prs.dot(parent_mtx) + parent = get_node_parent(parent) + if (transform): + mtx = create_matrix(prs) @ mscale + else: + mtx = mscale + created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh) + + +def make_scene(context, mscale, usemat, uvmesh, transform, parent): + for chunk in parent.children: + if (isinstance(chunk, SceneChunk)): + if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)): + try: + create_object(context, chunk, mscale, usemat, uvmesh, transform) + except Exception as exc: + print('ImportError:', exc, chunk) + + +def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): + global SCENE_LIST + SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', conReader=SceneChunk) + make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0]) + + +def read(context, filename, mscale, usemat, uvmesh, transform): + if (is_maxfile(filename)): + maxfile = ImportMaxFile(filename) + read_class_data(maxfile, filename) + read_config(maxfile, filename) + read_directory(maxfile, filename) + read_class_directory(maxfile, filename) + read_video_postqueue(maxfile, filename) + read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform) + else: + print("File seems to be no 3D Studio Max file!") + + +def load(operator, context, filepath="", scale_objects=1.0, use_material=True, + use_uv_mesh=False, use_apply_matrix=False, global_matrix=None): + context.window.cursor_set('WAIT') + mscale = mathutils.Matrix.Scale(scale_objects, 4) + if global_matrix is not None: + mscale = global_matrix @ mscale + + read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) + context.window.cursor_set('DEFAULT') + + return {'FINISHED'} -- 2.30.2 From 626e34d06e3fc63323329363eb14232b6a33e320 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:47:40 +0100 Subject: [PATCH 36/38] =?UTF-8?q?io=5Fscene=5Fmax/=5F=5Finit.py=20gel?= =?UTF-8?q?=C3=B6scht?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- io_scene_max/__init.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 io_scene_max/__init.py diff --git a/io_scene_max/__init.py b/io_scene_max/__init.py deleted file mode 100644 index e69de29..0000000 -- 2.30.2 From 76a6a9cc40bb0636ed981036fc10a87350b111b8 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:48:52 +0100 Subject: [PATCH 37/38] =?UTF-8?q?=5F=5Finit=5F=5F.py=20gel=C3=B6scht?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- __init__.py | 156 ---------------------------------------------------- 1 file changed, 156 deletions(-) delete mode 100644 __init__.py diff --git a/__init__.py b/__init__.py deleted file mode 100644 index 51af2e1..0000000 --- a/__init__.py +++ /dev/null @@ -1,156 +0,0 @@ -# SPDX-FileCopyrightText: 2023 Sebastian Schrand -# -# SPDX-License-Identifier: GPL-2.0-or-later - -import bpy -from bpy_extras.io_utils import ( - ImportHelper, - orientation_helper, - axis_conversion, - ) -from bpy.props import ( - BoolProperty, - FloatProperty, - StringProperty, - ) - -bl_info = { - "name": "Import Autodesk MAX (.max)", - "author": "Sebastian Sille, Philippe Lagadec, Jens M. Plonka", - "version": (1, 1, 2), - "blender": (3, 6, 0), - "location": "File > Import", - "description": "Import 3DSMAX meshes & materials", - "warning": "", - "filepath_url": "", - "category": "Import-Export"} - -if "bpy" in locals(): - import importlib - if "import_max" in locals(): - importlib.reload(import_max) - - -@orientation_helper(axis_forward='Y', axis_up='Z') -class Import_max(bpy.types.Operator, ImportHelper): - """Import Autodesk MAX""" - bl_idname = "import_autodesk.max" - bl_label = "Import MAX (.max)" - bl_options = {'PRESET', 'UNDO'} - - filename_ext = ".max" - filter_glob: StringProperty(default="*.max", options={'HIDDEN'},) - - scale_objects: FloatProperty( - name="Scale", - description="Scale factor for all objects", - min=0.0, max=10000.0, - soft_min=0.0, soft_max=10000.0, - default=1.0, - ) - use_material: BoolProperty( - name="Materials", - description="Import the materials of the objects", - default=True, - ) - use_uv_mesh: BoolProperty( - name="UV Mesh", - description="Import texture coordinates as mesh objects", - default=False, - ) - use_apply_matrix: BoolProperty( - name="Apply Matrix", - description="Use matrix to transform the objects", - default=False, - ) - - def execute(self, context): - from . import import_max - keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) - global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up,).to_4x4() - keywords["global_matrix"] = global_matrix - - return import_max.load(self, context, **keywords) - - def draw(self, context): - pass - - -class MAX_PT_import_include(bpy.types.Panel): - bl_space_type = 'FILE_BROWSER' - bl_region_type = 'TOOL_PROPS' - bl_label = "Include" - bl_parent_id = "FILE_PT_operator" - - @classmethod - def poll(cls, context): - sfile = context.space_data - operator = sfile.active_operator - - return operator.bl_idname == "IMPORT_AUTODESK_OT_max" - - def draw(self, context): - layout = self.layout - layout.use_property_split = True - layout.use_property_decorate = False - - sfile = context.space_data - operator = sfile.active_operator - - layrow = layout.row(align=True) - layrow.prop(operator, "use_material") - layrow.label(text="", icon='MATERIAL' if operator.use_material else 'SHADING_TEXTURE') - layrow = layout.row(align=True) - layrow.prop(operator, "use_uv_mesh") - layrow.label(text="", icon='UV' if operator.use_uv_mesh else 'GROUP_UVS') - - -class MAX_PT_import_transform(bpy.types.Panel): - bl_space_type = 'FILE_BROWSER' - bl_region_type = 'TOOL_PROPS' - bl_label = "Transform" - bl_parent_id = "FILE_PT_operator" - - @classmethod - def poll(cls, context): - sfile = context.space_data - operator = sfile.active_operator - - return operator.bl_idname == "IMPORT_AUTODESK_OT_max" - - def draw(self, context): - layout = self.layout - layout.use_property_split = True - layout.use_property_decorate = False - - sfile = context.space_data - operator = sfile.active_operator - - layout.prop(operator, "scale_objects") - layrow = layout.row(align=True) - layrow.prop(operator, "use_apply_matrix") - layrow.label(text="", icon='VIEW_ORTHO' if operator.use_apply_matrix else 'MESH_GRID') - layout.prop(operator, "axis_forward") - layout.prop(operator, "axis_up") - - -def menu_func(self, context): - self.layout.operator(Import_max.bl_idname, text="Autodesk MAX (.max)") - - -def register(): - bpy.utils.register_class(Import_max) - bpy.utils.register_class(MAX_PT_import_include) - bpy.utils.register_class(MAX_PT_import_transform) - bpy.types.TOPBAR_MT_file_import.append(menu_func) - - -def unregister(): - bpy.types.TOPBAR_MT_file_import.remove(menu_func) - bpy.utils.unregister_class(MAX_PT_import_transform) - bpy.utils.unregister_class(MAX_PT_import_include) - bpy.utils.unregister_class(Import_max) - - -if __name__ == "__main__": - register() -- 2.30.2 From 56aef6b3ab2b3044f9b881db335275e89e5fdd79 Mon Sep 17 00:00:00 2001 From: Sebastian Sille Date: Thu, 7 Dec 2023 16:49:04 +0100 Subject: [PATCH 38/38] =?UTF-8?q?import=5Fmax.py=20gel=C3=B6scht?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- import_max.py | 1422 ------------------------------------------------- 1 file changed, 1422 deletions(-) delete mode 100644 import_max.py diff --git a/import_max.py b/import_max.py deleted file mode 100644 index 0370228..0000000 --- a/import_max.py +++ /dev/null @@ -1,1422 +0,0 @@ -# SPDX-FileCopyrightText: 2023 Sebastian Schrand -# -# SPDX-License-Identifier: GPL-2.0-or-later -# Import is based on using information from olefile IO sourcecode -# and the FreeCAD Autodesk 3DS Max importer ImportMAX -# -# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2018 Philippe Lagadec -# (https://www.decalage.info) -# -# ImportMAX is copyright (c) 2017-2022 Jens M. Plonka -# (https://www.github.com/jmplonka/Importer3D) - -import io -import os -import re -import sys -import bpy -import math -import zlib -import array -import struct -import mathutils -from bpy_extras.node_shader_utils import PrincipledBSDFWrapper - - -################### -# DATA STRUCTURES # -################### - -MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' -WORD_CLSID = "00020900-0000-0000-C000-000000000046" - -MIN_FILE_SIZE = 1536 -UNKNOWN_SIZE = 0x7FFFFFFF -MAXFILE_SIZE = 0x7FFFFFFFFFFFFFFF -MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT -DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT -FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT -ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain -FREESECT = 0xFFFFFFFF # (-1) unallocated sector -MAX_STREAM = 2 # element is a stream object -ROOT_STORE = 5 # element is a root storage - -TYP_NAME = 0x0962 -INVALID_NAME = re.compile('^[0-9].*') -UNPACK_BOX_DATA = struct.Struct('= MIN_FILE_SIZE: - header = filename[:len(MAGIC)] - else: - with open(filename, 'rb') as fp: - header = fp.read(len(MAGIC)) - if header == MAGIC: - return True - else: - return False - - -class MaxStream(io.BytesIO): - """Returns an instance of the BytesIO class as read-only file object.""" - def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize): - if size == UNKNOWN_SIZE: - size = len(fat) * sectorsize - nb_sectors = (size + (sectorsize-1)) // sectorsize - - data = [] - for i in range(nb_sectors): - try: - fp.seek(offset + sectorsize * sect) - except: - break - sector_data = fp.read(sectorsize) - data.append(sector_data) - try: - sect = fat[sect] & FREESECT - except IndexError: - break - data = b"".join(data) - if len(data) >= size: - data = data[:size] - self.size = size - else: - self.size = len(data) - io.BytesIO.__init__(self, data) - - -class MaxFileDirEntry: - """Directory Entry for a stream or storage.""" - STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' - DIRENTRY_SIZE = 128 - assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE - - def __init__(self, entry, sid, maxfile): - self.sid = sid - self.maxfile = maxfile - self.kids = [] - self.kids_dict = {} - self.used = False - ( - self.name_raw, - self.namelength, - self.entry_type, - self.color, - self.sid_left, - self.sid_right, - self.sid_child, - clsid, - self.dwUserFlags, - self.createTime, - self.modifyTime, - self.isectStart, - self.sizeLow, - self.sizeHigh - ) = struct.unpack(MaxFileDirEntry.STRUCT_DIRENTRY, entry) - - if self.namelength > 64: - self.namelength = 64 - self.name_utf16 = self.name_raw[:(self.namelength - 2)] - self.name = maxfile._decode_utf16_str(self.name_utf16) - # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) - if maxfile.sectorsize == 512: - self.size = self.sizeLow - else: - self.size = self.sizeLow + (int(self.sizeHigh) << 32) - self.clsid = _clsid(clsid) - self.is_minifat = False - if self.entry_type in (ROOT_STORE, MAX_STREAM) and self.size > 0: - if self.size < maxfile.minisectorcutoff \ - and self.entry_type == MAX_STREAM: # only streams can be in MiniFAT - self.is_minifat = True - else: - self.is_minifat = False - maxfile._check_duplicate_stream(self.isectStart, self.is_minifat) - self.sect_chain = None - - def build_sect_chain(self, maxfile): - if self.sect_chain: - return - if self.entry_type not in (ROOT_STORE, MAX_STREAM) or self.size == 0: - return - self.sect_chain = list() - if self.is_minifat and not maxfile.minifat: - maxfile.loadminifat() - next_sect = self.isectStart - while next_sect != ENDOFCHAIN: - self.sect_chain.append(next_sect) - if self.is_minifat: - next_sect = maxfile.minifat[next_sect] - else: - next_sect = maxfile.fat[next_sect] - - def build_storage_tree(self): - if self.sid_child != FREESECT: - self.append_kids(self.sid_child) - self.kids.sort() - - def append_kids(self, child_sid): - if child_sid == FREESECT: - return - else: - child = self.maxfile._load_direntry(child_sid) - if child.used: - return - child.used = True - self.append_kids(child.sid_left) - name_lower = child.name.lower() - self.kids.append(child) - self.kids_dict[name_lower] = child - self.append_kids(child.sid_right) - child.build_storage_tree() - - def __eq__(self, other): - return self.name == other.name - - def __lt__(self, other): - return self.name < other.name - - def __ne__(self, other): - return not self.__eq__(other) - - def __le__(self, other): - return self.__eq__(other) or self.__lt__(other) - - -class ImportMaxFile: - """Representing an interface for importing .max files.""" - def __init__(self, filename=None): - self._filesize = None - self.byte_order = None - self.directory_fp = None - self.direntries = None - self.dll_version = None - self.fat = None - self.first_difat_sector = None - self.first_dir_sector = None - self.first_mini_fat_sector = None - self.fp = None - self.header_clsid = None - self.header_signature = None - self.mini_sector_shift = None - self.mini_sector_size = None - self.mini_stream_cutoff_size = None - self.minifat = None - self.minifatsect = None - self.minisectorcutoff = None - self.minisectorsize = None - self.ministream = None - self.minor_version = None - self.nb_sect = None - self.num_difat_sectors = None - self.num_dir_sectors = None - self.num_fat_sectors = None - self.num_mini_fat_sectors = None - self.reserved1 = None - self.reserved2 = None - self.root = None - self.sector_shift = None - self.sector_size = None - self.transaction_signature_number = None - if filename: - self.open(filename) - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def _decode_utf16_str(self, utf16_str, errors='replace'): - unicode_str = utf16_str.decode('UTF-16LE', errors) - return unicode_str - - def open(self, filename): - if hasattr(filename, 'read'): - self.fp = filename - elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE: - self.fp = io.BytesIO(filename) - else: - self.fp = open(filename, 'rb') - filesize = 0 - self.fp.seek(0, os.SEEK_END) - try: - filesize = self.fp.tell() - finally: - self.fp.seek(0) - self._filesize = filesize - self._used_streams_fat = [] - self._used_streams_minifat = [] - header = self.fp.read(512) - fmt_header = '<8s16sHHHHHHLLLLLLLLLL' - header_size = struct.calcsize(fmt_header) - header1 = header[:header_size] - ( - self.header_signature, - self.header_clsid, - self.minor_version, - self.dll_version, - self.byte_order, - self.sector_shift, - self.mini_sector_shift, - self.reserved1, - self.reserved2, - self.num_dir_sectors, - self.num_fat_sectors, - self.first_dir_sector, - self.transaction_signature_number, - self.mini_stream_cutoff_size, - self.first_mini_fat_sector, - self.num_mini_fat_sectors, - self.first_difat_sector, - self.num_difat_sectors - ) = struct.unpack(fmt_header, header1) - - self.sector_size = 2**self.sector_shift - self.mini_sector_size = 2**self.mini_sector_shift - if self.mini_stream_cutoff_size != 0x1000: - self.mini_stream_cutoff_size = 0x1000 - self.nb_sect = ((filesize + self.sector_size-1) // self.sector_size) - 1 - - # file clsid - self.header_clsid = _clsid(header[8:24]) - self.sectorsize = self.sector_size # i16(header, 30) - self.minisectorsize = self.mini_sector_size # i16(header, 32) - self.minisectorcutoff = self.mini_stream_cutoff_size # i32(header, 56) - self._check_duplicate_stream(self.first_dir_sector) - if self.num_mini_fat_sectors: - self._check_duplicate_stream(self.first_mini_fat_sector) - if self.num_difat_sectors: - self._check_duplicate_stream(self.first_difat_sector) - - # Load file allocation tables - self.loadfat(header) - self.loaddirectory(self.first_dir_sector) - self.minifatsect = self.first_mini_fat_sector - - def close(self): - self.fp.close() - - def _check_duplicate_stream(self, first_sect, minifat=False): - if minifat: - used_streams = self._used_streams_minifat - else: - if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): - return - used_streams = self._used_streams_fat - if first_sect in used_streams: - pass - else: - used_streams.append(first_sect) - - def sector_array(self, sect): - ary = array.array('I', sect) - if sys.byteorder == 'big': - ary.byteswap() - return ary - - def loadfat_sect(self, sect): - if isinstance(sect, array.array): - fat1 = sect - else: - fat1 = self.sector_array(sect) - isect = None - for isect in fat1: - isect = isect & FREESECT - if isect == ENDOFCHAIN or isect == FREESECT: - break - sector = self.getsect(isect) - nextfat = self.sector_array(sector) - self.fat = self.fat + nextfat - return isect - - def loadfat(self, header): - sect = header[76:512] - self.fat = array.array('I') - self.loadfat_sect(sect) - if self.num_difat_sectors != 0: - nb_difat_sectors = (self.sectorsize // 4) - 1 - nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors - isect_difat = self.first_difat_sector - for i in range(nb_difat): - sector_difat = self.getsect(isect_difat) - difat = self.sector_array(sector_difat) - self.loadfat_sect(difat[:nb_difat_sectors]) - isect_difat = difat[nb_difat_sectors] - if len(self.fat) > self.nb_sect: - self.fat = self.fat[:self.nb_sect] - - def loadminifat(self): - stream_size = self.num_mini_fat_sectors * self.sector_size - nb_minisectors = (self.root.size + self.mini_sector_size - 1) // self.mini_sector_size - used_size = nb_minisectors * 4 - sect = self._open(self.minifatsect, stream_size, force_FAT=True).read() - self.minifat = self.sector_array(sect) - self.minifat = self.minifat[:nb_minisectors] - - def getsect(self, sect): - try: - self.fp.seek(self.sectorsize * (sect + 1)) - except: - print('IndexError: Sector index out of range') - sector = self.fp.read(self.sectorsize) - return sector - - def loaddirectory(self, sect): - self.directory_fp = self._open(sect, force_FAT=True) - max_entries = self.directory_fp.size // 128 - self.direntries = [None] * max_entries - root_entry = self._load_direntry(0) - self.root = self.direntries[0] - self.root.build_storage_tree() - - def _load_direntry(self, sid): - if self.direntries[sid] is not None: - return self.direntries[sid] - self.directory_fp.seek(sid * 128) - entry = self.directory_fp.read(128) - self.direntries[sid] = MaxFileDirEntry(entry, sid, self) - return self.direntries[sid] - - def _open(self, start, size=UNKNOWN_SIZE, force_FAT=False): - if size < self.minisectorcutoff and not force_FAT: - if not self.ministream: - self.loadminifat() - size_ministream = self.root.size - self.ministream = self._open(self.root.isectStart, - size_ministream, force_FAT=True) - return MaxStream(fp=self.ministream, sect=start, size=size, - offset=0, sectorsize=self.minisectorsize, - fat=self.minifat, filesize=self.ministream.size) - else: - return MaxStream(fp=self.fp, sect=start, size=size, - offset=self.sectorsize, sectorsize=self.sectorsize, - fat=self.fat, filesize=self._filesize) - - def _find(self, filename): - if isinstance(filename, str): - filename = filename.split('/') - node = self.root - for name in filename: - for kid in node.kids: - if kid.name.lower() == name.lower(): - break - node = kid - return node.sid - - def openstream(self, filename): - sid = self._find(filename) - entry = self.direntries[sid] - return self._open(entry.isectStart, entry.size) - - -################### -# DATA PROCESSING # -################### - -class MaxChunk(): - """Representing a chunk of a .max file.""" - def __init__(self, types, size, level, number): - self.number = number - self.types = types - self.level = level - self.parent = None - self.previous = None - self.next = None - self.size = size - self.unknown = True - self.format = None - self.data = None - - def __str__(self): - if (self.unknown): - return "%s[%4x]%04X:%s" % ("" * self.level, self.number, self.types, - ":".join("%02x" % (c) for c in self.data)) - return "%s[%4x]%04X:%s=%s" % ("" * self.level, self.number, self.types, - self.format, self.data) - - -class ByteArrayChunk(MaxChunk): - """A byte array of a .max chunk.""" - def __init__(self, types, data, level, number): - MaxChunk.__init__(self, types, data, level, number) - - def set(self, data, name, fmt, start, end): - try: - self.data = struct.unpack(fmt, data[start:end]) - self.format = name - self.unknown = False - except Exception as exc: - self.data = data - # print('StructError:', exc, name) - - def set_string(self, data): - try: - self.data = data.decode('UTF-16LE') - self.format = "Str16" - self.unknown = False - except: - self.data = data - - def set_data(self, data): - if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]): - self.set_string(data) - elif (self.types in [0x2034, 0x2035]): - self.set(data, "ints", '<' + 'I' * int(len(data) / 4), 0, len(data)) - elif (self.types in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]): - self.set(data, "floats", '<' + 'f' * int(len(data) / 4), 0, len(data)) - elif (self.types == 0x2510): - self.set(data, "struct", '<' + 'f' * int(len(data) / 4 - 1) + 'I', 0, len(data)) - elif (self.types == 0x0100): - self.set(data, "float", ' 3): - return get_rotation(refs[0]) - elif (uid == MATRIX_ROT): # Rotation Wire - return get_rotation(get_references(pos)[0]) - if (rotation): - mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis) - return mtx - - -def get_scale(pos): - mtx = mathutils.Matrix.Identity(4) - if (pos): - uid = get_guid(pos) - if (uid == 0x2010): # Bezier Scale - scale = pos.get_first(0x2501) - if (scale is None): - scale = pos.get_first(0x2505) - pos = scale.data - elif (uid == 0x442315): # TCB Zoom - scale = pos.get_first(0x2501) - if (scale is None): - scale = pos.get_first(0x2505) - pos = scale.data - elif (uid == MATRIX_SCL): # ScaleXYZ - pos = get_point_3d(pos, 1.0) - else: - return mtx - mtx = mathutils.Matrix.Diagonal(pos[:3]).to_4x4() - return mtx - - -def create_matrix(prc): - mtx = mathutils.Matrix.Identity(4) - pos = rot = scl = None - uid = get_guid(prc) - if (uid == 0x2005): # Position/Rotation/Scale - pos = get_position(get_references(prc)[0]) - rot = get_rotation(get_references(prc)[1]) - scl = get_scale(get_references(prc)[2]) - elif (uid == 0x9154): # BipSlave Control - biped_sub_anim = get_references(prc)[2] - refs = get_references(biped_sub_anim) - scl = get_scale(get_references(refs[1])[0]) - rot = get_rotation(get_references(refs[2])[0]) - pos = get_position(get_references(refs[3])[0]) - if (pos is not None): - mtx = pos @ mtx - if (rot is not None): - mtx = rot @ mtx - if (scl is not None): - mtx = scl @ mtx - return mtx - - -def get_matrix_mesh_material(node): - refs = get_reference(node) - if (refs): - prs = refs.get(0, None) - msh = refs.get(1, None) - mat = refs.get(3, None) - lyr = refs.get(6, None) - else: - refs = get_references(node) - prs = refs[0] - msh = refs[1] - mat = refs[3] - lyr = None - if (len(refs) > 6): - lyr = refs[6] - return prs, msh, mat, lyr - - -def get_property(properties, idx): - for child in properties.children: - if (child.types & 0x100E): - if (get_short(child.data, 0)[0] == idx): - return child - return None - - -def get_color(colors, idx): - prop = get_property(colors, idx) - if (prop is not None): - siz = len(prop.data) - 12 - col, offset = get_floats(prop.data, siz, 3) - return (col[0], col[1], col[2]) - return None - - -def get_value(colors, idx): - prop = get_property(colors, idx) - if (prop is not None): - siz = len(prop.data) - 4 - val, offset = get_float(prop.data, siz) - return val - return None - - -def get_parameter(colors, fmt): - if (fmt == 0x1): - siz = len(colors.data) - 12 - para, offset = get_floats(colors.data, siz, 3) - else: - siz = len(colors.data) - 4 - para, offset = get_float(colors.data, siz) - return para - - -def get_standard_material(refs): - material = None - try: - if (len(refs) > 2): - colors = refs[2] - parameters = get_references(colors)[0] - material = Material() - material.set('ambient', get_color(parameters, 0x00)) - material.set('diffuse', get_color(parameters, 0x01)) - material.set('specular', get_color(parameters, 0x02)) - material.set('emissive', get_color(parameters, 0x08)) - material.set('shinines', get_value(parameters, 0x0B)) - parablock = refs[4] # ParameterBlock2 - material.set('glossines', get_value(parablock, 0x02)) - material.set('metallic', get_value(parablock, 0x05)) - except: - pass - return material - - -def get_vray_material(vry): - material = Material() - try: - material.set('diffuse', get_color(vry, 0x01)) - material.set('specular', get_color(vry, 0x02)) - material.set('shinines', get_value(vry, 0x03)) - material.set('refraction', get_value(vry, 0x09)) - material.set('emissive', get_color(vry, 0x17)) - material.set('glossines', get_value(vry, 0x18)) - material.set('metallic', get_value(vry, 0x19)) - except: - pass - return material - - -def get_corona_material(mtl): - material = Material() - try: - cor = mtl.children - material.set('diffuse', get_parameter(cor[3], 0x1)) - material.set('specular', get_parameter(cor[4], 0x1)) - material.set('emissive', get_parameter(cor[8], 0x1)) - material.set('glossines', get_parameter(cor[9], 0x2)) - except: - pass - return material - - -def get_arch_material(ad): - material = Material() - try: - material.set('diffuse', get_color(ad, 0x1A)) - material.set('specular', get_color(ad, 0x05)) - material.set('shinines', get_value(ad, 0x0B)) - except: - pass - return material - - -def adjust_material(obj, mat): - material = None - if (mat is not None): - uid = get_guid(mat) - if (uid == 0x0002): # Standard - refs = get_references(mat) - material = get_standard_material(refs) - elif (uid == 0x0200): # Multi/Sub-Object - refs = get_references(mat) - material = adjust_material(obj, refs[-1]) - elif (uid == VRAY_MTL): # VRayMtl - refs = get_reference(mat) - material = get_vray_material(refs[1]) - elif (uid == CORO_MTL): # CoronaMtl - refs = get_references(mat) - material = get_corona_material(refs[0]) - elif (uid == ARCH_MTL): # Arch - refs = get_references(mat) - material = get_arch_material(refs[0]) - if (obj is not None) and (material is not None): - objMaterial = bpy.data.materials.new(get_cls_name(mat)) - obj.data.materials.append(objMaterial) - matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True) - matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8, 0.8, 0.8)) - matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1, 1, 1)) - matShader.specular = objMaterial.specular_intensity = material.get('glossines', 0.5) - matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6) - matShader.metallic = objMaterial.metallic = material.get('metallic', 0) - matShader.emission_color = material.get('emissive', (0, 0, 0)) - matShader.ior = material.get('refraction', 1.45) - - -def adjust_matrix(obj, node): - mtx = create_matrix(node).flatten() - plc = mathutils.Matrix(*mtx) - obj.matrix_world = plc - return plc - - -def create_shape(context, pts, indices, node, key, mtx, mat, umt): - name = node.get_first(TYP_NAME).data - shape = bpy.data.meshes.new(name) - if (key is not None): - name = "%s_%d" % (name, key) - data = [] - if (pts): - loopstart = [] - looplines = loop = 0 - nb_faces = len(indices) - for fid in range(nb_faces): - polyface = indices[fid] - looplines += len(polyface) - shape.vertices.add(len(pts) // 3) - shape.loops.add(looplines) - shape.polygons.add(nb_faces) - shape.vertices.foreach_set("co", pts) - for vtx in indices: - loopstart.append(loop) - data.extend(vtx) - loop += len(vtx) - shape.polygons.foreach_set("loop_start", loopstart) - shape.loops.foreach_set("vertex_index", data) - - if (len(data) > 0): - shape.validate() - shape.update() - obj = bpy.data.objects.new(name, shape) - context.view_layer.active_layer_collection.collection.objects.link(obj) - obj.matrix_world = mtx - if (umt): - adjust_material(obj, mat) - return True - return True - - -def calc_point(data): - points = [] - long, offset = get_long(data, 0) - while (offset < len(data)): - val, offset = get_long(data, offset) - flt, offset = get_floats(data, offset, 3) - points.extend(flt) - return points - - -def calc_point_float(data): - points = [] - long, offset = get_long(data, 0) - while (offset < len(data)): - flt, offset = get_floats(data, offset, 3) - points.extend(flt) - return points - - -def calc_point_3d(chunk): - data = chunk.data - count, offset = get_long(data, 0) - pointlist = [] - try: - while (offset < len(data)): - pt = Point3d() - long, offset = get_long(data, offset) - pt.points, offset = get_longs(data, offset, long) - pt.flags, offset = get_short(data, offset) - if ((pt.flags & 0x01) != 0): - pt.f1, offset = get_long(data, offset) - if ((pt.flags & 0x08) != 0): - pt.fH, offset = get_short(data, offset) - if ((pt.flags & 0x10) != 0): - pt.f2, offset = get_long(data, offset) - if ((pt.flags & 0x20) != 0): - pt.fA, offset = get_longs(data, offset, 2 * (long - 3)) - if (len(pt.points) > 0): - pointlist.append(pt) - except Exception as exc: - print('ArrayError:\n', "%s: offset = %d\n" % (exc, offset)) - return pointlist - - -def get_point_array(values): - verts = [] - if len(values) >= 4: - count, offset = get_long(values, 0) - while (count > 0): - floats, offset = get_floats(values, offset, 3) - verts.extend(floats) - count -= 1 - return verts - - -def get_poly_4p(points): - vertex = {} - for point in points: - ngon = point.points - key = point.fH - if (key not in vertex): - vertex[key] = [] - vertex[key].append(ngon) - return vertex - - -def get_poly_5p(data): - count, offset = get_long(data, 0) - ngons = [] - while count > 0: - pt, offset = get_longs(data, offset, 3) - offset += 8 - ngons.append(pt) - count -= 1 - return ngons - - -def get_poly_6p(data): - count, offset = get_long(data, 0) - polylist = [] - while (offset < len(data)): - long, offset = get_longs(data, offset, 6) - i = 5 - while ((i > 3) and (long[i] < 0)): - i -= 1 - if (i > 2): - polylist.append(long[1:i]) - return polylist - - -def get_poly_data(chunk): - offset = 0 - polylist = [] - data = chunk.data - while (offset < len(data)): - count, offset = get_long(data, offset) - points, offset = get_longs(data, offset, count) - polylist.append(points) - return polylist - - -def create_editable_poly(context, node, msh, mat, mtx, umt, uvm): - coords = point4i = point6i = pointNi = None - poly = msh.get_first(0x08FE) - created = False - lidx = [] - lcrd = [] - lply = [] - if (poly): - for child in poly.children: - if (child.types == 0x0100): - coords = calc_point(child.data) - elif (child.types == 0x0108): - point6i = child.data - elif (child.types == 0x011A): - point4i = calc_point_3d(child) - elif (child.types == 0x0310): - pointNi = child.data - elif (child.types == 0x0124): - lidx.append(get_long(child.data, 0)[0]) - elif (child.types == 0x0128): - lcrd.append(calc_point_float(child.data)) - elif (child.types == 0x012B): - lply.append(get_poly_data(child)) - if (point4i is not None): - vertex = get_poly_4p(point4i) - if (len(vertex) > 0): - for key, ngons in vertex.items(): - created |= create_shape(context, coords, ngons, - node, key, mtx, mat, umt) - else: - created = True - elif (point6i is not None): - ngons = get_poly_6p(point6i) - created = create_shape(context, coords, ngons, node, - None, mtx, mat, umt) - elif (pointNi is not None): - ngons = get_poly_5p(pointNi) - created = create_shape(context, coords, ngons, node, - None, mtx, mat, umt) - if (uvm and len(lidx) > 0): - for i in range(len(lidx)): - created |= create_shape(context, lcrd[i], lply[i], - node, lidx[i], mtx, mat, umt) - return created - - -def create_editable_mesh(context, node, msh, mat, mtx, umt): - poly = msh.get_first(0x08FE) - created = False - if (poly): - vertex_chunk = poly.get_first(0x0914) - clsid_chunk = poly.get_first(0x0912) - coords = get_point_array(vertex_chunk.data) - ngons = get_poly_5p(clsid_chunk.data) - created = create_shape(context, coords, ngons, node, None, mtx, mat, umt) - return created - - -def create_shell(context, node, shell, mat, mtx, umt, uvm): - refs = get_references(shell) - msh = refs[-1] - if (get_cls_name(msh) == "'Editable Poly'"): - created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) - else: - created = create_editable_mesh(context, node, msh, mat, mtx, umt) - return created - - -def create_skipable(context, node, skip): - name = node.get_first(TYP_NAME).data - print(" skipping %s '%s'... " % (skip, name)) - return True - - -def create_mesh(context, node, msh, mtx, mat, umt, uvm): - created = False - uid = get_guid(msh) - msh.geometry = None - if (uid == EDIT_MESH): - created = create_editable_mesh(context, node, msh, mat, mtx, umt) - elif (uid == EDIT_POLY): - created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm) - elif (uid in {0x2032, 0x2033}): - created = create_shell(context, node, msh, mat, mtx, umt, uvm) - else: - skip = SKIPPABLE.get(uid) - if (skip is not None): - created = create_skipable(context, node, skip) - return created, uid - - -def create_object(context, node, mscale, usemat, uvmesh, transform): - parent = get_node_parent(node) - node.parent = parent - prs, msh, mat, lyr = get_matrix_mesh_material(node) - while ((parent is not None) and (get_guid(parent) != 0x02)): - parent_mtx = parent.matrix - if (parent_mtx): - prs = prs.dot(parent_mtx) - parent = get_node_parent(parent) - if (transform): - mtx = create_matrix(prs) @ mscale - else: - mtx = mscale - created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh) - - -def make_scene(context, mscale, usemat, uvmesh, transform, parent): - for chunk in parent.children: - if (isinstance(chunk, SceneChunk)): - if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)): - try: - create_object(context, chunk, mscale, usemat, uvmesh, transform) - except Exception as exc: - print('ImportError:', exc, chunk) - - -def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform): - global SCENE_LIST - SCENE_LIST = read_chunks(maxfile, 'Scene', filename+'.Scn.bin', conReader=SceneChunk) - make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0]) - - -def read(context, filename, mscale, usemat, uvmesh, transform): - if (is_maxfile(filename)): - maxfile = ImportMaxFile(filename) - read_class_data(maxfile, filename) - read_config(maxfile, filename) - read_directory(maxfile, filename) - read_class_directory(maxfile, filename) - read_video_postqueue(maxfile, filename) - read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform) - else: - print("File seems to be no 3D Studio Max file!") - - -def load(operator, context, filepath="", scale_objects=1.0, use_material=True, - use_uv_mesh=False, use_apply_matrix=False, global_matrix=None): - context.window.cursor_set('WAIT') - mscale = mathutils.Matrix.Scale(scale_objects, 4) - if global_matrix is not None: - mscale = global_matrix @ mscale - - read(context, filepath, mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix) - context.window.cursor_set('DEFAULT') - - return {'FINISHED'} -- 2.30.2