From f3412f42971a49169f62f5d25f0f3771e8d14e4d Mon Sep 17 00:00:00 2001 From: Soslan Guchmazov Date: Tue, 30 May 2023 14:02:45 +0200 Subject: [PATCH] New addon - HxA import/export --- io_scene_hxa/__init__.py | 60 +++++ io_scene_hxa/export_hxa_py.py | 389 +++++++++++++++++++++++++++++++ io_scene_hxa/hxapy_header.py | 88 +++++++ io_scene_hxa/hxapy_read_write.py | 292 +++++++++++++++++++++++ io_scene_hxa/hxapy_util.py | 48 ++++ io_scene_hxa/hxapy_validate.py | 144 ++++++++++++ io_scene_hxa/import_hxa_py.py | 305 ++++++++++++++++++++++++ 7 files changed, 1326 insertions(+) create mode 100644 io_scene_hxa/__init__.py create mode 100644 io_scene_hxa/export_hxa_py.py create mode 100644 io_scene_hxa/hxapy_header.py create mode 100644 io_scene_hxa/hxapy_read_write.py create mode 100644 io_scene_hxa/hxapy_util.py create mode 100644 io_scene_hxa/hxapy_validate.py create mode 100644 io_scene_hxa/import_hxa_py.py diff --git a/io_scene_hxa/__init__.py b/io_scene_hxa/__init__.py new file mode 100644 index 0000000..c4d1c63 --- /dev/null +++ b/io_scene_hxa/__init__.py @@ -0,0 +1,60 @@ +bl_info = { + "name": "HxA asset format", + "description": "Import-Export HxA", + "author": "SoslanGM (Soslan Guchmazov)", + "version": (0, 1), + "blender": (3, 0, 0), + "location": "File > Import-Export", + "warning": "", + "doc_url": "https://github.com/SoslanGM/HxApy_Blender_import-export", + "tracker_url": "", + "support": "TESTING", + "category": "Import-Export", +} + +if "bpy" in locals(): + import importlib + + if "import_hxa_py" in locals(): + importlib.reload(import_hxa_py) + if "export_hxa_py" in locals(): + importlib.reload(export_hxa_py) + + +import bpy +from . import import_hxa_py +from . import export_hxa_py + + +def menu_func_import(self, context): + self.layout.operator(import_hxa_py.ImportHXA.bl_idname, text="HxA (.hxa)") + + +def menu_func_export(self, context): + self.layout.operator(export_hxa_py.ExportHXA.bl_idname, text="HxA (.hxa)") + + +classes = ( + import_hxa_py.ImportHXA, + export_hxa_py.ExportHXA, +) + + +def register(): + for cls in classes: + bpy.utils.register_class(cls) + + bpy.types.TOPBAR_MT_file_import.append(menu_func_import) + bpy.types.TOPBAR_MT_file_export.append(menu_func_export) + + +def unregister(): + bpy.types.TOPBAR_MT_file_import.remove(menu_func_import) + bpy.types.TOPBAR_MT_file_export.remove(menu_func_export) + + for cls in classes: + bpy.utils.unregister_class(cls) + + +if __name__ == "__main__": + register() diff --git a/io_scene_hxa/export_hxa_py.py b/io_scene_hxa/export_hxa_py.py new file mode 100644 index 0000000..5a5f0f7 --- /dev/null +++ b/io_scene_hxa/export_hxa_py.py @@ -0,0 +1,389 @@ +import bpy +import bmesh + + +from . import hxapy_header as hxa +from . import hxapy_util as hxa_util +from . import hxapy_read_write as hxa_rw +from . import hxapy_validate as hxa_valid + +from bpy.props import StringProperty +from bpy_extras.io_utils import ExportHelper + +import logging + +log = logging.getLogger(__name__) + + +class ExportHXA(bpy.types.Operator, ExportHelper): + """Export a mesh as a HxA file""" + + bl_idname = "export_model.hxa" + bl_label = "Export HxA" + bl_options = {"REGISTER"} + + filename_ext = ".hxa" + filter_glob: StringProperty(default="*.hxa", options={"HIDDEN"}) + + def execute(self, context): + if bpy.ops.object.mode_set.poll(): + bpy.ops.object.mode_set(mode="OBJECT") + + hxa_dict = export_payload() + if not hxa_valid.hxa_util_validate(hxa_dict): + log.info(f"{self.filepath} couldn't pass validation") + self.report({"ERROR"}, f"{self.filepath} couldn't pass validation") + return {"CANCELLED"} + + try: + f = open(self.filepath, "wb") + except OSError: + log.info(f"HXA Error: File {self.filepath} could not be open for writing\n") + self.report( + {"ERROR"}, + f"HXA Error: File {self.filepath} could not be open for writing\n", + ) + return {"CANCELLED"} + + hxa_rw.write_hxa(f, hxa_dict) + f.close() + + return {"FINISHED"} + + +def hxa_meta(name, typ, data): + m = {"name": name, "type": typ, "data": data} + return m + + +def meta__armature_data(arm_ob, arm): + """ + Packs all the armature(bones) data into HxA meta fields. + """ + arm_location = arm_ob.location[:] + arm_scale = arm_ob.scale[:] + bone_count = len(arm.bones) + + bpy.context.view_layer.objects.active = arm_ob + bpy.ops.object.mode_set(mode="EDIT") + heads = [list(x.head) for x in arm.edit_bones] + tails = [list(x.tail) for x in arm.edit_bones] + bpy.ops.object.mode_set(mode="OBJECT") + + log.debug("Edit bone heads") + for h in heads: + log.debug(h) + log.debug("Edit bone tails") + for t in tails: + log.debug(t) + + heads = hxa_util.flatten_list(heads) + tails = hxa_util.flatten_list(tails) + names = [x.name for x in arm.bones] + parents = [x.parent.name if x.parent else "" for x in arm.bones] + + meta_armature_data_entries = [] + meta_armature_data_entries.append( + hxa_meta( + "meta armature location", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, arm_location + ) + ) + meta_armature_data_entries.append( + hxa_meta("meta armature scale", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, arm_scale) + ) + meta_armature_data_entries.append( + hxa_meta("meta bones heads", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, heads) + ) + meta_armature_data_entries.append( + hxa_meta("meta bones tails", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, tails) + ) + + bone_names_entries = [ + hxa_meta("", hxa.HXAMetaDataType.HXA_MDT_TEXT, names[i]) + for i in range(bone_count) + ] + meta_armature_data_entries.append( + hxa_meta( + "meta bones names", hxa.HXAMetaDataType.HXA_MDT_META, bone_names_entries + ) + ) + + bone_parents_entries = [ + hxa_meta("", hxa.HXAMetaDataType.HXA_MDT_TEXT, parents[i]) + for i in range(bone_count) + ] + meta_armature_data_entries.append( + hxa_meta( + "meta bones parents", hxa.HXAMetaDataType.HXA_MDT_META, bone_parents_entries + ) + ) + + meta_armature_data = hxa_meta( + "meta armature data", + hxa.HXAMetaDataType.HXA_MDT_META, + meta_armature_data_entries, + ) + + return meta_armature_data + + +def extract_weights(ob): + vgroups = ob.vertex_groups + + indexes_biglist = [[] for _ in vgroups] + weights_biglist = [[] for _ in vgroups] + for vi, vert in enumerate(ob.data.vertices): + for g in vert.groups: + indexes_biglist[g.group].append(vi) + weights_biglist[g.group].append(g.weight) + + return (indexes_biglist, weights_biglist) + + +def hxapy_type_meta(typ): + """Which HxA meta type will we use to write this type into the export file?""" + if typ == int: + return hxa.HXAMetaDataType.HXA_MDT_INT64 + elif typ == float: + return hxa.HXAMetaDataType.HXA_MDT_DOUBLE + elif typ == str: + return hxa.HXAMetaDataType.HXA_MDT_TEXT + + +# def ExportPayload(context, filepath): +def export_payload(): + """ + The overarching function to produce our dictionary representation of a HxA file, + before we write it to disk. + """ + bm = bmesh.new() + ob_mesh = bpy.context.object + me = ob_mesh.data + bm.from_mesh(me) + + vert_count = len(bm.verts) + face_count = len(bm.faces) + verts = [[c for c in v.co] for v in bm.verts] + faces = [f for f in bm.faces] + references = [[v.index for v in f.verts] for f in faces] + references = [ + [-x - 1 if _ref.index(x) == len(_ref) - 1 else x for x in _ref] + for _ref in references + ] + + verts = hxa_util.flatten_list(verts) + references = hxa_util.flatten_list(references) + log.debug(verts) + log.debug(references) + + bm.free() + + hxa_dict = {} + hxa_dict["version"] = hxa.HXA_VERSION_FORMAT + hxa_dict["node_count"] = 1 + + # *** Meta data + meta_data = [] + + # ** Mesh(meta) data + meta_meshdata_entries = [] + meta_meshdata_entries.append( + hxa_meta("meta objectname", hxa.HXAMetaDataType.HXA_MDT_TEXT, ob_mesh.name) + ) + meta_meshdata_entries.append( + hxa_meta("meta meshname", hxa.HXAMetaDataType.HXA_MDT_TEXT, me.name) + ) + meta_meshdata_entries.append( + hxa_meta( + "meta location", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, ob_mesh.location[:] + ) + ) + meta_meshdata_entries.append( + hxa_meta("meta scale", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, ob_mesh.scale[:]) + ) + + meta_data.append( + hxa_meta( + "meta mesh data", hxa.HXAMetaDataType.HXA_MDT_META, meta_meshdata_entries + ) + ) + + # ** Shapekeys + if ob_mesh.data.shape_keys: + object_shapekeys = ob_mesh.data.shape_keys.key_blocks + shapekey_count = len(object_shapekeys) + + meta_shapekeys_data = [] + for i in range(shapekey_count): + name = object_shapekeys[i].name + shapekey_values = [] + for x in object_shapekeys[i].data.values(): + shapekey_values += [y for y in x.co] + + meta_shapekeys_data.append( + hxa_meta(name, hxa.HXAMetaDataType.HXA_MDT_DOUBLE, shapekey_values) + ) + + meta_data.append( + hxa_meta( + "meta shapekeys", hxa.HXAMetaDataType.HXA_MDT_META, meta_shapekeys_data + ) + ) + + # ** Armature + if ob_mesh.parent: + if (ob_mesh.parent.type == "ARMATURE") & ( + type(ob_mesh.parent.data) == bpy.types.Armature + ): + ob_arm = ob_mesh.parent + arm = ob_arm.data + + meta_armaturedata = meta__armature_data(ob_arm, arm) + meta_data.append(meta_armaturedata) + + # ** Vertex weights + if len(meta_data) > 0: + indexes_list, weights_list = extract_weights(ob_mesh) + + vgroup_count = len(ob_mesh.vertex_groups) + if vgroup_count: + # vertex indexes + meta_weightindexes_data = [ + hxa_meta("", hxa.HXAMetaDataType.HXA_MDT_INT64, indexes_list[i]) + for i in range(vgroup_count) + ] + meta_data.append( + hxa_meta( + "meta weight indexes", + hxa.HXAMetaDataType.HXA_MDT_META, + meta_weightindexes_data, + ) + ) + + # vertex weights + meta_vertexweights_data = [ + hxa_meta("", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, weights_list[i]) + for i in range(vgroup_count) + ] + meta_data.append( + hxa_meta( + "meta vertex weights", + hxa.HXAMetaDataType.HXA_MDT_META, + meta_vertexweights_data, + ) + ) + + # ** creases + creases = [x.crease for x in me.edges] + edges = [list(e.vertices) for e in me.edges] + crease_tuples = [] + for i in range(len(edges)): + crease_tuples.append((edges[i], creases[i])) + + crease_tuples = sorted(crease_tuples, key=lambda t: (t[0][0], t[0][1])) + + log.debug(f"> {edges}") + # sorted_edges = sorted(edges, key = lambda x: (x[0], x[1])) + sorted_edges, sorted_creases = zip(*crease_tuples) + edge_verts = hxa_util.flatten_list(sorted_edges) + log.debug(f"Edge verts: {edge_verts}") + + # check for !=0 creases + creases_present = len([x != 0 for x in creases]) > 0 + if creases_present: + meta_creases_data_entries = [] + meta_creases_data_entries.append( + hxa_meta("", hxa.HXAMetaDataType.HXA_MDT_INT64, edge_verts) + ) + meta_creases_data_entries.append( + hxa_meta("", hxa.HXAMetaDataType.HXA_MDT_DOUBLE, sorted_creases) + ) + + meta_data.append( + hxa_meta( + "meta creases", + hxa.HXAMetaDataType.HXA_MDT_META, + meta_creases_data_entries, + ) + ) + + # ** custom props + custom_props = list(ob_mesh.keys()) + if len(custom_props) > 0: + meta_customprops_data = [] + for cp in custom_props: + customprop = ob_mesh[cp] + import idprop + + if type(customprop) == idprop.types.IDPropertyArray: + al = len(customprop) + mtype = hxa.HXAMetaDataType(type(customprop[0])) + data = list(customprop) + else: + mtype = hxa.HXAMetaDataType(type(customprop)) + data = customprop + if mtype == hxa.HXA_MDT_TEXT: + al = len(data) + else: + al = 1 + + meta_cp_name = cp + meta_cp = { + "name_length": len(meta_cp_name), + "name": meta_cp_name, + "type": hxa.HXAMetaDataType(mtype).value, + "array_length": al, + "data": data, + } + meta_customprops_data.append(meta_cp) + # - I'll do this later. Might not be as straightforward. + # meta_customprops_data.append(hxa_meta(meta_cp_name, hxa.HXAMetaType(mtype).value, data)) + + meta_data.append( + hxa_meta( + "meta custom properties", + hxa.HXAMetaDataType.HXA_MDT_META, + meta_customprops_data, + ) + ) + + # *** Mesh(geometry) data + vertex_layer = { + "name_length": len(hxa.HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_NAME), + "name": hxa.HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_NAME, + "components": hxa.HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_COMPONENTS, + "type": hxa.HXALayerDataType.HXA_LDT_FLOAT, + "data": verts, + } + vert_stack = {"layer_count": 1, "layers": [vertex_layer]} + + reference_layer = { + "name_length": len(hxa.HXA_CONVENTION_HARD_BASE_CORNER_LAYER_NAME), + "name": hxa.HXA_CONVENTION_HARD_BASE_CORNER_LAYER_NAME, + "components": hxa.HXA_CONVENTION_HARD_BASE_CORNER_LAYER_COMPONENTS, + "type": hxa.HXA_CONVENTION_HARD_BASE_CORNER_LAYER_TYPE, + "data": references, + } + corner_stack = {"layer_count": 1, "layers": [reference_layer]} + + edge_stack = {"layer_count": 0, "layers": []} + + face_stack = {"layer_count": 0, "layers": []} + + content = { + "vertex_count": vert_count, + "vertex_stack": vert_stack, + "edge_corner_count": len(references), + "corner_stack": corner_stack, + "edge_stack": edge_stack, + "face_count": face_count, + "face_stack": face_stack, + } + node = { + "type": hxa.HXANodeType.HXA_NT_GEOMETRY, + "meta_data_count": len(meta_data), + "meta_data": meta_data, + "content": content, + } + hxa_dict["nodes"] = [node] + return hxa_dict diff --git a/io_scene_hxa/hxapy_header.py b/io_scene_hxa/hxapy_header.py new file mode 100644 index 0000000..ecbb1f7 --- /dev/null +++ b/io_scene_hxa/hxapy_header.py @@ -0,0 +1,88 @@ +# This is a Python version of the original HxA header, written by Soslan Guchmazov (@SoslanGM). +# The original HxA header, as well as the HxA format itself, both for C programming language, +# was developed by Eskil Steenberg (@quelsolaar). +# You can find the original HxA readme at https://github.com/quelsolaar/HxA#readme, +# and the repository with this header(as well as source code for my Import/Export addon for Blender) +# is at https://github.com/SoslanGM/HxApy_Blender_import-export + +# Big thanks to @Scurest for help with the feedback, suggestions and fixes. + + +from enum import IntEnum + +HXA_VERSION_API = "0.3" +HXA_VERSION_FORMAT = 3 + + +HXA_NAME_MAX_LENGTH = 255 + + +class HXANodeType(IntEnum): + HXA_NT_META_ONLY = 0 + HXA_NT_GEOMETRY = 1 + HXA_NT_IMAGE = 2 + + +class HXAImageType(IntEnum): + HXA_IT_CUBE_IMAGE = 0 + HXA_IT_1D_IMAGE = 1 + HXA_IT_2D_IMAGE = 2 + HXA_IT_3D_IMAGE = 3 + + +class HXAMetaDataType(IntEnum): + HXA_MDT_INT64 = 0 + HXA_MDT_DOUBLE = 1 + HXA_MDT_NODE = 2 + HXA_MDT_TEXT = 3 + HXA_MDT_BINARY = 4 + HXA_MDT_META = 5 + + +class HXALayerDataType(IntEnum): + HXA_LDT_UINT8 = 0 + HXA_LDT_INT32 = 1 + HXA_LDT_FLOAT = 2 + HXA_LDT_DOUBLE = 3 + + +# - Hard conventions + +HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_NAME = "vertex" +HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_ID = 0 +HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_COMPONENTS = 3 +HXA_CONVENTION_HARD_BASE_CORNER_LAYER_NAME = "reference" +HXA_CONVENTION_HARD_BASE_CORNER_LAYER_ID = 0 +HXA_CONVENTION_HARD_BASE_CORNER_LAYER_COMPONENTS = 1 +HXA_CONVENTION_HARD_BASE_CORNER_LAYER_TYPE = HXALayerDataType["HXA_LDT_INT32"].value +HXA_CONVENTION_HARD_EDGE_NEIGHBOUR_LAYER_NAME = "neighbour" +HXA_CONVENTION_HARD_EDGE_NEIGHBOUR_LAYER_TYPE = HXALayerDataType["HXA_LDT_INT32"].value + + +# - Soft conventions +# geometry layers +HXA_CONVENTION_SOFT_LAYER_SEQUENCE0 = "sequence" +HXA_CONVENTION_SOFT_LAYER_UV0 = "uv" +HXA_CONVENTION_SOFT_LAYER_NORMALS = "normal" +HXA_CONVENTION_SOFT_LAYER_BINORMAL = "binormal" +HXA_CONVENTION_SOFT_LAYER_TANGENT = "tangent" +HXA_CONVENTION_SOFT_LAYER_COLOR = "color" +HXA_CONVENTION_SOFT_LAYER_CREASES = "creases" +HXA_CONVENTION_SOFT_LAYER_SELECTION = "select" +HXA_CONVENTION_SOFT_LAYER_SKIN_WEIGHT = "skining_weight" +HXA_CONVENTION_SOFT_LAYER_SKIN_REFERENCE = "skining_reference" +HXA_CONVENTION_SOFT_LAYER_BLENDSHAPE = "blendshape" +HXA_CONVENTION_SOFT_LAYER_ADD_BLENDSHAPE = "addblendshape" +HXA_CONVENTION_SOFT_LAYER_MATERIAL_ID = "material" +HXA_CONVENTION_SOFT_LAYER_GROUP_ID = "group" + +# Image layers +HXA_CONVENTION_SOFT_ALBEDO = "albedo" +HXA_CONVENTION_SOFT_LIGHT = "light" +HXA_CONVENTION_SOFT_DISPLACEMENT = "displacement" +HXA_CONVENTION_SOFT_DISTORTION = "distortion" +HXA_CONVENTION_SOFT_AMBIENT_OCCLUSION = "ambient_occlusion" + +# tags layers +HXA_CONVENTION_SOFT_NAME = "name" +HXA_CONVENTION_SOFT_TRANSFORM = "transform" diff --git a/io_scene_hxa/hxapy_read_write.py b/io_scene_hxa/hxapy_read_write.py new file mode 100644 index 0000000..3df3242 --- /dev/null +++ b/io_scene_hxa/hxapy_read_write.py @@ -0,0 +1,292 @@ +from . import hxapy_header as hxa + +import logging + +log = logging.getLogger(__name__) + +# *** Logging functions (start) + + +def log_meta(meta): + log.debug("Meta:") + log.debug(f" - name: {meta['name']}") + log.debug(f" - type: {hxa.HXAMetaDataType(meta['type']).name}") + log.debug(f" - array_length: {len(meta['data'])}") + log.debug(f" - data: {meta['data']}") + + +def log_layer(layer): + log.debug(f" - name: {layer['name']}") + log.debug(f" - components: {layer['components']}") + log.debug(f" - data_type: {hxa.HXALayerDataType(layer['type']).name}") + log.debug(f" - data: {layer['data']}\n") + + +# *** Logging functions (end) + + +# *** Read functions (start) + +import struct +import array + + +def read_u8(f): + return f.read(1)[0] + + +def read_u32(f): + return struct.unpack(" 0: + return f"Done in: {h}h{m}m{s}s" + elif m > 0: + return f"Done in: {m}m{s}s" + elif s > 0: + return f"Done in: {s}s" + + +def flatten_list(_list): + res = [] + for inner in _list: + for el in inner: + res.append(el) + return res + + +def restore_faces(references): + faces = [] + tupl = [] + for r in references: + if r < 0: + r = -r - 1 + tupl.append(r) + faces.append(tuple(tupl)) + tupl = [] + else: + tupl.append(r) + return faces + + +def break_list_up(data, length, step): + return [tuple(data[x : x + step]) for x in list(range(0, length, step))] diff --git a/io_scene_hxa/hxapy_validate.py b/io_scene_hxa/hxapy_validate.py new file mode 100644 index 0000000..2036ba1 --- /dev/null +++ b/io_scene_hxa/hxapy_validate.py @@ -0,0 +1,144 @@ +from . import hxapy_header as hxa +from . import hxapy_read_write as hxa_rw + +import logging + +log = logging.getLogger(__name__) + + +def hxa_util_validate_meta(meta, node, count): + if meta["type"] == hxa.HXAMetaDataType.HXA_MDT_NODE: + for al in range(len(meta["data"])): + if meta["data"][al] >= count: + log.info( + f"HxA Verify Error: Node {node} has meta data {meta['name']} that is referencing a non \ + existent node ({meta['data'][al]} out of {count})\n" + ) + return False + if meta["type"] == hxa.HXAMetaDataType.HXA_MDT_META: + for al in range(len(meta["data"])): + hxa_util_validate_meta( + meta["data"][al], + node, + count, + ) + + +def hxa_util_validate(hxa_file): + for nc in range(hxa_file["node_count"]): + node = hxa_file["nodes"][nc] + for mc in range(node["meta_data_count"]): + hxa_util_validate_meta( + node["meta_data"][mc], + mc, + hxa_file["node_count"], + ) + + node_type = hxa.HXANodeType(node["type"]).value + if node_type == hxa.HXANodeType.HXA_NT_GEOMETRY: + if node["content"]["vertex_stack"]["layer_count"] == 0: + log.info(f"HxA Verify Error: Node {nc} has no vertex layer\n") + return False + components = node["content"]["vertex_stack"]["layers"][0]["components"] + if components != 3: + log.info( + f"HxA Verify Error: Node {mc} vertex layer vertex layer has {components} components. \ + Must be HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_COMPONENTS \ + {hxa.HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_COMPONENTS}.\n" + ) + return False + + layer_type = hxa.HXALayerDataType( + node["content"]["vertex_stack"]["layers"][0]["type"] + ).value + if (layer_type != hxa.HXALayerDataType.HXA_LDT_FLOAT) and ( + layer_type != hxa.HXALayerDataType.HXA_LDT_DOUBLE + ): + log.info( + f"HxA Verify Error: Node {nc} first vertex layer is {hxa.HXALayerDataType(layer_type).name}, \ + must be HXA_LDT_FLOAT or HXA_LDT_DOUBLE\n" + ) + return False + + name = node["content"]["vertex_stack"]["layers"][0]["name"] + if name != hxa.HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_NAME: + log.info( + f'HxA Verify Error: Node {nc} vertex layer is named {name}. \ + Must be HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_NAME " \ + {hxa.HXA_CONVENTION_HARD_BASE_VERTEX_LAYER_NAME}".\n' + ) + return False + + if node["content"]["corner_stack"]["layer_count"] != 0: + components = node["content"]["corner_stack"]["layers"][0]["components"] + if components != 1: + log.info( + f"HxA Verify Error: Node {nc} reference layer has {components} components. Must be 1.\n" + ) + return False + + layer_type = hxa.HXALayerDataType( + node["content"]["corner_stack"]["layers"][0]["type"] + ).value + if layer_type != hxa.HXALayerDataType.HXA_LDT_INT32: + log.info( + f"HxA Verify Error: Node {nc} reference layer is of type {hxa.HXALayerDataType(layer_type).value} \ + must be HXA_LDT_INT32\n" + ) + return False + + name = node["content"]["corner_stack"]["layers"][0]["name"] + if name != hxa.HXA_CONVENTION_HARD_BASE_CORNER_LAYER_NAME: + log.info( + f'HxA Verify Error: Node {nc} reference layer is named {name}. Must be \ + HXA_CONVENTION_HARD_BASE_CORNER_LAYER_NAME " \ + {hxa.HXA_CONVENTION_HARD_BASE_CORNER_LAYER_NAME}".\n' + ) + return False + + references = node["content"]["corner_stack"]["layers"][0]["data"] + poly_count = 0 + reference = 0 + # Q: what if edge_corner_count is 0?? + for cc in range(node["content"]["edge_corner_count"]): + if references[cc] < 0: + reference = -references[cc] - 1 + poly_count += 1 + else: + reference = references[cc] + + if reference >= node["content"]["vertex_count"]: + log.info( + f"HxA Verify Error: Node {nc} has a reference value referencing a non existing \ + vertex ({reference}).\n" + ) + return False + + face_count = node["content"]["face_count"] + if face_count != poly_count: + log.info( + f"HxA Verify Error: Node {nc} claims to have {face_count} faces but the reference data \ + has {poly_count} faces.\n" + ) + return False + + return True + + +if __name__ == "__main__": + argc = len(argv) + if argc == 1: + log.info("Add a filename (ex: py hxapy_validate.py cube.hxa)") + exit() + + # for now, we have a single filename after the script: + # - py hxapy_validate.py filename + filename = argv[-1] + with open(filename, "wb") as f: + hxafile = hxa_rw.read_hxa(f) + + valid = hxa_util_validate(hxafile) + if not valid: + log.info(f"{filename} could not pass validation") + if valid: + log.info(f"{filename} validated") diff --git a/io_scene_hxa/import_hxa_py.py b/io_scene_hxa/import_hxa_py.py new file mode 100644 index 0000000..d8e9425 --- /dev/null +++ b/io_scene_hxa/import_hxa_py.py @@ -0,0 +1,305 @@ +import bpy +from bpy.props import StringProperty +from bpy_extras.io_utils import ImportHelper + +from . import hxapy_read_write as hxa_rw +from . import hxapy_util as hxa_util +from . import hxapy_validate as hxa_valid + +import logging + +log = logging.getLogger(__name__) + + +class ImportHXA(bpy.types.Operator, ImportHelper): + """Import a HxA file as a mesh""" + + bl_idname = "import_model.hxa" + bl_label = "Import HxA" + bl_options = {"REGISTER"} + + filename_ext = ".hxa" + filter_glob: StringProperty(default="*.hxa", options={"HIDDEN"}) + + def execute(self, context): + try: + f = open(self.filepath, "rb") + except OSError: + self.report( + {"ERROR"}, + f"HXA Error: File {self.filepath} could not be open for reading\n", + ) + log.info(f"HXA Error: File {self.filepath} could not be open for reading\n") + return {"CANCELLED"} + + hxa_dict = hxa_rw.read_hxa(f) + f.close() + + if not hxa_valid.hxa_util_validate(hxa_dict): + self.report({"ERROR"}, f"{self.filepath} couldn't pass validation") + log.info(f"{self.filepath} couldn't pass validation") + return {"CANCELLED"} + + meta_shapekeys = None + meta_armaturedata = None + meta_weightindexes = None + meta_vertexweights = None + meta_customproperties = None + meta_creases = None + meta_objectname = None + meta_meshname = None + meta_location = None + meta_scale = None + meta_armature_location = None + meta_armature_scale = None + meta_bones_heads = None + meta_bones_tails = None + meta_bones_names = None + meta_bones_parents = None + meta_data_count = hxa_dict["nodes"][0]["meta_data_count"] + if meta_data_count > 0: + meta_data = hxa_dict["nodes"][0]["meta_data"] + + metas_present = {meta["name"]: meta for meta in meta_data} + meta_meshdata = metas_present["meta mesh data"] + + if "meta shapekeys" in metas_present.keys(): + meta_shapekeys = metas_present["meta shapekeys"] + + if "meta armature data" in metas_present.keys(): + meta_armaturedata = metas_present["meta armature data"] + + if "meta weight indexes" in metas_present.keys(): + meta_weightindexes = metas_present["meta weight indexes"] + + if "meta vertex weights" in metas_present.keys(): + meta_vertexweights = metas_present["meta vertex weights"] + + if "meta custom properties" in metas_present.keys(): + meta_customproperties = metas_present["meta custom properties"] + + if "meta creases" in metas_present.keys(): + meta_creases = metas_present["meta creases"] + + # ** mesh data + meta_meshdata_entries = meta_meshdata["data"] + metas_present = {meta["name"]: meta for meta in meta_meshdata_entries} + + if "meta objectname" in metas_present.keys(): + meta_objectname = metas_present["meta objectname"] + log.debug(meta_objectname["data"]) + + if "meta meshname" in metas_present.keys(): + meta_meshname = metas_present["meta meshname"] + log.debug(meta_meshname["data"]) + + if "meta location" in metas_present.keys(): + meta_location = metas_present["meta location"] + log.debug(meta_location["data"]) + + if "meta scale" in metas_present.keys(): + meta_scale = metas_present["meta scale"] + log.debug(meta_scale["data"]) + + # ** armature data + if meta_armaturedata: + meta_armaturedata_entries = meta_armaturedata["data"] + metas_present = { + meta["name"]: meta for meta in meta_armaturedata_entries + } + + if "meta armature location" in metas_present.keys(): + meta_armature_location = metas_present["meta armature location"] + + if "meta armature scale" in metas_present.keys(): + meta_armature_scale = metas_present["meta armature scale"] + + if "meta bones heads" in metas_present.keys(): + meta_bones_heads = metas_present["meta bones heads"] + + if "meta bones tails" in metas_present.keys(): + meta_bones_tails = metas_present["meta bones tails"] + + if "meta bones names" in metas_present.keys(): + meta_bones_names = metas_present["meta bones names"] + + if "meta bones parents" in metas_present.keys(): + meta_bones_parents = metas_present["meta bones parents"] + + vertex_count = hxa_dict["nodes"][0]["content"]["vertex_count"] + vert_data = hxa_dict["nodes"][0]["content"]["vertex_stack"]["layers"][0]["data"] + ref_data = hxa_dict["nodes"][0]["content"]["corner_stack"]["layers"][0]["data"] + + # - Add edge verts to the mesh, then write creases to mesh after picking out the edges? + verts = hxa_util.break_list_up(vert_data, vertex_count * 3, 3) + + if meta_creases: + edge_data = meta_creases["data"][0]["data"] + arrlen = len(meta_creases["data"][0]["data"]) + edges = hxa_util.break_list_up(edge_data, arrlen, 2) + crease_values = meta_creases["data"][1]["data"] + + crease_dict = {} + for i in range(len(edges)): + e = edges[i] + k = str(f"{e[0]} {e[1]}") + v = crease_values[i] + crease_dict[k] = v + else: + edges = [] # for now + + faces = hxa_util.restore_faces(ref_data) + + if meta_meshname: + me_name = meta_meshname["data"] + else: + me_name = "imported HxA mesh" + + if meta_objectname: + ob_name = meta_objectname["data"] + else: + ob_name = "imported HxA object" + + restore_mesh(verts, edges, faces, me_name, ob_name) + + mesh_object = bpy.context.object + + if meta_location: + x, y, z = meta_location["data"] + + mesh_object.location.x = x + mesh_object.location.y = y + mesh_object.location.z = z + + if meta_scale: + x, y, z = meta_scale["data"] + + mesh_object.scale.x = x + mesh_object.scale.y = y + mesh_object.scale.z = z + + if meta_armature_location: + armature_location = meta_armature_location["data"] + + if meta_armature_scale: + armature_scale = meta_armature_scale["data"] + + if meta_creases: + mesh_edges = mesh_object.data.edges + edge_verts = [list(e.vertices) for e in mesh_object.data.edges] + for i in range(len(crease_values)): + e = edge_verts[i] + k = str(f"{e[0]} {e[1]}") + mesh_edges[i].crease = crease_dict[k] + + if meta_shapekeys: + shapekeys_data = meta_shapekeys["data"] + + for i in range(len(shapekeys_data)): + shapekeys_values = hxa_util.break_list_up( + shapekeys_data[i]["data"], vertex_count * 3, 3 + ) + shapekey = mesh_object.shape_key_add( + name=shapekeys_data[i]["name"], from_mix=True + ) + for i in range(vertex_count): + shapekey.data[i].co = shapekeys_values[i] + + if meta_armaturedata: + bone_count = len(meta_bones_heads["data"]) / 3 + heads = hxa_util.break_list_up( + meta_bones_heads["data"], int(bone_count) * 3, 3 + ) + + tails = hxa_util.break_list_up( + meta_bones_tails["data"], int(bone_count) * 3, 3 + ) + names = [x["data"] for x in meta_bones_names["data"]] + parents = [x["data"] for x in meta_bones_parents["data"]] + + restore_armature( + armature_location, armature_scale, heads, tails, names, parents + ) + + # parent armature, apply location and scale + ob_arm = bpy.context.object + # arm = ob_arm.data + + bpy.ops.object.mode_set(mode="OBJECT") + bpy.ops.object.select_all(action="DESELECT") + mesh_object.select_set(True) + ob_arm.select_set(True) + bpy.context.view_layer.objects.active = ob_arm + bpy.ops.object.parent_set(type="ARMATURE_NAME") + + # - does this exist without armatures? (does this need to get indented into the armature block :) ) + # *** Vertex weights + if (meta_weightindexes != None) & (meta_vertexweights != None): + vindex_list = meta_weightindexes["data"] + vgroup_list = meta_vertexweights["data"] + + # ** write weights + for i in range(int(bone_count)): + indexes = vindex_list[i]["data"] + weights = vgroup_list[i]["data"] + vgroup_size = len(weights) + for j in range(vgroup_size): + mesh_object.vertex_groups[i].add( + [indexes[j]], weights[j], "REPLACE" + ) + + # *** Custom properties + # assumption: custom props are saved on the mesh object. It's fine, but something to think about. + if meta_customproperties: + customprop_entries = meta_customproperties["data"] + for customprop in customprop_entries: + mesh_object[customprop["name"]] = customprop["data"] + + bpy.ops.object.shade_flat() + + return {"FINISHED"} + + +def restore_mesh(verts, edges, faces, mesh_name="mesh", object_name="object"): + test_mesh = bpy.data.meshes.new(name=mesh_name) + test_mesh.from_pydata(verts, edges, faces) + + test_object = bpy.data.objects.new(name=object_name, object_data=test_mesh) + + bpy.context.view_layer.active_layer_collection.collection.objects.link(test_object) + bpy.ops.object.select_all(action="DESELECT") + test_object.select_set(True) + bpy.context.view_layer.objects.active = test_object + + +def restore_armature(location, scale, heads, tails, names, parents): + bpy.ops.object.armature_add(enter_editmode=True) + ob_arm = bpy.context.object + arm = ob_arm.data + ob_arm.location = location + ob_arm.scale = scale + + arm.edit_bones[-1].head = heads[0] + arm.edit_bones[-1].tail = tails[0] + arm.edit_bones[-1].name = names[0] + + for i in range(1, len(heads)): + ebone = arm.edit_bones.new(names[i]) + ebone.head = heads[i] + ebone.tail = tails[i] + + for i in range(len(parents)): + bpy.ops.armature.select_all(action="DESELECT") + child_name = names[i] + parent_name = parents[i] + if parent_name == "": + continue + + child = arm.edit_bones[child_name] + parent = arm.edit_bones[parent_name] + child.parent = parent + + ob_arm.show_in_front = True + bpy.ops.object.mode_set(mode="OBJECT") + bpy.ops.object.select_all(action="DESELECT") + ob_arm.select_set(True) -- 2.30.2