Add io_mesh_3mf addon #104950
81
io_mesh_3mf/__init__.py
Normal file
81
io_mesh_3mf/__init__.py
Normal file
@ -0,0 +1,81 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
bl_info = {
|
||||
"name": "3MF format",
|
||||
"author": "Ghostkeeper",
|
||||
"version": (1, 0, 0),
|
||||
"blender": (2, 80, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "Import-Export 3MF files",
|
||||
"category": "Import-Export"
|
||||
}
|
||||
|
||||
"""
|
||||
Import and export 3MF files in Blender.
|
||||
"""
|
||||
|
||||
# Reload functionality.
|
||||
if "bpy" in locals():
|
||||
import importlib
|
||||
if "import_3mf" in locals():
|
||||
importlib.reload(import_3mf)
|
||||
if "export_3mf" in locals():
|
||||
importlib.reload(export_3mf)
|
||||
|
||||
import bpy.utils # To (un)register the add-on.
|
||||
import bpy.types # To (un)register the add-on as an import/export function.
|
||||
|
||||
from .import_3mf import Import3MF # Imports 3MF files.
|
||||
from .export_3mf import Export3MF # Exports 3MF files.
|
||||
|
||||
|
||||
def menu_import(self, _):
|
||||
"""
|
||||
Calls the 3MF import operator from the menu item.
|
||||
"""
|
||||
self.layout.operator(Import3MF.bl_idname, text="3D Manufacturing Format (.3mf)")
|
||||
|
||||
|
||||
def menu_export(self, _):
|
||||
"""
|
||||
Calls the 3MF export operator from the menu item.
|
||||
"""
|
||||
self.layout.operator(Export3MF.bl_idname, text="3D Manufacturing Format (.3mf)")
|
||||
|
||||
|
||||
classes = (
|
||||
Import3MF,
|
||||
Export3MF
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
bpy.types.TOPBAR_MT_file_import.append(menu_import)
|
||||
bpy.types.TOPBAR_MT_file_export.append(menu_export)
|
||||
|
||||
|
||||
def unregister():
|
||||
for cls in classes:
|
||||
bpy.utils.unregister_class(cls)
|
||||
|
||||
bpy.types.TOPBAR_MT_file_import.remove(menu_import)
|
||||
bpy.types.TOPBAR_MT_file_export.remove(menu_export)
|
||||
|
||||
|
||||
# Allow the add-on to be ran directly without installation.
|
||||
if __name__ == "__main__":
|
||||
register()
|
326
io_mesh_3mf/annotations.py
Normal file
326
io_mesh_3mf/annotations.py
Normal file
@ -0,0 +1,326 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy # To store the annotations long-term in the Blender context.
|
||||
import collections # Namedtuple data structure for annotations, and Counter to write optimized content types.
|
||||
import json # To serialize the data for long-term storage in the Blender scene.
|
||||
import logging # Reporting parsing errors.
|
||||
import os.path # To parse target paths in relationships.
|
||||
import urllib.parse # To parse relative target paths in relationships.
|
||||
import xml.etree.ElementTree # To parse the relationships files.
|
||||
|
||||
from .constants import *
|
||||
|
||||
|
||||
# These are the different types of annotations we can store.
|
||||
Relationship = collections.namedtuple("Relationship", ["namespace", "source"])
|
||||
ContentType = collections.namedtuple("ContentType", ["mime_type"])
|
||||
|
||||
# Flag object to denote that different 3MF archives give different content types to the same file in the archive.
|
||||
ConflictingContentType = object()
|
||||
|
||||
ANNOTATION_FILE = ".3mf_annotations" # File name to use to store the annotations in the Blender data.
|
||||
|
||||
|
||||
class Annotations:
|
||||
"""
|
||||
This is a collection of annotations for a 3MF document. It annotates the files in the archive with metadata
|
||||
information.
|
||||
|
||||
The class contains serialisation and deserialization functions in order to be able to load and save the annotations
|
||||
from/to a 3MF archive, and to load and save the annotations in the Blender scene.
|
||||
|
||||
The annotations are stored in the `self.annotations` dictionary. The keys of this dictionary are the targets of the
|
||||
annotations, normally the files in this archive. It can be any URI however, and the files don't necessarily need to
|
||||
exist.
|
||||
|
||||
The values are sets of annotations. The annotations are named tuples as described in the beginning of this module.
|
||||
The set can contain any mixture of these named tuples. Duplicates will get filtered out by the nature of the set
|
||||
data structure.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Creates an empty collection of annotations.
|
||||
"""
|
||||
# All of the annotations so far. Keys are the target files of the annotations. Values are sets of annotation
|
||||
# objects.
|
||||
self.annotations = {}
|
||||
|
||||
def add_rels(self, rels_file):
|
||||
"""
|
||||
Add relationships to this collection from a file stream containing a .rels file from a 3MF archive.
|
||||
|
||||
A relationship is treated as a file annotation, because it only contains a file that the relationship is
|
||||
targeting, and a meaningless namespace. The relationship also originates from a source, indicated by the path to
|
||||
the relationship file. This will also get stored, so that it can be properly restored later.
|
||||
|
||||
Duplicate relationships won't get stored.
|
||||
:param rels_file: A file stream containing a .rels file.
|
||||
"""
|
||||
# Relationships are evaluated relative to the path that the _rels folder around the .rels file is on. If any.
|
||||
base_path = os.path.dirname(rels_file.name) + "/"
|
||||
if os.path.basename(os.path.dirname(base_path)) == RELS_FOLDER:
|
||||
base_path = os.path.dirname(os.path.dirname(base_path)) + "/"
|
||||
|
||||
try:
|
||||
root = xml.etree.ElementTree.ElementTree(file=rels_file)
|
||||
except xml.etree.ElementTree.ParseError as e:
|
||||
logging.warning(
|
||||
f"Relationship file {rels_file.name} has malformed XML (position {e.position[0]}:{e.position[1]}).")
|
||||
return # Skip this file.
|
||||
|
||||
for relationship_node in root.iterfind(RELS_RELATIONSHIP_FIND, RELS_NAMESPACES):
|
||||
try:
|
||||
target = relationship_node.attrib["Target"]
|
||||
namespace = relationship_node.attrib["Type"]
|
||||
except KeyError as e:
|
||||
logging.warning(f"Relationship missing attribute: {str(e)}")
|
||||
continue # Skip this relationship.
|
||||
if namespace == MODEL_REL: # Don't store relationships that we will write ourselves.
|
||||
continue
|
||||
|
||||
# Evaluate any relative URIs based on the path to this .rels file in the archive.
|
||||
target = urllib.parse.urljoin(base_path, target)
|
||||
|
||||
if target != "" and target[0] == "/":
|
||||
# To coincide with the convention held by the zipfile package, paths in this archive will not start with
|
||||
# a slash.
|
||||
target = target[1:]
|
||||
|
||||
if target not in self.annotations:
|
||||
self.annotations[target] = set()
|
||||
|
||||
# Add to the annotations as a relationship (since it's a set, don't create duplicates).
|
||||
self.annotations[target].add(Relationship(namespace=namespace, source=base_path))
|
||||
|
||||
def add_content_types(self, files_by_content_type):
|
||||
"""
|
||||
Add annotations that signal the content types of the files in the archive.
|
||||
|
||||
If a file already got a different content type from a different 3MF archive, the content type of the file now
|
||||
becomes unknown (and subsequently won't get stored in any exported 3MF archive).
|
||||
|
||||
Content types for files known to this 3MF implementation will not get stored. This add-on will rewrite those
|
||||
files and may change the file location and such.
|
||||
:param files_by_content_type: The files in this archive, sorted by content type.
|
||||
"""
|
||||
for content_type, file_set in files_by_content_type.items():
|
||||
if content_type == "":
|
||||
continue # Don't store content type if the content type is unknown.
|
||||
if content_type in {RELS_MIMETYPE, MODEL_MIMETYPE}:
|
||||
continue # Don't store content type if it's a file we'll rewrite with this add-on.
|
||||
for file in file_set:
|
||||
filename = file.name
|
||||
if filename not in self.annotations:
|
||||
self.annotations[filename] = set()
|
||||
if ConflictingContentType in self.annotations[filename]:
|
||||
# Content type was already conflicting through multiple previous files. It'll stay in conflict.
|
||||
continue
|
||||
content_type_annotations = list(filter(lambda annotation: type(annotation) == ContentType,
|
||||
self.annotations[filename]))
|
||||
if any(content_type_annotations) and content_type_annotations[0].mime_type != content_type:
|
||||
# There was already a content type and it is different from this one.
|
||||
# This file now has conflicting content types!
|
||||
logging.warning(f"Found conflicting content types for file: {filename}")
|
||||
for annotation in content_type_annotations:
|
||||
self.annotations[filename].remove(annotation)
|
||||
self.annotations[filename].add(ConflictingContentType)
|
||||
else:
|
||||
# No content type yet, or the existing content type is the same.
|
||||
# Adding it again wouldn't have any effect if it is the same.
|
||||
self.annotations[filename].add(ContentType(content_type))
|
||||
|
||||
def write_rels(self, archive):
|
||||
"""
|
||||
Write the relationship annotations in this collections to an archive as .rels files.
|
||||
|
||||
Multiple relationship files may be added to the archive, if relationships came from multiple sources in the
|
||||
original archives.
|
||||
:param archive: A zip archive to add the relationships to.
|
||||
"""
|
||||
current_id = 0 # Have an incrementing ID number to make all relationship IDs unique across the whole archive.
|
||||
|
||||
# First sort all relationships by their source, so that we know which relationship goes into which file.
|
||||
# We always want to create a .rels file for the archive root, with our default relationships.
|
||||
rels_by_source = {"/": set()}
|
||||
|
||||
for target, annotations in self.annotations.items():
|
||||
for annotation in annotations:
|
||||
if type(annotation) is not Relationship:
|
||||
continue
|
||||
if annotation.source not in rels_by_source:
|
||||
rels_by_source[annotation.source] = set()
|
||||
rels_by_source[annotation.source].add((target, annotation.namespace))
|
||||
|
||||
for source, annotations in rels_by_source.items():
|
||||
if source == "/": # Writing to the archive root. Don't want to start zipfile paths with a slash.
|
||||
source = ""
|
||||
# Create an XML document containing all relationships for this source.
|
||||
root = xml.etree.ElementTree.Element(f"{{{RELS_NAMESPACE}}}Relationships")
|
||||
for target, namespace in annotations:
|
||||
xml.etree.ElementTree.SubElement(root, f"{{{RELS_NAMESPACE}}}Relationship", attrib={
|
||||
f"{{{RELS_NAMESPACE}}}Id": "rel" + str(current_id),
|
||||
f"{{{RELS_NAMESPACE}}}Target": "/" + target,
|
||||
f"{{{RELS_NAMESPACE}}}Type": namespace
|
||||
})
|
||||
current_id += 1
|
||||
|
||||
# Write relationships for files that we create.
|
||||
if source == "":
|
||||
xml.etree.ElementTree.SubElement(root, f"{{{RELS_NAMESPACE}}}Relationship", attrib={
|
||||
f"{{{RELS_NAMESPACE}}}Id": "rel" + str(current_id),
|
||||
f"{{{RELS_NAMESPACE}}}Target": "/" + MODEL_LOCATION,
|
||||
f"{{{RELS_NAMESPACE}}}Type": MODEL_REL
|
||||
})
|
||||
current_id += 1
|
||||
|
||||
document = xml.etree.ElementTree.ElementTree(root)
|
||||
|
||||
# Write that XML document to a file.
|
||||
rels_file = source + RELS_FOLDER + "/.rels" # _rels folder in the "source" folder.
|
||||
with archive.open(rels_file, 'w') as f:
|
||||
document.write(f, xml_declaration=True, encoding='UTF-8', default_namespace=RELS_NAMESPACE)
|
||||
|
||||
def write_content_types(self, archive):
|
||||
"""
|
||||
Write a [Content_Types].xml file to a 3MF archive, containing all of the content types that we have assigned.
|
||||
:param archive: A zip archive to add the content types to.
|
||||
"""
|
||||
# First sort all of the content types by their extension, so that we can find out what the most common content
|
||||
# type is for each extension.
|
||||
content_types_by_extension = {}
|
||||
for target, annotations in self.annotations.items():
|
||||
for annotation in annotations:
|
||||
if type(annotation) is not ContentType:
|
||||
continue
|
||||
extension = os.path.splitext(target)[1]
|
||||
if extension not in content_types_by_extension:
|
||||
content_types_by_extension[extension] = []
|
||||
content_types_by_extension[extension].append(annotation.mime_type)
|
||||
|
||||
# Then find out which is the most common content type to assign to that extension.
|
||||
most_common = {}
|
||||
for extension, mime_types in content_types_by_extension.items():
|
||||
counter = collections.Counter(mime_types)
|
||||
most_common[extension] = counter.most_common(1)[0][0]
|
||||
|
||||
# Add the content types for files that this add-on creates by itself.
|
||||
most_common[".rels"] = RELS_MIMETYPE
|
||||
most_common[".model"] = MODEL_MIMETYPE
|
||||
|
||||
# Write an XML file that contains the extension rules for the most common cases,
|
||||
# but specific overrides for the outliers.
|
||||
root = xml.etree.ElementTree.Element(f"{{{CONTENT_TYPES_NAMESPACE}}}Types")
|
||||
|
||||
# First add all of the extension-based rules.
|
||||
for extension, mime_type in most_common.items():
|
||||
if not extension: # Skip files without extension.
|
||||
continue
|
||||
xml.etree.ElementTree.SubElement(root, f"{{{CONTENT_TYPES_NAMESPACE}}}Default", attrib={
|
||||
f"{{{CONTENT_TYPES_NAMESPACE}}}Extension": extension[1:], # Don't include the period.
|
||||
f"{{{CONTENT_TYPES_NAMESPACE}}}ContentType": mime_type
|
||||
})
|
||||
|
||||
# Then write the overrides for files that don't have the same content type as most of their exceptions.
|
||||
for target, annotations in self.annotations.items():
|
||||
for annotation in annotations:
|
||||
if type(annotation) is not ContentType:
|
||||
continue
|
||||
extension = os.path.splitext(target)[1]
|
||||
if not extension or annotation.mime_type != most_common[extension]:
|
||||
# This is an exceptional case that should be stored as an override.
|
||||
xml.etree.ElementTree.SubElement(root, f"{{{CONTENT_TYPES_NAMESPACE}}}Override", attrib={
|
||||
f"{{{CONTENT_TYPES_NAMESPACE}}}PartName": "/" + target,
|
||||
f"{{{CONTENT_TYPES_NAMESPACE}}}ContentType": annotation.mime_type
|
||||
})
|
||||
|
||||
# Output all that to the [Content_Types].xml file.
|
||||
document = xml.etree.ElementTree.ElementTree(root)
|
||||
with archive.open(CONTENT_TYPES_LOCATION, 'w') as f:
|
||||
document.write(f, xml_declaration=True, encoding='UTF-8', default_namespace=CONTENT_TYPES_NAMESPACE)
|
||||
|
||||
def store(self):
|
||||
"""
|
||||
Stores this `Annotations` instance in the Blender scene.
|
||||
|
||||
The instance will serialize itself and put that data in a hidden JSON file in the scene. This way the data can
|
||||
survive until it needs to be saved to a 3MF document again, even when shared through a Blend file.
|
||||
"""
|
||||
# Generate a JSON document containing all annotations.
|
||||
document = {}
|
||||
for target, annotations in self.annotations.items():
|
||||
serialized_annotations = []
|
||||
for annotation in annotations:
|
||||
if type(annotation) == Relationship:
|
||||
serialized_annotations.append({
|
||||
"annotation": 'relationship',
|
||||
"namespace": annotation.namespace,
|
||||
"source": annotation.source
|
||||
})
|
||||
elif type(annotation) == ContentType:
|
||||
serialized_annotations.append({
|
||||
"annotation": 'content_type',
|
||||
"mime_type": annotation.mime_type
|
||||
})
|
||||
elif annotation == ConflictingContentType:
|
||||
serialized_annotations.append({
|
||||
"annotation": 'content_type_conflict'
|
||||
})
|
||||
document[target] = serialized_annotations
|
||||
|
||||
# Store this in the Blender context.
|
||||
if ANNOTATION_FILE in bpy.data.texts:
|
||||
bpy.data.texts.remove(bpy.data.texts[ANNOTATION_FILE])
|
||||
text_file = bpy.data.texts.new(ANNOTATION_FILE)
|
||||
text_file.write(json.dumps(document))
|
||||
|
||||
def retrieve(self):
|
||||
"""
|
||||
Retrieves any existing annotations from the Blender scene.
|
||||
|
||||
This looks for a serialized annotation file in the Blender data. If it exists, it parses that file and retrieves
|
||||
the data from it, restoring the state of the annotations collection that stored that file.
|
||||
"""
|
||||
# If there's nothing stored in the current scene, this clears the state of the annotations.
|
||||
self.annotations.clear()
|
||||
|
||||
if ANNOTATION_FILE not in bpy.data.texts:
|
||||
return # Nothing to read. Done!
|
||||
try:
|
||||
annotation_data = json.loads(bpy.data.texts[ANNOTATION_FILE].as_string())
|
||||
except json.JSONDecodeError:
|
||||
logging.warning("Annotation file exists, but is not properly formatted.")
|
||||
return # File was meddled with?
|
||||
|
||||
for target, annotations in annotation_data.items():
|
||||
self.annotations[target] = set()
|
||||
try:
|
||||
for annotation in annotations:
|
||||
if annotation['annotation'] == 'relationship':
|
||||
self.annotations[target].add(
|
||||
Relationship(namespace=annotation['namespace'], source=annotation['source']))
|
||||
elif annotation['annotation'] == 'content_type':
|
||||
self.annotations[target].add(ContentType(mime_type=annotation['mime_type']))
|
||||
elif annotation['annotation'] == 'content_type_conflict':
|
||||
self.annotations[target].add(ConflictingContentType)
|
||||
else:
|
||||
logging.warning(f"Unknown annotation type \"{annotation['annotation']}\" encountered.")
|
||||
continue
|
||||
except TypeError: # Raised when `annotations` is not iterable.
|
||||
logging.warning(f"Annotation for target \"{target}\" is not properly structured.")
|
||||
except KeyError as e:
|
||||
# Raised when missing the 'annotation' key or a required key belonging to that annotation.
|
||||
logging.warning(f"Annotation for target \"{target}\" missing key: {str(e)}")
|
||||
if not self.annotations[target]: # Nothing was added in the end.
|
||||
del self.annotations[target] # Don't store the empty target either then.
|
56
io_mesh_3mf/constants.py
Normal file
56
io_mesh_3mf/constants.py
Normal file
@ -0,0 +1,56 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
This module defines some constants for 3MF's file structure.
|
||||
|
||||
These are the constants that are inherent to the 3MF file format.
|
||||
"""
|
||||
|
||||
SUPPORTED_EXTENSIONS = set() # Set of namespaces for 3MF extensions that we support.
|
||||
# File contents to use when files must be preserved but there's a file with different content in a previous archive.
|
||||
# Only for flagging. This will not be in the final 3MF archives.
|
||||
conflicting_mustpreserve_contents = "<Conflicting MustPreserve file!>"
|
||||
|
||||
# Default storage locations.
|
||||
MODEL_LOCATION = "3D/3dmodel.model" # Conventional location for the 3D model data.
|
||||
CONTENT_TYPES_LOCATION = "[Content_Types].xml" # Location of the content types definition.
|
||||
RELS_FOLDER = "_rels" # Folder name to store relationships files in.
|
||||
|
||||
# Relationship types.
|
||||
MODEL_REL = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel" # Relationship type of 3D models.
|
||||
THUMBNAIL_REL = "http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail"
|
||||
|
||||
# MIME types of files in the archive.
|
||||
RELS_MIMETYPE = "application/vnd.openxmlformats-package.relationships+xml" # MIME type of .rels files.
|
||||
MODEL_MIMETYPE = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml" # MIME type of .model files.
|
||||
|
||||
# Constants in the 3D model file.
|
||||
MODEL_NAMESPACE = "http://schemas.microsoft.com/3dmanufacturing/core/2015/02"
|
||||
MODEL_NAMESPACES = {
|
||||
"3mf": MODEL_NAMESPACE
|
||||
}
|
||||
MODEL_DEFAULT_UNIT = "millimeter" # If the unit is missing, it will be this.
|
||||
|
||||
# Constants in the ContentTypes file.
|
||||
CONTENT_TYPES_NAMESPACE = "http://schemas.openxmlformats.org/package/2006/content-types"
|
||||
CONTENT_TYPES_NAMESPACES = {
|
||||
"ct": CONTENT_TYPES_NAMESPACE
|
||||
}
|
||||
|
||||
# Constants in the .rels files.
|
||||
RELS_NAMESPACE = "http://schemas.openxmlformats.org/package/2006/relationships"
|
||||
RELS_NAMESPACES = { # Namespaces used for the rels files.
|
||||
"rel": RELS_NAMESPACE
|
||||
}
|
||||
RELS_RELATIONSHIP_FIND = "rel:Relationship"
|
527
io_mesh_3mf/export_3mf.py
Normal file
527
io_mesh_3mf/export_3mf.py
Normal file
@ -0,0 +1,527 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import base64 # To decode files that must be preserved.
|
||||
import bpy # The Blender API.
|
||||
import bpy.props # To define metadata properties for the operator.
|
||||
import bpy.types # This class is an operator in Blender, and to find meshes in the scene.
|
||||
import bpy_extras.io_utils # Helper functions to export meshes more easily.
|
||||
import bpy_extras.node_shader_utils # Converting material colors to sRGB.
|
||||
import collections # Counter, to find the most common material of an object.
|
||||
import itertools
|
||||
import logging # To debug and log progress.
|
||||
import mathutils # For the transformation matrices.
|
||||
import xml.etree.ElementTree # To write XML documents with the 3D model data.
|
||||
import zipfile # To write zip archives, the shell of the 3MF file.
|
||||
|
||||
from .annotations import Annotations # To store file annotations
|
||||
from .constants import *
|
||||
from .metadata import Metadata # To store metadata from the Blender scene into the 3MF file.
|
||||
from .unit_conversions import blender_to_metre, threemf_to_metre
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Export3MF(bpy.types.Operator, bpy_extras.io_utils.ExportHelper):
|
||||
"""
|
||||
Operator that exports a 3MF file from Blender.
|
||||
"""
|
||||
|
||||
# Metadata.
|
||||
bl_idname = "export_mesh.threemf"
|
||||
bl_label = "Export 3MF"
|
||||
bl_description = "Save the current scene to 3MF"
|
||||
filename_ext = ".3mf"
|
||||
|
||||
# Options for the user.
|
||||
filter_glob: bpy.props.StringProperty(
|
||||
default="*.3mf",
|
||||
options={'HIDDEN'})
|
||||
use_selection: bpy.props.BoolProperty(
|
||||
name="Selection Only",
|
||||
description="Export selected objects only.",
|
||||
default=False)
|
||||
global_scale: bpy.props.FloatProperty(
|
||||
name="Scale",
|
||||
default=1.0,
|
||||
soft_min=0.001,
|
||||
soft_max=1000.0,
|
||||
min=1e-6,
|
||||
max=1e6)
|
||||
use_mesh_modifiers: bpy.props.BoolProperty(
|
||||
name="Apply Modifiers",
|
||||
description="Apply the modifiers before saving.",
|
||||
default=True)
|
||||
coordinate_precision: bpy.props.IntProperty(
|
||||
name="Precision",
|
||||
description="The number of decimal digits to use in coordinates in the file.",
|
||||
default=4,
|
||||
min=0,
|
||||
max=12)
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initialize some fields with defaults before starting.
|
||||
"""
|
||||
super().__init__()
|
||||
self.next_resource_id = 1 # Which resource ID to generate for the next object.
|
||||
self.num_written = 0 # How many objects we've written to the file.
|
||||
self.material_resource_id = -1 # We write one material. This is the resource ID of that material.
|
||||
self.material_name_to_index = {} # For each material in Blender, the index in the 3MF materials group.
|
||||
|
||||
def execute(self, context):
|
||||
"""
|
||||
The main routine that writes the 3MF archive.
|
||||
|
||||
This function serves as a high-level overview of the steps involved to write a 3MF file.
|
||||
:param context: The Blender context.
|
||||
:return: A set of status flags to indicate whether the write succeeded or not.
|
||||
"""
|
||||
# Reset state.
|
||||
self.next_resource_id = 1 # Starts counting at 1 for some inscrutable reason.
|
||||
self.material_resource_id = -1
|
||||
self.num_written = 0
|
||||
|
||||
archive = self.create_archive(self.filepath)
|
||||
if archive is None:
|
||||
return {'CANCELLED'}
|
||||
|
||||
if self.use_selection:
|
||||
blender_objects = context.selected_objects
|
||||
else:
|
||||
blender_objects = context.scene.objects
|
||||
|
||||
global_scale = self.unit_scale(context)
|
||||
|
||||
# Due to an open bug in Python 3.7 (Blender's version) we need to prefix all elements with the namespace.
|
||||
# Bug: https://bugs.python.org/issue17088
|
||||
# Workaround: https://stackoverflow.com/questions/4997848/4999510#4999510
|
||||
root = xml.etree.ElementTree.Element(f"{{{MODEL_NAMESPACE}}}model")
|
||||
|
||||
scene_metadata = Metadata()
|
||||
scene_metadata.retrieve(bpy.context.scene)
|
||||
self.write_metadata(root, scene_metadata)
|
||||
|
||||
resources_element = xml.etree.ElementTree.SubElement(root, f"{{{MODEL_NAMESPACE}}}resources")
|
||||
self.material_name_to_index = self.write_materials(resources_element, blender_objects)
|
||||
self.write_objects(root, resources_element, blender_objects, global_scale)
|
||||
|
||||
document = xml.etree.ElementTree.ElementTree(root)
|
||||
with archive.open(MODEL_LOCATION, 'w', force_zip64=True) as f:
|
||||
document.write(f, xml_declaration=True, encoding='UTF-8', default_namespace=MODEL_NAMESPACE)
|
||||
try:
|
||||
archive.close()
|
||||
except EnvironmentError as e:
|
||||
log.error(f"Unable to complete writing to 3MF archive: {e}")
|
||||
return {'CANCELLED'}
|
||||
|
||||
log.info(f"Exported {self.num_written} objects to 3MF archive {self.filepath}.")
|
||||
return {'FINISHED'}
|
||||
|
||||
# The rest of the functions are in order of when they are called.
|
||||
|
||||
def create_archive(self, filepath):
|
||||
"""
|
||||
Creates an empty 3MF archive.
|
||||
|
||||
The archive is complete according to the 3MF specs except that the actual 3dmodel.model file is missing.
|
||||
:param filepath: The path to write the file to.
|
||||
:return: A zip archive that other functions can add things to.
|
||||
"""
|
||||
try:
|
||||
archive = zipfile.ZipFile(filepath, 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=9)
|
||||
|
||||
# Store the file annotations we got from imported 3MF files, and store them in the archive.
|
||||
annotations = Annotations()
|
||||
annotations.retrieve()
|
||||
annotations.write_rels(archive)
|
||||
annotations.write_content_types(archive)
|
||||
self.must_preserve(archive)
|
||||
except EnvironmentError as e:
|
||||
log.error(f"Unable to write 3MF archive to {filepath}: {e}")
|
||||
return None
|
||||
|
||||
return archive
|
||||
|
||||
def must_preserve(self, archive):
|
||||
"""
|
||||
Write files that must be preserved to the archive.
|
||||
|
||||
These files were stored in the Blender scene in a hidden location.
|
||||
:param archive: The archive to write files to.
|
||||
"""
|
||||
for textfile in bpy.data.texts:
|
||||
filename = textfile.name
|
||||
if not filename.startswith(".3mf_preserved/"):
|
||||
continue # Unrelated file. Not ours to read.
|
||||
contents = textfile.as_string()
|
||||
if contents == conflicting_mustpreserve_contents:
|
||||
continue # This file was in conflict. Don't preserve any copy of it then.
|
||||
contents = base64.b85decode(contents.encode("UTF-8"))
|
||||
filename = filename[len(".3mf_preserved/"):]
|
||||
with archive.open(filename, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def unit_scale(self, context):
|
||||
"""
|
||||
Get the scaling factor we need to transform the document to millimetres.
|
||||
:param context: The Blender context to get the unit from.
|
||||
:return: Floating point value that we need to scale this model by. A small number (<1) means that we need to
|
||||
make the coordinates in the 3MF file smaller than the coordinates in Blender. A large number (>1) means we need
|
||||
to make the coordinates in the file larger than the coordinates in Blender.
|
||||
"""
|
||||
scale = self.global_scale
|
||||
|
||||
if context.scene.unit_settings.scale_length != 0:
|
||||
scale *= context.scene.unit_settings.scale_length # Apply the global scale of the units in Blender.
|
||||
|
||||
threemf_unit = MODEL_DEFAULT_UNIT
|
||||
blender_unit = context.scene.unit_settings.length_unit
|
||||
scale /= threemf_to_metre[threemf_unit] # Convert 3MF units to metre.
|
||||
scale *= blender_to_metre[blender_unit] # Convert metre to Blender's units.
|
||||
|
||||
return scale
|
||||
|
||||
def write_materials(self, resources_element, blender_objects):
|
||||
"""
|
||||
Write the materials on the specified blender objects to a 3MF document.
|
||||
|
||||
We'll write all materials to one single <basematerials> tag in the resources.
|
||||
|
||||
Aside from writing the materials to the document, this function also returns a mapping from the names of the
|
||||
materials in Blender (which must be unique) to the index in the <basematerials> material group. Using that
|
||||
mapping, the objects and triangles can write down an index referring to the list of <base> tags.
|
||||
|
||||
Since the <base> material can only hold a color, we'll write the diffuse color of the material to the file.
|
||||
:param resources_element: A <resources> node from a 3MF document.
|
||||
:param blender_objects: A list of Blender objects that may have materials which we need to write to the
|
||||
document.
|
||||
:return: A mapping from material name to the index of that material in the <basematerials> tag.
|
||||
"""
|
||||
name_to_index = {} # The output list, mapping from material name to indexes in the <basematerials> tag.
|
||||
next_index = 0
|
||||
|
||||
# Create an element lazily. We don't want to create an element if there are no materials to write.
|
||||
basematerials_element = None
|
||||
|
||||
for blender_object in blender_objects:
|
||||
for material_slot in blender_object.material_slots:
|
||||
material = material_slot.material
|
||||
|
||||
material_name = material.name
|
||||
if material_name in name_to_index: # Already have this material through another object.
|
||||
continue
|
||||
|
||||
# Wrap this material into a principled render node, to convert its color to sRGB.
|
||||
principled = bpy_extras.node_shader_utils.PrincipledBSDFWrapper(material, is_readonly=True)
|
||||
color = principled.base_color
|
||||
red = min(255, round(color[0] * 255))
|
||||
green = min(255, round(color[1] * 255))
|
||||
blue = min(255, round(color[2] * 255))
|
||||
alpha = principled.alpha
|
||||
if alpha >= 1.0: # Completely opaque. Leave out the alpha component.
|
||||
color_hex = "#%0.2X%0.2X%0.2X" % (red, green, blue)
|
||||
else:
|
||||
alpha = min(255, round(alpha * 255))
|
||||
color_hex = "#%0.2X%0.2X%0.2X%0.2X" % (red, green, blue, alpha)
|
||||
|
||||
if basematerials_element is None:
|
||||
self.material_resource_id = str(self.next_resource_id)
|
||||
self.next_resource_id += 1
|
||||
basematerials_element = xml.etree.ElementTree.SubElement(
|
||||
resources_element,
|
||||
f"{{{MODEL_NAMESPACE}}}basematerials", attrib={
|
||||
f"{{{MODEL_NAMESPACE}}}id": self.material_resource_id
|
||||
})
|
||||
xml.etree.ElementTree.SubElement(basematerials_element, f"{{{MODEL_NAMESPACE}}}base", attrib={
|
||||
f"{{{MODEL_NAMESPACE}}}name": material_name,
|
||||
f"{{{MODEL_NAMESPACE}}}displaycolor": color_hex
|
||||
})
|
||||
name_to_index[material_name] = next_index
|
||||
next_index += 1
|
||||
|
||||
return name_to_index
|
||||
|
||||
def write_objects(self, root, resources_element, blender_objects, global_scale):
|
||||
"""
|
||||
Writes a group of objects into the 3MF archive.
|
||||
:param root: An XML root element to write the objects into.
|
||||
:param resources_element: An XML element to write resources into.
|
||||
:param blender_objects: A list of Blender objects that need to be written to that XML element.
|
||||
:param global_scale: A scaling factor to apply to all objects to convert the units.
|
||||
"""
|
||||
transformation = mathutils.Matrix.Scale(global_scale, 4)
|
||||
|
||||
build_element = xml.etree.ElementTree.SubElement(root, f"{{{MODEL_NAMESPACE}}}build")
|
||||
for blender_object in blender_objects:
|
||||
if blender_object.parent is not None:
|
||||
continue # Only write objects that have no parent, since we'll get the child objects recursively.
|
||||
if blender_object.type not in {'MESH', 'EMPTY'}:
|
||||
continue
|
||||
|
||||
objectid, mesh_transformation = self.write_object_resource(resources_element, blender_object)
|
||||
|
||||
item_element = xml.etree.ElementTree.SubElement(build_element, f"{{{MODEL_NAMESPACE}}}item")
|
||||
self.num_written += 1
|
||||
item_element.attrib[f"{{{MODEL_NAMESPACE}}}objectid"] = str(objectid)
|
||||
mesh_transformation = transformation @ mesh_transformation
|
||||
if mesh_transformation != mathutils.Matrix.Identity(4):
|
||||
item_element.attrib[f"{{{MODEL_NAMESPACE}}}transform"] =\
|
||||
self.format_transformation(mesh_transformation)
|
||||
|
||||
metadata = Metadata()
|
||||
metadata.retrieve(blender_object)
|
||||
if "3mf:partnumber" in metadata:
|
||||
item_element.attrib[f"{{{MODEL_NAMESPACE}}}partnumber"] = metadata["3mf:partnumber"].value
|
||||
del metadata["3mf:partnumber"]
|
||||
if metadata:
|
||||
metadatagroup_element = xml.etree.ElementTree.SubElement(
|
||||
item_element,
|
||||
f"{{{MODEL_NAMESPACE}}}metadatagroup")
|
||||
self.write_metadata(metadatagroup_element, metadata)
|
||||
|
||||
def write_object_resource(self, resources_element, blender_object):
|
||||
"""
|
||||
Write a single Blender object and all of its children to the resources of a 3MF document.
|
||||
|
||||
If the object contains a mesh it'll get written to the document as an object with a mesh resource. If the object
|
||||
contains children it'll get written to the document as an object with components. If the object contains both,
|
||||
two objects will be written; one with the mesh and another with the components. The mesh then gets added as a
|
||||
component of the object with components.
|
||||
:param resources_element: The <resources> element of the 3MF document to write into.
|
||||
:param blender_object: A Blender object to write to that XML element.
|
||||
:return: A tuple, containing the object ID of the newly written resource and a transformation matrix that this
|
||||
resource must be saved with.
|
||||
"""
|
||||
new_resource_id = self.next_resource_id
|
||||
self.next_resource_id += 1
|
||||
object_element = xml.etree.ElementTree.SubElement(resources_element, f"{{{MODEL_NAMESPACE}}}object")
|
||||
object_element.attrib[f"{{{MODEL_NAMESPACE}}}id"] = str(new_resource_id)
|
||||
|
||||
metadata = Metadata()
|
||||
metadata.retrieve(blender_object)
|
||||
if "3mf:object_type" in metadata:
|
||||
object_type = metadata["3mf:object_type"].value
|
||||
if object_type != "model": # Only write if not the default.
|
||||
object_element.attrib[f"{{{MODEL_NAMESPACE}}}type"] = object_type
|
||||
del metadata["3mf:object_type"]
|
||||
|
||||
if blender_object.mode == 'EDIT':
|
||||
blender_object.update_from_editmode() # Apply recent changes made to the model.
|
||||
mesh_transformation = blender_object.matrix_world
|
||||
|
||||
child_objects = blender_object.children
|
||||
if child_objects: # Only write the <components> tag if there are actually components.
|
||||
components_element = xml.etree.ElementTree.SubElement(
|
||||
object_element,
|
||||
f"{{{MODEL_NAMESPACE}}}components")
|
||||
for child in blender_object.children:
|
||||
if child.type != 'MESH':
|
||||
continue
|
||||
# Recursively write children to the resources.
|
||||
child_id, child_transformation = self.write_object_resource(resources_element, child)
|
||||
# Use pseudo-inverse for safety, but the epsilon then doesn't matter since it'll get multiplied by 0
|
||||
# later anyway then.
|
||||
child_transformation = mesh_transformation.inverted_safe() @ child_transformation
|
||||
component_element = xml.etree.ElementTree.SubElement(
|
||||
components_element,
|
||||
f"{{{MODEL_NAMESPACE}}}component")
|
||||
self.num_written += 1
|
||||
component_element.attrib[f"{{{MODEL_NAMESPACE}}}objectid"] = str(child_id)
|
||||
if child_transformation != mathutils.Matrix.Identity(4):
|
||||
component_element.attrib[f"{{{MODEL_NAMESPACE}}}transform"] =\
|
||||
self.format_transformation(child_transformation)
|
||||
|
||||
# In the tail recursion, get the vertex data.
|
||||
# This is necessary because we may need to apply the mesh modifiers, which causes these objects to lose their
|
||||
# children.
|
||||
if self.use_mesh_modifiers:
|
||||
dependency_graph = bpy.context.evaluated_depsgraph_get()
|
||||
blender_object = blender_object.evaluated_get(dependency_graph)
|
||||
|
||||
try:
|
||||
mesh = blender_object.to_mesh()
|
||||
except RuntimeError: # Object.to_mesh() is not guaranteed to return Optional[Mesh], apparently.
|
||||
return new_resource_id, mesh_transformation
|
||||
if mesh is None:
|
||||
return new_resource_id, mesh_transformation
|
||||
|
||||
# Need to convert this to triangles-only, because 3MF doesn't support faces with more than 3 vertices.
|
||||
mesh.calc_loop_triangles()
|
||||
|
||||
if len(mesh.vertices) > 0: # Only write a <mesh> tag if there is mesh data.
|
||||
# If this object already contains components, we can't also store a mesh. So create a new object and use
|
||||
# that object as another component.
|
||||
if child_objects:
|
||||
mesh_id = self.next_resource_id
|
||||
self.next_resource_id += 1
|
||||
mesh_object_element = xml.etree.ElementTree.SubElement(
|
||||
resources_element,
|
||||
f"{{{MODEL_NAMESPACE}}}object")
|
||||
mesh_object_element.attrib[f"{{{MODEL_NAMESPACE}}}id"] = str(mesh_id)
|
||||
component_element = xml.etree.ElementTree.SubElement(
|
||||
components_element,
|
||||
f"{{{MODEL_NAMESPACE}}}component")
|
||||
self.num_written += 1
|
||||
component_element.attrib[f"{{{MODEL_NAMESPACE}}}objectid"] = str(mesh_id)
|
||||
else: # No components, then we can write directly into this object resource.
|
||||
mesh_object_element = object_element
|
||||
mesh_element = xml.etree.ElementTree.SubElement(mesh_object_element, f"{{{MODEL_NAMESPACE}}}mesh")
|
||||
|
||||
# Find the most common material for this mesh, for maximum compression.
|
||||
material_indices = [triangle.material_index for triangle in mesh.loop_triangles]
|
||||
# If there are no triangles, we provide 0 as index, but it'll not get read by write_triangles either then.
|
||||
most_common_material_list_index = 0
|
||||
|
||||
if material_indices and blender_object.material_slots:
|
||||
counter = collections.Counter(material_indices)
|
||||
# most_common_material_object_index is an index from the MeshLoopTriangle, referring to the list of
|
||||
# materials attached to the Blender object.
|
||||
most_common_material_object_index = counter.most_common(1)[0][0]
|
||||
most_common_material = blender_object.material_slots[most_common_material_object_index].material
|
||||
# most_common_material_list_index is an index referring to our own list of materials that we put in the
|
||||
# resources.
|
||||
most_common_material_list_index = self.material_name_to_index[most_common_material.name]
|
||||
# We always only write one group of materials. The resource ID was determined when it was written.
|
||||
object_element.attrib[f"{{{MODEL_NAMESPACE}}}pid"] = str(self.material_resource_id)
|
||||
object_element.attrib[f"{{{MODEL_NAMESPACE}}}pindex"] = str(most_common_material_list_index)
|
||||
|
||||
self.write_vertices(mesh_element, mesh.vertices)
|
||||
self.write_triangles(
|
||||
mesh_element,
|
||||
mesh.loop_triangles,
|
||||
most_common_material_list_index,
|
||||
blender_object.material_slots)
|
||||
|
||||
# If the object has metadata, write that to a metadata object.
|
||||
if "3mf:partnumber" in metadata:
|
||||
mesh_object_element.attrib[f"{{{MODEL_NAMESPACE}}}partnumber"] =\
|
||||
metadata["3mf:partnumber"].value
|
||||
del metadata["3mf:partnumber"]
|
||||
if "3mf:object_type" in metadata:
|
||||
object_type = metadata["3mf:object_type"].value
|
||||
if object_type != "model" and object_type != "other":
|
||||
# Only write if not the default.
|
||||
# Don't write "other" object types since we're not allowed to refer to them. Pretend they are normal
|
||||
# models.
|
||||
mesh_object_element.attrib[f"{{{MODEL_NAMESPACE}}}type"] = object_type
|
||||
del metadata["3mf:object_type"]
|
||||
if metadata:
|
||||
metadatagroup_element = xml.etree.ElementTree.SubElement(
|
||||
object_element,
|
||||
f"{{{MODEL_NAMESPACE}}}metadatagroup")
|
||||
self.write_metadata(metadatagroup_element, metadata)
|
||||
|
||||
return new_resource_id, mesh_transformation
|
||||
|
||||
def write_metadata(self, node, metadata):
|
||||
"""
|
||||
Writes metadata from a metadata storage into an XML node.
|
||||
:param node: The node to add <metadata> tags to.
|
||||
:param metadata: The collection of metadata to write to that node.
|
||||
"""
|
||||
for metadata_entry in metadata.values():
|
||||
metadata_node = xml.etree.ElementTree.SubElement(node, f"{{{MODEL_NAMESPACE}}}metadata")
|
||||
metadata_node.attrib[f"{{{MODEL_NAMESPACE}}}name"] = metadata_entry.name
|
||||
if metadata_entry.preserve:
|
||||
metadata_node.attrib[f"{{{MODEL_NAMESPACE}}}preserve"] = "1"
|
||||
if metadata_entry.datatype:
|
||||
metadata_node.attrib[f"{{{MODEL_NAMESPACE}}}type"] = metadata_entry.datatype
|
||||
metadata_node.text = metadata_entry.value
|
||||
|
||||
def format_transformation(self, transformation):
|
||||
"""
|
||||
Formats a transformation matrix in 3MF's formatting.
|
||||
|
||||
This transformation matrix can then be written to an attribute.
|
||||
:param transformation: The transformation matrix to format.
|
||||
:return: A serialisation of the transformation matrix.
|
||||
"""
|
||||
pieces = (row[:3] for row in transformation.transposed()) # Don't convert the 4th column.
|
||||
result = ""
|
||||
for cell in itertools.chain.from_iterable(pieces):
|
||||
if result != "": # First loop, don't put a space in.
|
||||
result += " "
|
||||
result += self.format_number(cell, 6) # Never use scientific notation!
|
||||
return result
|
||||
|
||||
def write_vertices(self, mesh_element, vertices):
|
||||
"""
|
||||
Writes a list of vertices into the specified mesh element.
|
||||
|
||||
This then becomes a resource that can be used in a build.
|
||||
:param mesh_element: The <mesh> element of the 3MF document.
|
||||
:param vertices: A list of Blender vertices to add.
|
||||
"""
|
||||
vertices_element = xml.etree.ElementTree.SubElement(mesh_element, f"{{{MODEL_NAMESPACE}}}vertices")
|
||||
|
||||
# Precompute some names for better performance.
|
||||
vertex_name = f"{{{MODEL_NAMESPACE}}}vertex"
|
||||
x_name = f"{{{MODEL_NAMESPACE}}}x"
|
||||
y_name = f"{{{MODEL_NAMESPACE}}}y"
|
||||
z_name = f"{{{MODEL_NAMESPACE}}}z"
|
||||
|
||||
for vertex in vertices: # Create the <vertex> elements.
|
||||
vertex_element = xml.etree.ElementTree.SubElement(vertices_element, vertex_name)
|
||||
vertex_element.attrib[x_name] = self.format_number(vertex.co[0], self.coordinate_precision)
|
||||
vertex_element.attrib[y_name] = self.format_number(vertex.co[1], self.coordinate_precision)
|
||||
vertex_element.attrib[z_name] = self.format_number(vertex.co[2], self.coordinate_precision)
|
||||
|
||||
def write_triangles(self, mesh_element, triangles, object_material_list_index, material_slots):
|
||||
"""
|
||||
Writes a list of triangles into the specified mesh element.
|
||||
|
||||
This then becomes a resource that can be used in a build.
|
||||
:param mesh_element: The <mesh> element of the 3MF document.
|
||||
:param triangles: A list of triangles. Each list is a list of indices to the list of vertices.
|
||||
:param object_material_list_index: The index of the material that the object was written with to which these
|
||||
triangles belong. If the triangle has a different index, we need to write the index with the triangle.
|
||||
:param material_slots: List of materials belonging to the object for which we write triangles. These are
|
||||
necessary to interpret the material indices stored in the MeshLoopTriangles.
|
||||
"""
|
||||
triangles_element = xml.etree.ElementTree.SubElement(mesh_element, f"{{{MODEL_NAMESPACE}}}triangles")
|
||||
|
||||
# Precompute some names for better performance.
|
||||
triangle_name = f"{{{MODEL_NAMESPACE}}}triangle"
|
||||
v1_name = f"{{{MODEL_NAMESPACE}}}v1"
|
||||
v2_name = f"{{{MODEL_NAMESPACE}}}v2"
|
||||
v3_name = f"{{{MODEL_NAMESPACE}}}v3"
|
||||
p1_name = f"{{{MODEL_NAMESPACE}}}p1"
|
||||
|
||||
for triangle in triangles:
|
||||
triangle_element = xml.etree.ElementTree.SubElement(triangles_element, triangle_name)
|
||||
triangle_element.attrib[v1_name] = str(triangle.vertices[0])
|
||||
triangle_element.attrib[v2_name] = str(triangle.vertices[1])
|
||||
triangle_element.attrib[v3_name] = str(triangle.vertices[2])
|
||||
|
||||
if triangle.material_index < len(material_slots):
|
||||
# Convert to index in our global list.
|
||||
material_index = self.material_name_to_index[material_slots[triangle.material_index].material.name]
|
||||
if material_index != object_material_list_index:
|
||||
# Not equal to the index that our parent object was written with, so we must override it here.
|
||||
triangle_element.attrib[p1_name] = str(material_index)
|
||||
|
||||
def format_number(self, number, decimals):
|
||||
"""
|
||||
Properly formats a floating point number to a certain precision.
|
||||
|
||||
This format will never use scientific notation (no 3.14e-5 nonsense) and will have a fixed limit to the number
|
||||
of decimals. It will not have a limit to the length of the integer part. Any trailing zeros are stripped.
|
||||
:param number: A floating point number to format.
|
||||
:param decimals: The maximum number of places after the radix to write.
|
||||
:return: A string representing that number.
|
||||
"""
|
||||
formatted = ("{:." + str(decimals) + "f}").format(number).rstrip("0").rstrip(".")
|
||||
if formatted == "":
|
||||
return "0"
|
||||
return formatted
|
744
io_mesh_3mf/import_3mf.py
Normal file
744
io_mesh_3mf/import_3mf.py
Normal file
@ -0,0 +1,744 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import base64 # To encode MustPreserve files in the Blender scene.
|
||||
import bpy # The Blender API.
|
||||
import bpy.ops # To adjust the camera to fit models.
|
||||
import bpy.props # To define metadata properties for the operator.
|
||||
import bpy.types # This class is an operator in Blender.
|
||||
import bpy_extras.io_utils # Helper functions to import meshes more easily.
|
||||
import bpy_extras.node_shader_utils # Getting correct color spaces for materials.
|
||||
import logging # To debug and log progress.
|
||||
import collections # For namedtuple.
|
||||
import mathutils # For the transformation matrices.
|
||||
import os.path # To take file paths relative to the selected directory.
|
||||
import re # To find files in the archive based on the content types.
|
||||
import xml.etree.ElementTree # To parse the 3dmodel.model file.
|
||||
import zipfile # To read the 3MF files which are secretly zip archives.
|
||||
|
||||
from .annotations import Annotations, ContentType, Relationship # To use annotations to decide on what to import.
|
||||
from .constants import *
|
||||
from .metadata import MetadataEntry, Metadata # To store and serialize metadata.
|
||||
from .unit_conversions import blender_to_metre, threemf_to_metre # To convert to Blender's units.
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ResourceObject = collections.namedtuple("ResourceObject", [
|
||||
"vertices",
|
||||
"triangles",
|
||||
"materials",
|
||||
"components",
|
||||
"metadata"])
|
||||
Component = collections.namedtuple("Component", ["resource_object", "transformation"])
|
||||
ResourceMaterial = collections.namedtuple("ResourceMaterial", ["name", "color"])
|
||||
|
||||
|
||||
class Import3MF(bpy.types.Operator, bpy_extras.io_utils.ImportHelper):
|
||||
"""
|
||||
Operator that imports a 3MF file into Blender.
|
||||
"""
|
||||
|
||||
# Metadata.
|
||||
bl_idname = "import_mesh.threemf"
|
||||
bl_label = "Import 3MF"
|
||||
bl_description = "Load a 3MF scene"
|
||||
bl_options = {'UNDO'}
|
||||
filename_ext = ".3mf"
|
||||
|
||||
# Options for the user.
|
||||
filter_glob: bpy.props.StringProperty(default="*.3mf", options={'HIDDEN'})
|
||||
files: bpy.props.CollectionProperty(name="File Path", type=bpy.types.OperatorFileListElement)
|
||||
directory: bpy.props.StringProperty(subtype='DIR_PATH')
|
||||
global_scale: bpy.props.FloatProperty(name="Scale", default=1.0, soft_min=0.001, soft_max=1000.0, min=1e-6, max=1e6)
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes the importer with empty fields.
|
||||
"""
|
||||
super().__init__()
|
||||
self.resource_objects = {} # Dictionary mapping resource IDs to ResourceObjects.
|
||||
|
||||
# Dictionary mapping resource IDs to dictionaries mapping indexes to ResourceMaterial objects.
|
||||
self.resource_materials = {}
|
||||
|
||||
# Which of our resource materials already exists in the Blender scene as a Blender material.
|
||||
self.resource_to_material = {}
|
||||
|
||||
self.num_loaded = 0
|
||||
|
||||
def execute(self, context):
|
||||
"""
|
||||
The main routine that reads out the 3MF file.
|
||||
|
||||
This function serves as a high-level overview of the steps involved to read the 3MF file.
|
||||
:param context: The Blender context.
|
||||
:return: A set of status flags to indicate whether the operation succeeded or not.
|
||||
"""
|
||||
# Reset state.
|
||||
self.resource_objects = {}
|
||||
self.resource_materials = {}
|
||||
self.resource_to_material = {}
|
||||
self.num_loaded = 0
|
||||
scene_metadata = Metadata()
|
||||
# If there was already metadata in the scene, combine that with this file.
|
||||
scene_metadata.retrieve(bpy.context.scene)
|
||||
# Don't load the title from the old scene. If there is a title in the imported 3MF, use that.
|
||||
# Else, we'll not override the scene title and it gets retained.
|
||||
del scene_metadata["Title"]
|
||||
annotations = Annotations()
|
||||
annotations.retrieve() # If there were already annotations in the scene, combine that with this file.
|
||||
|
||||
# Preparation of the input parameters.
|
||||
paths = [os.path.join(self.directory, name.name) for name in self.files]
|
||||
if not paths:
|
||||
paths.append(self.filepath)
|
||||
|
||||
if bpy.ops.object.mode_set.poll():
|
||||
bpy.ops.object.mode_set(mode='OBJECT') # Switch to object mode to view the new file.
|
||||
if bpy.ops.object.select_all.poll():
|
||||
bpy.ops.object.select_all(action='DESELECT') # Deselect other files.
|
||||
|
||||
for path in paths:
|
||||
files_by_content_type = self.read_archive(path) # Get the files from the archive.
|
||||
|
||||
# File metadata.
|
||||
for rels_file in files_by_content_type.get(RELS_MIMETYPE, []):
|
||||
annotations.add_rels(rels_file)
|
||||
annotations.add_content_types(files_by_content_type)
|
||||
self.must_preserve(files_by_content_type, annotations)
|
||||
|
||||
# Read the model data.
|
||||
for model_file in files_by_content_type.get(MODEL_MIMETYPE, []):
|
||||
try:
|
||||
document = xml.etree.ElementTree.ElementTree(file=model_file)
|
||||
except xml.etree.ElementTree.ParseError as e:
|
||||
log.error(f"3MF document in {path} is malformed: {str(e)}")
|
||||
continue
|
||||
if document is None:
|
||||
# This file is corrupt or we can't read it. There is no error code to communicate this to Blender
|
||||
# though.
|
||||
continue # Leave the scene empty / skip this file.
|
||||
root = document.getroot()
|
||||
if not self.is_supported(root.attrib.get("requiredextensions", "")):
|
||||
log.warning(f"3MF document in {path} requires unknown extensions.")
|
||||
# Still continue processing even though the spec says not to. Our aim is to retrieve whatever
|
||||
# information we can.
|
||||
|
||||
scale_unit = self.unit_scale(context, root)
|
||||
self.resource_objects = {}
|
||||
self.resource_materials = {}
|
||||
scene_metadata = self.read_metadata(root, scene_metadata)
|
||||
self.read_materials(root)
|
||||
self.read_objects(root)
|
||||
self.build_items(root, scale_unit)
|
||||
|
||||
scene_metadata.store(bpy.context.scene)
|
||||
annotations.store()
|
||||
|
||||
# Zoom the camera to view the imported objects.
|
||||
for area in bpy.context.screen.areas:
|
||||
if area.type == 'VIEW_3D':
|
||||
for region in area.regions:
|
||||
if region.type == 'WINDOW':
|
||||
override = {'area': area, 'region': region, 'edit_object': bpy.context.edit_object}
|
||||
bpy.ops.view3d.view_selected(override)
|
||||
|
||||
log.info(f"Imported {self.num_loaded} objects from 3MF files.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
# The rest of the functions are in order of when they are called.
|
||||
|
||||
def read_archive(self, path):
|
||||
"""
|
||||
Creates file streams from all the files in the archive.
|
||||
|
||||
The results are sorted by their content types. Consumers of this data can pick the content types that they know
|
||||
from the file and process those.
|
||||
:param path: The path to the archive to read.
|
||||
:return: A dictionary with all of the resources in the archive by content type. The keys in this dictionary are
|
||||
the different content types available in the file. The values in this dictionary are lists of input streams
|
||||
referring to files in the archive.
|
||||
"""
|
||||
result = {}
|
||||
try:
|
||||
archive = zipfile.ZipFile(path)
|
||||
content_types = self.read_content_types(archive)
|
||||
mime_types = self.assign_content_types(archive, content_types)
|
||||
for path, mime_type in mime_types.items():
|
||||
if mime_type not in result:
|
||||
result[mime_type] = []
|
||||
# Zipfile can open an infinite number of streams at the same time. Don't worry about it.
|
||||
result[mime_type].append(archive.open(path))
|
||||
except (zipfile.BadZipFile, EnvironmentError) as e:
|
||||
# File is corrupt, or the OS prevents us from reading it (doesn't exist, no permissions, etc.)
|
||||
log.error(f"Unable to read archive: {e}")
|
||||
return result
|
||||
return result
|
||||
|
||||
def read_content_types(self, archive):
|
||||
"""
|
||||
Read the content types from a 3MF archive.
|
||||
|
||||
The output of this reading is a list of MIME types that are each mapped to a regular expression that matches on
|
||||
the file paths within the archive that could contain this content type. This encodes both types of descriptors
|
||||
for the content types that can occur in the content types document: Extensions and full paths.
|
||||
|
||||
The output is ordered in priority. Matches that should be evaluated first will be put in the front of the output
|
||||
list.
|
||||
:param archive: The 3MF archive to read the contents from.
|
||||
:return: A list of tuples, in order of importance, where the first element describes a regex of paths that
|
||||
match, and the second element is the MIME type string of the content type.
|
||||
"""
|
||||
namespaces = {"ct": "http://schemas.openxmlformats.org/package/2006/content-types"}
|
||||
result = []
|
||||
|
||||
try:
|
||||
with archive.open(CONTENT_TYPES_LOCATION) as f:
|
||||
try:
|
||||
root = xml.etree.ElementTree.ElementTree(file=f)
|
||||
except xml.etree.ElementTree.ParseError as e:
|
||||
log.warning(
|
||||
f"{CONTENT_TYPES_LOCATION} has malformed XML"
|
||||
f"(position {e.position[0]}:{e.position[1]}).")
|
||||
root = None
|
||||
|
||||
if root is not None:
|
||||
# Overrides are more important than defaults, so put those in front.
|
||||
for override_node in root.iterfind("ct:Override", namespaces):
|
||||
if "PartName" not in override_node.attrib or "ContentType" not in override_node.attrib:
|
||||
log.warning("[Content_Types].xml malformed: Override node without path or MIME type.")
|
||||
continue # Ignore the broken one.
|
||||
match_regex = re.compile(re.escape(override_node.attrib["PartName"]))
|
||||
result.append((match_regex, override_node.attrib["ContentType"]))
|
||||
|
||||
for default_node in root.iterfind("ct:Default", namespaces):
|
||||
if "Extension" not in default_node.attrib or "ContentType" not in default_node.attrib:
|
||||
log.warning("[Content_Types].xml malformed: Default node without extension or MIME type.")
|
||||
continue # Ignore the broken one.
|
||||
match_regex = re.compile(r".*\." + re.escape(default_node.attrib["Extension"]))
|
||||
result.append((match_regex, default_node.attrib["ContentType"]))
|
||||
except KeyError: # ZipFile reports that the content types file doesn't exist.
|
||||
log.warning(f"{CONTENT_TYPES_LOCATION} file missing!")
|
||||
|
||||
# This parser should be robust to slightly broken files and retrieve what we can.
|
||||
# In case the document is broken or missing, here we'll append the default ones for 3MF.
|
||||
# If the content types file was fine, this gets least priority so the actual data still wins.
|
||||
result.append((re.compile(r".*\.rels"), RELS_MIMETYPE))
|
||||
result.append((re.compile(r".*\.model"), MODEL_MIMETYPE))
|
||||
|
||||
return result
|
||||
|
||||
def assign_content_types(self, archive, content_types):
|
||||
"""
|
||||
Assign a MIME type to each file in the archive.
|
||||
|
||||
The MIME types are obtained through the content types file from the archive. This content types file itself is
|
||||
not in the result though.
|
||||
:param archive: A 3MF archive with files to assign content types to.
|
||||
:param content_types: The content types for files in that archive, in order of priority.
|
||||
:return: A dictionary mapping all file paths in the archive to a content types. If the content type for a file
|
||||
is unknown, the content type will be an empty string.
|
||||
"""
|
||||
result = {}
|
||||
for file_info in archive.filelist:
|
||||
file_path = file_info.filename
|
||||
if file_path == CONTENT_TYPES_LOCATION: # Don't index this one.
|
||||
continue
|
||||
for pattern, content_type in content_types: # Process in the correct order!
|
||||
if pattern.fullmatch(file_path):
|
||||
result[file_path] = content_type
|
||||
break
|
||||
else: # None of the patterns matched.
|
||||
result[file_path] = ""
|
||||
|
||||
return result
|
||||
|
||||
def must_preserve(self, files_by_content_type, annotations):
|
||||
"""
|
||||
Preserves files that are marked with the 'MustPreserve' relationship and PrintTickets.
|
||||
|
||||
These files are saved in the Blender context as text files in a hidden folder. If the preserved files are in
|
||||
conflict with previously loaded 3MF archives (same file path, different content) then they will not be
|
||||
preserved.
|
||||
|
||||
Archived files are stored in Base85 encoding to allow storing arbitrary files, even binary files. This sadly
|
||||
means that the file size will increase by about 25%, and that the files are not human-readable any more when
|
||||
opened in Blender, even if they were originally human-readable.
|
||||
:param files_by_content_type: The files in this 3MF archive, by content type. They must be provided by content
|
||||
type because that is how the ``read_archive`` function stores them, which is not ideal. But this function will
|
||||
sort that out.
|
||||
:param annotations: Collection of annotations gathered so far.
|
||||
"""
|
||||
preserved_files = set() # Find all files which must be preserved according to the annotations.
|
||||
for target, its_annotations in annotations.annotations.items():
|
||||
for annotation in its_annotations:
|
||||
if type(annotation) == Relationship:
|
||||
if annotation.namespace in {
|
||||
"http://schemas.openxmlformats.org/package/2006/relationships/mustpreserve",
|
||||
"http://schemas.microsoft.com/3dmanufacturing/2013/01/printticket"
|
||||
}:
|
||||
preserved_files.add(target)
|
||||
elif type(annotation) == ContentType:
|
||||
if annotation.mime_type == "application/vnd.ms-printing.printticket+xml":
|
||||
preserved_files.add(target)
|
||||
|
||||
for files in files_by_content_type.values():
|
||||
for file in files:
|
||||
if file.name in preserved_files:
|
||||
filename = ".3mf_preserved/" + file.name
|
||||
if filename in bpy.data.texts:
|
||||
if bpy.data.texts[filename].as_string() == conflicting_mustpreserve_contents:
|
||||
# This file was previously already in conflict. The new file will always be in conflict with
|
||||
# one of the previous files.
|
||||
continue
|
||||
# Encode as Base85 so that the file can be saved in Blender's Text objects.
|
||||
file_contents = base64.b85encode(file.read()).decode('UTF-8')
|
||||
if filename in bpy.data.texts:
|
||||
if bpy.data.texts[filename].as_string() == file_contents:
|
||||
# File contents are EXACTLY the same, so the file is not in conflict.
|
||||
continue # But we also don't need to re-add the same file then.
|
||||
else: # Same file exists with different contents, so they are in conflict.
|
||||
bpy.data.texts[filename].clear()
|
||||
bpy.data.texts[filename].write(conflicting_mustpreserve_contents)
|
||||
continue
|
||||
else: # File doesn't exist yet.
|
||||
handle = bpy.data.texts.new(filename)
|
||||
handle.write(file_contents)
|
||||
|
||||
def is_supported(self, required_extensions):
|
||||
"""
|
||||
Determines if a document is supported by this add-on.
|
||||
:param required_extensions: The value of the `requiredextensions` attribute of the root node of the XML
|
||||
document.
|
||||
:return: `True` if the document is supported, or `False` if it's not.
|
||||
"""
|
||||
extensions = required_extensions.split(" ")
|
||||
extensions = set(filter(lambda x: x != "", extensions))
|
||||
return extensions <= SUPPORTED_EXTENSIONS
|
||||
|
||||
def unit_scale(self, context, root):
|
||||
"""
|
||||
Get the scaling factor we need to use for this document, according to its unit.
|
||||
:param context: The Blender context.
|
||||
:param root: An ElementTree root element containing the entire 3MF file.
|
||||
:return: Floating point value that we need to scale this model by. A small number (<1) means that we need to
|
||||
make the coordinates in Blender smaller than the coordinates in the file. A large number (>1) means we need to
|
||||
make the coordinates in Blender larger than the coordinates in the file.
|
||||
"""
|
||||
scale = self.global_scale
|
||||
|
||||
if context.scene.unit_settings.scale_length != 0:
|
||||
scale /= context.scene.unit_settings.scale_length # Apply the global scale of the units in Blender.
|
||||
|
||||
threemf_unit = root.attrib.get("unit", MODEL_DEFAULT_UNIT)
|
||||
blender_unit = context.scene.unit_settings.length_unit
|
||||
scale *= threemf_to_metre[threemf_unit] # Convert 3MF units to metre.
|
||||
scale /= blender_to_metre[blender_unit] # Convert metre to Blender's units.
|
||||
|
||||
return scale
|
||||
|
||||
def read_metadata(self, node, original_metadata=None):
|
||||
"""
|
||||
Reads the metadata tags from a metadata group.
|
||||
:param node: A node in the 3MF document that contains <metadata> tags. This can be either a root node, or a
|
||||
<metadatagroup> node.
|
||||
:param original_metadata: If there was already metadata for this context from other documents, you can provide
|
||||
that metadata here. The metadata of those documents will be combined then.
|
||||
:return: A `Metadata` object.
|
||||
"""
|
||||
if original_metadata is not None:
|
||||
metadata = original_metadata
|
||||
else:
|
||||
metadata = Metadata() # Create a new Metadata object.
|
||||
|
||||
for metadata_node in node.iterfind("./3mf:metadata", MODEL_NAMESPACES):
|
||||
if "name" not in metadata_node.attrib:
|
||||
log.warning("Metadata entry without name is discarded.")
|
||||
continue # This attribute has no name, so there's no key by which I can save the metadata.
|
||||
name = metadata_node.attrib["name"]
|
||||
preserve_str = metadata_node.attrib.get("preserve", "0")
|
||||
# We don't use this ourselves since we always preserve, but the preserve attribute itself will also be
|
||||
# preserved.
|
||||
preserve = preserve_str != "0" and preserve_str.lower() != "false"
|
||||
datatype = metadata_node.attrib.get("type", "")
|
||||
value = metadata_node.text
|
||||
|
||||
# Always store all metadata so that they are preserved.
|
||||
metadata[name] = MetadataEntry(name=name, preserve=preserve, datatype=datatype, value=value)
|
||||
|
||||
return metadata
|
||||
|
||||
def read_materials(self, root):
|
||||
"""
|
||||
Read out all of the material resources from the 3MF document.
|
||||
|
||||
The materials will be stored in `self.resource_materials` until it gets used to build the items.
|
||||
:param root: The root of an XML document that may contain materials.
|
||||
"""
|
||||
for basematerials_item in root.iterfind("./3mf:resources/3mf:basematerials", MODEL_NAMESPACES):
|
||||
try:
|
||||
material_id = basematerials_item.attrib["id"]
|
||||
except KeyError:
|
||||
log.warning("Encountered a basematerials item without resource ID.")
|
||||
continue # Need to have an ID, or no item can reference to the materials. Skip this one.
|
||||
if material_id in self.resource_materials:
|
||||
log.warning(f"Duplicate material ID: {material_id}")
|
||||
continue
|
||||
|
||||
# Use a dictionary mapping indices to resources, because some indices may be skipped due to being invalid.
|
||||
self.resource_materials[material_id] = {}
|
||||
index = 0
|
||||
|
||||
# "Base" must be the stupidest name for a material resource. Oh well.
|
||||
for base_item in basematerials_item.iterfind("./3mf:base", MODEL_NAMESPACES):
|
||||
name = base_item.attrib.get("name", "3MF Material")
|
||||
color = base_item.attrib.get("displaycolor")
|
||||
if color is not None:
|
||||
# Parse the color. It's a hexadecimal number indicating RGB or RGBA.
|
||||
color = color.lstrip("#") # Should start with a #. We'll be lenient if it's not.
|
||||
try:
|
||||
color_int = int(color, 16)
|
||||
# Separate out up to four bytes from this int, from right to left.
|
||||
b1 = (color_int & 0x000000FF) / 255
|
||||
b2 = ((color_int & 0x0000FF00) >> 8) / 255
|
||||
b3 = ((color_int & 0x00FF0000) >> 16) / 255
|
||||
b4 = ((color_int & 0xFF000000) >> 24) / 255
|
||||
if len(color) == 6: # RGB format.
|
||||
color = (b3, b2, b1, 1.0) # b1, b2 and b3 are B, G, R respectively. b4 is always 0.
|
||||
else: # RGBA format, or invalid.
|
||||
color = (b4, b3, b2, b1) # b1, b2, b3 and b4 are A, B, G, R respectively.
|
||||
except ValueError:
|
||||
log.warning(f"Invalid color for material {name} of resource {material_id}: {color}")
|
||||
color = None # Don't add a color for this material.
|
||||
|
||||
# Input is valid. Create a resource.
|
||||
self.resource_materials[material_id][index] = ResourceMaterial(name=name, color=color)
|
||||
index += 1
|
||||
|
||||
if len(self.resource_materials[material_id]) == 0:
|
||||
del self.resource_materials[material_id] # Don't leave empty material sets hanging.
|
||||
|
||||
def read_objects(self, root):
|
||||
"""
|
||||
Reads all repeatable build objects from the resources of an XML root node.
|
||||
|
||||
This stores them in the resource_objects field.
|
||||
:param root: The root node of a 3dmodel.model XML file.
|
||||
"""
|
||||
for object_node in root.iterfind("./3mf:resources/3mf:object", MODEL_NAMESPACES):
|
||||
try:
|
||||
objectid = object_node.attrib["id"]
|
||||
except KeyError:
|
||||
log.warning("Object resource without ID!")
|
||||
continue # ID is required, otherwise the build can't refer to it.
|
||||
|
||||
pid = object_node.attrib.get("pid") # Material ID.
|
||||
pindex = object_node.attrib.get("pindex") # Index within a collection of materials.
|
||||
material = None
|
||||
if pid is not None and pindex is not None:
|
||||
try:
|
||||
index = int(pindex)
|
||||
material = self.resource_materials[pid][index]
|
||||
except KeyError:
|
||||
log.warning(
|
||||
f"Object with ID {objectid} refers to material collection {pid} with index {pindex}"
|
||||
f"which doesn't exist.")
|
||||
except ValueError:
|
||||
log.warning(f"Object with ID {objectid} specifies material index {pindex}, which is not integer.")
|
||||
|
||||
vertices = self.read_vertices(object_node)
|
||||
triangles, materials = self.read_triangles(object_node, material, pid)
|
||||
components = self.read_components(object_node)
|
||||
metadata = Metadata()
|
||||
for metadata_node in object_node.iterfind("./3mf:metadatagroup", MODEL_NAMESPACES):
|
||||
metadata = self.read_metadata(metadata_node, metadata)
|
||||
if "partnumber" in object_node.attrib:
|
||||
# Blender has no way to ensure that custom properties get preserved if a mesh is split up, but for most
|
||||
# operations this is retained properly.
|
||||
metadata["3mf:partnumber"] = MetadataEntry(
|
||||
name="3mf:partnumber",
|
||||
preserve=True,
|
||||
datatype="xs:string",
|
||||
value=object_node.attrib["partnumber"])
|
||||
metadata["3mf:object_type"] = MetadataEntry(
|
||||
name="3mf:object_type",
|
||||
preserve=True,
|
||||
datatype="xs:string",
|
||||
value=object_node.attrib.get("type", "model"))
|
||||
|
||||
self.resource_objects[objectid] = ResourceObject(
|
||||
vertices=vertices,
|
||||
triangles=triangles,
|
||||
materials=materials,
|
||||
components=components,
|
||||
metadata=metadata)
|
||||
|
||||
def read_vertices(self, object_node):
|
||||
"""
|
||||
Reads out the vertices from an XML node of an object.
|
||||
|
||||
If any vertex is corrupt, like with a coordinate missing or not proper floats, then the 0 coordinate will be
|
||||
used. This is to prevent messing up the list of indices.
|
||||
:param object_node: An <object> element from the 3dmodel.model file.
|
||||
:return: List of vertices in that object. Each vertex is a tuple of 3 floats for X, Y and Z.
|
||||
"""
|
||||
result = []
|
||||
for vertex in object_node.iterfind("./3mf:mesh/3mf:vertices/3mf:vertex", MODEL_NAMESPACES):
|
||||
attrib = vertex.attrib
|
||||
try:
|
||||
x = float(attrib.get("x", 0))
|
||||
except ValueError: # Not a float.
|
||||
log.warning("Vertex missing X coordinate.")
|
||||
x = 0
|
||||
try:
|
||||
y = float(attrib.get("y", 0))
|
||||
except ValueError:
|
||||
log.warning("Vertex missing Y coordinate.")
|
||||
y = 0
|
||||
try:
|
||||
z = float(attrib.get("z", 0))
|
||||
except ValueError:
|
||||
log.warning("Vertex missing Z coordinate.")
|
||||
z = 0
|
||||
result.append((x, y, z))
|
||||
return result
|
||||
|
||||
def read_triangles(self, object_node, default_material, material_pid):
|
||||
"""
|
||||
Reads out the triangles from an XML node of an object.
|
||||
|
||||
These triangles always consist of 3 vertices each. Each vertex is an index to the list of vertices read
|
||||
previously. The triangle also contains an associated material, or None if the triangle gets no material.
|
||||
:param object_node: An <object> element from the 3dmodel.model file.
|
||||
:param default_material: If the triangle specifies no material, it should get this material. May be `None` if
|
||||
the model specifies no material.
|
||||
:param material_pid: Triangles that specify a material index will get their material from this material group.
|
||||
:return: Two lists of equal length. The first lists the vertices of each triangle, which are 3-tuples of
|
||||
integers referring to the first, second and third vertex of the triangle. The second list contains a material
|
||||
for each triangle, or `None` if the triangle doesn't get a material.
|
||||
"""
|
||||
vertices = []
|
||||
materials = []
|
||||
for triangle in object_node.iterfind("./3mf:mesh/3mf:triangles/3mf:triangle", MODEL_NAMESPACES):
|
||||
attrib = triangle.attrib
|
||||
try:
|
||||
v1 = int(attrib["v1"])
|
||||
v2 = int(attrib["v2"])
|
||||
v3 = int(attrib["v3"])
|
||||
if v1 < 0 or v2 < 0 or v3 < 0: # Negative indices are not allowed.
|
||||
log.warning("Triangle containing negative index to vertex list.")
|
||||
continue
|
||||
|
||||
pid = attrib.get("pid", material_pid)
|
||||
p1 = attrib.get("p1")
|
||||
if p1 is None:
|
||||
material = default_material
|
||||
else:
|
||||
try:
|
||||
material = self.resource_materials[pid][int(p1)]
|
||||
except KeyError as e:
|
||||
# Sorry. It's hard to give an exception more specific than this.
|
||||
log.warning(f"Material {e} is missing.")
|
||||
material = default_material
|
||||
except ValueError as e:
|
||||
log.warning(f"Material index is not an integer: {e}")
|
||||
material = default_material
|
||||
|
||||
vertices.append((v1, v2, v3))
|
||||
materials.append(material)
|
||||
except KeyError as e:
|
||||
log.warning(f"Vertex {e} is missing.")
|
||||
continue
|
||||
except ValueError as e:
|
||||
log.warning(f"Vertex reference is not an integer: {e}")
|
||||
continue # No fallback this time. Leave out the entire triangle.
|
||||
return vertices, materials
|
||||
|
||||
def read_components(self, object_node):
|
||||
"""
|
||||
Reads out the components from an XML node of an object.
|
||||
|
||||
These components refer to other resource objects, with a transformation applied. They will eventually appear in
|
||||
the scene as sub-objects.
|
||||
:param object_node: An <object> element from the 3dmodel.model file.
|
||||
:return: List of components in this object node.
|
||||
"""
|
||||
result = []
|
||||
for component_node in object_node.iterfind("./3mf:components/3mf:component", MODEL_NAMESPACES):
|
||||
try:
|
||||
objectid = component_node.attrib["objectid"]
|
||||
except KeyError: # ID is required.
|
||||
continue # Ignore this invalid component.
|
||||
transform = self.parse_transformation(component_node.attrib.get("transform", ""))
|
||||
|
||||
result.append(Component(resource_object=objectid, transformation=transform))
|
||||
return result
|
||||
|
||||
def parse_transformation(self, transformation_str):
|
||||
"""
|
||||
Parses a transformation matrix as written in the 3MF files.
|
||||
|
||||
Transformations in 3MF files are written in the form:
|
||||
`m00 m01 m01 m10 m11 m12 m20 m21 m22 m30 m31 m32`
|
||||
|
||||
This would then result in a row-major matrix of the form:
|
||||
```
|
||||
_ _
|
||||
| m00 m01 m02 0.0 |
|
||||
| m10 m11 m12 0.0 |
|
||||
| m20 m21 m22 0.0 |
|
||||
| m30 m31 m32 1.0 |
|
||||
- -
|
||||
```
|
||||
:param transformation_str: A transformation as represented in 3MF.
|
||||
:return: A `Matrix` object with the correct transformation.
|
||||
"""
|
||||
components = transformation_str.split(" ")
|
||||
result = mathutils.Matrix.Identity(4)
|
||||
if transformation_str == "": # Early-out if transformation is missing. This is not malformed.
|
||||
return result
|
||||
row = -1
|
||||
col = 0
|
||||
for component in components:
|
||||
row += 1
|
||||
if row > 2:
|
||||
col += 1
|
||||
row = 0
|
||||
if col > 3:
|
||||
log.warning(f"Transformation matrix contains too many components: {transformation_str}")
|
||||
break # Too many components. Ignore the rest.
|
||||
try:
|
||||
component_float = float(component)
|
||||
except ValueError: # Not a proper float. Skip this one.
|
||||
log.warning(f"Transformation matrix malformed: {transformation_str}")
|
||||
continue
|
||||
result[row][col] = component_float
|
||||
return result
|
||||
|
||||
def build_items(self, root, scale_unit):
|
||||
"""
|
||||
Builds the scene. This places objects with certain transformations in
|
||||
the scene.
|
||||
:param root: The root node of the 3dmodel.model XML document.
|
||||
:param scale_unit: The scale to apply for the units of the model to be
|
||||
transformed to Blender's units, as a float ratio.
|
||||
:return: A sequence of Blender Objects that need to be placed in the
|
||||
scene. Each mesh gets transformed appropriately.
|
||||
"""
|
||||
for build_item in root.iterfind("./3mf:build/3mf:item", MODEL_NAMESPACES):
|
||||
try:
|
||||
objectid = build_item.attrib["objectid"]
|
||||
resource_object = self.resource_objects[objectid]
|
||||
except KeyError: # ID is required, and it must be in the available resource_objects.
|
||||
log.warning("Encountered build item without object ID.")
|
||||
continue # Ignore this invalid item.
|
||||
|
||||
metadata = Metadata()
|
||||
for metadata_node in build_item.iterfind("./3mf:metadatagroup", MODEL_NAMESPACES):
|
||||
metadata = self.read_metadata(metadata_node, metadata)
|
||||
if "partnumber" in build_item.attrib:
|
||||
metadata["3mf:partnumber"] = MetadataEntry(
|
||||
name="3mf:partnumber",
|
||||
preserve=True,
|
||||
datatype="xs:string",
|
||||
value=build_item.attrib["partnumber"])
|
||||
|
||||
transform = mathutils.Matrix.Scale(scale_unit, 4)
|
||||
transform @= self.parse_transformation(build_item.attrib.get("transform", ""))
|
||||
|
||||
self.build_object(resource_object, transform, metadata, [objectid])
|
||||
|
||||
def build_object(self, resource_object, transformation, metadata, objectid_stack_trace, parent=None):
|
||||
"""
|
||||
Converts a resource object into a Blender object.
|
||||
|
||||
This resource object may refer to components that need to be built along. These components may again have
|
||||
subcomponents, and so on. These will be built recursively. A "stack trace" will be traced in order to prevent
|
||||
going into an infinite recursion.
|
||||
:param resource_object: The resource object that needs to be converted.
|
||||
:param transformation: A transformation matrix to apply to this resource object.
|
||||
:param metadata: A collection of metadata belonging to this build item.
|
||||
:param objectid_stack_trace: A list of all object IDs that have been processed so far, including the object ID
|
||||
we're processing now.
|
||||
:param parent: The resulting object must be marked as a child of this Blender object.
|
||||
:return: A sequence of Blender objects. These objects may be "nested" in the sense that they sometimes refer to
|
||||
other objects as their parents.
|
||||
"""
|
||||
# Create a mesh if there is mesh data here.
|
||||
mesh = None
|
||||
if resource_object.triangles:
|
||||
mesh = bpy.data.meshes.new("3MF Mesh")
|
||||
mesh.from_pydata(resource_object.vertices, [], resource_object.triangles)
|
||||
mesh.update()
|
||||
resource_object.metadata.store(mesh)
|
||||
|
||||
# Mapping resource materials to indices in the list of materials for this specific mesh.
|
||||
materials_to_index = {}
|
||||
for triangle_index, triangle_material in enumerate(resource_object.materials):
|
||||
if triangle_material is None:
|
||||
continue
|
||||
|
||||
# Add the material to Blender if it doesn't exist yet. Otherwise create a new material in Blender.
|
||||
if triangle_material not in self.resource_to_material:
|
||||
material = bpy.data.materials.new(triangle_material.name)
|
||||
material.use_nodes = True
|
||||
principled = bpy_extras.node_shader_utils.PrincipledBSDFWrapper(material, is_readonly=False)
|
||||
principled.base_color = triangle_material.color[:3]
|
||||
principled.alpha = triangle_material.color[3]
|
||||
self.resource_to_material[triangle_material] = material
|
||||
else:
|
||||
material = self.resource_to_material[triangle_material]
|
||||
|
||||
# Add the material to this mesh if it doesn't have it yet. Otherwise re-use previous index.
|
||||
if triangle_material not in materials_to_index:
|
||||
new_index = len(mesh.materials.items())
|
||||
if new_index > 32767:
|
||||
log.warning("Blender doesn't support more than 32768 different materials per mesh.")
|
||||
continue
|
||||
mesh.materials.append(material)
|
||||
materials_to_index[triangle_material] = new_index
|
||||
|
||||
# Assign the material to the correct triangle.
|
||||
mesh.polygons[triangle_index].material_index = materials_to_index[triangle_material]
|
||||
|
||||
# Create an object.
|
||||
blender_object = bpy.data.objects.new("3MF Object", mesh)
|
||||
self.num_loaded += 1
|
||||
if parent is not None:
|
||||
blender_object.parent = parent
|
||||
blender_object.matrix_world = transformation
|
||||
bpy.context.collection.objects.link(blender_object)
|
||||
bpy.context.view_layer.objects.active = blender_object
|
||||
blender_object.select_set(True)
|
||||
metadata.store(blender_object)
|
||||
if "3mf:object_type" in resource_object.metadata\
|
||||
and resource_object.metadata["3mf:object_type"].value in {"solidsupport", "support"}:
|
||||
# Don't render support meshes.
|
||||
blender_object.hide_render = True
|
||||
|
||||
# Recurse for all components.
|
||||
for component in resource_object.components:
|
||||
if component.resource_object in objectid_stack_trace:
|
||||
# These object IDs refer to each other in a loop. Don't go in there!
|
||||
log.warning(f"Recursive components in object ID: {component.resource_object}")
|
||||
continue
|
||||
try:
|
||||
child_object = self.resource_objects[component.resource_object]
|
||||
except KeyError: # Invalid resource ID. Doesn't exist!
|
||||
log.warning(f"Build item with unknown resource ID: {component.resource_object}")
|
||||
continue
|
||||
transform = transformation @ component.transformation # Apply the child's transformation and pass it on.
|
||||
objectid_stack_trace.append(component.resource_object)
|
||||
self.build_object(child_object, transform, metadata, objectid_stack_trace, parent=blender_object)
|
||||
objectid_stack_trace.pop()
|
198
io_mesh_3mf/metadata.py
Normal file
198
io_mesh_3mf/metadata.py
Normal file
@ -0,0 +1,198 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import collections # For named tuples.
|
||||
import idprop.types # To interpret property groups as metadata entries.
|
||||
|
||||
MetadataEntry = collections.namedtuple("MetadataEntry", ["name", "preserve", "datatype", "value"])
|
||||
|
||||
|
||||
class Metadata:
|
||||
"""
|
||||
This class tracks the metadata of a Blender object.
|
||||
|
||||
You can use it to update the metadata when importing, or to get the scene's metadata when exporting. It has a
|
||||
routine to store the metadata in a Blender object and to retrieve it from that Blender object again.
|
||||
|
||||
This class functions like a temporary data structure only. It is blissfully unaware of the intricacies of the 3MF
|
||||
file format specifically, save for knowing all of the properties of a metadata entry that can be specified.
|
||||
|
||||
The class' signature is like a dictionary. The keys of the dictionary are the names of the metadata entries. The
|
||||
values of the dictionary are MetadataEntry named tuples, containing several properties of the metadata entries as
|
||||
can be specified in the 3MF format. However the behaviour of the class is not entirely like a dictionary, since this
|
||||
dictionary will only store metadata that is consistent across all of the attempts to store metadata. If you store
|
||||
the same metadata entry multiple times, it will store only one copy, which is like a dictionary. However if you
|
||||
store an entry with the same name but a different value, it'll know that the metadata is inconsistent across the
|
||||
different files and thus will pretend that this metadata entry was not set. This way, if you load multiple 3MF files
|
||||
into one scene in Blender, you will only get the intersection of the matching metadata entries.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Create an empty storage of metadata.
|
||||
"""
|
||||
self.metadata = {}
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""
|
||||
Add a metadata entry to this storage.
|
||||
:param key: The name of the entry.
|
||||
:param value: A `MetadataEntry` object to store.
|
||||
"""
|
||||
if key not in self.metadata:
|
||||
# Completely new value. We can just store this one, since it's always consistent with existing metadata.
|
||||
self.metadata[key] = value
|
||||
return
|
||||
|
||||
if self.metadata[key] is None:
|
||||
# This entry was already in conflict with another entry and erased.
|
||||
# The new value will also be in conflict with at least one, so should also not be stored.
|
||||
return
|
||||
|
||||
competing = self.metadata[key]
|
||||
if value.value != competing.value or value.datatype != competing.datatype:
|
||||
# These two are inconsistent. Erase both!
|
||||
self.metadata[key] = None
|
||||
return
|
||||
|
||||
# The two are consistent. Usually no need to store anything, since it's already stored.
|
||||
# The "preserve" property may be different. Preserve if any of them says to preserve.
|
||||
if not competing.preserve and value.preserve: # Prevent unnecessary construction of namedtuples.
|
||||
self.metadata[key] = MetadataEntry(
|
||||
name=key,
|
||||
preserve=True,
|
||||
datatype=competing.datatype,
|
||||
value=competing.value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Retrieves a metadata entry, if it exists and was not in conflict.
|
||||
:param key: The name of the metadata entry to get.
|
||||
:return: The `MetadataEntry` object stored there.
|
||||
:raises: `KeyError` if there is no metadata entry or it was in conflict.
|
||||
"""
|
||||
if key not in self.metadata or self.metadata[key] is None:
|
||||
# Metadata entry doesn't exist, or its values are conflicting with each other across multiple files.
|
||||
raise KeyError(key)
|
||||
return self.metadata[key]
|
||||
|
||||
def __contains__(self, item):
|
||||
"""
|
||||
Tests if a metadata entry with a certain name is present and not in conflict.
|
||||
:param item: The name of the metadata entry to test for.
|
||||
:return: `True` if the metadata entry is present and not in conflict, or `False` if it's not present or in
|
||||
conflict with metadata values from multiple files.
|
||||
"""
|
||||
return item in self.metadata and self.metadata[item] is not None
|
||||
|
||||
def __bool__(self):
|
||||
"""
|
||||
Checks if there is any content in this metadata storage.
|
||||
|
||||
Conflicting metadata entries are not counted as content in this case.
|
||||
:return: `True` if there is metadata in this storage, or `False` if there isn't any.
|
||||
"""
|
||||
return any(self.values())
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of valid items in this metadata storage.
|
||||
|
||||
An item is only valid if it's not in conflict, i.e. if it would be present in an iteration over the storage.
|
||||
:return: The number of valid metadata entries.
|
||||
"""
|
||||
return sum(1 for _ in self.values())
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""
|
||||
Completely delete all traces of a metadata entry from this storage.
|
||||
|
||||
Even if there was no real entry, but the shadow of entries being in conflict, that information will be removed.
|
||||
That way it'll allow for a new value to be stored.
|
||||
|
||||
Contrary to the normal dictionary's version, this one does check for the key's existence, so you don't need to
|
||||
do that manually.
|
||||
"""
|
||||
if key in self.metadata:
|
||||
del self.metadata[key]
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Compares two metadata groups together.
|
||||
|
||||
This is currently just used for the unit tests to see if the metadata is constructed correctly.
|
||||
:param other: The metadata object to compare to.
|
||||
:return: `True` if the two groups of metadata contain the same metadata (including which entries are in
|
||||
conflict), or `False` otherwise.
|
||||
"""
|
||||
return self.metadata == other.metadata
|
||||
|
||||
def store(self, blender_object):
|
||||
"""
|
||||
Store this metadata in a Blender object.
|
||||
|
||||
The metadata will be stored as Blender properties. In the case of properties known to Blender they will be
|
||||
translated appropriately.
|
||||
:param blender_object: The Blender object to store the metadata in.
|
||||
"""
|
||||
for metadata_entry in self.values():
|
||||
name = metadata_entry.name
|
||||
value = metadata_entry.value
|
||||
if name == "Title": # Has a built-in ID property for objects as well as scenes.
|
||||
blender_object.name = value
|
||||
elif name == "3mf:partnumber":
|
||||
# Special case: This is always a string and doesn't need the preserve attribute. We can simplify this to
|
||||
# make it easier to edit.
|
||||
blender_object[name] = value
|
||||
else:
|
||||
blender_object[name] = {
|
||||
"datatype": metadata_entry.datatype,
|
||||
"preserve": metadata_entry.preserve,
|
||||
"value": value,
|
||||
}
|
||||
|
||||
def retrieve(self, blender_object):
|
||||
"""
|
||||
Retrieve metadata from a Blender object.
|
||||
|
||||
The metadata will get stored in this existing instance.
|
||||
|
||||
The metadata from the Blender object will get merged with the data that already exists in this instance. In case
|
||||
of conflicting metadata values, those metadata entries will be left out.
|
||||
:param blender_object: A Blender object to retrieve metadata from.
|
||||
"""
|
||||
for key in blender_object.keys():
|
||||
entry = blender_object[key]
|
||||
if key == "3mf:partnumber":
|
||||
self[key] = MetadataEntry(name=key, preserve=True, datatype="xs:string", value=entry)
|
||||
continue
|
||||
if isinstance(entry, idprop.types.IDPropertyGroup)\
|
||||
and "datatype" in entry.keys()\
|
||||
and "preserve" in entry.keys()\
|
||||
and "value" in entry.keys(): # Most likely a metadata entry from a previous 3MF file.
|
||||
self[key] = MetadataEntry(
|
||||
name=key,
|
||||
preserve=entry.get("preserve"),
|
||||
datatype=entry.get("datatype"),
|
||||
value=entry.get("value"))
|
||||
# Don't mess with metadata added by the user or their other Blender add-ons. Don't want to break their
|
||||
# behaviour.
|
||||
|
||||
self["Title"] = MetadataEntry(name="Title", preserve=True, datatype="xs:string", value=blender_object.name)
|
||||
|
||||
def values(self):
|
||||
"""
|
||||
Return all metadata entries that are registered in this storage and not in conflict.
|
||||
:return: A generator of metadata entries.
|
||||
"""
|
||||
yield from filter(lambda entry: entry is not None, self.metadata.values())
|
44
io_mesh_3mf/unit_conversions.py
Normal file
44
io_mesh_3mf/unit_conversions.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Blender add-on to import and export 3MF files.
|
||||
# Copyright (C) 2020 Ghostkeeper
|
||||
# This add-on is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
# This add-on is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
# details.
|
||||
# You should have received a copy of the GNU Affero General Public License along with this plug-in. If not, see
|
||||
# <https://gnu.org/licenses/>.
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
This file defines unit conversions between Blender's units and 3MF's units.
|
||||
"""
|
||||
|
||||
blender_to_metre = { # Scale of each of Blender's length units to a metre.
|
||||
'THOU': 0.0000254,
|
||||
'INCHES': 0.0254,
|
||||
'FEET': 0.3048,
|
||||
'YARDS': 0.9144,
|
||||
'CHAINS': 20.1168,
|
||||
'FURLONGS': 201.168,
|
||||
'MILES': 1609.344,
|
||||
'MICROMETERS': 0.000001,
|
||||
'MILLIMETERS': 0.001,
|
||||
'CENTIMETERS': 0.01,
|
||||
'DECIMETERS': 0.1,
|
||||
'METERS': 1,
|
||||
'ADAPTIVE': 1,
|
||||
'DEKAMETERS': 10,
|
||||
'HECTOMETERS': 100,
|
||||
'KILOMETERS': 1000
|
||||
}
|
||||
|
||||
threemf_to_metre = { # Scale of each of 3MF's length units to a metre.
|
||||
'micron': 0.000001,
|
||||
'millimeter': 0.001,
|
||||
'centimeter': 0.01,
|
||||
'inch': 0.0254,
|
||||
'foot': 0.3048,
|
||||
'meter': 1
|
||||
}
|
Loading…
Reference in New Issue
Block a user