added the addons to script folder, alpha supprt for fbx and oly
This commit is contained in:
205
release/scripts/startup/io_mesh_ply_soc/__init__.py
Normal file
205
release/scripts/startup/io_mesh_ply_soc/__init__.py
Normal file
@@ -0,0 +1,205 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
bl_info = {
|
||||
"name": "Stanford PLY format-soc",
|
||||
"author": "Bruce Merry, Campbell Barton",
|
||||
"blender": (2, 74, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "Import-Export PLY mesh data withs UV's and vertex colors-soc",
|
||||
"warning": "",
|
||||
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
|
||||
"Scripts/Import-Export/Stanford_PLY",
|
||||
"support": 'OFFICIAL',
|
||||
"category": "Import-Export"}
|
||||
|
||||
# Copyright (C) 2004, 2005: Bruce Merry, bmerry@cs.uct.ac.za
|
||||
# Contributors: Bruce Merry, Campbell Barton
|
||||
|
||||
# To support reload properly, try to access a package var,
|
||||
# if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
import importlib
|
||||
if "export_ply_soc" in locals():
|
||||
importlib.reload(export_ply_soc)
|
||||
if "import_ply_soc" in locals():
|
||||
importlib.reload(import_ply_soc)
|
||||
|
||||
|
||||
import os
|
||||
import bpy
|
||||
from bpy.props import (
|
||||
CollectionProperty,
|
||||
StringProperty,
|
||||
BoolProperty,
|
||||
EnumProperty,
|
||||
FloatProperty,
|
||||
)
|
||||
from bpy_extras.io_utils import (
|
||||
ImportHelper,
|
||||
ExportHelper,
|
||||
orientation_helper_factory,
|
||||
axis_conversion,
|
||||
)
|
||||
|
||||
|
||||
IOPLYOrientationHelper = orientation_helper_factory("IOPLYOrientationHelper", axis_forward='Y', axis_up='Z')
|
||||
|
||||
|
||||
class SocImportPLY(bpy.types.Operator, ImportHelper):
|
||||
"""Load a PLY geometry file"""
|
||||
bl_idname = "import_mesh_soc.ply"
|
||||
bl_label = "Import PLY soc"
|
||||
bl_options = {'UNDO'}
|
||||
|
||||
files = CollectionProperty(name="File Path",
|
||||
description="File path used for importing "
|
||||
"the PLY file",
|
||||
type=bpy.types.OperatorFileListElement)
|
||||
|
||||
directory = StringProperty()
|
||||
|
||||
filename_ext = ".ply"
|
||||
filter_glob = StringProperty(default="*.ply", options={'HIDDEN'})
|
||||
|
||||
def execute(self, context):
|
||||
paths = [os.path.join(self.directory, name.name)
|
||||
for name in self.files]
|
||||
if not paths:
|
||||
paths.append(self.filepath)
|
||||
|
||||
from . import import_ply_soc
|
||||
|
||||
for path in paths:
|
||||
import_ply_soc.load(self, context, path)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class SocExportPLY(bpy.types.Operator, ExportHelper, IOPLYOrientationHelper):
|
||||
"""Export a single object as a Stanford PLY with normals, """ \
|
||||
"""colors and texture coordinates"""
|
||||
bl_idname = "export_mesh_soc.ply"
|
||||
bl_label = "Export PLY soc"
|
||||
|
||||
filename_ext = ".ply"
|
||||
filter_glob = StringProperty(default="*.ply", options={'HIDDEN'})
|
||||
|
||||
use_mesh_modifiers = BoolProperty(
|
||||
name="Apply Modifiers",
|
||||
description="Apply Modifiers to the exported mesh",
|
||||
default=True,
|
||||
)
|
||||
use_normals = BoolProperty(
|
||||
name="Normals",
|
||||
description="Export Normals for smooth and "
|
||||
"hard shaded faces "
|
||||
"(hard shaded faces will be exported "
|
||||
"as individual faces)",
|
||||
default=True,
|
||||
)
|
||||
use_uv_coords = BoolProperty(
|
||||
name="UVs",
|
||||
description="Export the active UV layer",
|
||||
default=True,
|
||||
)
|
||||
use_colors = BoolProperty(
|
||||
name="Vertex Colors",
|
||||
description="Export the active vertex color layer",
|
||||
default=True,
|
||||
)
|
||||
|
||||
global_scale = FloatProperty(
|
||||
name="Scale",
|
||||
min=0.01, max=1000.0,
|
||||
default=1.0,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.active_object is not None
|
||||
|
||||
def execute(self, context):
|
||||
from . import export_ply_soc
|
||||
|
||||
from mathutils import Matrix
|
||||
|
||||
keywords = self.as_keywords(ignore=("axis_forward",
|
||||
"axis_up",
|
||||
"global_scale",
|
||||
"check_existing",
|
||||
"filter_glob",
|
||||
))
|
||||
global_matrix = axis_conversion(to_forward=self.axis_forward,
|
||||
to_up=self.axis_up,
|
||||
).to_4x4() * Matrix.Scale(self.global_scale, 4)
|
||||
keywords["global_matrix"] = global_matrix
|
||||
|
||||
filepath = self.filepath
|
||||
filepath = bpy.path.ensure_ext(filepath, self.filename_ext)
|
||||
|
||||
return export_ply_soc.save(self, context, **keywords)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
row = layout.row()
|
||||
row.prop(self, "use_mesh_modifiers")
|
||||
row.prop(self, "use_normals")
|
||||
row = layout.row()
|
||||
row.prop(self, "use_uv_coords")
|
||||
row.prop(self, "use_colors")
|
||||
|
||||
layout.prop(self, "axis_forward")
|
||||
layout.prop(self, "axis_up")
|
||||
layout.prop(self, "global_scale")
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(SocImportPLY.bl_idname, text="Stanford (.ply)")
|
||||
|
||||
|
||||
def menu_func_export(self, context):
|
||||
self.layout.operator(SocExportPLY.bl_idname, text="Stanford (.ply)")
|
||||
|
||||
|
||||
classes = (
|
||||
SocImportPLY,
|
||||
SocExportPLY,
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||
|
||||
|
||||
def unregister():
|
||||
for cls in classes:
|
||||
bpy.utils.unregister_class(cls)
|
||||
|
||||
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
228
release/scripts/startup/io_mesh_ply_soc/export_ply_soc.py
Normal file
228
release/scripts/startup/io_mesh_ply_soc/export_ply_soc.py
Normal file
@@ -0,0 +1,228 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
"""
|
||||
This script exports Stanford PLY files from Blender. It supports normals,
|
||||
colors, and texture coordinates per face or per vertex.
|
||||
Only one mesh can be exported at a time.
|
||||
"""
|
||||
|
||||
import bpy
|
||||
import os
|
||||
|
||||
|
||||
def save_mesh(filepath,
|
||||
mesh,
|
||||
use_normals=True,
|
||||
use_uv_coords=True,
|
||||
use_colors=True,
|
||||
):
|
||||
|
||||
def rvec3d(v):
|
||||
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
|
||||
|
||||
def rvec2d(v):
|
||||
return round(v[0], 6), round(v[1], 6)
|
||||
|
||||
file = open(filepath, "w", encoding="utf8", newline="\n")
|
||||
fw = file.write
|
||||
|
||||
# Be sure tessface & co are available!
|
||||
if not mesh.tessfaces and mesh.polygons:
|
||||
mesh.calc_tessface()
|
||||
|
||||
has_uv = bool(mesh.tessface_uv_textures)
|
||||
has_vcol = bool(mesh.tessface_vertex_colors)
|
||||
|
||||
if not has_uv:
|
||||
use_uv_coords = False
|
||||
if not has_vcol:
|
||||
use_colors = False
|
||||
|
||||
if not use_uv_coords:
|
||||
has_uv = False
|
||||
if not use_colors:
|
||||
has_vcol = False
|
||||
|
||||
if has_uv:
|
||||
active_uv_layer = mesh.tessface_uv_textures.active
|
||||
if not active_uv_layer:
|
||||
use_uv_coords = False
|
||||
has_uv = False
|
||||
else:
|
||||
active_uv_layer = active_uv_layer.data
|
||||
|
||||
if has_vcol:
|
||||
active_col_layer = mesh.tessface_vertex_colors.active
|
||||
if not active_col_layer:
|
||||
use_colors = False
|
||||
has_vcol = False
|
||||
else:
|
||||
active_col_layer = active_col_layer.data
|
||||
|
||||
# in case
|
||||
color = uvcoord = uvcoord_key = normal = normal_key = None
|
||||
|
||||
mesh_verts = mesh.vertices # save a lookup
|
||||
ply_verts = [] # list of dictionaries
|
||||
# vdict = {} # (index, normal, uv) -> new index
|
||||
vdict = [{} for i in range(len(mesh_verts))]
|
||||
ply_faces = [[] for f in range(len(mesh.tessfaces))]
|
||||
vert_count = 0
|
||||
for i, f in enumerate(mesh.tessfaces):
|
||||
|
||||
smooth = not use_normals or f.use_smooth
|
||||
if not smooth:
|
||||
normal = f.normal[:]
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if has_uv:
|
||||
uv = active_uv_layer[i]
|
||||
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4
|
||||
if has_vcol:
|
||||
col = active_col_layer[i]
|
||||
col = col.color1[:], col.color2[:], col.color3[:], col.color4[:]
|
||||
|
||||
f_verts = f.vertices
|
||||
|
||||
pf = ply_faces[i]
|
||||
for j, vidx in enumerate(f_verts):
|
||||
v = mesh_verts[vidx]
|
||||
|
||||
if smooth:
|
||||
normal = v.normal[:]
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if has_uv:
|
||||
uvcoord = uv[j][0], uv[j][1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
|
||||
if has_vcol:
|
||||
color = col[j]
|
||||
color = (int(color[0] * 255.0),
|
||||
int(color[1] * 255.0),
|
||||
int(color[2] * 255.0),
|
||||
int(color[3] * 255.0)
|
||||
)
|
||||
key = normal_key, uvcoord_key, color
|
||||
|
||||
vdict_local = vdict[vidx]
|
||||
pf_vidx = vdict_local.get(key) # Will be None initially
|
||||
|
||||
if pf_vidx is None: # same as vdict_local.has_key(key)
|
||||
pf_vidx = vdict_local[key] = vert_count
|
||||
ply_verts.append((vidx, normal, uvcoord, color))
|
||||
vert_count += 1
|
||||
|
||||
pf.append(pf_vidx)
|
||||
|
||||
fw("ply\n")
|
||||
fw("format ascii 1.0\n")
|
||||
fw("comment Created by Blender %s - "
|
||||
"www.blender.org, source file: %r\n" %
|
||||
(bpy.app.version_string, os.path.basename(bpy.data.filepath)))
|
||||
|
||||
fw("element vertex %d\n" % len(ply_verts))
|
||||
|
||||
fw("property float x\n"
|
||||
"property float y\n"
|
||||
"property float z\n")
|
||||
|
||||
if use_normals:
|
||||
fw("property float nx\n"
|
||||
"property float ny\n"
|
||||
"property float nz\n")
|
||||
if use_uv_coords:
|
||||
fw("property float s\n"
|
||||
"property float t\n")
|
||||
if use_colors:
|
||||
fw("property uchar red\n"
|
||||
"property uchar green\n"
|
||||
"property uchar blue\n"
|
||||
"property uchar alpha\n")
|
||||
|
||||
fw("element face %d\n" % len(mesh.tessfaces))
|
||||
fw("property list uchar uint vertex_indices\n")
|
||||
fw("end_header\n")
|
||||
|
||||
for i, v in enumerate(ply_verts):
|
||||
fw("%.6f %.6f %.6f" % mesh_verts[v[0]].co[:]) # co
|
||||
if use_normals:
|
||||
fw(" %.6f %.6f %.6f" % v[1]) # no
|
||||
if use_uv_coords:
|
||||
fw(" %.6f %.6f" % v[2]) # uv
|
||||
if use_colors:
|
||||
fw(" %u %u %u %u" % v[3]) # col
|
||||
fw("\n")
|
||||
|
||||
for pf in ply_faces:
|
||||
if len(pf) == 3:
|
||||
fw("3 %d %d %d\n" % tuple(pf))
|
||||
else:
|
||||
fw("4 %d %d %d %d\n" % tuple(pf))
|
||||
|
||||
file.close()
|
||||
print("writing %r done" % filepath)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def save(operator,
|
||||
context,
|
||||
filepath="",
|
||||
use_mesh_modifiers=True,
|
||||
use_normals=True,
|
||||
use_uv_coords=True,
|
||||
use_colors=True,
|
||||
global_matrix=None
|
||||
):
|
||||
|
||||
scene = context.scene
|
||||
obj = context.active_object
|
||||
|
||||
if global_matrix is None:
|
||||
from mathutils import Matrix
|
||||
global_matrix = Matrix()
|
||||
|
||||
if bpy.ops.object.mode_set.poll():
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
if use_mesh_modifiers and obj.modifiers:
|
||||
mesh = obj.to_mesh(scene, True, 'PREVIEW')
|
||||
else:
|
||||
mesh = obj.data.copy()
|
||||
|
||||
if not mesh:
|
||||
raise Exception("Error, could not get mesh data from active object")
|
||||
|
||||
mesh.transform(global_matrix * obj.matrix_world)
|
||||
if use_normals:
|
||||
mesh.calc_normals()
|
||||
|
||||
ret = save_mesh(filepath, mesh,
|
||||
use_normals=use_normals,
|
||||
use_uv_coords=use_uv_coords,
|
||||
use_colors=use_colors,
|
||||
)
|
||||
|
||||
if use_mesh_modifiers:
|
||||
bpy.data.meshes.remove(mesh)
|
||||
|
||||
return ret
|
394
release/scripts/startup/io_mesh_ply_soc/import_ply_soc.py
Normal file
394
release/scripts/startup/io_mesh_ply_soc/import_ply_soc.py
Normal file
@@ -0,0 +1,394 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import re
|
||||
import struct
|
||||
|
||||
|
||||
class element_spec(object):
|
||||
__slots__ = ("name",
|
||||
"count",
|
||||
"properties",
|
||||
)
|
||||
|
||||
def __init__(self, name, count):
|
||||
self.name = name
|
||||
self.count = count
|
||||
self.properties = []
|
||||
|
||||
def load(self, format, stream):
|
||||
if format == b'ascii':
|
||||
stream = stream.readline().split()
|
||||
return [x.load(format, stream) for x in self.properties]
|
||||
|
||||
def index(self, name):
|
||||
for i, p in enumerate(self.properties):
|
||||
if p.name == name:
|
||||
return i
|
||||
return -1
|
||||
|
||||
|
||||
class property_spec(object):
|
||||
__slots__ = ("name",
|
||||
"list_type",
|
||||
"numeric_type",
|
||||
)
|
||||
|
||||
def __init__(self, name, list_type, numeric_type):
|
||||
self.name = name
|
||||
self.list_type = list_type
|
||||
self.numeric_type = numeric_type
|
||||
|
||||
def read_format(self, format, count, num_type, stream):
|
||||
if format == b'ascii':
|
||||
if num_type == 's':
|
||||
ans = []
|
||||
for i in range(count):
|
||||
s = stream[i]
|
||||
if len(s) < 2 or s[0] != '"' or s[-1] != '"':
|
||||
print('Invalid string', s)
|
||||
print('Note: ply_import.py does not handle whitespace in strings')
|
||||
return None
|
||||
ans.append(s[1:-1])
|
||||
stream[:count] = []
|
||||
return ans
|
||||
if num_type == 'f' or num_type == 'd':
|
||||
mapper = float
|
||||
else:
|
||||
mapper = int
|
||||
ans = [mapper(x) for x in stream[:count]]
|
||||
stream[:count] = []
|
||||
return ans
|
||||
else:
|
||||
if num_type == 's':
|
||||
ans = []
|
||||
for i in range(count):
|
||||
fmt = format + 'i'
|
||||
data = stream.read(struct.calcsize(fmt))
|
||||
length = struct.unpack(fmt, data)[0]
|
||||
fmt = '%s%is' % (format, length)
|
||||
data = stream.read(struct.calcsize(fmt))
|
||||
s = struct.unpack(fmt, data)[0]
|
||||
ans.append(s[:-1]) # strip the NULL
|
||||
return ans
|
||||
else:
|
||||
fmt = '%s%i%s' % (format, count, num_type)
|
||||
data = stream.read(struct.calcsize(fmt))
|
||||
return struct.unpack(fmt, data)
|
||||
|
||||
def load(self, format, stream):
|
||||
if self.list_type is not None:
|
||||
count = int(self.read_format(format, 1, self.list_type, stream)[0])
|
||||
return self.read_format(format, count, self.numeric_type, stream)
|
||||
else:
|
||||
return self.read_format(format, 1, self.numeric_type, stream)[0]
|
||||
|
||||
|
||||
class object_spec(object):
|
||||
__slots__ = ("specs",
|
||||
)
|
||||
'A list of element_specs'
|
||||
def __init__(self):
|
||||
self.specs = []
|
||||
|
||||
def load(self, format, stream):
|
||||
return dict([(i.name, [i.load(format, stream) for j in range(i.count)]) for i in self.specs])
|
||||
|
||||
'''
|
||||
# Longhand for above LC
|
||||
answer = {}
|
||||
for i in self.specs:
|
||||
answer[i.name] = []
|
||||
for j in range(i.count):
|
||||
if not j % 100 and meshtools.show_progress:
|
||||
Blender.Window.DrawProgressBar(float(j) / i.count, 'Loading ' + i.name)
|
||||
answer[i.name].append(i.load(format, stream))
|
||||
return answer
|
||||
'''
|
||||
|
||||
|
||||
def read(filepath):
|
||||
format = b''
|
||||
texture = b''
|
||||
version = b'1.0'
|
||||
format_specs = {b'binary_little_endian': '<',
|
||||
b'binary_big_endian': '>',
|
||||
b'ascii': b'ascii'}
|
||||
type_specs = {b'char': 'b',
|
||||
b'uchar': 'B',
|
||||
b'int8': 'b',
|
||||
b'uint8': 'B',
|
||||
b'int16': 'h',
|
||||
b'uint16': 'H',
|
||||
b'short': 'h',
|
||||
b'ushort': 'H',
|
||||
b'int': 'i',
|
||||
b'int32': 'i',
|
||||
b'uint': 'I',
|
||||
b'uint32': 'I',
|
||||
b'float': 'f',
|
||||
b'float32': 'f',
|
||||
b'float64': 'd',
|
||||
b'double': 'd',
|
||||
b'string': 's'}
|
||||
obj_spec = object_spec()
|
||||
invalid_ply = (None, None, None)
|
||||
|
||||
with open(filepath, 'rb') as plyf:
|
||||
signature = plyf.readline()
|
||||
|
||||
if not signature.startswith(b'ply'):
|
||||
print('Signature line was invalid')
|
||||
return invalid_ply
|
||||
|
||||
valid_header = False
|
||||
for line in plyf:
|
||||
tokens = re.split(br'[ \r\n]+', line)
|
||||
|
||||
if len(tokens) == 0:
|
||||
continue
|
||||
if tokens[0] == b'end_header':
|
||||
valid_header = True
|
||||
break
|
||||
elif tokens[0] == b'comment':
|
||||
if len(tokens) < 2:
|
||||
continue
|
||||
elif tokens[1] == b'TextureFile':
|
||||
if len(tokens) < 4:
|
||||
print('Invalid texture line')
|
||||
else:
|
||||
texture = tokens[2]
|
||||
continue
|
||||
elif tokens[0] == b'obj_info':
|
||||
continue
|
||||
elif tokens[0] == b'format':
|
||||
if len(tokens) < 3:
|
||||
print('Invalid format line')
|
||||
return invalid_ply
|
||||
if tokens[1] not in format_specs:
|
||||
print('Unknown format', tokens[1])
|
||||
return invalid_ply
|
||||
if tokens[2] != version:
|
||||
print('Unknown version', tokens[2])
|
||||
return invalid_ply
|
||||
format = tokens[1]
|
||||
elif tokens[0] == b'element':
|
||||
if len(tokens) < 3:
|
||||
print(b'Invalid element line')
|
||||
return invalid_ply
|
||||
obj_spec.specs.append(element_spec(tokens[1], int(tokens[2])))
|
||||
elif tokens[0] == b'property':
|
||||
if not len(obj_spec.specs):
|
||||
print('Property without element')
|
||||
return invalid_ply
|
||||
if tokens[1] == b'list':
|
||||
obj_spec.specs[-1].properties.append(property_spec(tokens[4], type_specs[tokens[2]], type_specs[tokens[3]]))
|
||||
else:
|
||||
obj_spec.specs[-1].properties.append(property_spec(tokens[2], None, type_specs[tokens[1]]))
|
||||
if not valid_header:
|
||||
print("Invalid header ('end_header' line not found!)")
|
||||
return invalid_ply
|
||||
|
||||
obj = obj_spec.load(format_specs[format], plyf)
|
||||
|
||||
return obj_spec, obj, texture
|
||||
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
def load_ply_mesh(filepath, ply_name):
|
||||
from bpy_extras.io_utils import unpack_face_list
|
||||
# from bpy_extras.image_utils import load_image # UNUSED
|
||||
|
||||
obj_spec, obj, texture = read(filepath)
|
||||
if obj is None:
|
||||
print('Invalid file')
|
||||
return
|
||||
|
||||
uvindices = colindices = None
|
||||
colmultiply = None
|
||||
|
||||
# noindices = None # Ignore normals
|
||||
|
||||
for el in obj_spec.specs:
|
||||
if el.name == b'vertex':
|
||||
vindices_x, vindices_y, vindices_z = el.index(b'x'), el.index(b'y'), el.index(b'z')
|
||||
# noindices = (el.index('nx'), el.index('ny'), el.index('nz'))
|
||||
# if -1 in noindices: noindices = None
|
||||
uvindices = (el.index(b's'), el.index(b't'))
|
||||
if -1 in uvindices:
|
||||
uvindices = None
|
||||
colindices = el.index(b'red'), el.index(b'green'), el.index(b'blue')
|
||||
if -1 in colindices:
|
||||
colindices = None
|
||||
else: # if not a float assume uchar
|
||||
colmultiply = [1.0 if el.properties[i].numeric_type in {'f', 'd'} else (1.0 / 255.0) for i in colindices]
|
||||
|
||||
elif el.name == b'face':
|
||||
findex = el.index(b'vertex_indices')
|
||||
elif el.name == b'tristrips':
|
||||
trindex = el.index(b'vertex_indices')
|
||||
elif el.name == b'edge':
|
||||
eindex1, eindex2 = el.index(b'vertex1'), el.index(b'vertex2')
|
||||
|
||||
mesh_faces = []
|
||||
mesh_uvs = []
|
||||
mesh_colors = []
|
||||
|
||||
def add_face(vertices, indices, uvindices, colindices):
|
||||
mesh_faces.append(indices)
|
||||
if uvindices:
|
||||
mesh_uvs.append([(vertices[index][uvindices[0]], vertices[index][uvindices[1]]) for index in indices])
|
||||
if colindices:
|
||||
mesh_colors.append([(vertices[index][colindices[0]] * colmultiply[0],
|
||||
vertices[index][colindices[1]] * colmultiply[1],
|
||||
vertices[index][colindices[2]] * colmultiply[2],
|
||||
) for index in indices])
|
||||
|
||||
if uvindices or colindices:
|
||||
# If we have Cols or UVs then we need to check the face order.
|
||||
add_face_simple = add_face
|
||||
|
||||
# EVIL EEKADOODLE - face order annoyance.
|
||||
def add_face(vertices, indices, uvindices, colindices):
|
||||
if len(indices) == 4:
|
||||
if indices[2] == 0 or indices[3] == 0:
|
||||
indices = indices[2], indices[3], indices[0], indices[1]
|
||||
elif len(indices) == 3:
|
||||
if indices[2] == 0:
|
||||
indices = indices[1], indices[2], indices[0]
|
||||
|
||||
add_face_simple(vertices, indices, uvindices, colindices)
|
||||
|
||||
verts = obj[b'vertex']
|
||||
|
||||
if b'face' in obj:
|
||||
for f in obj[b'face']:
|
||||
ind = f[findex]
|
||||
len_ind = len(ind)
|
||||
if len_ind <= 4:
|
||||
add_face(verts, ind, uvindices, colindices)
|
||||
else:
|
||||
# Fan fill the face
|
||||
for j in range(len_ind - 2):
|
||||
add_face(verts, (ind[0], ind[j + 1], ind[j + 2]), uvindices, colindices)
|
||||
|
||||
if b'tristrips' in obj:
|
||||
for t in obj[b'tristrips']:
|
||||
ind = t[trindex]
|
||||
len_ind = len(ind)
|
||||
for j in range(len_ind - 2):
|
||||
add_face(verts, (ind[j], ind[j + 1], ind[j + 2]), uvindices, colindices)
|
||||
|
||||
mesh = bpy.data.meshes.new(name=ply_name)
|
||||
|
||||
mesh.vertices.add(len(obj[b'vertex']))
|
||||
|
||||
mesh.vertices.foreach_set("co", [a for v in obj[b'vertex'] for a in (v[vindices_x], v[vindices_y], v[vindices_z])])
|
||||
|
||||
if b'edge' in obj:
|
||||
mesh.edges.add(len(obj[b'edge']))
|
||||
mesh.edges.foreach_set("vertices", [a for e in obj[b'edge'] for a in (e[eindex1], e[eindex2])])
|
||||
|
||||
if mesh_faces:
|
||||
mesh.tessfaces.add(len(mesh_faces))
|
||||
mesh.tessfaces.foreach_set("vertices_raw", unpack_face_list(mesh_faces))
|
||||
|
||||
if uvindices or colindices:
|
||||
if uvindices:
|
||||
uvlay = mesh.tessface_uv_textures.new()
|
||||
if colindices:
|
||||
vcol_lay = mesh.tessface_vertex_colors.new()
|
||||
|
||||
if uvindices:
|
||||
for i, f in enumerate(uvlay.data):
|
||||
ply_uv = mesh_uvs[i]
|
||||
for j, uv in enumerate(f.uv):
|
||||
uv[0], uv[1] = ply_uv[j]
|
||||
|
||||
if colindices:
|
||||
for i, f in enumerate(vcol_lay.data):
|
||||
# XXX, colors dont come in right, needs further investigation.
|
||||
ply_col = mesh_colors[i]
|
||||
if len(ply_col) == 4:
|
||||
f_col = f.color1, f.color2, f.color3, f.color4
|
||||
else:
|
||||
f_col = f.color1, f.color2, f.color3
|
||||
|
||||
for j, col in enumerate(f_col):
|
||||
col.r, col.g, col.b = ply_col[j]
|
||||
|
||||
mesh.validate()
|
||||
mesh.update()
|
||||
|
||||
if texture and uvindices:
|
||||
|
||||
import os
|
||||
import sys
|
||||
from bpy_extras.image_utils import load_image
|
||||
|
||||
encoding = sys.getfilesystemencoding()
|
||||
encoded_texture = texture.decode(encoding=encoding)
|
||||
name = bpy.path.display_name_from_filepath(texture)
|
||||
image = load_image(encoded_texture, os.path.dirname(filepath), recursive=True, place_holder=True)
|
||||
|
||||
if image:
|
||||
texture = bpy.data.textures.new(name=name, type='IMAGE')
|
||||
texture.image = image
|
||||
|
||||
material = bpy.data.materials.new(name=name)
|
||||
material.use_shadeless = True
|
||||
|
||||
mtex = material.texture_slots.add()
|
||||
mtex.texture = texture
|
||||
mtex.texture_coords = 'UV'
|
||||
mtex.use_map_color_diffuse = True
|
||||
|
||||
mesh.materials.append(material)
|
||||
for face in mesh.uv_textures[0].data:
|
||||
face.image = image
|
||||
|
||||
return mesh
|
||||
|
||||
|
||||
def load_ply(filepath):
|
||||
import time
|
||||
|
||||
t = time.time()
|
||||
ply_name = bpy.path.display_name_from_filepath(filepath)
|
||||
|
||||
mesh = load_ply_mesh(filepath, ply_name)
|
||||
if not mesh:
|
||||
return {'CANCELLED'}
|
||||
|
||||
scn = bpy.context.scene
|
||||
|
||||
obj = bpy.data.objects.new(ply_name, mesh)
|
||||
scn.objects.link(obj)
|
||||
scn.objects.active = obj
|
||||
obj.select = True
|
||||
|
||||
print('\nSuccessfully imported %r in %.3f sec' % (filepath, time.time() - t))
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def load(operator, context, filepath=""):
|
||||
return load_ply(filepath)
|
640
release/scripts/startup/io_scene_fbx_soc/__init__.py
Normal file
640
release/scripts/startup/io_scene_fbx_soc/__init__.py
Normal file
@@ -0,0 +1,640 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
bl_info = {
|
||||
"name": "FBX format-soc",
|
||||
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier",
|
||||
"version": (3, 7, 10),
|
||||
"blender": (2, 77, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "FBX IO meshes, UV's, vertex colors, materials, textures, cameras, lamps and actions",
|
||||
"warning": "",
|
||||
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Import-Export/Autodesk_FBX",
|
||||
"support": 'OFFICIAL',
|
||||
"category": "Import-Export",
|
||||
}
|
||||
|
||||
|
||||
if "bpy" in locals():
|
||||
import importlib
|
||||
if "import_fbx_soc" in locals():
|
||||
importlib.reload(import_fbx_soc)
|
||||
if "export_fbx_bin_soc" in locals():
|
||||
importlib.reload(export_fbx_bin_soc)
|
||||
if "export_fbx_soc" in locals():
|
||||
importlib.reload(export_fbx_soc)
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import (
|
||||
StringProperty,
|
||||
BoolProperty,
|
||||
FloatProperty,
|
||||
EnumProperty,
|
||||
)
|
||||
from bpy_extras.io_utils import (
|
||||
ImportHelper,
|
||||
ExportHelper,
|
||||
orientation_helper_factory,
|
||||
path_reference_mode,
|
||||
axis_conversion,
|
||||
)
|
||||
|
||||
|
||||
IOFBXOrientationHelper = orientation_helper_factory("IOFBXOrientationHelper", axis_forward='-Z', axis_up='Y')
|
||||
|
||||
|
||||
class SocImportFBX(bpy.types.Operator, ImportHelper, IOFBXOrientationHelper):
|
||||
"""Load a FBX file"""
|
||||
bl_idname = "import_scene_soc.fbx"
|
||||
bl_label = "Import FBX Soc"
|
||||
bl_options = {'UNDO', 'PRESET'}
|
||||
|
||||
directory = StringProperty()
|
||||
|
||||
filename_ext = ".fbx"
|
||||
filter_glob = StringProperty(default="*.fbx", options={'HIDDEN'})
|
||||
|
||||
ui_tab = EnumProperty(
|
||||
items=(('MAIN', "Main", "Main basic settings"),
|
||||
('ARMATURE', "Armatures", "Armature-related settings"),
|
||||
),
|
||||
name="ui_tab",
|
||||
description="Import options categories",
|
||||
)
|
||||
|
||||
use_manual_orientation = BoolProperty(
|
||||
name="Manual Orientation",
|
||||
description="Specify orientation and scale, instead of using embedded data in FBX file",
|
||||
default=False,
|
||||
)
|
||||
global_scale = FloatProperty(
|
||||
name="Scale",
|
||||
min=0.001, max=1000.0,
|
||||
default=1.0,
|
||||
)
|
||||
bake_space_transform = BoolProperty(
|
||||
name="!EXPERIMENTAL! Apply Transform",
|
||||
description="Bake space transform into object data, avoids getting unwanted rotations to objects when "
|
||||
"target space is not aligned with Blender's space "
|
||||
"(WARNING! experimental option, use at own risks, known broken with armatures/animations)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
use_custom_normals = BoolProperty(
|
||||
name="Import Normals",
|
||||
description="Import custom normals, if available (otherwise Blender will recompute them)",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_image_search = BoolProperty(
|
||||
name="Image Search",
|
||||
description="Search subdirs for any associated images (WARNING: may be slow)",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_alpha_decals = BoolProperty(
|
||||
name="Alpha Decals",
|
||||
description="Treat materials with alpha as decals (no shadow casting)",
|
||||
default=False,
|
||||
)
|
||||
decal_offset = FloatProperty(
|
||||
name="Decal Offset",
|
||||
description="Displace geometry of alpha meshes",
|
||||
min=0.0, max=1.0,
|
||||
default=0.0,
|
||||
)
|
||||
|
||||
use_anim = BoolProperty(
|
||||
name="Import Animation",
|
||||
description="Import FBX animation",
|
||||
default=True,
|
||||
)
|
||||
anim_offset = FloatProperty(
|
||||
name="Animation Offset",
|
||||
description="Offset to apply to animation during import, in frames",
|
||||
default=1.0,
|
||||
)
|
||||
|
||||
use_custom_props = BoolProperty(
|
||||
name="Import User Properties",
|
||||
description="Import user properties as custom properties",
|
||||
default=True,
|
||||
)
|
||||
use_custom_props_enum_as_string = BoolProperty(
|
||||
name="Import Enums As Strings",
|
||||
description="Store enumeration values as strings",
|
||||
default=True,
|
||||
)
|
||||
|
||||
ignore_leaf_bones = BoolProperty(
|
||||
name="Ignore Leaf Bones",
|
||||
description="Ignore the last bone at the end of each chain (used to mark the length of the previous bone)",
|
||||
default=False,
|
||||
)
|
||||
force_connect_children = BoolProperty(
|
||||
name="Force Connect Children",
|
||||
description="Force connection of children bones to their parent, even if their computed head/tail "
|
||||
"positions do not match (can be useful with pure-joints-type armatures)",
|
||||
default=False,
|
||||
)
|
||||
automatic_bone_orientation = BoolProperty(
|
||||
name="Automatic Bone Orientation",
|
||||
description="Try to align the major bone axis with the bone children",
|
||||
default=False,
|
||||
)
|
||||
primary_bone_axis = EnumProperty(
|
||||
name="Primary Bone Axis",
|
||||
items=(('X', "X Axis", ""),
|
||||
('Y', "Y Axis", ""),
|
||||
('Z', "Z Axis", ""),
|
||||
('-X', "-X Axis", ""),
|
||||
('-Y', "-Y Axis", ""),
|
||||
('-Z', "-Z Axis", ""),
|
||||
),
|
||||
default='Y',
|
||||
)
|
||||
secondary_bone_axis = EnumProperty(
|
||||
name="Secondary Bone Axis",
|
||||
items=(('X', "X Axis", ""),
|
||||
('Y', "Y Axis", ""),
|
||||
('Z', "Z Axis", ""),
|
||||
('-X', "-X Axis", ""),
|
||||
('-Y', "-Y Axis", ""),
|
||||
('-Z', "-Z Axis", ""),
|
||||
),
|
||||
default='X',
|
||||
)
|
||||
|
||||
use_prepost_rot = BoolProperty(
|
||||
name="Use Pre/Post Rotation",
|
||||
description="Use pre/post rotation from FBX transform (you may have to disable that in some cases)",
|
||||
default=True,
|
||||
)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
layout.prop(self, "ui_tab", expand=True)
|
||||
if self.ui_tab == 'MAIN':
|
||||
layout.prop(self, "use_manual_orientation"),
|
||||
sub = layout.column()
|
||||
sub.enabled = self.use_manual_orientation
|
||||
sub.prop(self, "axis_forward")
|
||||
sub.prop(self, "axis_up")
|
||||
layout.prop(self, "global_scale")
|
||||
layout.prop(self, "bake_space_transform")
|
||||
|
||||
layout.prop(self, "use_custom_normals")
|
||||
|
||||
layout.prop(self, "use_anim")
|
||||
layout.prop(self, "anim_offset")
|
||||
|
||||
layout.prop(self, "use_custom_props")
|
||||
sub = layout.row()
|
||||
sub.enabled = self.use_custom_props
|
||||
sub.prop(self, "use_custom_props_enum_as_string")
|
||||
|
||||
layout.prop(self, "use_image_search")
|
||||
# layout.prop(self, "use_alpha_decals")
|
||||
layout.prop(self, "decal_offset")
|
||||
|
||||
layout.prop(self, "use_prepost_rot")
|
||||
elif self.ui_tab == 'ARMATURE':
|
||||
layout.prop(self, "ignore_leaf_bones")
|
||||
layout.prop(self, "force_connect_children"),
|
||||
layout.prop(self, "automatic_bone_orientation"),
|
||||
sub = layout.column()
|
||||
sub.enabled = not self.automatic_bone_orientation
|
||||
sub.prop(self, "primary_bone_axis")
|
||||
sub.prop(self, "secondary_bone_axis")
|
||||
|
||||
def execute(self, context):
|
||||
keywords = self.as_keywords(ignore=("filter_glob", "directory", "ui_tab"))
|
||||
keywords["use_cycles"] = (context.scene.render.engine == 'CYCLES')
|
||||
|
||||
from . import import_fbx_soc
|
||||
return import_fbx_soc.load(self, context, **keywords)
|
||||
|
||||
|
||||
class SocExportFBX(bpy.types.Operator, ExportHelper, IOFBXOrientationHelper):
|
||||
"""Write a FBX file"""
|
||||
bl_idname = "export_scene_soc.fbx"
|
||||
bl_label = "Export FBX Soc"
|
||||
bl_options = {'UNDO', 'PRESET'}
|
||||
|
||||
filename_ext = ".fbx"
|
||||
filter_glob = StringProperty(default="*.fbx", options={'HIDDEN'})
|
||||
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
|
||||
version = EnumProperty(
|
||||
items=(('BIN7400', "FBX 7.4 binary", "Modern 7.4 binary version"),
|
||||
('ASCII6100', "FBX 6.1 ASCII",
|
||||
"Legacy 6.1 ascii version - WARNING: Deprecated and no more maintained"),
|
||||
),
|
||||
name="Version",
|
||||
description="Choose which version of the exporter to use",
|
||||
)
|
||||
|
||||
# 7.4 only
|
||||
ui_tab = EnumProperty(
|
||||
items=(('MAIN', "Main", "Main basic settings"),
|
||||
('GEOMETRY', "Geometries", "Geometry-related settings"),
|
||||
('ARMATURE', "Armatures", "Armature-related settings"),
|
||||
('ANIMATION', "Animation", "Animation-related settings"),
|
||||
),
|
||||
name="ui_tab",
|
||||
description="Export options categories",
|
||||
)
|
||||
|
||||
use_selection = BoolProperty(
|
||||
name="Selected Objects",
|
||||
description="Export selected objects on visible layers",
|
||||
default=False,
|
||||
)
|
||||
global_scale = FloatProperty(
|
||||
name="Scale",
|
||||
description="Scale all data (Some importers do not support scaled armatures!)",
|
||||
min=0.001, max=1000.0,
|
||||
soft_min=0.01, soft_max=1000.0,
|
||||
default=1.0,
|
||||
)
|
||||
# 7.4 only
|
||||
apply_unit_scale = BoolProperty(
|
||||
name="Apply Unit",
|
||||
description="Scale all data according to current Blender size, to match default FBX unit "
|
||||
"(centimeter, some importers do not handle UnitScaleFactor properly)",
|
||||
default=True,
|
||||
)
|
||||
# 7.4 only
|
||||
bake_space_transform = BoolProperty(
|
||||
name="!EXPERIMENTAL! Apply Transform",
|
||||
description="Bake space transform into object data, avoids getting unwanted rotations to objects when "
|
||||
"target space is not aligned with Blender's space "
|
||||
"(WARNING! experimental option, use at own risks, known broken with armatures/animations)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
object_types = EnumProperty(
|
||||
name="Object Types",
|
||||
options={'ENUM_FLAG'},
|
||||
items=(('EMPTY', "Empty", ""),
|
||||
('CAMERA', "Camera", ""),
|
||||
('LAMP', "Lamp", ""),
|
||||
('ARMATURE', "Armature", "WARNING: not supported in dupli/group instances"),
|
||||
('MESH', "Mesh", ""),
|
||||
('OTHER', "Other", "Other geometry types, like curve, metaball, etc. (converted to meshes)"),
|
||||
),
|
||||
description="Which kind of object to export",
|
||||
default={'EMPTY', 'CAMERA', 'LAMP', 'ARMATURE', 'MESH', 'OTHER'},
|
||||
)
|
||||
|
||||
use_mesh_modifiers = BoolProperty(
|
||||
name="Apply Modifiers",
|
||||
description="Apply modifiers to mesh objects (except Armature ones) - "
|
||||
"WARNING: prevents exporting shape keys",
|
||||
default=True,
|
||||
)
|
||||
use_mesh_modifiers_render = BoolProperty(
|
||||
name="Use Modifiers Render Setting",
|
||||
description="Use render settings when applying modifiers to mesh objects",
|
||||
default=True,
|
||||
)
|
||||
mesh_smooth_type = EnumProperty(
|
||||
name="Smoothing",
|
||||
items=(('OFF', "Normals Only", "Export only normals instead of writing edge or face smoothing data"),
|
||||
('FACE', "Face", "Write face smoothing"),
|
||||
('EDGE', "Edge", "Write edge smoothing"),
|
||||
),
|
||||
description="Export smoothing information "
|
||||
"(prefer 'Normals Only' option if your target importer understand split normals)",
|
||||
default='OFF',
|
||||
)
|
||||
use_mesh_edges = BoolProperty(
|
||||
name="Loose Edges",
|
||||
description="Export loose edges (as two-vertices polygons)",
|
||||
default=False,
|
||||
)
|
||||
# 7.4 only
|
||||
use_tspace = BoolProperty(
|
||||
name="Tangent Space",
|
||||
description="Add binormal and tangent vectors, together with normal they form the tangent space "
|
||||
"(will only work correctly with tris/quads only meshes!)",
|
||||
default=False,
|
||||
)
|
||||
# 7.4 only
|
||||
use_custom_props = BoolProperty(
|
||||
name="Custom Properties",
|
||||
description="Export custom properties",
|
||||
default=False,
|
||||
)
|
||||
add_leaf_bones = BoolProperty(
|
||||
name="Add Leaf Bones",
|
||||
description="Append a final bone to the end of each chain to specify last bone length "
|
||||
"(use this when you intend to edit the armature from exported data)",
|
||||
default=True # False for commit!
|
||||
)
|
||||
primary_bone_axis = EnumProperty(
|
||||
name="Primary Bone Axis",
|
||||
items=(('X', "X Axis", ""),
|
||||
('Y', "Y Axis", ""),
|
||||
('Z', "Z Axis", ""),
|
||||
('-X', "-X Axis", ""),
|
||||
('-Y', "-Y Axis", ""),
|
||||
('-Z', "-Z Axis", ""),
|
||||
),
|
||||
default='Y',
|
||||
)
|
||||
secondary_bone_axis = EnumProperty(
|
||||
name="Secondary Bone Axis",
|
||||
items=(('X', "X Axis", ""),
|
||||
('Y', "Y Axis", ""),
|
||||
('Z', "Z Axis", ""),
|
||||
('-X', "-X Axis", ""),
|
||||
('-Y', "-Y Axis", ""),
|
||||
('-Z', "-Z Axis", ""),
|
||||
),
|
||||
default='X',
|
||||
)
|
||||
use_armature_deform_only = BoolProperty(
|
||||
name="Only Deform Bones",
|
||||
description="Only write deforming bones (and non-deforming ones when they have deforming children)",
|
||||
default=False,
|
||||
)
|
||||
armature_nodetype = EnumProperty(
|
||||
name="Armature FBXNode Type",
|
||||
items=(('NULL', "Null", "'Null' FBX node, similar to Blender's Empty (default)"),
|
||||
('ROOT', "Root", "'Root' FBX node, supposed to be the root of chains of bones..."),
|
||||
('LIMBNODE', "LimbNode", "'LimbNode' FBX node, a regular joint between two bones..."),
|
||||
),
|
||||
description="FBX type of node (object) used to represent Blender's armatures "
|
||||
"(use Null one unless you experience issues with other app, other choices may no import back "
|
||||
"perfectly in Blender...)",
|
||||
default='NULL',
|
||||
)
|
||||
# Anim - 7.4
|
||||
bake_anim = BoolProperty(
|
||||
name="Baked Animation",
|
||||
description="Export baked keyframe animation",
|
||||
default=True,
|
||||
)
|
||||
bake_anim_use_all_bones = BoolProperty(
|
||||
name="Key All Bones",
|
||||
description="Force exporting at least one key of animation for all bones "
|
||||
"(needed with some target applications, like UE4)",
|
||||
default=True,
|
||||
)
|
||||
bake_anim_use_nla_strips = BoolProperty(
|
||||
name="NLA Strips",
|
||||
description="Export each non-muted NLA strip as a separated FBX's AnimStack, if any, "
|
||||
"instead of global scene animation",
|
||||
default=True,
|
||||
)
|
||||
bake_anim_use_all_actions = BoolProperty(
|
||||
name="All Actions",
|
||||
description="Export each action as a separated FBX's AnimStack, instead of global scene animation "
|
||||
"(note that animated objects will get all actions compatible with them, "
|
||||
"others will get no animation at all)",
|
||||
default=True,
|
||||
)
|
||||
bake_anim_force_startend_keying = BoolProperty(
|
||||
name="Force Start/End Keying",
|
||||
description="Always add a keyframe at start and end of actions for animated channels",
|
||||
default=True,
|
||||
)
|
||||
bake_anim_step = FloatProperty(
|
||||
name="Sampling Rate",
|
||||
description="How often to evaluate animated values (in frames)",
|
||||
min=0.01, max=100.0,
|
||||
soft_min=0.1, soft_max=10.0,
|
||||
default=1.0,
|
||||
)
|
||||
bake_anim_simplify_factor = FloatProperty(
|
||||
name="Simplify",
|
||||
description="How much to simplify baked values (0.0 to disable, the higher the more simplified)",
|
||||
min=0.0, max=100.0, # No simplification to up to 10% of current magnitude tolerance.
|
||||
soft_min=0.0, soft_max=10.0,
|
||||
default=1.0, # default: min slope: 0.005, max frame step: 10.
|
||||
)
|
||||
# Anim - 6.1
|
||||
use_anim = BoolProperty(
|
||||
name="Animation",
|
||||
description="Export keyframe animation",
|
||||
default=True,
|
||||
)
|
||||
use_anim_action_all = BoolProperty(
|
||||
name="All Actions",
|
||||
description=("Export all actions for armatures or just the currently selected action"),
|
||||
default=True,
|
||||
)
|
||||
use_default_take = BoolProperty(
|
||||
name="Default Take",
|
||||
description="Export currently assigned object and armature animations into a default take from the scene "
|
||||
"start/end frames",
|
||||
default=True
|
||||
)
|
||||
use_anim_optimize = BoolProperty(
|
||||
name="Optimize Keyframes",
|
||||
description="Remove double keyframes",
|
||||
default=True,
|
||||
)
|
||||
anim_optimize_precision = FloatProperty(
|
||||
name="Precision",
|
||||
description="Tolerance for comparing double keyframes (higher for greater accuracy)",
|
||||
min=0.0, max=20.0, # from 10^2 to 10^-18 frames precision.
|
||||
soft_min=1.0, soft_max=16.0,
|
||||
default=6.0, # default: 10^-4 frames.
|
||||
)
|
||||
# End anim
|
||||
path_mode = path_reference_mode
|
||||
# 7.4 only
|
||||
embed_textures = BoolProperty(
|
||||
name="Embed Textures",
|
||||
description="Embed textures in FBX binary file (only for \"Copy\" path mode!)",
|
||||
default=False,
|
||||
)
|
||||
batch_mode = EnumProperty(
|
||||
name="Batch Mode",
|
||||
items=(('OFF', "Off", "Active scene to file"),
|
||||
('SCENE', "Scene", "Each scene as a file"),
|
||||
('GROUP', "Group", "Each group as a file"),
|
||||
),
|
||||
)
|
||||
use_batch_own_dir = BoolProperty(
|
||||
name="Batch Own Dir",
|
||||
description="Create a dir for each exported file",
|
||||
default=True,
|
||||
)
|
||||
use_metadata = BoolProperty(
|
||||
name="Use Metadata",
|
||||
default=True,
|
||||
options={'HIDDEN'},
|
||||
)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
layout.prop(self, "version")
|
||||
|
||||
if self.version == 'BIN7400':
|
||||
layout.prop(self, "ui_tab", expand=True)
|
||||
if self.ui_tab == 'MAIN':
|
||||
layout.prop(self, "use_selection")
|
||||
row = layout.row(align=True)
|
||||
row.prop(self, "global_scale")
|
||||
sub = row.row(align=True)
|
||||
sub.prop(self, "apply_unit_scale", text="", icon='NDOF_TRANS')
|
||||
layout.prop(self, "axis_forward")
|
||||
layout.prop(self, "axis_up")
|
||||
|
||||
layout.separator()
|
||||
layout.prop(self, "object_types")
|
||||
layout.prop(self, "bake_space_transform")
|
||||
layout.prop(self, "use_custom_props")
|
||||
|
||||
layout.separator()
|
||||
row = layout.row(align=True)
|
||||
row.prop(self, "path_mode")
|
||||
sub = row.row(align=True)
|
||||
sub.enabled = (self.path_mode == 'COPY')
|
||||
sub.prop(self, "embed_textures", text="", icon='PACKAGE' if self.embed_textures else 'UGLYPACKAGE')
|
||||
row = layout.row(align=True)
|
||||
row.prop(self, "batch_mode")
|
||||
sub = row.row(align=True)
|
||||
sub.prop(self, "use_batch_own_dir", text="", icon='NEWFOLDER')
|
||||
elif self.ui_tab == 'GEOMETRY':
|
||||
layout.prop(self, "use_mesh_modifiers")
|
||||
sub = layout.row()
|
||||
sub.enabled = self.use_mesh_modifiers
|
||||
sub.prop(self, "use_mesh_modifiers_render")
|
||||
layout.prop(self, "mesh_smooth_type")
|
||||
layout.prop(self, "use_mesh_edges")
|
||||
sub = layout.row()
|
||||
#~ sub.enabled = self.mesh_smooth_type in {'OFF'}
|
||||
sub.prop(self, "use_tspace")
|
||||
elif self.ui_tab == 'ARMATURE':
|
||||
layout.prop(self, "use_armature_deform_only")
|
||||
layout.prop(self, "add_leaf_bones")
|
||||
layout.prop(self, "primary_bone_axis")
|
||||
layout.prop(self, "secondary_bone_axis")
|
||||
layout.prop(self, "armature_nodetype")
|
||||
elif self.ui_tab == 'ANIMATION':
|
||||
layout.prop(self, "bake_anim")
|
||||
col = layout.column()
|
||||
col.enabled = self.bake_anim
|
||||
col.prop(self, "bake_anim_use_all_bones")
|
||||
col.prop(self, "bake_anim_use_nla_strips")
|
||||
col.prop(self, "bake_anim_use_all_actions")
|
||||
col.prop(self, "bake_anim_force_startend_keying")
|
||||
col.prop(self, "bake_anim_step")
|
||||
col.prop(self, "bake_anim_simplify_factor")
|
||||
else:
|
||||
layout.prop(self, "use_selection")
|
||||
layout.prop(self, "global_scale")
|
||||
layout.prop(self, "axis_forward")
|
||||
layout.prop(self, "axis_up")
|
||||
|
||||
layout.separator()
|
||||
layout.prop(self, "object_types")
|
||||
layout.prop(self, "use_mesh_modifiers")
|
||||
layout.prop(self, "mesh_smooth_type")
|
||||
layout.prop(self, "use_mesh_edges")
|
||||
sub = layout.row()
|
||||
#~ sub.enabled = self.mesh_smooth_type in {'OFF'}
|
||||
sub.prop(self, "use_tspace")
|
||||
layout.prop(self, "use_armature_deform_only")
|
||||
layout.prop(self, "use_anim")
|
||||
col = layout.column()
|
||||
col.enabled = self.use_anim
|
||||
col.prop(self, "use_anim_action_all")
|
||||
col.prop(self, "use_default_take")
|
||||
col.prop(self, "use_anim_optimize")
|
||||
col.prop(self, "anim_optimize_precision")
|
||||
|
||||
layout.separator()
|
||||
layout.prop(self, "path_mode")
|
||||
|
||||
layout.prop(self, "batch_mode")
|
||||
layout.prop(self, "use_batch_own_dir")
|
||||
|
||||
@property
|
||||
def check_extension(self):
|
||||
return self.batch_mode == 'OFF'
|
||||
|
||||
def execute(self, context):
|
||||
from mathutils import Matrix
|
||||
if not self.filepath:
|
||||
raise Exception("filepath not set")
|
||||
|
||||
global_matrix = (Matrix.Scale(self.global_scale, 4) *
|
||||
axis_conversion(to_forward=self.axis_forward,
|
||||
to_up=self.axis_up,
|
||||
).to_4x4())
|
||||
|
||||
keywords = self.as_keywords(ignore=("global_scale",
|
||||
"check_existing",
|
||||
"filter_glob",
|
||||
"ui_tab",
|
||||
))
|
||||
|
||||
keywords["global_matrix"] = global_matrix
|
||||
|
||||
if self.version == 'BIN7400':
|
||||
from . import export_fbx_bin_soc
|
||||
return export_fbx_bin_soc.save(self, context, **keywords)
|
||||
else:
|
||||
from . import export_fbx_soc
|
||||
return export_fbx_soc.save(self, context, **keywords)
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(SocImportFBX.bl_idname, text="FBX (.fbx)")
|
||||
|
||||
|
||||
def menu_func_export(self, context):
|
||||
self.layout.operator(SocExportFBX.bl_idname, text="FBX (.fbx)")
|
||||
|
||||
|
||||
classes = (
|
||||
SocImportFBX,
|
||||
SocExportFBX,
|
||||
)
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||
|
||||
for cls in classes:
|
||||
bpy.utils.unregister_class(cls)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
78
release/scripts/startup/io_scene_fbx_soc/data_types.py
Normal file
78
release/scripts/startup/io_scene_fbx_soc/data_types.py
Normal file
@@ -0,0 +1,78 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2006-2012, assimp team
|
||||
# Script copyright (C) 2013 Blender Foundation
|
||||
|
||||
BOOL = b'C'[0]
|
||||
INT16 = b'Y'[0]
|
||||
INT32 = b'I'[0]
|
||||
INT64 = b'L'[0]
|
||||
FLOAT32 = b'F'[0]
|
||||
FLOAT64 = b'D'[0]
|
||||
BYTES = b'R'[0]
|
||||
STRING = b'S'[0]
|
||||
INT32_ARRAY = b'i'[0]
|
||||
INT64_ARRAY = b'l'[0]
|
||||
FLOAT32_ARRAY = b'f'[0]
|
||||
FLOAT64_ARRAY = b'd'[0]
|
||||
BOOL_ARRAY = b'b'[0]
|
||||
BYTE_ARRAY = b'c'[0]
|
||||
|
||||
# Some other misc defines
|
||||
# Known combinations so far - supposed meaning: A = animatable, A+ = animated, U = UserProp
|
||||
# VALID_NUMBER_FLAGS = {b'A', b'A+', b'AU', b'A+U'} # Not used...
|
||||
|
||||
# array types - actual length may vary (depending on underlying C implementation)!
|
||||
import array
|
||||
|
||||
# For now, bytes and bool are assumed always 1byte.
|
||||
ARRAY_BOOL = 'b'
|
||||
ARRAY_BYTE = 'B'
|
||||
|
||||
ARRAY_INT32 = None
|
||||
ARRAY_INT64 = None
|
||||
for _t in 'ilq':
|
||||
size = array.array(_t).itemsize
|
||||
if size == 4:
|
||||
ARRAY_INT32 = _t
|
||||
elif size == 8:
|
||||
ARRAY_INT64 = _t
|
||||
if ARRAY_INT32 and ARRAY_INT64:
|
||||
break
|
||||
if not ARRAY_INT32:
|
||||
raise Exception("Impossible to get a 4-bytes integer type for array!")
|
||||
if not ARRAY_INT64:
|
||||
raise Exception("Impossible to get an 8-bytes integer type for array!")
|
||||
|
||||
ARRAY_FLOAT32 = None
|
||||
ARRAY_FLOAT64 = None
|
||||
for _t in 'fd':
|
||||
size = array.array(_t).itemsize
|
||||
if size == 4:
|
||||
ARRAY_FLOAT32 = _t
|
||||
elif size == 8:
|
||||
ARRAY_FLOAT64 = _t
|
||||
if ARRAY_FLOAT32 and ARRAY_FLOAT64:
|
||||
break
|
||||
if not ARRAY_FLOAT32:
|
||||
raise Exception("Impossible to get a 4-bytes float type for array!")
|
||||
if not ARRAY_FLOAT64:
|
||||
raise Exception("Impossible to get an 8-bytes float type for array!")
|
321
release/scripts/startup/io_scene_fbx_soc/encode_bin.py
Normal file
321
release/scripts/startup/io_scene_fbx_soc/encode_bin.py
Normal file
@@ -0,0 +1,321 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2013 Campbell Barton
|
||||
|
||||
try:
|
||||
from . import data_types
|
||||
except:
|
||||
import data_types
|
||||
|
||||
from struct import pack
|
||||
import array
|
||||
import zlib
|
||||
|
||||
_BLOCK_SENTINEL_LENGTH = 13
|
||||
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
|
||||
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
|
||||
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
|
||||
|
||||
# fbx has very strict CRC rules, all based on file timestamp
|
||||
# until we figure these out, write files at a fixed time. (workaround!)
|
||||
|
||||
# Assumes: CreationTime
|
||||
_TIME_ID = b'1970-01-01 10:00:00:000'
|
||||
_FILE_ID = b'\x28\xb3\x2a\xeb\xb6\x24\xcc\xc2\xbf\xc8\xb0\x2a\xa9\x2b\xfc\xf1'
|
||||
_FOOT_ID = b'\xfa\xbc\xab\x09\xd0\xc8\xd4\x66\xb1\x76\xfb\x83\x1c\xf7\x26\x7e'
|
||||
|
||||
# Awful exceptions: those "classes" of elements seem to need block sentinel even when having no children and some props.
|
||||
_ELEMS_ID_ALWAYS_BLOCK_SENTINEL = {b"AnimationStack", b"AnimationLayer"}
|
||||
|
||||
|
||||
class FBXElem:
|
||||
__slots__ = (
|
||||
"id",
|
||||
"props",
|
||||
"props_type",
|
||||
"elems",
|
||||
|
||||
"_props_length", # combine length of props
|
||||
"_end_offset", # byte offset from the start of the file.
|
||||
)
|
||||
|
||||
def __init__(self, id):
|
||||
assert(len(id) < 256) # length must fit in a uint8
|
||||
self.id = id
|
||||
self.props = []
|
||||
self.props_type = bytearray()
|
||||
self.elems = []
|
||||
self._end_offset = -1
|
||||
self._props_length = -1
|
||||
|
||||
def add_bool(self, data):
|
||||
assert(isinstance(data, bool))
|
||||
data = pack('?', data)
|
||||
|
||||
self.props_type.append(data_types.BOOL)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int16(self, data):
|
||||
assert(isinstance(data, int))
|
||||
data = pack('<h', data)
|
||||
|
||||
self.props_type.append(data_types.INT16)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int32(self, data):
|
||||
assert(isinstance(data, int))
|
||||
data = pack('<i', data)
|
||||
|
||||
self.props_type.append(data_types.INT32)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int64(self, data):
|
||||
assert(isinstance(data, int))
|
||||
data = pack('<q', data)
|
||||
|
||||
self.props_type.append(data_types.INT64)
|
||||
self.props.append(data)
|
||||
|
||||
def add_float32(self, data):
|
||||
assert(isinstance(data, float))
|
||||
data = pack('<f', data)
|
||||
|
||||
self.props_type.append(data_types.FLOAT32)
|
||||
self.props.append(data)
|
||||
|
||||
def add_float64(self, data):
|
||||
assert(isinstance(data, float))
|
||||
data = pack('<d', data)
|
||||
|
||||
self.props_type.append(data_types.FLOAT64)
|
||||
self.props.append(data)
|
||||
|
||||
def add_bytes(self, data):
|
||||
assert(isinstance(data, bytes))
|
||||
data = pack('<I', len(data)) + data
|
||||
|
||||
self.props_type.append(data_types.BYTES)
|
||||
self.props.append(data)
|
||||
|
||||
def add_string(self, data):
|
||||
assert(isinstance(data, bytes))
|
||||
data = pack('<I', len(data)) + data
|
||||
|
||||
self.props_type.append(data_types.STRING)
|
||||
self.props.append(data)
|
||||
|
||||
def add_string_unicode(self, data):
|
||||
assert(isinstance(data, str))
|
||||
data = data.encode('utf8')
|
||||
data = pack('<I', len(data)) + data
|
||||
|
||||
self.props_type.append(data_types.STRING)
|
||||
self.props.append(data)
|
||||
|
||||
def _add_array_helper(self, data, array_type, prop_type):
|
||||
assert(isinstance(data, array.array))
|
||||
assert(data.typecode == array_type)
|
||||
|
||||
length = len(data)
|
||||
|
||||
if _IS_BIG_ENDIAN:
|
||||
data = data[:]
|
||||
data.byteswap()
|
||||
data = data.tobytes()
|
||||
|
||||
# mimic behavior of fbxconverter (also common sense)
|
||||
# we could make this configurable.
|
||||
encoding = 0 if len(data) <= 128 else 1
|
||||
if encoding == 0:
|
||||
pass
|
||||
elif encoding == 1:
|
||||
data = zlib.compress(data, 1)
|
||||
|
||||
comp_len = len(data)
|
||||
|
||||
data = pack('<3I', length, encoding, comp_len) + data
|
||||
|
||||
self.props_type.append(prop_type)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int32_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_INT32, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_INT32, data_types.INT32_ARRAY)
|
||||
|
||||
def add_int64_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_INT64, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_INT64, data_types.INT64_ARRAY)
|
||||
|
||||
def add_float32_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_FLOAT32, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_FLOAT32, data_types.FLOAT32_ARRAY)
|
||||
|
||||
def add_float64_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_FLOAT64, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_FLOAT64, data_types.FLOAT64_ARRAY)
|
||||
|
||||
def add_bool_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_BOOL, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_BOOL, data_types.BOOL_ARRAY)
|
||||
|
||||
def add_byte_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_BYTE, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_BYTE, data_types.BYTE_ARRAY)
|
||||
|
||||
# -------------------------
|
||||
# internal helper functions
|
||||
|
||||
def _calc_offsets(self, offset, is_last):
|
||||
"""
|
||||
Call before writing, calculates fixed offsets.
|
||||
"""
|
||||
assert(self._end_offset == -1)
|
||||
assert(self._props_length == -1)
|
||||
|
||||
offset += 12 # 3 uints
|
||||
offset += 1 + len(self.id) # len + idname
|
||||
|
||||
props_length = 0
|
||||
for data in self.props:
|
||||
# 1 byte for the prop type
|
||||
props_length += 1 + len(data)
|
||||
self._props_length = props_length
|
||||
offset += props_length
|
||||
|
||||
offset = self._calc_offsets_children(offset, is_last)
|
||||
|
||||
self._end_offset = offset
|
||||
return offset
|
||||
|
||||
def _calc_offsets_children(self, offset, is_last):
|
||||
if self.elems:
|
||||
elem_last = self.elems[-1]
|
||||
for elem in self.elems:
|
||||
offset = elem._calc_offsets(offset, (elem is elem_last))
|
||||
offset += _BLOCK_SENTINEL_LENGTH
|
||||
elif not self.props or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
|
||||
if not is_last:
|
||||
offset += _BLOCK_SENTINEL_LENGTH
|
||||
|
||||
return offset
|
||||
|
||||
def _write(self, write, tell, is_last):
|
||||
assert(self._end_offset != -1)
|
||||
assert(self._props_length != -1)
|
||||
|
||||
write(pack('<3I', self._end_offset, len(self.props), self._props_length))
|
||||
|
||||
write(bytes((len(self.id),)))
|
||||
write(self.id)
|
||||
|
||||
for i, data in enumerate(self.props):
|
||||
write(bytes((self.props_type[i],)))
|
||||
write(data)
|
||||
|
||||
self._write_children(write, tell, is_last)
|
||||
|
||||
if tell() != self._end_offset:
|
||||
raise IOError("scope length not reached, "
|
||||
"something is wrong (%d)" % (end_offset - tell()))
|
||||
|
||||
def _write_children(self, write, tell, is_last):
|
||||
if self.elems:
|
||||
elem_last = self.elems[-1]
|
||||
for elem in self.elems:
|
||||
assert(elem.id != b'')
|
||||
elem._write(write, tell, (elem is elem_last))
|
||||
write(_BLOCK_SENTINEL_DATA)
|
||||
elif not self.props or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
|
||||
if not is_last:
|
||||
write(_BLOCK_SENTINEL_DATA)
|
||||
|
||||
|
||||
def _write_timedate_hack(elem_root):
|
||||
# perform 2 changes
|
||||
# - set the FileID
|
||||
# - set the CreationTime
|
||||
|
||||
ok = 0
|
||||
for elem in elem_root.elems:
|
||||
if elem.id == b'FileId':
|
||||
assert(elem.props_type[0] == b'R'[0])
|
||||
assert(len(elem.props_type) == 1)
|
||||
elem.props.clear()
|
||||
elem.props_type.clear()
|
||||
|
||||
elem.add_bytes(_FILE_ID)
|
||||
ok += 1
|
||||
elif elem.id == b'CreationTime':
|
||||
assert(elem.props_type[0] == b'S'[0])
|
||||
assert(len(elem.props_type) == 1)
|
||||
elem.props.clear()
|
||||
elem.props_type.clear()
|
||||
|
||||
elem.add_string(_TIME_ID)
|
||||
ok += 1
|
||||
|
||||
if ok == 2:
|
||||
break
|
||||
|
||||
if ok != 2:
|
||||
print("Missing fields!")
|
||||
|
||||
|
||||
def write(fn, elem_root, version):
|
||||
assert(elem_root.id == b'')
|
||||
|
||||
with open(fn, 'wb') as f:
|
||||
write = f.write
|
||||
tell = f.tell
|
||||
|
||||
write(_HEAD_MAGIC)
|
||||
write(pack('<I', version))
|
||||
|
||||
# hack since we don't decode time.
|
||||
# ideally we would _not_ modify this data.
|
||||
_write_timedate_hack(elem_root)
|
||||
|
||||
elem_root._calc_offsets_children(tell(), False)
|
||||
elem_root._write_children(write, tell, False)
|
||||
|
||||
write(_FOOT_ID)
|
||||
write(b'\x00' * 4)
|
||||
|
||||
# padding for alignment (values between 1 & 16 observed)
|
||||
# if already aligned to 16, add a full 16 bytes padding.
|
||||
ofs = tell()
|
||||
pad = ((ofs + 15) & ~15) - ofs
|
||||
if pad == 0:
|
||||
pad = 16
|
||||
|
||||
write(b'\0' * pad)
|
||||
|
||||
write(pack('<I', version))
|
||||
|
||||
# unknown magic (always the same)
|
||||
write(b'\0' * 120)
|
||||
write(b'\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b')
|
3149
release/scripts/startup/io_scene_fbx_soc/export_fbx_bin_soc.py
Normal file
3149
release/scripts/startup/io_scene_fbx_soc/export_fbx_bin_soc.py
Normal file
File diff suppressed because it is too large
Load Diff
2933
release/scripts/startup/io_scene_fbx_soc/export_fbx_soc.py
Normal file
2933
release/scripts/startup/io_scene_fbx_soc/export_fbx_soc.py
Normal file
File diff suppressed because it is too large
Load Diff
353
release/scripts/startup/io_scene_fbx_soc/fbx2json.py
Normal file
353
release/scripts/startup/io_scene_fbx_soc/fbx2json.py
Normal file
@@ -0,0 +1,353 @@
|
||||
#!/usr/bin/env python3
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2006-2012, assimp team
|
||||
# Script copyright (C) 2013 Blender Foundation
|
||||
|
||||
"""
|
||||
Usage
|
||||
=====
|
||||
|
||||
fbx2json [FILES]...
|
||||
|
||||
This script will write a JSON file for each FBX argument given.
|
||||
|
||||
|
||||
Output
|
||||
======
|
||||
|
||||
The JSON data is formatted into a list of nested lists of 4 items:
|
||||
|
||||
``[id, [data, ...], "data_types", [subtree, ...]]``
|
||||
|
||||
Where each list may be empty, and the items in
|
||||
the subtree are formatted the same way.
|
||||
|
||||
data_types is a string, aligned with data that spesifies a type
|
||||
for each property.
|
||||
|
||||
The types are as follows:
|
||||
|
||||
* 'Y': - INT16
|
||||
* 'C': - BOOL
|
||||
* 'I': - INT32
|
||||
* 'F': - FLOAT32
|
||||
* 'D': - FLOAT64
|
||||
* 'L': - INT64
|
||||
* 'R': - BYTES
|
||||
* 'S': - STRING
|
||||
* 'f': - FLOAT32_ARRAY
|
||||
* 'i': - INT32_ARRAY
|
||||
* 'd': - FLOAT64_ARRAY
|
||||
* 'l': - INT64_ARRAY
|
||||
* 'b': - BOOL ARRAY
|
||||
* 'c': - BYTE ARRAY
|
||||
|
||||
Note that key:value pairs aren't used since the id's are not
|
||||
ensured to be unique.
|
||||
"""
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# FBX Binary Parser
|
||||
|
||||
from struct import unpack
|
||||
import array
|
||||
import zlib
|
||||
|
||||
# at the end of each nested block, there is a NUL record to indicate
|
||||
# that the sub-scope exists (i.e. to distinguish between P: and P : {})
|
||||
_BLOCK_SENTINEL_LENGTH = ...
|
||||
_BLOCK_SENTINEL_DATA = ...
|
||||
read_fbx_elem_uint = ...
|
||||
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
|
||||
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
|
||||
from collections import namedtuple
|
||||
FBXElem = namedtuple("FBXElem", ("id", "props", "props_type", "elems"))
|
||||
del namedtuple
|
||||
|
||||
|
||||
def read_uint(read):
|
||||
return unpack(b'<I', read(4))[0]
|
||||
|
||||
|
||||
def read_uint64(read):
|
||||
return unpack(b'<Q', read(8))[0]
|
||||
|
||||
|
||||
def read_ubyte(read):
|
||||
return unpack(b'B', read(1))[0]
|
||||
|
||||
|
||||
def read_string_ubyte(read):
|
||||
size = read_ubyte(read)
|
||||
data = read(size)
|
||||
return data
|
||||
|
||||
|
||||
def unpack_array(read, array_type, array_stride, array_byteswap):
|
||||
length = read_uint(read)
|
||||
encoding = read_uint(read)
|
||||
comp_len = read_uint(read)
|
||||
|
||||
data = read(comp_len)
|
||||
|
||||
if encoding == 0:
|
||||
pass
|
||||
elif encoding == 1:
|
||||
data = zlib.decompress(data)
|
||||
|
||||
assert(length * array_stride == len(data))
|
||||
|
||||
data_array = array.array(array_type, data)
|
||||
if array_byteswap and _IS_BIG_ENDIAN:
|
||||
data_array.byteswap()
|
||||
return data_array
|
||||
|
||||
|
||||
read_data_dict = {
|
||||
b'Y'[0]: lambda read: unpack(b'<h', read(2))[0], # 16 bit int
|
||||
b'C'[0]: lambda read: unpack(b'?', read(1))[0], # 1 bit bool (yes/no)
|
||||
b'I'[0]: lambda read: unpack(b'<i', read(4))[0], # 32 bit int
|
||||
b'F'[0]: lambda read: unpack(b'<f', read(4))[0], # 32 bit float
|
||||
b'D'[0]: lambda read: unpack(b'<d', read(8))[0], # 64 bit float
|
||||
b'L'[0]: lambda read: unpack(b'<q', read(8))[0], # 64 bit int
|
||||
b'R'[0]: lambda read: read(read_uint(read)), # binary data
|
||||
b'S'[0]: lambda read: read(read_uint(read)), # string data
|
||||
b'f'[0]: lambda read: unpack_array(read, 'f', 4, False), # array (float)
|
||||
b'i'[0]: lambda read: unpack_array(read, 'i', 4, True), # array (int)
|
||||
b'd'[0]: lambda read: unpack_array(read, 'd', 8, False), # array (double)
|
||||
b'l'[0]: lambda read: unpack_array(read, 'q', 8, True), # array (long)
|
||||
b'b'[0]: lambda read: unpack_array(read, 'b', 1, False), # array (bool)
|
||||
b'c'[0]: lambda read: unpack_array(read, 'B', 1, False), # array (ubyte)
|
||||
}
|
||||
|
||||
|
||||
# FBX 7500 (aka FBX2016) introduces incompatible changes at binary level:
|
||||
# * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long.
|
||||
# * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64.
|
||||
def init_version(fbx_version):
|
||||
global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, read_fbx_elem_uint
|
||||
|
||||
assert(_BLOCK_SENTINEL_LENGTH == ...)
|
||||
assert(_BLOCK_SENTINEL_DATA == ...)
|
||||
|
||||
if fbx_version < 7500:
|
||||
_BLOCK_SENTINEL_LENGTH = 13
|
||||
read_fbx_elem_uint = read_uint
|
||||
else:
|
||||
_BLOCK_SENTINEL_LENGTH = 25
|
||||
read_fbx_elem_uint = read_uint64
|
||||
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
|
||||
|
||||
|
||||
def read_elem(read, tell, use_namedtuple):
|
||||
# [0] the offset at which this block ends
|
||||
# [1] the number of properties in the scope
|
||||
# [2] the length of the property list
|
||||
end_offset = read_fbx_elem_uint(read)
|
||||
if end_offset == 0:
|
||||
return None
|
||||
|
||||
prop_count = read_fbx_elem_uint(read)
|
||||
prop_length = read_fbx_elem_uint(read)
|
||||
|
||||
elem_id = read_string_ubyte(read) # elem name of the scope/key
|
||||
elem_props_type = bytearray(prop_count) # elem property types
|
||||
elem_props_data = [None] * prop_count # elem properties (if any)
|
||||
elem_subtree = [] # elem children (if any)
|
||||
|
||||
for i in range(prop_count):
|
||||
data_type = read(1)[0]
|
||||
elem_props_data[i] = read_data_dict[data_type](read)
|
||||
elem_props_type[i] = data_type
|
||||
|
||||
if tell() < end_offset:
|
||||
while tell() < (end_offset - _BLOCK_SENTINEL_LENGTH):
|
||||
elem_subtree.append(read_elem(read, tell, use_namedtuple))
|
||||
|
||||
if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA:
|
||||
raise IOError("failed to read nested block sentinel, "
|
||||
"expected all bytes to be 0")
|
||||
|
||||
if tell() != end_offset:
|
||||
raise IOError("scope length not reached, something is wrong")
|
||||
|
||||
args = (elem_id, elem_props_data, elem_props_type, elem_subtree)
|
||||
return FBXElem(*args) if use_namedtuple else args
|
||||
|
||||
|
||||
def parse_version(fn):
|
||||
"""
|
||||
Return the FBX version,
|
||||
if the file isn't a binary FBX return zero.
|
||||
"""
|
||||
with open(fn, 'rb') as f:
|
||||
read = f.read
|
||||
|
||||
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
|
||||
return 0
|
||||
|
||||
return read_uint(read)
|
||||
|
||||
|
||||
def parse(fn, use_namedtuple=True):
|
||||
root_elems = []
|
||||
|
||||
with open(fn, 'rb') as f:
|
||||
read = f.read
|
||||
tell = f.tell
|
||||
|
||||
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
|
||||
raise IOError("Invalid header")
|
||||
|
||||
fbx_version = read_uint(read)
|
||||
init_version(fbx_version)
|
||||
|
||||
while True:
|
||||
elem = read_elem(read, tell, use_namedtuple)
|
||||
if elem is None:
|
||||
break
|
||||
root_elems.append(elem)
|
||||
|
||||
args = (b'', [], bytearray(0), root_elems)
|
||||
return FBXElem(*args) if use_namedtuple else args, fbx_version
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Inline Modules
|
||||
|
||||
# pyfbx.data_types
|
||||
data_types = type(array)("data_types")
|
||||
data_types.__dict__.update(
|
||||
dict(
|
||||
INT16 = b'Y'[0],
|
||||
BOOL = b'C'[0],
|
||||
INT32 = b'I'[0],
|
||||
FLOAT32 = b'F'[0],
|
||||
FLOAT64 = b'D'[0],
|
||||
INT64 = b'L'[0],
|
||||
BYTES = b'R'[0],
|
||||
STRING = b'S'[0],
|
||||
FLOAT32_ARRAY = b'f'[0],
|
||||
INT32_ARRAY = b'i'[0],
|
||||
FLOAT64_ARRAY = b'd'[0],
|
||||
INT64_ARRAY = b'l'[0],
|
||||
BOOL_ARRAY = b'b'[0],
|
||||
BYTE_ARRAY = b'c'[0],
|
||||
))
|
||||
|
||||
# pyfbx.parse_bin
|
||||
parse_bin = type(array)("parse_bin")
|
||||
parse_bin.__dict__.update(
|
||||
dict(
|
||||
parse = parse
|
||||
))
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# JSON Converter
|
||||
# from pyfbx import parse_bin, data_types
|
||||
import json
|
||||
import array
|
||||
|
||||
|
||||
def fbx2json_property_as_string(prop, prop_type):
|
||||
if prop_type == data_types.STRING:
|
||||
prop_str = prop.decode('utf-8')
|
||||
prop_str = prop_str.replace('\x00\x01', '::')
|
||||
return json.dumps(prop_str)
|
||||
else:
|
||||
prop_py_type = type(prop)
|
||||
if prop_py_type == bytes:
|
||||
return json.dumps(repr(prop)[2:-1])
|
||||
elif prop_py_type == bool:
|
||||
return json.dumps(prop)
|
||||
elif prop_py_type == array.array:
|
||||
return repr(list(prop))
|
||||
|
||||
return repr(prop)
|
||||
|
||||
|
||||
def fbx2json_properties_as_string(fbx_elem):
|
||||
return ", ".join(fbx2json_property_as_string(*prop_item)
|
||||
for prop_item in zip(fbx_elem.props,
|
||||
fbx_elem.props_type))
|
||||
|
||||
|
||||
def fbx2json_recurse(fw, fbx_elem, ident, is_last):
|
||||
fbx_elem_id = fbx_elem.id.decode('utf-8')
|
||||
fw('%s["%s", ' % (ident, fbx_elem_id))
|
||||
fw('[%s], ' % fbx2json_properties_as_string(fbx_elem))
|
||||
fw('"%s", ' % (fbx_elem.props_type.decode('ascii')))
|
||||
|
||||
fw('[')
|
||||
if fbx_elem.elems:
|
||||
fw('\n')
|
||||
ident_sub = ident + " "
|
||||
for fbx_elem_sub in fbx_elem.elems:
|
||||
fbx2json_recurse(fw, fbx_elem_sub, ident_sub,
|
||||
fbx_elem_sub is fbx_elem.elems[-1])
|
||||
fw(']')
|
||||
|
||||
fw(']%s' % ('' if is_last else ',\n'))
|
||||
|
||||
|
||||
def fbx2json(fn):
|
||||
import os
|
||||
|
||||
fn_json = "%s.json" % os.path.splitext(fn)[0]
|
||||
print("Writing: %r " % fn_json, end="")
|
||||
fbx_root_elem, fbx_version = parse(fn, use_namedtuple=True)
|
||||
print("(Version %d) ..." % fbx_version)
|
||||
|
||||
with open(fn_json, 'w', encoding="ascii", errors='xmlcharrefreplace') as f:
|
||||
fw = f.write
|
||||
fw('[\n')
|
||||
ident_sub = " "
|
||||
for fbx_elem_sub in fbx_root_elem.elems:
|
||||
fbx2json_recurse(f.write, fbx_elem_sub, ident_sub,
|
||||
fbx_elem_sub is fbx_root_elem.elems[-1])
|
||||
fw(']\n')
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Command Line
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
if "--help" in sys.argv:
|
||||
print(__doc__)
|
||||
return
|
||||
|
||||
for arg in sys.argv[1:]:
|
||||
try:
|
||||
fbx2json(arg)
|
||||
except:
|
||||
print("Failed to convert %r, error:" % arg)
|
||||
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
1231
release/scripts/startup/io_scene_fbx_soc/fbx_utils.py
Normal file
1231
release/scripts/startup/io_scene_fbx_soc/fbx_utils.py
Normal file
File diff suppressed because it is too large
Load Diff
3123
release/scripts/startup/io_scene_fbx_soc/import_fbx_soc.py
Normal file
3123
release/scripts/startup/io_scene_fbx_soc/import_fbx_soc.py
Normal file
File diff suppressed because it is too large
Load Diff
176
release/scripts/startup/io_scene_fbx_soc/json2fbx.py
Normal file
176
release/scripts/startup/io_scene_fbx_soc/json2fbx.py
Normal file
@@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env python3
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2014 Blender Foundation
|
||||
|
||||
"""
|
||||
Usage
|
||||
=====
|
||||
|
||||
json2fbx [FILES]...
|
||||
|
||||
This script will write a binary FBX file for each JSON argument given.
|
||||
|
||||
|
||||
Input
|
||||
======
|
||||
|
||||
The JSON data is formatted into a list of nested lists of 4 items:
|
||||
|
||||
``[id, [data, ...], "data_types", [subtree, ...]]``
|
||||
|
||||
Where each list may be empty, and the items in
|
||||
the subtree are formatted the same way.
|
||||
|
||||
data_types is a string, aligned with data that spesifies a type
|
||||
for each property.
|
||||
|
||||
The types are as follows:
|
||||
|
||||
* 'Y': - INT16
|
||||
* 'C': - BOOL
|
||||
* 'I': - INT32
|
||||
* 'F': - FLOAT32
|
||||
* 'D': - FLOAT64
|
||||
* 'L': - INT64
|
||||
* 'R': - BYTES
|
||||
* 'S': - STRING
|
||||
* 'f': - FLOAT32_ARRAY
|
||||
* 'i': - INT32_ARRAY
|
||||
* 'd': - FLOAT64_ARRAY
|
||||
* 'l': - INT64_ARRAY
|
||||
* 'b': - BOOL ARRAY
|
||||
* 'c': - BYTE ARRAY
|
||||
|
||||
Note that key:value pairs aren't used since the id's are not
|
||||
ensured to be unique.
|
||||
"""
|
||||
|
||||
|
||||
def elem_empty(elem, name):
|
||||
import encode_bin
|
||||
sub_elem = encode_bin.FBXElem(name)
|
||||
if elem is not None:
|
||||
elem.elems.append(sub_elem)
|
||||
return sub_elem
|
||||
|
||||
|
||||
def parse_json_rec(fbx_root, json_node):
|
||||
name, data, data_types, children = json_node
|
||||
ver = 0
|
||||
|
||||
assert(len(data_types) == len(data))
|
||||
|
||||
e = elem_empty(fbx_root, name.encode())
|
||||
for d, dt in zip(data, data_types):
|
||||
if dt == "C":
|
||||
e.add_bool(d)
|
||||
elif dt == "Y":
|
||||
e.add_int16(d)
|
||||
elif dt == "I":
|
||||
e.add_int32(d)
|
||||
elif dt == "L":
|
||||
e.add_int64(d)
|
||||
elif dt == "F":
|
||||
e.add_float32(d)
|
||||
elif dt == "D":
|
||||
e.add_float64(d)
|
||||
elif dt == "R":
|
||||
d = eval('b"""' + d + '"""')
|
||||
e.add_bytes(d)
|
||||
elif dt == "S":
|
||||
d = d.encode().replace(b"::", b"\x00\x01")
|
||||
e.add_string(d)
|
||||
elif dt == "i":
|
||||
e.add_int32_array(d)
|
||||
elif dt == "l":
|
||||
e.add_int64_array(d)
|
||||
elif dt == "f":
|
||||
e.add_float32_array(d)
|
||||
elif dt == "d":
|
||||
e.add_float64_array(d)
|
||||
elif dt == "b":
|
||||
e.add_byte_array(d)
|
||||
elif dt == "c":
|
||||
e.add_bool_array(d)
|
||||
|
||||
if name == "FBXVersion":
|
||||
assert(data_types == "I")
|
||||
ver = int(data[0])
|
||||
|
||||
for child in children:
|
||||
_ver = parse_json_rec(e, child)
|
||||
if _ver:
|
||||
ver = _ver
|
||||
|
||||
return ver
|
||||
|
||||
|
||||
def parse_json(json_root):
|
||||
root = elem_empty(None, b"")
|
||||
ver = 0
|
||||
|
||||
for n in json_root:
|
||||
_ver = parse_json_rec(root, n)
|
||||
if _ver:
|
||||
ver = _ver
|
||||
|
||||
return root, ver
|
||||
|
||||
|
||||
def json2fbx(fn):
|
||||
import os
|
||||
import json
|
||||
|
||||
import encode_bin
|
||||
|
||||
fn_fbx = "%s.fbx" % os.path.splitext(fn)[0]
|
||||
print("Writing: %r " % fn_fbx, end="")
|
||||
json_root = []
|
||||
with open(fn) as f_json:
|
||||
json_root = json.load(f_json)
|
||||
fbx_root, fbx_version = parse_json(json_root)
|
||||
print("(Version %d) ..." % fbx_version)
|
||||
encode_bin.write(fn_fbx, fbx_root, fbx_version)
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Command Line
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
if "--help" in sys.argv:
|
||||
print(__doc__)
|
||||
return
|
||||
|
||||
for arg in sys.argv[1:]:
|
||||
try:
|
||||
json2fbx(arg)
|
||||
except:
|
||||
print("Failed to convert %r, error:" % arg)
|
||||
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
194
release/scripts/startup/io_scene_fbx_soc/parse_fbx.py
Normal file
194
release/scripts/startup/io_scene_fbx_soc/parse_fbx.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2006-2012, assimp team
|
||||
# Script copyright (C) 2013 Blender Foundation
|
||||
|
||||
__all__ = (
|
||||
"parse",
|
||||
"data_types",
|
||||
"parse_version",
|
||||
"FBXElem",
|
||||
)
|
||||
|
||||
from struct import unpack
|
||||
import array
|
||||
import zlib
|
||||
|
||||
from . import data_types
|
||||
|
||||
# at the end of each nested block, there is a NUL record to indicate
|
||||
# that the sub-scope exists (i.e. to distinguish between P: and P : {})
|
||||
_BLOCK_SENTINEL_LENGTH = ...
|
||||
_BLOCK_SENTINEL_DATA = ...
|
||||
read_fbx_elem_uint = ...
|
||||
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
|
||||
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
|
||||
from collections import namedtuple
|
||||
FBXElem = namedtuple("FBXElem", ("id", "props", "props_type", "elems"))
|
||||
del namedtuple
|
||||
|
||||
|
||||
def read_uint(read):
|
||||
return unpack(b'<I', read(4))[0]
|
||||
|
||||
|
||||
def read_uint64(read):
|
||||
return unpack(b'<Q', read(8))[0]
|
||||
|
||||
|
||||
def read_ubyte(read):
|
||||
return unpack(b'B', read(1))[0]
|
||||
|
||||
|
||||
def read_string_ubyte(read):
|
||||
size = read_ubyte(read)
|
||||
data = read(size)
|
||||
return data
|
||||
|
||||
|
||||
def unpack_array(read, array_type, array_stride, array_byteswap):
|
||||
length = read_uint(read)
|
||||
encoding = read_uint(read)
|
||||
comp_len = read_uint(read)
|
||||
|
||||
data = read(comp_len)
|
||||
|
||||
if encoding == 0:
|
||||
pass
|
||||
elif encoding == 1:
|
||||
data = zlib.decompress(data)
|
||||
|
||||
assert(length * array_stride == len(data))
|
||||
|
||||
data_array = array.array(array_type, data)
|
||||
if array_byteswap and _IS_BIG_ENDIAN:
|
||||
data_array.byteswap()
|
||||
return data_array
|
||||
|
||||
|
||||
read_data_dict = {
|
||||
b'Y'[0]: lambda read: unpack(b'<h', read(2))[0], # 16 bit int
|
||||
b'C'[0]: lambda read: unpack(b'?', read(1))[0], # 1 bit bool (yes/no)
|
||||
b'I'[0]: lambda read: unpack(b'<i', read(4))[0], # 32 bit int
|
||||
b'F'[0]: lambda read: unpack(b'<f', read(4))[0], # 32 bit float
|
||||
b'D'[0]: lambda read: unpack(b'<d', read(8))[0], # 64 bit float
|
||||
b'L'[0]: lambda read: unpack(b'<q', read(8))[0], # 64 bit int
|
||||
b'R'[0]: lambda read: read(read_uint(read)), # binary data
|
||||
b'S'[0]: lambda read: read(read_uint(read)), # string data
|
||||
b'f'[0]: lambda read: unpack_array(read, data_types.ARRAY_FLOAT32, 4, False), # array (float)
|
||||
b'i'[0]: lambda read: unpack_array(read, data_types.ARRAY_INT32, 4, True), # array (int)
|
||||
b'd'[0]: lambda read: unpack_array(read, data_types.ARRAY_FLOAT64, 8, False), # array (double)
|
||||
b'l'[0]: lambda read: unpack_array(read, data_types.ARRAY_INT64, 8, True), # array (long)
|
||||
b'b'[0]: lambda read: unpack_array(read, data_types.ARRAY_BOOL, 1, False), # array (bool)
|
||||
b'c'[0]: lambda read: unpack_array(read, data_types.ARRAY_BYTE, 1, False), # array (ubyte)
|
||||
}
|
||||
|
||||
|
||||
# FBX 7500 (aka FBX2016) introduces incompatible changes at binary level:
|
||||
# * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long.
|
||||
# * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64.
|
||||
def init_version(fbx_version):
|
||||
global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, read_fbx_elem_uint
|
||||
|
||||
_BLOCK_SENTINEL_LENGTH = ...
|
||||
_BLOCK_SENTINEL_DATA = ...
|
||||
read_fbx_elem_uint = ...
|
||||
|
||||
if fbx_version < 7500:
|
||||
_BLOCK_SENTINEL_LENGTH = 13
|
||||
read_fbx_elem_uint = read_uint
|
||||
else:
|
||||
_BLOCK_SENTINEL_LENGTH = 25
|
||||
read_fbx_elem_uint = read_uint64
|
||||
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
|
||||
|
||||
|
||||
def read_elem(read, tell, use_namedtuple):
|
||||
# [0] the offset at which this block ends
|
||||
# [1] the number of properties in the scope
|
||||
# [2] the length of the property list
|
||||
end_offset = read_fbx_elem_uint(read)
|
||||
if end_offset == 0:
|
||||
return None
|
||||
|
||||
prop_count = read_fbx_elem_uint(read)
|
||||
prop_length = read_fbx_elem_uint(read)
|
||||
|
||||
elem_id = read_string_ubyte(read) # elem name of the scope/key
|
||||
elem_props_type = bytearray(prop_count) # elem property types
|
||||
elem_props_data = [None] * prop_count # elem properties (if any)
|
||||
elem_subtree = [] # elem children (if any)
|
||||
|
||||
for i in range(prop_count):
|
||||
data_type = read(1)[0]
|
||||
elem_props_data[i] = read_data_dict[data_type](read)
|
||||
elem_props_type[i] = data_type
|
||||
|
||||
if tell() < end_offset:
|
||||
while tell() < (end_offset - _BLOCK_SENTINEL_LENGTH):
|
||||
elem_subtree.append(read_elem(read, tell, use_namedtuple))
|
||||
|
||||
if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA:
|
||||
raise IOError("failed to read nested block sentinel, "
|
||||
"expected all bytes to be 0")
|
||||
|
||||
if tell() != end_offset:
|
||||
raise IOError("scope length not reached, something is wrong")
|
||||
|
||||
args = (elem_id, elem_props_data, elem_props_type, elem_subtree)
|
||||
return FBXElem(*args) if use_namedtuple else args
|
||||
|
||||
|
||||
def parse_version(fn):
|
||||
"""
|
||||
Return the FBX version,
|
||||
if the file isn't a binary FBX return zero.
|
||||
"""
|
||||
with open(fn, 'rb') as f:
|
||||
read = f.read
|
||||
|
||||
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
|
||||
return 0
|
||||
|
||||
return read_uint(read)
|
||||
|
||||
|
||||
def parse(fn, use_namedtuple=True):
|
||||
root_elems = []
|
||||
|
||||
with open(fn, 'rb') as f:
|
||||
read = f.read
|
||||
tell = f.tell
|
||||
|
||||
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
|
||||
raise IOError("Invalid header")
|
||||
|
||||
fbx_version = read_uint(read)
|
||||
init_version(fbx_version)
|
||||
|
||||
while True:
|
||||
elem = read_elem(read, tell, use_namedtuple)
|
||||
if elem is None:
|
||||
break
|
||||
root_elems.append(elem)
|
||||
|
||||
args = (b'', [], bytearray(0), root_elems)
|
||||
return FBXElem(*args) if use_namedtuple else args, fbx_version
|
@@ -659,6 +659,7 @@ static void rna_MeshLoopColor_color_get(PointerRNA *ptr, float *values)
|
||||
values[0] = (&mcol->r)[0] / 255.0f;
|
||||
values[1] = (&mcol->r)[1] / 255.0f;
|
||||
values[2] = (&mcol->r)[2] / 255.0f;
|
||||
values[3] = (&mcol->r)[3] / 255.0f;
|
||||
}
|
||||
|
||||
static void rna_MeshLoopColor_color_set(PointerRNA *ptr, const float *values)
|
||||
@@ -668,6 +669,7 @@ static void rna_MeshLoopColor_color_set(PointerRNA *ptr, const float *values)
|
||||
(&mcol->r)[0] = (char)(CLAMPIS(values[0] * 255.0f, 0, 255));
|
||||
(&mcol->r)[1] = (char)(CLAMPIS(values[1] * 255.0f, 0, 255));
|
||||
(&mcol->r)[2] = (char)(CLAMPIS(values[2] * 255.0f, 0, 255));
|
||||
(&mcol->r)[3] = (char)(CLAMPIS(values[3] * 255.0f, 0, 255));
|
||||
}
|
||||
|
||||
static int rna_Mesh_texspace_editable(PointerRNA *ptr, const char **UNUSED(r_info))
|
||||
@@ -2582,7 +2584,7 @@ static void rna_def_mloopcol(BlenderRNA *brna)
|
||||
RNA_def_struct_path_func(srna, "rna_MeshColor_path");
|
||||
|
||||
prop = RNA_def_property(srna, "color", PROP_FLOAT, PROP_COLOR);
|
||||
RNA_def_property_array(prop, 3);
|
||||
RNA_def_property_array(prop, 4);
|
||||
RNA_def_property_range(prop, 0.0f, 1.0f);
|
||||
RNA_def_property_float_funcs(prop, "rna_MeshLoopColor_color_get", "rna_MeshLoopColor_color_set", NULL);
|
||||
RNA_def_property_ui_text(prop, "Color", "");
|
||||
|
Reference in New Issue
Block a user