2014-10-15 16:35:18 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2014-10-14 19:14:37 +02:00
|
|
|
# ***** BEGIN GPL LICENSE BLOCK *****
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License
|
|
|
|
# as published by the Free Software Foundation; either version 2
|
|
|
|
# of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software Foundation,
|
|
|
|
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
|
|
|
#
|
|
|
|
# ***** END GPL LICENCE BLOCK *****
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-18 20:13:10 +02:00
|
|
|
VERBOSE = False
|
|
|
|
TIMEIT = True
|
2014-10-15 16:35:18 +02:00
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
class FilePath:
|
|
|
|
"""
|
|
|
|
Tiny filepath class to hide blendfile.
|
|
|
|
"""
|
|
|
|
__slots__ = (
|
|
|
|
"block",
|
|
|
|
"path",
|
2014-10-15 16:35:18 +02:00
|
|
|
# path may be relative to basepath
|
|
|
|
"basedir",
|
2014-10-17 14:01:54 +02:00
|
|
|
# library link level
|
|
|
|
"level",
|
2014-10-14 19:09:17 +02:00
|
|
|
)
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
def __init__(self, block, path, basedir, level):
|
2014-10-14 19:09:17 +02:00
|
|
|
self.block = block
|
|
|
|
self.path = path
|
2014-10-15 16:35:18 +02:00
|
|
|
self.basedir = basedir
|
2014-10-17 14:01:54 +02:00
|
|
|
self.level = level
|
2014-10-14 19:09:17 +02:00
|
|
|
|
|
|
|
# --------
|
|
|
|
# filepath
|
|
|
|
#
|
|
|
|
@property
|
|
|
|
def filepath(self):
|
|
|
|
return self.block[self.path]
|
|
|
|
|
|
|
|
@filepath.setter
|
|
|
|
def filepath(self, filepath):
|
|
|
|
self.block[self.path] = filepath
|
|
|
|
|
|
|
|
# ------------------------------------------------------------------------
|
|
|
|
# Main function to visit paths
|
|
|
|
@staticmethod
|
2014-10-15 16:35:18 +02:00
|
|
|
def visit_from_blend(
|
|
|
|
filepath,
|
|
|
|
|
|
|
|
# never modify the blend
|
|
|
|
readonly=True,
|
|
|
|
# callback that creates a temp file and returns its path.
|
|
|
|
temp_remap_cb=None,
|
|
|
|
|
|
|
|
# recursive options
|
|
|
|
recursive=False,
|
|
|
|
# list of ID block names we want to load, or None to load all
|
|
|
|
block_codes=None,
|
|
|
|
# root when we're loading libs indirectly
|
|
|
|
rootdir=None,
|
|
|
|
level=0,
|
2014-10-15 19:44:10 +02:00
|
|
|
# dict of id's used so we don't follow these links again
|
|
|
|
# prevents cyclic references too!
|
|
|
|
# {lib_path: set([block id's ...])}
|
|
|
|
lib_visit=None,
|
2014-10-15 16:35:18 +02:00
|
|
|
):
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
import os
|
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
if VERBOSE:
|
|
|
|
indent_str = " " * level
|
|
|
|
print(indent_str + "Opening:", filepath)
|
|
|
|
print(indent_str + "... blocks:", block_codes)
|
|
|
|
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
basedir = os.path.dirname(os.path.abspath(filepath))
|
2014-10-15 16:35:18 +02:00
|
|
|
if rootdir is None:
|
|
|
|
rootdir = basedir
|
|
|
|
|
|
|
|
if recursive and (level > 0) and (block_codes is not None):
|
2014-10-16 20:01:22 +02:00
|
|
|
# prevent from expanding the
|
|
|
|
# same datablock more then once
|
2014-10-15 16:35:18 +02:00
|
|
|
expand_codes = set()
|
2014-10-16 20:01:22 +02:00
|
|
|
|
|
|
|
def expand_codes_add_test(block):
|
2014-10-18 20:13:10 +02:00
|
|
|
# we could investigate a better way...
|
|
|
|
# Not to be accessing ID blocks at this point. but its harmless
|
|
|
|
if block.code == b'ID':
|
|
|
|
return False
|
2014-10-16 20:01:22 +02:00
|
|
|
len_prev = len(expand_codes)
|
|
|
|
expand_codes.add(block[b'id.name'])
|
|
|
|
return (len_prev != len(expand_codes))
|
|
|
|
|
|
|
|
def block_expand(block, code):
|
|
|
|
if expand_codes_add_test(block):
|
|
|
|
yield block
|
|
|
|
|
|
|
|
fn = ExpandID.expand_funcs.get(code)
|
|
|
|
if fn is not None:
|
|
|
|
for sub_block in fn(block):
|
|
|
|
if sub_block is not None:
|
|
|
|
yield from block_expand(sub_block, sub_block.code)
|
|
|
|
else:
|
|
|
|
yield block
|
2014-10-15 16:35:18 +02:00
|
|
|
else:
|
|
|
|
expand_codes = None
|
2014-10-16 20:01:22 +02:00
|
|
|
def block_expand(block, code):
|
|
|
|
yield block
|
2014-10-15 16:35:18 +02:00
|
|
|
|
|
|
|
if block_codes is None:
|
2014-10-16 20:01:22 +02:00
|
|
|
def iter_blocks_id(code):
|
|
|
|
return blend.find_blocks_from_code(code)
|
2014-10-15 16:35:18 +02:00
|
|
|
else:
|
2014-10-16 20:01:22 +02:00
|
|
|
def iter_blocks_id(code):
|
|
|
|
for block in blend.find_blocks_from_code(code):
|
|
|
|
if block[b'id.name'] in block_codes:
|
|
|
|
yield from block_expand(block, code)
|
2014-10-15 16:35:18 +02:00
|
|
|
|
|
|
|
if expand_codes is None:
|
|
|
|
iter_blocks_lib = lambda: blend.find_blocks_from_code(b'ID')
|
|
|
|
else:
|
|
|
|
iter_blocks_lib = lambda: (block
|
|
|
|
for block in blend.find_blocks_from_code(b'ID')
|
|
|
|
if block[b'name'] in expand_codes)
|
|
|
|
|
|
|
|
|
|
|
|
if temp_remap_cb is not None:
|
2014-10-17 14:01:54 +02:00
|
|
|
filepath_tmp = temp_remap_cb(filepath, level)
|
2014-10-15 16:35:18 +02:00
|
|
|
else:
|
|
|
|
filepath_tmp = filepath
|
2014-10-14 19:09:17 +02:00
|
|
|
|
|
|
|
import blendfile
|
2014-10-15 16:35:18 +02:00
|
|
|
blend = blendfile.open_blend(filepath_tmp, "rb" if readonly else "r+b")
|
|
|
|
|
2014-10-16 21:29:09 +02:00
|
|
|
for code in blend.code_index.keys():
|
|
|
|
# handle library blocks as special case
|
|
|
|
if ((len(code) != 2) or
|
|
|
|
(code in {
|
|
|
|
# libraries handled below
|
|
|
|
b'LI',
|
|
|
|
b'ID',
|
|
|
|
# unneeded
|
|
|
|
b'WM',
|
|
|
|
b'SN', # bScreen
|
|
|
|
})):
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2014-10-18 20:13:10 +02:00
|
|
|
if VERBOSE:
|
|
|
|
print(" Scanning", code)
|
|
|
|
|
2014-10-16 21:29:09 +02:00
|
|
|
for block in iter_blocks_id(code):
|
2014-10-17 14:01:54 +02:00
|
|
|
yield from FilePath.from_block(block, basedir, rootdir, level)
|
2014-10-16 20:01:22 +02:00
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
if recursive:
|
|
|
|
# look into libraries
|
|
|
|
lib_all = {}
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
for block in iter_blocks_lib():
|
|
|
|
lib_id = block[b'lib']
|
|
|
|
lib = blend.find_block_from_offset(lib_id)
|
|
|
|
lib_path = lib[b'name']
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
# import IPython; IPython.embed()
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
# get all data needed to read the blend files here (it will be freed!)
|
|
|
|
# lib is an address at the moment, we only use as a way to group
|
2014-10-15 19:44:10 +02:00
|
|
|
lib_all.setdefault(lib_path, set()).add(block[b'name'])
|
2014-10-15 16:35:18 +02:00
|
|
|
|
|
|
|
# do this after, incase we mangle names above
|
|
|
|
for block in iter_blocks_id(b'LI'):
|
2014-10-17 14:01:54 +02:00
|
|
|
yield from FilePath.from_block(block, basedir, rootdir, level)
|
2014-10-14 19:09:17 +02:00
|
|
|
|
|
|
|
blend.close()
|
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
# ----------------
|
|
|
|
# Handle Recursive
|
|
|
|
if recursive:
|
|
|
|
# now we've closed the file, loop on other files
|
|
|
|
for lib_path, lib_block_codes in lib_all.items():
|
2014-10-17 16:00:04 +02:00
|
|
|
lib_path_abs = utils.compatpath(utils.abspath(lib_path, basedir))
|
2014-10-15 19:44:10 +02:00
|
|
|
|
|
|
|
# if we visited this before,
|
|
|
|
# check we don't follow the same links more than once
|
|
|
|
lib_block_codes_existing = lib_visit.setdefault(lib_path_abs, set())
|
|
|
|
lib_block_codes -= lib_block_codes_existing
|
|
|
|
# don't touch them again
|
|
|
|
lib_block_codes_existing.update(lib_block_codes)
|
|
|
|
|
|
|
|
# import IPython; IPython.embed()
|
2014-10-15 16:35:18 +02:00
|
|
|
if VERBOSE:
|
|
|
|
print((indent_str + " "), "Library: ", filepath, " -> ", lib_path_abs, sep="")
|
|
|
|
print((indent_str + " "), lib_block_codes)
|
|
|
|
yield from FilePath.visit_from_blend(
|
|
|
|
lib_path_abs,
|
|
|
|
readonly=readonly,
|
|
|
|
temp_remap_cb=temp_remap_cb,
|
|
|
|
recursive=True,
|
2014-10-15 19:44:10 +02:00
|
|
|
block_codes=lib_block_codes,
|
2014-10-15 16:35:18 +02:00
|
|
|
rootdir=rootdir,
|
|
|
|
level=level + 1,
|
|
|
|
)
|
|
|
|
|
2014-10-16 20:26:02 +02:00
|
|
|
# ------------------------------------------------------------------------
|
|
|
|
# Direct filepaths from Blocks
|
|
|
|
#
|
|
|
|
# (no expanding or following references)
|
|
|
|
|
|
|
|
@staticmethod
|
2014-10-17 14:01:54 +02:00
|
|
|
def from_block(block, basedir, rootdir, level):
|
2014-10-16 20:26:02 +02:00
|
|
|
assert(block.code != b'DATA')
|
|
|
|
fn = FilePath._from_block_dict.get(block.code)
|
|
|
|
if fn is not None:
|
2014-10-17 14:01:54 +02:00
|
|
|
yield from fn(block, basedir, rootdir, level)
|
2014-10-16 20:26:02 +02:00
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
def _from_block_IM(block, basedir, rootdir, level):
|
2014-10-17 12:11:00 +02:00
|
|
|
# (IMA_SRC_FILE, IMA_SRC_SEQUENCE, IMA_SRC_MOVIE)
|
|
|
|
if block[b'source'] not in {1, 2, 3}:
|
|
|
|
return
|
|
|
|
if block[b'packedfile']:
|
|
|
|
return
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
yield FilePath(block, b'name', basedir, level), rootdir
|
2014-10-16 20:26:02 +02:00
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
def _from_block_LI(block, basedir, rootdir, level):
|
2014-10-17 12:11:00 +02:00
|
|
|
if block[b'packedfile']:
|
|
|
|
return
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
yield FilePath(block, b'name', basedir, level), rootdir
|
2014-10-16 20:26:02 +02:00
|
|
|
|
|
|
|
_from_block_dict = {
|
|
|
|
b'IM': _from_block_IM,
|
|
|
|
b'LI': _from_block_LI,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-16 20:01:22 +02:00
|
|
|
class bf_utils:
|
|
|
|
@staticmethod
|
|
|
|
def iter_ListBase(block):
|
|
|
|
while block:
|
|
|
|
yield block
|
|
|
|
block = block.file.find_block_from_offset(block[b'next'])
|
|
|
|
|
2014-10-16 21:29:09 +02:00
|
|
|
def iter_array(block, length=-1):
|
|
|
|
assert(block.code == b'DATA')
|
|
|
|
import blendfile
|
|
|
|
import os
|
|
|
|
handle = block.file.handle
|
|
|
|
header = block.file.header
|
|
|
|
|
|
|
|
for i in range(length):
|
|
|
|
block.file.handle.seek(block.file_offset + (header.pointer_size * i), os.SEEK_SET)
|
|
|
|
offset = blendfile.DNA_IO.read_pointer(handle, header)
|
|
|
|
sub_block = block.file.find_block_from_offset(offset)
|
|
|
|
yield sub_block
|
|
|
|
|
2014-10-16 20:01:22 +02:00
|
|
|
|
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# ID Expand
|
|
|
|
|
|
|
|
class ExpandID:
|
|
|
|
# fake module
|
2014-10-17 12:11:00 +02:00
|
|
|
#
|
|
|
|
# TODO:
|
|
|
|
#
|
|
|
|
# Array lookups here are _WAY_ too complicated,
|
|
|
|
# we need some nicer way to represent pointer indirection (easy like in C!)
|
|
|
|
# but for now, use what we have.
|
|
|
|
#
|
2014-10-16 20:01:22 +02:00
|
|
|
__slots__ = ()
|
|
|
|
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
raise RuntimeError("%s should not be instantiated" % cls)
|
|
|
|
|
2014-10-17 12:11:00 +02:00
|
|
|
@staticmethod
|
|
|
|
def _expand_generic_material(block):
|
|
|
|
array_len = block.get(b'totcol')
|
|
|
|
if array_len != 0:
|
|
|
|
array = block.get_pointer(b'mat')
|
|
|
|
for sub_block in bf_utils.iter_array(array, array_len):
|
|
|
|
yield sub_block
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _expand_generic_mtex(block):
|
2014-10-18 20:13:10 +02:00
|
|
|
field = block.dna_type.field_from_name[b'mtex']
|
2014-10-17 12:11:00 +02:00
|
|
|
array_len = field.dna_size // block.file.header.pointer_size
|
|
|
|
|
|
|
|
for i in range(array_len):
|
|
|
|
path = ('mtex[%d]' % i).encode('ascii')
|
|
|
|
item = block.get_pointer(path)
|
|
|
|
if item:
|
|
|
|
tex = item.get_pointer(b'tex')
|
|
|
|
yield tex
|
|
|
|
|
|
|
|
@staticmethod
|
2014-10-16 20:01:22 +02:00
|
|
|
def expand_OB(block):
|
|
|
|
yield block.get_pointer(b'data')
|
2014-10-17 12:11:00 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2014-10-16 20:01:22 +02:00
|
|
|
def expand_ME(block):
|
2014-10-17 12:11:00 +02:00
|
|
|
yield from ExpandID._expand_generic_material(block)
|
2014-10-16 21:29:09 +02:00
|
|
|
|
2014-10-17 12:11:00 +02:00
|
|
|
@staticmethod
|
|
|
|
def expand_ME(block):
|
|
|
|
yield from ExpandID._expand_generic_material(block)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def expand_CU(block):
|
|
|
|
yield from ExpandID._expand_generic_material(block)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def expand_MB(block):
|
|
|
|
yield from ExpandID._expand_generic_material(block)
|
|
|
|
|
|
|
|
@staticmethod
|
2014-10-16 20:01:22 +02:00
|
|
|
def expand_MA(block):
|
2014-10-17 12:11:00 +02:00
|
|
|
yield from ExpandID._expand_generic_mtex(block)
|
|
|
|
|
|
|
|
@staticmethod
|
2014-10-16 20:01:22 +02:00
|
|
|
def expand_TE(block):
|
2014-10-17 12:11:00 +02:00
|
|
|
yield block.get_pointer(b'ima')
|
|
|
|
|
|
|
|
@staticmethod
|
2014-10-16 20:01:22 +02:00
|
|
|
def expand_GR(block):
|
|
|
|
sdna_index_GroupObject = block.file.sdna_index_from_id[b'GroupObject']
|
|
|
|
for gobj in bf_utils.iter_ListBase(block.get_pointer(b'gobject.first')):
|
|
|
|
yield gobj.get_pointer(b'ob', sdna_index_refine=sdna_index_GroupObject)
|
|
|
|
|
|
|
|
expand_funcs = {
|
2014-10-17 12:11:00 +02:00
|
|
|
b'OB': expand_OB.__func__,
|
|
|
|
b'ME': expand_ME.__func__,
|
|
|
|
b'MA': expand_MA.__func__,
|
|
|
|
b'TE': expand_TE.__func__,
|
|
|
|
b'GR': expand_GR.__func__,
|
2014-10-16 20:01:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# Packing Utility
|
|
|
|
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
class utils:
|
|
|
|
# fake module
|
|
|
|
__slots__ = ()
|
|
|
|
|
2014-10-17 12:11:00 +02:00
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
raise RuntimeError("%s should not be instantiated" % cls)
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
@staticmethod
|
2014-10-15 16:35:18 +02:00
|
|
|
def abspath(path, start, library=None):
|
2014-10-14 19:09:17 +02:00
|
|
|
import os
|
2014-10-15 16:35:18 +02:00
|
|
|
if path.startswith(b'//'):
|
2014-10-14 19:09:17 +02:00
|
|
|
# if library:
|
|
|
|
# start = os.path.dirname(abspath(library.filepath))
|
|
|
|
return os.path.join(start, path[2:])
|
|
|
|
return path
|
|
|
|
|
2014-10-17 16:00:04 +02:00
|
|
|
if __import__("os").sep == '/':
|
|
|
|
@staticmethod
|
|
|
|
def compatpath(path):
|
|
|
|
return path.replace(b'\\', b'/')
|
|
|
|
else:
|
|
|
|
@staticmethod
|
|
|
|
def compatpath(path):
|
|
|
|
# keep '//'
|
|
|
|
return path[:2] + path[2:].replace(b'/', b'\\')
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
|
|
|
|
def pack(blendfile_src, blendfile_dst):
|
2014-10-15 18:37:47 +02:00
|
|
|
|
|
|
|
# Internal details:
|
|
|
|
# - we copy to a temp path before operating on the blend file
|
|
|
|
# so we can modify in-place.
|
|
|
|
# - temp files are only created once, (if we never touched them before),
|
|
|
|
# this way, for linked libraries - a single blend file may be used
|
|
|
|
# multiple times, each access will apply new edits ontop of the old ones.
|
2014-10-15 19:44:10 +02:00
|
|
|
# - we track which libs we have touched (using 'lib_visit' arg),
|
|
|
|
# this means that the same libs wont be touched many times to modify the same data
|
|
|
|
# also prevents cyclic loops from crashing.
|
2014-10-15 18:37:47 +02:00
|
|
|
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
|
2014-10-15 18:37:47 +02:00
|
|
|
path_temp_files = set()
|
|
|
|
path_copy_files = set()
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
SUBDIR = b'data'
|
|
|
|
|
2014-10-18 20:13:10 +02:00
|
|
|
if TIMEIT:
|
|
|
|
import time
|
|
|
|
t = time.time()
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
def temp_remap_cb(filepath, level):
|
2014-10-15 16:35:18 +02:00
|
|
|
"""
|
|
|
|
Create temp files in the destination path.
|
|
|
|
"""
|
2014-10-17 16:00:04 +02:00
|
|
|
filepath = utils.compatpath(filepath)
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
if level == 0:
|
|
|
|
filepath_tmp = os.path.join(base_dir_dst, os.path.basename(filepath)) + b'@'
|
|
|
|
else:
|
|
|
|
filepath_tmp = os.path.join(base_dir_dst, SUBDIR, os.path.basename(filepath)) + b'@'
|
|
|
|
|
2014-10-15 18:37:47 +02:00
|
|
|
# only overwrite once (allows us to )
|
|
|
|
if filepath_tmp not in path_temp_files:
|
|
|
|
shutil.copy(filepath, filepath_tmp)
|
|
|
|
path_temp_files.add(filepath_tmp)
|
2014-10-15 16:35:18 +02:00
|
|
|
return filepath_tmp
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
base_dir_src = os.path.dirname(blendfile_src)
|
|
|
|
base_dir_dst = os.path.dirname(blendfile_dst)
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
base_dir_dst_subdir = os.path.join(base_dir_dst, SUBDIR)
|
|
|
|
if not os.path.exists(base_dir_dst_subdir):
|
|
|
|
os.makedirs(base_dir_dst_subdir)
|
|
|
|
|
2014-10-15 19:44:10 +02:00
|
|
|
lib_visit = {}
|
|
|
|
|
2014-10-15 16:35:18 +02:00
|
|
|
for fp, rootdir in FilePath.visit_from_blend(
|
|
|
|
blendfile_src,
|
|
|
|
readonly=False,
|
|
|
|
temp_remap_cb=temp_remap_cb,
|
2014-10-15 19:44:10 +02:00
|
|
|
recursive=True,
|
|
|
|
lib_visit=lib_visit):
|
2014-10-15 16:35:18 +02:00
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
# assume the path might be relative
|
2014-10-17 16:00:04 +02:00
|
|
|
path_rel = utils.compatpath(fp.filepath)
|
|
|
|
path_base = path_rel.split(os.sep.encode('ascii'))[-1]
|
2014-10-15 16:35:18 +02:00
|
|
|
path_src = utils.abspath(path_rel, fp.basedir)
|
2014-10-14 19:09:17 +02:00
|
|
|
|
|
|
|
# rename in the blend
|
2014-10-17 14:01:54 +02:00
|
|
|
path_dst = os.path.join(base_dir_dst_subdir, path_base)
|
|
|
|
if fp.level == 0:
|
|
|
|
fp.filepath = b"//" + os.path.join(SUBDIR, path_base)
|
|
|
|
else:
|
|
|
|
fp.filepath = b'//' + path_base
|
2014-10-14 19:09:17 +02:00
|
|
|
|
|
|
|
# add to copylist
|
2014-10-15 18:37:47 +02:00
|
|
|
path_copy_files.add((path_src, path_dst))
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-15 19:44:10 +02:00
|
|
|
del lib_visit
|
|
|
|
|
2014-10-16 20:01:22 +02:00
|
|
|
# handle the
|
2014-10-17 14:01:54 +02:00
|
|
|
blendfile_dst_tmp = temp_remap_cb(blendfile_src, 0)
|
2014-10-15 18:37:47 +02:00
|
|
|
shutil.move(blendfile_dst_tmp, blendfile_dst)
|
|
|
|
path_temp_files.remove(blendfile_dst_tmp)
|
|
|
|
|
|
|
|
for fn in path_temp_files:
|
|
|
|
# strip '@'
|
|
|
|
shutil.move(fn, fn[:-1])
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-15 18:37:47 +02:00
|
|
|
for src, dst in path_copy_files:
|
2014-10-14 19:09:17 +02:00
|
|
|
if not os.path.exists(src):
|
|
|
|
print(" Source missing! %r" % src)
|
|
|
|
else:
|
|
|
|
print(" Copying %r -> %r" % (src, dst))
|
|
|
|
shutil.copy(src, dst)
|
|
|
|
|
|
|
|
print(" Written:", blendfile_dst)
|
|
|
|
|
2014-10-18 20:13:10 +02:00
|
|
|
if TIMEIT:
|
|
|
|
print(" Time: %.4f" % (time.time() - t))
|
|
|
|
|
2014-10-14 19:09:17 +02:00
|
|
|
|
2014-10-17 16:17:30 +02:00
|
|
|
def create_argparse():
|
2014-10-17 14:01:54 +02:00
|
|
|
import os
|
2014-10-17 16:17:30 +02:00
|
|
|
import argparse
|
|
|
|
|
|
|
|
usage_text = (
|
|
|
|
"Run this script to extract blend-files(s) to a destination path:" +
|
|
|
|
os.path.basename(__file__) +
|
|
|
|
"--input=FILE --output=FILE [options]")
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description=usage_text)
|
|
|
|
|
|
|
|
# for main_render() only, but validate args.
|
|
|
|
parser.add_argument(
|
|
|
|
"-i", "--input", dest="path_src", metavar='FILE', required=True,
|
|
|
|
help="Input path(s) or a wildcard to glob many files")
|
|
|
|
parser.add_argument(
|
|
|
|
"-o", "--output", dest="path_dst", metavar='FILE', required=True,
|
|
|
|
help="Output file or a directory when multiple inputs are passed")
|
|
|
|
|
|
|
|
return parser
|
|
|
|
|
2014-10-17 14:01:54 +02:00
|
|
|
|
2014-10-17 16:17:30 +02:00
|
|
|
def main():
|
|
|
|
import sys
|
|
|
|
|
|
|
|
parser = create_argparse()
|
|
|
|
args = parser.parse_args(sys.argv[1:])
|
|
|
|
|
|
|
|
encoding = sys.getfilesystemencoding()
|
|
|
|
|
|
|
|
pack(args.path_src.encode(encoding),
|
|
|
|
args.path_dst.encode(encoding))
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|
2014-10-17 14:01:54 +02:00
|
|
|
|