test local replay of edits, needed for local cache

This commit is contained in:
2015-01-23 20:30:19 +11:00
parent 27a0f7471b
commit 98f67c5d7f
5 changed files with 152 additions and 10 deletions

View File

@@ -350,12 +350,21 @@ class BlendFileBlock:
assert(type(dna_type_id) is bytes)
self.refine_type_from_index(self.file.sdna_index_from_id[dna_type_id])
def get(self, path,
def get_file_offset(self, path,
default=...,
sdna_index_refine=None,
use_nil=True, use_str=True,
base_index=0,
):
"""
Return (offset, length)
"""
assert(type(path) is bytes)
ofs = self.file_offset
if base_index != 0:
assert(base_index < self.count)
ofs += (self.size // self.count) * base_index
self.file.handle.seek(ofs, os.SEEK_SET)
if sdna_index_refine is None:
sdna_index_refine = self.sdna_index
@@ -363,13 +372,30 @@ class BlendFileBlock:
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
dna_struct = self.file.structs[sdna_index_refine]
ofs = self.file_offset
field = dna_struct.field_from_path(
self.file.header, self.file.handle, path)
return (self.file.handle.tell(), field.dna_name.array_size)
def get(self, path,
default=...,
sdna_index_refine=None,
use_nil=True, use_str=True,
base_index=0,
):
ofs = self.file_offset
if base_index != 0:
assert(base_index < self.count)
ofs += (self.size // self.count) * base_index
self.file.handle.seek(ofs, os.SEEK_SET)
if sdna_index_refine is None:
sdna_index_refine = self.sdna_index
else:
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
dna_struct = self.file.structs[sdna_index_refine]
return dna_struct.field_get(
self.file.header, self.file.handle, path,
default=default,

View File

@@ -122,6 +122,13 @@ def pack(
# Read variations from json files.
use_variations=True,
# do _everything_ except to write the paths.
# useful if we want to calculate deps to remap but postpone applying them.
readonly=False,
# dict of binary_edits:
# {file: [(ofs, bytes), ...], ...}
binary_edits=None,
):
"""
:param deps_remap: Store path deps_remap info as follows.
@@ -135,7 +142,7 @@ def pack(
# so we can modify in-place.
# - temp files are only created once, (if we never touched them before),
# this way, for linked libraries - a single blend file may be used
# multiple times, each access will apply new edits ontop of the old ones.
# multiple times, each access will apply new edits on top of the old ones.
# - we track which libs we have touched (using 'lib_visit' arg),
# this means that the same libs wont be touched many times to modify the same data
# also prevents cyclic loops from crashing.
@@ -275,7 +282,7 @@ def pack(
for fp, (rootdir, fp_blend_basename) in blendfile_path_walker.FilePath.visit_from_blend(
blendfile_src,
readonly=False,
readonly=readonly,
temp_remap_cb=temp_remap_cb,
recursive=True,
recursive_all=all_deps,
@@ -293,6 +300,14 @@ def pack(
yield report(" %s: %s\n" % (colorize("blend", color='blue'), fp_blend))
fp_blend_basename_last = fp_blend_basename
if binary_edits is not None:
# TODO, temp_remap_cb makes paths, this isn't ideal,
# in this case we only want to remap!
tmp = temp_remap_cb(fp_blend, base_dir_src)
tmp = os.path.relpath(tmp[:-1], base_dir_dst_temp)
binary_edits_curr = binary_edits.setdefault(tmp, [])
del tmp
# assume the path might be relative
path_src_orig = fp.filepath
path_rel = blendfile_path_walker.utils.compatpath(path_src_orig)
@@ -323,7 +338,13 @@ def pack(
path_dst = os.path.join(base_dir_dst, path_dst)
path_dst_final = b'//' + path_dst_final
# Assign direct or add to edit-list (to apply later)
if not readonly:
fp.filepath = path_dst_final
if binary_edits is not None:
fp.filepath_assign_edits(path_dst_final, binary_edits_curr)
# add to copy-list
# never copy libs (handled separately)
if not isinstance(fp, blendfile_path_walker.FPElem_block_path) or fp.userdata[0].code != b'LI':

View File

@@ -106,6 +106,24 @@ class FPElem:
else:
return utils.compatpath(filepath)
def filepath_assign_edits(self, filepath, binary_edits):
self._set_cb_edits(filepath, binary_edits)
@staticmethod
def _filepath_assign_edits(block, path, filepath, binary_edits):
"""
Record the write to a separate entry (binary file-like object),
this lets us replay the edits later.
(so we can replay them onto the clients local cache without a file transfer).
"""
import struct
assert(type(filepath) is bytes)
assert(type(path) is bytes)
ofs, size = block.get_file_offset(path)
# ensure we dont write past the field size & allow for \0
filepath = filepath[:size - 1]
binary_edits.append((ofs, filepath + b'\0'))
@property
def filepath(self):
return self._get_cb()
@@ -134,11 +152,15 @@ class FPElem_block_path(FPElem):
block, path = self.userdata
block[path] = filepath
def _set_cb_edits(self, filepath, binary_edits):
block, path = self.userdata
self._filepath_assign_edits(block, path, filepath, binary_edits)
class FPElem_sequence_single(FPElem):
"""
Movie sequence
userdata = (block, path)
userdata = (block, path, sub_block, sub_path)
"""
__slots__ = ()
@@ -148,16 +170,23 @@ class FPElem_sequence_single(FPElem):
def _set_cb(self, filepath):
block, path, sub_block, sub_path = self.userdata
head, sep, tail = utils.splitpath(filepath)
block[path] = head + sep
sub_block[sub_path] = tail
def _set_cb_edits(self, filepath, binary_edits):
block, path, sub_block, sub_path = self.userdata
head, sep, tail = utils.splitpath(filepath)
self._filepath_assign_edits(block, path, head + sep, binary_edits)
self._filepath_assign_edits(sub_block, sub_path, tail, binary_edits)
class FPElem_sequence_image_seq(FPElem_sequence_single):
"""
Image sequence
userdata = (block, path)
userdata = (block, path, sub_block, sub_path)
"""
__slots__ = ()

View File

@@ -454,6 +454,62 @@ class bam_commands:
sys.stdout.write("\nwritten: %r\n" % session_rootdir)
# -------------------
# replay binary edits
#
# We've downloaded the files pristine from their repo.
# This means we can use local cache and avoid re-downloading.
#
# But for files to work locally we have to apply binary edits given to us by the server.
sys.stdout.write("replaying edits...\n")
with open(os.path.join(session_rootdir, ".bam_paths_edit.data"), 'rb') as fh:
import pickle
binary_edits_all = pickle.load(fh)
paths_uuid_update = {}
for blendfile, binary_edits in binary_edits_all.items():
if binary_edits:
sys.stdout.write(" operating on: %r\n" % blendfile)
sys.stdout.flush()
blendfile_abs = os.path.join(session_rootdir, blendfile.decode('utf-8'))
# we don't want to read, just edit whats there.
with open(blendfile_abs, 'rb+') as fh_blend:
for ofs, data in binary_edits:
# sys.stdout.write("\n%r\n" % data)
sys.stdout.flush()
# ensure we're writing to the correct location.
# fh_blend.seek(ofs)
# sys.stdout.write(repr(b'existing data: ' + fh_blend.read(len(data) + 1)))
fh_blend.seek(ofs)
fh_blend.write(data)
sys.stdout.write("\n")
sys.stdout.flush()
# update hash!
# we could do later, but the file is fresh in cache, so do now
from bam.utils.system import uuid_from_file
paths_uuid_update[blendfile.decode('utf-8')] = uuid_from_file(blendfile_abs)
del uuid_from_file
del pickle
del binary_edits_all
if paths_uuid_update:
# freshen the UUID's based on the replayed binary_edits
from bam.utils.system import write_json_to_file
paths_uuid = bam_session.load_paths_uuid(session_rootdir)
assert(set(paths_uuid_update.keys()).issubset(set(paths_uuid.keys())))
paths_uuid.update(paths_uuid_update)
write_json_to_file(os.path.join(session_rootdir, ".bam_paths_uuid.json"), paths_uuid)
del write_json_to_file
del paths_uuid
del paths_uuid_update
# we will need to keep these later
os.remove(os.path.join(session_rootdir, ".bam_paths_edit.data"))
# done with binary edits
# ----------------------
@staticmethod
def update(paths):
# Load project configuration
@@ -874,6 +930,7 @@ class bam_commands:
with open(filepath_remap, 'wb') as fh:
import pickle
pickle.dump(remap_data, fh, pickle.HIGHEST_PROTOCOL)
del pickle
@staticmethod
def remap_finish(
@@ -896,6 +953,7 @@ class bam_commands:
with open(filepath_remap, 'rb') as fh:
import pickle
remap_data = pickle.load(fh)
del pickle
from bam.blend import blendfile_path_remap
blendfile_path_remap.finish(

View File

@@ -394,6 +394,8 @@ class FileAPI(Resource):
paths_remap = {}
paths_uuid = {}
binary_edits = {}
if filepath.endswith(".blend"):
# find the path relative to the project's root
@@ -407,6 +409,8 @@ class FileAPI(Resource):
all_deps=all_deps,
report=report,
blendfile_src_dir_fakeroot=blendfile_src_dir_fakeroot.encode('utf-8'),
readonly=True,
binary_edits=binary_edits,
)
except:
log.exception("Error packing the blend file")
@@ -453,7 +457,11 @@ class FileAPI(Resource):
write_dict_as_json(".bam_paths_remap.json", paths_remap)
write_dict_as_json(".bam_paths_uuid.json", paths_uuid)
import pickle
zip_handle.writestr(".bam_paths_edit.data", pickle.dumps(binary_edits, pickle.HIGHEST_PROTOCOL))
del write_dict_as_json
del binary_edits
# done writing json!
@staticmethod