add test for path remapping

This commit is contained in:
2014-12-13 14:24:40 +01:00
parent 2dd2fd9c8d
commit eb1a43df10
3 changed files with 182 additions and 37 deletions

View File

@@ -816,6 +816,7 @@ class bam_commands:
@staticmethod
def remap_start(
paths,
use_json=False,
):
filepath_remap = "bam_remap.data"
@@ -831,6 +832,7 @@ class bam_commands:
import blendfile_path_remap
remap_data = blendfile_path_remap.start(
paths,
use_json=use_json,
)
with open(filepath_remap, 'wb') as fh:
@@ -842,6 +844,7 @@ class bam_commands:
paths,
force_relative=False,
dry_run=False,
use_json=False,
):
filepath_remap = "bam_remap.data"
@@ -864,6 +867,7 @@ class bam_commands:
paths, remap_data,
force_relative=force_relative,
dry_run=dry_run,
use_json=use_json,
)
if not dry_run:
@@ -871,6 +875,7 @@ class bam_commands:
@staticmethod
def remap_reset(
use_json=False,
):
filepath_remap = "bam_remap.data"
if os.path.exists(filepath_remap):
@@ -1030,7 +1035,8 @@ def create_argparse_list(subparsers):
func=lambda args:
bam_commands.list_dir(
args.paths or ["."],
use_json=args.json),
use_json=args.json,
),
)
@@ -1078,10 +1084,13 @@ def create_argparse_remap(subparsers):
dest="paths", nargs="*",
help="Path(s) to operate on",
)
init_argparse_common(sub_subparse, use_json=True)
sub_subparse.set_defaults(
func=lambda args:
bam_commands.remap_start(
args.paths or ["."],
use_json=args.json,
),
)
@@ -1101,12 +1110,15 @@ def create_argparse_remap(subparsers):
"-d", "--dry-run", dest="dry_run", action='store_true',
help="Just print output as if the paths are being run",
)
init_argparse_common(sub_subparse, use_json=True)
sub_subparse.set_defaults(
func=lambda args:
bam_commands.remap_finish(
args.paths or ["."],
force_relative=args.force_relative,
dry_run=args.dry_run,
use_json=args.json,
),
)
@@ -1114,9 +1126,13 @@ def create_argparse_remap(subparsers):
"reset",
help="Cancel path remapping",
)
init_argparse_common(sub_subparse, use_json=True)
sub_subparse.set_defaults(
func=lambda args:
bam_commands.remap_reset(),
bam_commands.remap_reset(
use_json=args.json,
),
)

View File

@@ -32,10 +32,23 @@ def _is_blend(f):
return f.lower().endswith(b'.blend')
def _warn(msg):
def _warn__ascii(msg):
print(" warning: %s" % msg)
def _info__ascii(msg):
print(msg)
def _warn__json(msg):
import json
print(json.dumps(("warning", msg)), end=",\n")
def _info__json(msg):
import json
print(json.dumps(("info", msg)), end=",\n")
def _uuid_from_file(fn, block_size=1 << 20):
with open(fn, 'rb') as f:
# first get the size
@@ -55,14 +68,16 @@ def _uuid_from_file(fn, block_size=1 << 20):
def _iter_files(paths, check_ext=None):
# note, sorting isn't needed
# just gives predictable output
for p in paths:
p = os.path.abspath(p)
for dirpath, dirnames, filenames in os.walk(p):
for dirpath, dirnames, filenames in sorted(os.walk(p)):
# skip '.svn'
if dirpath.startswith(b'.') and dirpath != b'.':
continue
for filename in filenames:
for filename in sorted(filenames):
if check_ext is None or check_ext(filename):
filepath = os.path.join(dirpath, filename)
yield filepath
@@ -75,7 +90,19 @@ def start(
paths,
is_quiet=False,
dry_run=False,
use_json=False,
):
if use_json:
warn = _warn__json
info = _info__json
else:
warn = _warn__ascii
info = _info__ascii
if use_json:
print("[")
# {(sha1, length): "filepath"}
remap_uuid = {}
@@ -97,7 +124,7 @@ def start(
for blendfile_src in _iter_files(paths, check_ext=_is_blend):
if not is_quiet:
print("blend read: %r" % blendfile_src)
info("blend read: %r" % blendfile_src)
remap_lost[blendfile_src] = remap_lost_blendfile_src = set()
@@ -115,7 +142,7 @@ def start(
files_to_map.add(f_abs)
else:
if not is_quiet:
_warn("file %r not found!" % f_abs)
warn("file %r not found!" % f_abs)
# don't complain about this file being missing on remap
remap_lost_blendfile_src.add(fp.filepath)
@@ -134,7 +161,7 @@ def start(
f_match = remap_uuid.get(f_uuid)
if f_match is not None:
if not is_quiet:
_warn("duplicate file found! (%r, %r)" % (f_match, f))
warn("duplicate file found! (%r, %r)" % (f_match, f))
remap_uuid[f_uuid] = f
@@ -144,6 +171,16 @@ def start(
remap_lost,
)
if use_json:
if not remap_uuid:
print("\"nothing to remap!\"")
else:
print("\"complete\"")
print("]")
else:
if not remap_uuid:
print("Nothing to remap!")
return remap_data_args
@@ -152,8 +189,19 @@ def finish(
is_quiet=False,
force_relative=False,
dry_run=False,
use_json=False,
):
if use_json:
warn = _warn__json
info = _info__json
else:
warn = _warn__ascii
info = _info__ascii
if use_json:
print("[")
(remap_uuid,
remap_lost,
) = remap_data_args
@@ -171,19 +219,18 @@ def finish(
# now the fun begins, remap _all_ paths
import blendfile_path_walker
for blendfile_dst in _iter_files(paths, check_ext=_is_blend):
blendfile_src = remap_dst_to_src.get(blendfile_dst)
if blendfile_src is None:
if not is_quiet:
_warn("new blendfile added since beginning 'remap': %r" % blendfile_dst)
warn("new blendfile added since beginning 'remap': %r" % blendfile_dst)
continue
# not essential, just so we can give more meaningful errors
remap_lost_blendfile_src = remap_lost[blendfile_src]
if not is_quiet:
print("blend write: %r" % blendfile_src)
info("blend write: %r -> %r" % (blendfile_src, blendfile_dst))
blendfile_src_basedir = os.path.dirname(blendfile_src)
blendfile_dst_basedir = os.path.dirname(blendfile_dst)
@@ -195,36 +242,40 @@ def finish(
# TODO. warn when referencing files outside 'paths'
# so we can update the reference
f_src_rel = fp.filepath
f_src_orig = fp.filepath
if f_src_rel in remap_lost_blendfile_src:
if f_src_orig in remap_lost_blendfile_src:
# this file never existed, so we can't remap it
continue
is_relative = f_src_rel.startswith(b'//')
is_relative = f_src_orig.startswith(b'//')
if is_relative:
f_src_abs = fp.filepath_absolute_resolve(basedir=blendfile_src_basedir)
else:
f_src_abs = f_src_rel
f_src_abs = f_src_orig
f_src_abs = os.path.normpath(f_src_abs)
f_dst_abs = remap_src_to_dst.get(f_src_abs)
if f_dst_abs is None:
if not is_quiet:
_warn("file %r not found in map!" % f_src_abs)
warn("file %r not found in map!" % f_src_abs)
continue
# now remap!
if is_relative or force_relative:
f_dst_rel = b'//' + os.path.relpath(f_dst_abs, blendfile_dst_basedir)
f_dst_final = b'//' + os.path.relpath(f_dst_abs, blendfile_dst_basedir)
else:
f_dst_rel = f_dst_abs
f_dst_final = f_dst_abs
if f_dst_rel != f_src_rel:
if f_dst_final != f_src_orig:
if not dry_run:
fp.filepath = f_dst_abs
# print("remap %r -> %r" % (f_src_abs, fp.filepath))
fp.filepath = f_dst_final
if not is_quiet:
info("remap %r -> %r" % (f_src_abs, f_dst_abs))
del blendfile_path_walker
if use_json:
print("\"complete\"\n]")

View File

@@ -344,7 +344,7 @@ def file_quick_touch_blend(path, filepart=None, times=None):
os.utime(path, times)
def file_quick_image(path, filepart=None):
def file_quick_image(path, filepart=None, fill_color=b'\xff' * 4):
def write_png(buf, width, height):
""" buf: must be bytes or a bytearray in py3, a regular string in py2. formatted RGBARGBA... """
import zlib
@@ -369,7 +369,7 @@ def file_quick_image(path, filepart=None):
if filepart is not None:
path = os.path.join(path, filepart)
with open(path, 'wb') as f:
f.write(write_png(b'0000' * 4, 2, 2))
f.write(write_png(fill_color * 4, 2, 2))
def _dbg_dump_path(path):
@@ -439,14 +439,14 @@ def blendfile_template_create(blendfile, blendfile_root, create_id, create_data,
return True
def blendfile_template_create_from_files(proj_path, session_path, blendfile_pair, images):
def blendfile_template_create_from_files(proj_path, session_path, blendfile, images):
for f_proj, f_local in images:
for i, f_proj in enumerate(images):
f_abs = os.path.join(session_path, f_proj)
os.makedirs(os.path.dirname(f_abs))
file_quick_image(f_abs)
file_quick_image(f_abs, fill_color=bytes([i]))
blendfile_abs = os.path.join(session_path, blendfile_pair[0])
blendfile_abs = os.path.join(session_path, blendfile)
deps = []
if not blendfile_template_create(blendfile_abs, session_path, "create_from_files", None, deps):
return False
@@ -1046,7 +1046,9 @@ class BamRelativeAbsoluteTest(BamSessionTestCase):
proj_path, session_path = self.init_session(session_name)
# create the image files we need
blendfile_template_create_from_files(proj_path, session_path, blendfile_pair, images)
blendfile_template_create_from_files(
proj_path, session_path,
blendfile_pair[0], [f[0] for f in images])
# now commit the files
stdout, stderr = bam_run(["commit", "-m", "commit shot_01"], session_path)
@@ -1430,6 +1432,82 @@ class BamIgnoreTest(BamSessionTestCase):
# now check for status
self.assertRaises(RuntimeError, bam_run, ["status", ], session_path)
class BamRemapTest(BamSimpleTestCase):
""" Test remapping existing blend files via the 'bam remap' command.
note: this doesn't need any bam-session. simply a directory to work in.
"""
@staticmethod
def remap_path_pair(base, src_dst):
return ("%s -> %s" % (
os.path.join(base, src_dst[0]).encode('utf-8'),
os.path.join(base, src_dst[1]).encode('utf-8'),
))
def test_remap_empty(self):
subdir_path = os.path.join(TEMP_LOCAL, "my_remap")
os.makedirs(subdir_path)
ret = bam_run_as_json(["remap", "start", "--json"], subdir_path)
self.assertEqual(["nothing to remap!"], ret)
def test_remap_simple(self):
subdir_path = os.path.join(TEMP_LOCAL, "my_remap")
subdir_path_sub = os.path.join(TEMP_LOCAL, "my_remap", "sub")
os.makedirs(subdir_path_sub)
# DUMMY VALUES (we don't really need)
proj_path = subdir_path
session_path = subdir_path_sub
# absolute path: (project relative) -->
# checkout path: (relative to blend)
blendfile_pair = ("shots/01/shot_01.blend", "new/deeply/nested/path/testme.blend")
images =(
("maps/generic.png", "foobar/another.png"),
("shots/01/maps/special.png", "blah/image.png"),
)
blendfile_template_create_from_files(
proj_path, session_path,
blendfile_pair[0], [f[0] for f in images])
blendfile_pair_abs = (
os.path.join(session_path, blendfile_pair[0]),
os.path.join(session_path, blendfile_pair[1]),
)
ret_expect = [
['info', "blend read: %s" % blendfile_pair_abs[0].encode()],
'complete',
]
ret = bam_run_as_json(["remap", "start", "--json"], session_path)
self.assertEqual(ret_expect, ret)
for f_pair in ((blendfile_pair,) + images):
f_src = os.path.join(session_path, f_pair[0])
f_dst = os.path.join(session_path, f_pair[1])
os.makedirs(os.path.dirname(f_dst), exist_ok=True)
shutil.move(f_src, f_dst)
ret_expect = [
["info", "blend write: %s" % BamRemapTest.remap_path_pair(session_path, blendfile_pair)],
["info", "remap %s" % BamRemapTest.remap_path_pair(session_path, images[0])],
["info", "remap %s" % BamRemapTest.remap_path_pair(session_path, images[1])],
"complete",
]
ret = bam_run_as_json(["remap", "finish", "--json", "--dry-run"], session_path)
self.assertEqual(ret_expect, ret)
ret = bam_run_as_json(["remap", "finish", "--json"], session_path)
self.assertEqual(ret_expect, ret)
# finally run deps to see the paths are as we expect
ret = bam_run_as_json(["deps", blendfile_pair_abs[1], "--json"], session_path)
self.assertEqual(ret[0][1], "//" + os.path.join("..", "..", "..", "..", images[0][1]))
self.assertEqual(ret[0][3], "OK")
self.assertEqual(ret[1][1], "//" + os.path.join("..", "..", "..", "..", images[1][1]))
self.assertEqual(ret[1][3], "OK")
if __name__ == '__main__':
data = global_setup()