Remap: Refactor scripts #177

Merged
Nick Alberelli merged 17 commits from :feature/refactor-remap-script into main 2023-12-07 16:17:27 +01:00
Showing only changes of commit 38e3737dd8 - Show all commits

View File

@ -1,3 +1,4 @@
import argparse
import bpy import bpy
from pathlib import Path from pathlib import Path
import json import json
@ -7,11 +8,13 @@ import contextlib
file_updated = False file_updated = False
json_file_path = "" # File Path to read/write JSON File to # Command line arguments.
parser = argparse.ArgumentParser()
file_map_json = Path(json_file_path) parser.add_argument(
file_map_data = open(file_map_json) "path",
file_map_dict = json.load(file_map_data) help="Path to JSON file containing map",
type=str,
)
@contextlib.contextmanager @contextlib.contextmanager
@ -55,7 +58,7 @@ def generate_checksum(filepath: str) -> str:
return sha256.hexdigest() return sha256.hexdigest()
def find_new_from_old(old_path): def find_new_from_old(old_path: str, file_map_dict: dict) -> str:
for _, value in file_map_dict.items(): for _, value in file_map_dict.items():
for old_json_path in value['old']: for old_json_path in value['old']:
if old_json_path.endswith(old_path.split("/..")[-1]): if old_json_path.endswith(old_path.split("/..")[-1]):
@ -68,7 +71,7 @@ def find_new_from_old(old_path):
return value['new'] return value['new']
def update_vse_references(): def update_vse_references(file_map_dict: dict) -> None:
global file_updated global file_updated
for scn in bpy.data.scenes: for scn in bpy.data.scenes:
if not scn.sequence_editor: if not scn.sequence_editor:
@ -77,7 +80,7 @@ def update_vse_references():
for path in paths_for_vse_strip(strip): for path in paths_for_vse_strip(strip):
if path == "": if path == "":
continue continue
new_path = find_new_from_old(path) new_path = find_new_from_old(path, file_map_dict)
if not new_path: if not new_path:
print(f"No new path for '{strip.name}' at '{path}' ") print(f"No new path for '{strip.name}' at '{path}' ")
continue continue
@ -106,21 +109,21 @@ def update_vse_references():
file_updated = True file_updated = True
def update_referenced_images(): def update_referenced_images(file_map_dict: dict) -> None:
global file_updated global file_updated
for img in bpy.data.images: for img in bpy.data.images:
if img.filepath is not None and img.filepath != "": if img.filepath is not None and img.filepath != "":
new_path = find_new_from_old(img.filepath) new_path = find_new_from_old(img.filepath, file_map_dict)
if new_path: if new_path:
print(f"Remapping Image Datablock {img.filepath }") print(f"Remapping Image Datablock {img.filepath }")
img.filepath = new_path img.filepath = new_path
file_updated = True file_updated = True
def update_libs(): def update_libs(file_map_dict: dict) -> None:
global file_updated global file_updated
for lib in bpy.data.libraries: for lib in bpy.data.libraries:
new_path = find_new_from_old(lib.filepath) new_path = find_new_from_old(lib.filepath, file_map_dict)
if new_path: if new_path:
lib.filepath = new_path lib.filepath = new_path
print(f"Remapping {lib.filepath}") print(f"Remapping {lib.filepath}")
@ -129,9 +132,17 @@ def update_libs():
def remap_all_blender_paths(): def remap_all_blender_paths():
start = time.time() start = time.time()
update_vse_references()
update_referenced_images() args = parser.parse_args()
update_libs() json_file_path = args.path # File Path to read/write JSON File to
file_map_json = Path(json_file_path)
file_map_data = open(file_map_json)
file_map_dict = json.load(file_map_data)
update_vse_references(file_map_dict)
update_referenced_images(file_map_dict)
update_libs(file_map_dict)
bpy.ops.file.make_paths_relative() bpy.ops.file.make_paths_relative()
end = time.time() end = time.time()