Remap: Refactor scripts #177
@ -1,3 +1,4 @@
|
||||
import argparse
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
import json
|
||||
@ -7,11 +8,13 @@ import contextlib
|
||||
|
||||
file_updated = False
|
||||
|
||||
json_file_path = "" # File Path to read/write JSON File to
|
||||
|
||||
file_map_json = Path(json_file_path)
|
||||
file_map_data = open(file_map_json)
|
||||
file_map_dict = json.load(file_map_data)
|
||||
# Command line arguments.
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"path",
|
||||
help="Path to JSON file containing map",
|
||||
type=str,
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@ -55,7 +58,7 @@ def generate_checksum(filepath: str) -> str:
|
||||
return sha256.hexdigest()
|
||||
|
||||
|
||||
def find_new_from_old(old_path):
|
||||
def find_new_from_old(old_path: str, file_map_dict: dict) -> str:
|
||||
for _, value in file_map_dict.items():
|
||||
for old_json_path in value['old']:
|
||||
if old_json_path.endswith(old_path.split("/..")[-1]):
|
||||
@ -68,7 +71,7 @@ def find_new_from_old(old_path):
|
||||
return value['new']
|
||||
|
||||
|
||||
def update_vse_references():
|
||||
def update_vse_references(file_map_dict: dict) -> None:
|
||||
global file_updated
|
||||
for scn in bpy.data.scenes:
|
||||
if not scn.sequence_editor:
|
||||
@ -77,7 +80,7 @@ def update_vse_references():
|
||||
for path in paths_for_vse_strip(strip):
|
||||
if path == "":
|
||||
continue
|
||||
new_path = find_new_from_old(path)
|
||||
new_path = find_new_from_old(path, file_map_dict)
|
||||
if not new_path:
|
||||
print(f"No new path for '{strip.name}' at '{path}' ")
|
||||
continue
|
||||
@ -106,21 +109,21 @@ def update_vse_references():
|
||||
file_updated = True
|
||||
|
||||
|
||||
def update_referenced_images():
|
||||
def update_referenced_images(file_map_dict: dict) -> None:
|
||||
global file_updated
|
||||
for img in bpy.data.images:
|
||||
if img.filepath is not None and img.filepath != "":
|
||||
new_path = find_new_from_old(img.filepath)
|
||||
new_path = find_new_from_old(img.filepath, file_map_dict)
|
||||
if new_path:
|
||||
print(f"Remapping Image Datablock {img.filepath }")
|
||||
img.filepath = new_path
|
||||
file_updated = True
|
||||
|
||||
|
||||
def update_libs():
|
||||
def update_libs(file_map_dict: dict) -> None:
|
||||
global file_updated
|
||||
for lib in bpy.data.libraries:
|
||||
new_path = find_new_from_old(lib.filepath)
|
||||
new_path = find_new_from_old(lib.filepath, file_map_dict)
|
||||
if new_path:
|
||||
lib.filepath = new_path
|
||||
print(f"Remapping {lib.filepath}")
|
||||
@ -129,9 +132,17 @@ def update_libs():
|
||||
|
||||
def remap_all_blender_paths():
|
||||
start = time.time()
|
||||
update_vse_references()
|
||||
update_referenced_images()
|
||||
update_libs()
|
||||
|
||||
args = parser.parse_args()
|
||||
json_file_path = args.path # File Path to read/write JSON File to
|
||||
|
||||
file_map_json = Path(json_file_path)
|
||||
file_map_data = open(file_map_json)
|
||||
file_map_dict = json.load(file_map_data)
|
||||
|
||||
update_vse_references(file_map_dict)
|
||||
update_referenced_images(file_map_dict)
|
||||
update_libs(file_map_dict)
|
||||
bpy.ops.file.make_paths_relative()
|
||||
end = time.time()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user