Remap: Refactor scripts #177

Merged
Nick Alberelli merged 17 commits from :feature/refactor-remap-script into main 2023-12-07 16:17:27 +01:00
5 changed files with 251 additions and 123 deletions

View File

@ -3,8 +3,9 @@
This directory contains scripts that are useful for re-organizing production directories. The tools are intended to be used when some directories need to be changed, and references to these directories need to be updated in .blend files.
## Usage
1. Set the variable `json_file_path` to match in all script files. Set `folder_path` in both has_map script files.
2. Run `hash_map_make.py` to create a JSON file listing every file in directory via hash, plus a list directories leading to that file (duplicate files included).
3. Re-organize/rename items in the directory you have made a map for.
4. Run `hash_map_update.py` to find the new locations of these files using the Hash to match them up. This will add a `new` directory for each hash.
5. Using [`bbatch`](https://projects.blender.org/studio/blender-studio-pipeline/src/branch/main/scripts/bbatch/README.md) run the script `remap_blender_paths.py` to update references in .blend files from the old path to the new path.
1. Enter remap directory `cd blender-studio-pipeline/scripts/remap`
2. Run the remap tool via `python -m remap`. You will be prompted for a directory to map, and a location to store the map (outside of your remap directory).
3. Now you are ready to re-organize your mapped directory, move files into different folders, rename files and remove duplicates.
4. Re-run the remap tool via `python -m remap` to update your map with the new file locations. The tool will print a bbatch, copy this for use in step 6.
5. Enter bbatch directory `cd blender-studio-pipeline/scripts/bbatch`
6. Run provided bbatch command, similar to `python -m bbatch {my_files}/ --nosave --recursive --script {path_to_script}/remap_blender_paths.py --args "{path_to_json}/dir_map.json"` to update all references to the remapped directory contents in your .blend files.

View File

@ -1,44 +0,0 @@
#!/usr/bin/env python3
import os
import hashlib
import json
json_file_path = "" # File Path to write JSON File to
folder_path = "" # Create Map for items in this folder recursively
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
"""
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
return digest.hexdigest()
def generate_json_for_directory(directory_path):
data = {}
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if sha256 in data:
data[sha256]['old'].append(file_path)
else:
data[sha256] = {'old': [file_path], 'new': ''}
print(f"Making hash for {file_name}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
if __name__ == "__main__":
directory_path = folder_path
generate_json_for_directory(directory_path)

View File

@ -1,52 +0,0 @@
#!/usr/bin/env python3
import os
import hashlib
import json
from pathlib import Path
json_file_path = "" # File Path to read/write JSON File to
folder_path = "" # Create Map for items in this folder recursively
gold_file_map_json = Path(json_file_path)
gold_file_map_data = open(gold_file_map_json)
gold_file_map_dict = json.load(gold_file_map_data)
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
"""
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
return digest.hexdigest()
def generate_json_for_directory(directory_path):
data = gold_file_map_dict
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if not data.get(sha256):
print(f"Cannot find file in dict {file_path}")
continue
if sha256 in data:
data[sha256]['new'] = file_path
print(f"Updating path for {file_path}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
if __name__ == "__main__":
directory_path = folder_path
generate_json_for_directory(directory_path)

180
scripts/remap/remap.py Normal file
View File

@ -0,0 +1,180 @@
#!/usr/bin/env python3
from pathlib import Path
import hashlib
import json
import os
JSON_FILE_KEY = 'json_file_path'
CRAWL_DIR_KEY = 'folder_path'
def get_current_dir():
return Path(__file__).parent.resolve()
def get_variable_file_path():
directory = get_current_dir()
variables_dir = directory.joinpath("var")
if not variables_dir.exists():
variables_dir.mkdir()
return variables_dir.joinpath("remap_variables.json")
def get_variable_file():
env_file = get_variable_file_path()
if env_file.exists():
return env_file
def remove_variable_file():
env_file = get_variable_file_path()
Path.unlink(env_file)
def get_variables():
var_file = Path(get_variable_file())
var_file_data = open(var_file)
var_file_dict = json.load(var_file_data)
return var_file_dict
def set_variable_file():
file_path = get_variable_file_path()
variables = {}
dir_to_map = get_dir_to_map()
json_file = get_josn_file_dir()
variables[JSON_FILE_KEY] = f"{json_file}"
variables[CRAWL_DIR_KEY] = f"{dir_to_map}"
with open(file_path, 'w') as json_file:
json.dump(variables, json_file, indent=4)
return variables
def get_dir_recursively(prompt: str):
iterations = 0
dir_to_map = input(prompt)
if Path(dir_to_map).is_dir():
iterations += 1
return dir_to_map
if iterations > 10:
raise Exception('Provided path is not a directory')
else:
print('Provided path is not a directory')
return get_dir_recursively(prompt)
def get_dir_to_map() -> str:
return get_dir_recursively("Please enter directory to map: ")
def get_josn_file_dir() -> str:
json_dir = get_dir_recursively("Please enter directory to store JSON map: ")
return Path(json_dir).joinpath("dir_map.json").__str__()
def get_bbatch_command():
directory = get_current_dir()
source_file = directory.joinpath('remap_blender_paths.py')
variables = get_variables()
print(
"To update your .blend file references open the bbatch directory and run the following command"
)
print(
f'python -m bbatch {variables[CRAWL_DIR_KEY]} --nosave --recursive --script {source_file} --args "{variables[JSON_FILE_KEY]}"'
)
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
"""
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
return digest.hexdigest()
def generate_json_for_directory(directory_path, json_file_path):
# TODO Centralize duplicate code from 'update_json_for_directory()'
data = {}
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if sha256 in data:
data[sha256]['old'].append(file_path)
else:
data[sha256] = {'old': [file_path], 'new': ''}
print(f"Making hash for {file_name}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
def update_json_for_directory(directory_path, json_file_path):
file_map_json = Path(json_file_path)
file_map_data = open(file_map_json)
file_map_dict = json.load(file_map_data)
data = file_map_dict
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if not data.get(sha256):
print(f"Cannot find file in dict {file_path}")
continue
if sha256 in data:
data[sha256]['new'] = file_path
print(f"Updating path for {file_path}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
def main():
if not get_variable_file():
print("Starting new remap session")
variables = set_variable_file()
print(f"Generating map for directory '{variables[CRAWL_DIR_KEY]}'")
generate_json_for_directory(variables[CRAWL_DIR_KEY], variables[JSON_FILE_KEY])
print(
f"Directory '{variables[CRAWL_DIR_KEY]}' can now be re-organized before re-running this tool to update it's map"
)
else:
variables = get_variables()
answer = input(
f"Continune with existing session to update map for dir '{variables[CRAWL_DIR_KEY]}'? yes or no: "
)
answer_lw = answer.lower()
if answer_lw == "yes" or answer_lw == 'y':
print(f"Updating map for directory '{variables[CRAWL_DIR_KEY]}'")
update_json_for_directory(
variables[CRAWL_DIR_KEY], variables[JSON_FILE_KEY]
)
print('Map update is complete')
get_bbatch_command()
elif answer_lw == "no" or answer_lw == 'n':
remove_variable_file()
main()
else:
print("Please enter yes or no.")
if __name__ == "__main__":
print("Welcome to 'remap', a tool to assist in a re-organization of folders")
main()

View File

@ -1,18 +1,14 @@
import sys
import bpy
from pathlib import Path
import json
import hashlib
import time
import contextlib
from typing import List
file_updated = False
json_file_path = "" # File Path to read/write JSON File to
gold_file_map_json = Path(json_file_path)
gold_file_map_data = open(gold_file_map_json)
gold_file_map_dict = json.load(gold_file_map_data)
@contextlib.contextmanager
def override_save_version():
@ -27,7 +23,16 @@ def override_save_version():
bpy.context.preferences.filepaths.save_version = save_version
def paths_for_vse_strip(strip):
def paths_for_vse_strip(strip: bpy.types.Sequence) -> List[str]:
"""Returns all paths related to Movie, Image and Sound strips
in Blender's Video Sequence Editor
Args:
strip (bpy.types.Sequence): Movie, Image or Sounds Strip
Returns:
List[str]: List of all paths related to strip
"""
if hasattr(strip, "filepath"):
return [strip.filepath]
if hasattr(strip, "directory"):
@ -38,9 +43,9 @@ def paths_for_vse_strip(strip):
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
"""Generate a checksum for a zip file
Args:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
@ -55,20 +60,37 @@ def generate_checksum(filepath: str) -> str:
return sha256.hexdigest()
def find_new_from_old(old_path):
for _, value in gold_file_map_dict.items():
def dict_find_new_from_old(old_path: str, file_map_dict: dict) -> str:
"""Returns the matching 'new' filepath stored in file_map_dict
using the 'old' filepath.
Args:
old_path (str): 'old' filepath referencing a file from Blender
file_map_dict (dict): Dictionary of 'old' and 'new' paths
Returns:
str: 'new' filepath to replace the 'old' filepath
"""
for _, value in file_map_dict.items():
for old_json_path in value['old']:
# Match paths using the filepath stored in Blender
if old_json_path.endswith(old_path.split("/..")[-1]):
if value['new'] != old_json_path:
return value['new']
for _, value in gold_file_map_dict.items():
for _, value in file_map_dict.items():
for old_json_path in value['old']:
# Match paths using filename only
if old_json_path.endswith(old_path.split("/")[-1]):
if value['new'] != old_json_path:
return value['new']
def update_vse_references():
def update_vse_references(file_map_dict: dict) -> None:
"""Update file references for VSE strips
Args:
file_map_dict (dict): Dictionary of 'old' and 'new' paths
"""
global file_updated
for scn in bpy.data.scenes:
if not scn.sequence_editor:
@ -77,7 +99,7 @@ def update_vse_references():
for path in paths_for_vse_strip(strip):
if path == "":
continue
new_path = find_new_from_old(path)
new_path = dict_find_new_from_old(path, file_map_dict)
if not new_path:
print(f"No new path for '{strip.name}' at '{path}' ")
continue
@ -106,21 +128,31 @@ def update_vse_references():
file_updated = True
def update_referenced_images():
def update_referenced_images(file_map_dict: dict) -> None:
"""Update file references for Image data-blocks
Args:
file_map_dict (dict): Dictionary of 'old' and 'new' paths
"""
global file_updated
for img in bpy.data.images:
if img.filepath is not None and img.filepath != "":
new_path = find_new_from_old(img.filepath)
new_path = dict_find_new_from_old(img.filepath, file_map_dict)
if new_path:
print(f"Remapping Image Datablock {img.filepath }")
img.filepath = new_path
file_updated = True
def update_libs():
def update_libs(file_map_dict: dict) -> None:
"""Update file references for libraries (linked/appended data)
Args:
file_map_dict (dict): Dictionary of 'old' and 'new' paths
"""
global file_updated
for lib in bpy.data.libraries:
new_path = find_new_from_old(lib.filepath)
new_path = dict_find_new_from_old(lib.filepath, file_map_dict)
if new_path:
lib.filepath = new_path
print(f"Remapping {lib.filepath}")
@ -128,10 +160,21 @@ def update_libs():
def remap_all_blender_paths():
"""Remap all references to files from blender via dictionary"""
start = time.time()
update_vse_references()
update_referenced_images()
update_libs()
import sys
argv = sys.argv
argv = argv[argv.index("--") + 1 :]
json_file_path = argv[0]
file_map_json = Path(json_file_path)
file_map_data = open(file_map_json)
file_map_dict = json.load(file_map_data)
update_vse_references(file_map_dict)
update_referenced_images(file_map_dict)
update_libs(file_map_dict)
bpy.ops.file.make_paths_relative()
end = time.time()