Remap: Refactor scripts #177

Merged
Nick Alberelli merged 17 commits from :feature/refactor-remap-script into main 2023-12-07 16:17:27 +01:00
3 changed files with 186 additions and 96 deletions
Showing only changes of commit 262a0e14e1 - Show all commits

View File

@ -1,44 +0,0 @@
#!/usr/bin/env python3
import os
import hashlib
import json
json_file_path = "" # File Path to write JSON File to
folder_path = "" # Create Map for items in this folder recursively
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
"""
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
return digest.hexdigest()
def generate_json_for_directory(directory_path):
data = {}
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if sha256 in data:
data[sha256]['old'].append(file_path)
else:
data[sha256] = {'old': [file_path], 'new': ''}
print(f"Making hash for {file_name}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
if __name__ == "__main__":
directory_path = folder_path
generate_json_for_directory(directory_path)

View File

@ -1,52 +0,0 @@
#!/usr/bin/env python3
import os
import hashlib
import json
from pathlib import Path
json_file_path = "" # File Path to read/write JSON File to
folder_path = "" # Create Map for items in this folder recursively
gold_file_map_json = Path(json_file_path)
gold_file_map_data = open(gold_file_map_json)
gold_file_map_dict = json.load(gold_file_map_data)
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
"""
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
return digest.hexdigest()
def generate_json_for_directory(directory_path):
data = gold_file_map_dict
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if not data.get(sha256):
print(f"Cannot find file in dict {file_path}")
continue
if sha256 in data:
data[sha256]['new'] = file_path
print(f"Updating path for {file_path}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
if __name__ == "__main__":
directory_path = folder_path
generate_json_for_directory(directory_path)

186
scripts/remap/remap.py Normal file
View File

@ -0,0 +1,186 @@
#!/usr/bin/env python3
from pathlib import Path
import hashlib
import json
import os
JSON_FILE_KEY = 'json_file_path'
CRAWL_DIR_KEY = 'folder_path'
def get_current_dir():
return Path(__file__).parent.resolve()
def get_variable_file_path():
directory = get_current_dir()
return directory.joinpath(".env")
def get_variable_file():
env_file = get_variable_file_path()
if env_file.exists():
return env_file
def remove_variable_file():
env_file = get_variable_file_path()
Path.unlink(env_file)
def get_variables():
# TODO FIX Saving JSON because os.environ.get() doesn't work when running this as a script
var_file = Path(get_variable_file())
var_file_data = open(var_file)
var_file_dict = json.load(var_file_data)
return var_file_dict
def set_variable_file():
file_path = get_variable_file_path()
variables = {}
dir_to_map = get_dir_to_map()
json_file = get_josn_file_dir()
variables[JSON_FILE_KEY] = f"{json_file}"
variables[CRAWL_DIR_KEY] = f"{dir_to_map}"
with open(file_path, 'w') as json_file:
json.dump(variables, json_file, indent=4)
return variables
def get_dir_to_map() -> str:
dir_to_map = input("Please enter directory to map: ")
if not Path(dir_to_map).is_dir():
raise Exception('Provided path is not a directory')
return dir_to_map
def get_josn_file_dir() -> str:
json_dir = input("Please enter directory to store JSON map: ")
if not Path(json_dir).is_dir():
raise Exception('Provided path is not a directory')
return Path(json_dir).joinpath("dir_map.json").__str__()
def get_bbatch_script_dir() -> Path:
bbatch_dir = input("Please select directory to save bbatch script to: ")
if not Path(bbatch_dir).is_dir():
raise Exception('Provided path is not a directory')
return Path(bbatch_dir)
def save_bbatch_script():
directory = get_current_dir()
file = directory.joinpath('remap_blender_paths.py')
variables = get_variables()
new_file_dir = get_bbatch_script_dir()
with open(file, 'r', encoding='utf-8') as file:
data = file.readlines()
for line in data:
if line.startswith('json_file_path ='):
data[data.index(line)] = f'json_file_path = "{variables[JSON_FILE_KEY]}"'
mapped_dir_name = Path(variables[CRAWL_DIR_KEY]).name
new_file = new_file_dir.joinpath(f'remap_blender_paths_for_{mapped_dir_name}.py')
with open(new_file, 'w', encoding='utf-8') as file:
file.writelines(data)
print(f"bbatch script has been saved to {new_file.__str__()}")
print("This is your bbatch command")
print(
f'python -m bbatch {variables[CRAWL_DIR_KEY]} --script {new_file.__str__()} --nosave --recursive'
)
def generate_checksum(filepath: str) -> str:
"""
Generate a checksum for a zip file
Arguments:
archive_path: String of the archive's file path
Returns:
sha256 checksum for the provided archive as string
"""
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
return digest.hexdigest()
def generate_json_for_directory(directory_path, json_file_path):
# TODO Centralize duplicate code from 'update_json_for_directory()'
data = {}
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if sha256 in data:
data[sha256]['old'].append(file_path)
else:
data[sha256] = {'old': [file_path], 'new': ''}
print(f"Making hash for {file_name}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
def update_json_for_directory(directory_path, json_file_path):
file_map_json = Path(json_file_path)
file_map_data = open(file_map_json)
file_map_dict = json.load(file_map_data)
data = file_map_dict
for root, _, files in os.walk(directory_path):
for file_name in files:
file_path = os.path.join(root, file_name)
sha256 = generate_checksum(file_path)
if not data.get(sha256):
print(f"Cannot find file in dict {file_path}")
continue
if sha256 in data:
data[sha256]['new'] = file_path
print(f"Updating path for {file_path}")
with open(json_file_path, 'w') as json_file:
json.dump(data, json_file, indent=4)
def main():
print("Welcome to 'remap' a tool to assist in a re-organization of folders")
if not get_variable_file():
print("Starting new remap session")
variables = set_variable_file()
print(f"Generating map for directory '{variables[CRAWL_DIR_KEY]}'")
generate_json_for_directory(variables[CRAWL_DIR_KEY], variables[JSON_FILE_KEY])
print(
f"Directory '{variables[CRAWL_DIR_KEY]}' can now be re-organized before updating the map"
)
else:
variables = get_variables()
answer = input(f"Update map for dir '{variables[CRAWL_DIR_KEY]}' yes or no: ")
answer_lw = answer.lower()
if answer_lw == "yes" or answer_lw == 'y':
print(f"Updating map for directory '{variables[CRAWL_DIR_KEY]}'")
update_json_for_directory(
variables[CRAWL_DIR_KEY], variables[JSON_FILE_KEY]
)
print('Map update is complete')
save_bbatch_script()
elif answer_lw == "no" or answer_lw == 'n':
remove_variable_file()
main()
else:
print("Please enter yes or no.")
if __name__ == "__main__":
main()