Add project-tools #142
87
scripts/project-tools/consistency_check.py
Executable file
87
scripts/project-tools/consistency_check.py
Executable file
@ -0,0 +1,87 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def create_path_dict(startpath, max_depth):
|
||||||
|
path_structure_dict = {}
|
||||||
|
start_folder_name = os.path.basename(start_search_path)
|
||||||
|
for root, dirs, files in os.walk(startpath, followlinks=True):
|
||||||
|
# We are only interested in the files and folders inside the start path.
|
||||||
|
cur_path = root.replace(startpath, start_folder_name)
|
||||||
|
level = cur_path.count(os.sep)
|
||||||
|
# Sanity check. We don't expect the directory tree to be too deep.
|
||||||
|
# Therefore, we will stop if we go too deep.
|
||||||
|
# This avoids infinite loops that can happen when we follow symlinks
|
||||||
|
if level > max_depth:
|
||||||
|
print("We have gone too deep in the file structure, stopping...")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Insert the data into the dictionary
|
||||||
|
nested_dict = path_structure_dict
|
||||||
|
key_path = cur_path.split(os.sep)
|
||||||
|
final_key = key_path[-1]
|
||||||
|
for key in key_path[:-1]:
|
||||||
|
nested_dict = nested_dict[key]
|
||||||
|
|
||||||
|
files_dict = {}
|
||||||
|
for f in files:
|
||||||
|
files_dict[f] = "file"
|
||||||
|
|
||||||
|
nested_dict[final_key] = files_dict
|
||||||
|
|
||||||
|
# Print the files structure to we can see the traversed file tree
|
||||||
|
indent = ' ' * 4 * (level)
|
||||||
|
print('{}{}/'.format(indent, os.path.basename(root)))
|
||||||
|
subindent = ' ' * 4 * (level + 1)
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
print('{}{}'.format(subindent, f))
|
||||||
|
return path_structure_dict
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_structure_is_consistent(cur_path, path_dict, error_list):
|
||||||
|
for path in path_dict:
|
||||||
|
# Get next path to check for consistency
|
||||||
|
next_path = (cur_path / path).resolve()
|
||||||
|
print("Checking path: %s" % next_path)
|
||||||
|
if next_path.exists():
|
||||||
|
nested_item = path_dict[path]
|
||||||
|
if type(nested_item) is not dict:
|
||||||
|
if next_path.is_file():
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# This must be a file, warn if it is not
|
||||||
|
error_list += ["ERROR: %s is not a file, when it should be!" % next_path]
|
||||||
|
check_if_structure_is_consistent(next_path, nested_item, error_list)
|
||||||
|
else:
|
||||||
|
error_list += ["ERROR: %s doesn't exist!" % next_path]
|
||||||
|
|
||||||
|
|
||||||
|
current_file_folder = pathlib.Path(__file__).parent
|
||||||
|
start_search_path = current_file_folder.parent.parent.resolve()
|
||||||
|
# path_dict = create_path_dict(str(start_search_path), 5)
|
||||||
|
|
||||||
|
# path_dict pre-generated. This is the stucture the consistency check will ensure is there
|
||||||
|
path_dict = {}
|
||||||
|
with open(current_file_folder / "folder_structure.json") as json_file:
|
||||||
|
path_dict = json.load(json_file)
|
||||||
|
|
||||||
|
# TODO perhaps make a function to pretty print out the path_dict for easier inspection
|
||||||
|
|
||||||
|
error_list = []
|
||||||
|
check_if_structure_is_consistent(current_file_folder, path_dict, error_list)
|
||||||
|
|
||||||
|
print()
|
||||||
|
if len(error_list) == 0:
|
||||||
|
print("Consistency check: PASSED")
|
||||||
|
exit(0)
|
||||||
|
else:
|
||||||
|
print("Consistency check: FAILED")
|
||||||
|
print()
|
||||||
|
for error in error_list:
|
||||||
|
print(error)
|
||||||
|
# Exit with error as we didn't pass the consistency check
|
||||||
|
exit(1)
|
23
scripts/project-tools/folder_structure.json
Normal file
23
scripts/project-tools/folder_structure.json
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"../../": {
|
||||||
|
"shared": {
|
||||||
|
"artifacts": {}
|
||||||
|
},
|
||||||
|
"svn": {
|
||||||
|
"tools": {
|
||||||
|
"consistency_check.py": "file",
|
||||||
|
"folder_structure.json": "file",
|
||||||
|
"install_desktop_file.sh": "file",
|
||||||
|
"rollback_blender.py": "file",
|
||||||
|
"run_blender.py": "file",
|
||||||
|
"update_addons.py": "file",
|
||||||
|
"update_blender.py": "file"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"local": {
|
||||||
|
"blender": {},
|
||||||
|
"scripts": {},
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
54
scripts/project-tools/init_project_folder_structure.py
Executable file
54
scripts/project-tools/init_project_folder_structure.py
Executable file
@ -0,0 +1,54 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def valid_dir_arg(value):
|
||||||
|
"""Determine if the value is a valid directory"""
|
||||||
|
filepath = pathlib.Path(value)
|
||||||
|
|
||||||
|
if not filepath.exists() or not filepath.is_dir():
|
||||||
|
msg = f"Error! This is not a directory: {value}"
|
||||||
|
raise argparse.ArgumentTypeError(msg)
|
||||||
|
else:
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
|
||||||
|
def create_folder_structure(cur_path, path_dict, source_folder):
|
||||||
|
for path in path_dict:
|
||||||
|
# Get next path to check for consistency
|
||||||
|
next_path = (cur_path / path).resolve()
|
||||||
|
print("Checking path: %s" % next_path)
|
||||||
|
nested_item = path_dict[path]
|
||||||
|
if type(nested_item) is not dict:
|
||||||
|
# This is a file we should copy over
|
||||||
|
if next_path.exists():
|
||||||
|
continue
|
||||||
|
print(f"Copying over: {next_path.name}")
|
||||||
|
shutil.copy(source_folder / next_path.name, next_path)
|
||||||
|
else:
|
||||||
|
print(f"Creating folder: {next_path}")
|
||||||
|
os.makedirs(next_path)
|
||||||
|
create_folder_structure(next_path, nested_item, source_folder)
|
||||||
|
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
parser = argparse.ArgumentParser(description="Generate project structure.")
|
||||||
|
parser.add_argument("-t", "--target", type=valid_dir_arg)
|
||||||
|
args = parser.parse_args(args)
|
||||||
|
target_folder = args.target or pathlib.Path.cwd().parent.parent
|
||||||
|
folder_structure = pathlib.Path(__file__).parent / "folder_structure.json"
|
||||||
|
|
||||||
|
with open(folder_structure) as json_file:
|
||||||
|
path_dict = json.load(json_file)
|
||||||
|
create_folder_structure(target_folder, path_dict["../../"], folder_structure.parent)
|
||||||
|
print("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(sys.argv[1:])
|
13
scripts/project-tools/install_desktop_file.sh
Executable file
13
scripts/project-tools/install_desktop_file.sh
Executable file
@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Make sure we are in this files directory
|
||||||
|
cd "$(dirname "$0")"
|
||||||
|
|
||||||
|
PROJECT_NAME="Pets"
|
||||||
|
DESKTOP_FILE_DST="$HOME/.local/share/applications/blender_$PROJECT_NAME.desktop"
|
||||||
|
BLENDER_BIN_PATH=$(realpath ./run_blender.py)
|
||||||
|
|
||||||
|
cp ../../local/blender/linux/blender.desktop $DESKTOP_FILE_DST
|
||||||
|
|
||||||
|
# Update the .desktop file data
|
||||||
|
sed -i -e "s:Exec=blender:Exec=$BLENDER_BIN_PATH:" -e "s:Blender:Blender $PROJECT_NAME:" "$DESKTOP_FILE_DST"
|
4
scripts/project-tools/pyproject.toml
Normal file
4
scripts/project-tools/pyproject.toml
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[tool.black]
|
||||||
|
line-length = 100
|
||||||
|
include = '\.pyi?$'
|
||||||
|
skip-string-normalization = true
|
62
scripts/project-tools/rollback_blender.py
Executable file
62
scripts/project-tools/rollback_blender.py
Executable file
@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import filecmp
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# The project base path (where shared, local and svn are located)
|
||||||
|
PATH_BASE = Path(__file__).resolve().parent.parent.parent
|
||||||
|
PATH_ARTIFACTS = PATH_BASE / 'shared' / 'artifacts' / 'blender'
|
||||||
|
PATH_PREVIOUS = PATH_ARTIFACTS / 'previous'
|
||||||
|
BACKUP_DIR = PATH_PREVIOUS / '00'
|
||||||
|
|
||||||
|
if not BACKUP_DIR.exists():
|
||||||
|
BACKUP_DIR.mkdir()
|
||||||
|
# Backup the current files
|
||||||
|
for file in PATH_ARTIFACTS.iterdir():
|
||||||
|
if file.is_file():
|
||||||
|
shutil.copy(file, BACKUP_DIR)
|
||||||
|
|
||||||
|
cur_date_file = PATH_ARTIFACTS / "download_date"
|
||||||
|
|
||||||
|
paths = sorted(Path(PATH_PREVIOUS).iterdir())
|
||||||
|
|
||||||
|
print("Available builds:\n")
|
||||||
|
|
||||||
|
for index, path in enumerate(paths):
|
||||||
|
date_file = path / "download_date"
|
||||||
|
if not date_file.exists():
|
||||||
|
print("ERROR: The backup folder %s is missing a datefile, exiting!" % path)
|
||||||
|
|
||||||
|
with open(date_file, 'r') as file:
|
||||||
|
date = file.read().rstrip()
|
||||||
|
|
||||||
|
if filecmp.cmp(cur_date_file, date_file):
|
||||||
|
print("\033[1mID:\033[0m\033[100m%3i (%s) <current>\033[0m" % (index, date))
|
||||||
|
else:
|
||||||
|
print("\033[1mID:\033[0m%3i (%s)" % (index, date))
|
||||||
|
|
||||||
|
input_error_mess = "Please select an index between 0 and " + str(len(paths) - 1)
|
||||||
|
selected_index = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
index_str = input("Select which Blender build number to switch to. (press ENTER to confirm): ")
|
||||||
|
if not index_str.isnumeric():
|
||||||
|
print(input_error_mess)
|
||||||
|
continue
|
||||||
|
index = int(index_str)
|
||||||
|
if index >= 0 and index < len(paths):
|
||||||
|
selected_index = index
|
||||||
|
break
|
||||||
|
print(input_error_mess)
|
||||||
|
|
||||||
|
# Remove current files and move the selected snapshot into current folder
|
||||||
|
for file in PATH_ARTIFACTS.iterdir():
|
||||||
|
if file.is_file():
|
||||||
|
os.remove(file)
|
||||||
|
|
||||||
|
for file in paths[selected_index].iterdir():
|
||||||
|
# Everything should be a file in here but have this check for sanity eitherway.
|
||||||
|
if file.is_file():
|
||||||
|
shutil.copy(file, PATH_ARTIFACTS)
|
254
scripts/project-tools/run_blender.py
Executable file
254
scripts/project-tools/run_blender.py
Executable file
@ -0,0 +1,254 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import filecmp
|
||||||
|
import glob
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
# The project base path (where shared, local and svn are located)
|
||||||
|
PATH_BASE = Path(__file__).resolve().parent.parent.parent
|
||||||
|
PATH_ARTIFACTS = PATH_BASE / 'shared' / 'artifacts'
|
||||||
|
PATH_LOCAL = PATH_BASE / 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logger():
|
||||||
|
# Create a logger
|
||||||
|
logger = logging.getLogger()
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Create a StreamHandler that outputs log messages to stdout
|
||||||
|
stream_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
stream_handler.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Create a formatter for the log messages
|
||||||
|
formatter = logging.Formatter('%(levelname)s - %(message)s')
|
||||||
|
|
||||||
|
# Set the formatter for the StreamHandler
|
||||||
|
stream_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# Add the StreamHandler to the logger
|
||||||
|
logger.addHandler(stream_handler)
|
||||||
|
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
logger = setup_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def extract_dmg(dmg_file: Path, internal_pah, dst_path: Path):
|
||||||
|
# Execute hdiutil to mount the dmg file
|
||||||
|
mount_process = subprocess.run(
|
||||||
|
['hdiutil', 'attach', dmg_file, '-plist'], capture_output=True, text=True
|
||||||
|
)
|
||||||
|
mount_output = mount_process.stdout
|
||||||
|
|
||||||
|
# Parse the mount_output to retrieve the mounted volume name
|
||||||
|
import plistlib
|
||||||
|
|
||||||
|
plist_data = plistlib.loads(mount_output.encode('utf-8'))
|
||||||
|
mount_point = plist_data['system-entities'][0]['mount-point']
|
||||||
|
|
||||||
|
# Ensure destination directory exists
|
||||||
|
dst_path = dst_path / internal_pah
|
||||||
|
dst_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Extract the contents of the mounted dmg to the destination directory
|
||||||
|
file_in_dmg = os.path.join(mount_point, internal_pah)
|
||||||
|
subprocess.run(['ditto', file_in_dmg, dst_path])
|
||||||
|
|
||||||
|
# Unmount the dmg file
|
||||||
|
subprocess.run(['hdiutil', 'detach', mount_point])
|
||||||
|
|
||||||
|
|
||||||
|
def extract_tar_xz(file_path: Path, dst_path: Path):
|
||||||
|
dst_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
'tar',
|
||||||
|
'xf',
|
||||||
|
file_path,
|
||||||
|
'--directory',
|
||||||
|
dst_path,
|
||||||
|
'--strip-components=1',
|
||||||
|
'--checkpoint=.1000',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_zip(file_path: Path, dst_path: Path):
|
||||||
|
temp_dir = tempfile.mkdtemp()
|
||||||
|
with zipfile.ZipFile(file_path, 'r') as zip_ref:
|
||||||
|
zip_ref.extractall(temp_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
src_path = [subdir for subdir in Path(temp_dir).iterdir()][0]
|
||||||
|
except IndexError:
|
||||||
|
logger.fatal("The archive %s does not contain any directory" % file_path.name)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
dst_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.move(src_path, dst_path)
|
||||||
|
|
||||||
|
shutil.rmtree(temp_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def update_addon(addon_zip_name, path_in_zip_to_extract=''):
|
||||||
|
addon_zip_sha = addon_zip_name + '.sha256'
|
||||||
|
# This is the file that records all toplevel folders/files installed by this addon
|
||||||
|
# It is used to cleanup old files and folders when updating or removing addons
|
||||||
|
addon_zip_files = addon_zip_name + '.files'
|
||||||
|
|
||||||
|
# Check if we have the latest add-ons from shared
|
||||||
|
addon_artifacts_folder = PATH_ARTIFACTS / 'addons'
|
||||||
|
artifact_archive = addon_artifacts_folder / addon_zip_name
|
||||||
|
artifact_checksum = addon_artifacts_folder / addon_zip_sha
|
||||||
|
|
||||||
|
if not artifact_checksum.exists():
|
||||||
|
logger.error("Missing file %s" % artifact_checksum)
|
||||||
|
logger.error("Could not update add-ons")
|
||||||
|
return
|
||||||
|
|
||||||
|
local_checksum = PATH_LOCAL / addon_zip_sha
|
||||||
|
|
||||||
|
if local_checksum.exists():
|
||||||
|
if filecmp.cmp(local_checksum, artifact_checksum):
|
||||||
|
logger.info("Already up to date")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not artifact_archive.exists():
|
||||||
|
logger.error("Shasum exists but the archive file %s does not!" % artifact_archive)
|
||||||
|
logger.error("Could not update add-ons")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Extract the archive in a temp location and move the addons content to local
|
||||||
|
tmp_dir = Path(tempfile.mkdtemp())
|
||||||
|
|
||||||
|
# Extract the zip file to the temporary directory
|
||||||
|
with zipfile.ZipFile(artifact_archive, 'r') as zip_ref:
|
||||||
|
zip_ref.extractall(tmp_dir)
|
||||||
|
|
||||||
|
# Get the path of the folder to copy
|
||||||
|
src_path_base = tmp_dir / path_in_zip_to_extract
|
||||||
|
dst_path_base = PATH_LOCAL / 'scripts' / 'addons'
|
||||||
|
|
||||||
|
# Remove all files previously installed by the archive
|
||||||
|
local_installed_files = PATH_LOCAL / addon_zip_files
|
||||||
|
if local_installed_files.exists():
|
||||||
|
with open(local_installed_files) as file:
|
||||||
|
lines = [line.rstrip() for line in file]
|
||||||
|
for folder in lines:
|
||||||
|
shutil.rmtree(dst_path_base / folder)
|
||||||
|
|
||||||
|
# Get a list of directories inside the given directory
|
||||||
|
addons = [subdir.name for subdir in src_path_base.iterdir() if subdir.is_dir()]
|
||||||
|
|
||||||
|
with open(local_installed_files, 'w') as f:
|
||||||
|
for addon_name in addons:
|
||||||
|
f.write("%s\n" % addon_name)
|
||||||
|
|
||||||
|
for addon_name in addons:
|
||||||
|
logger.debug("Moving %s" % addon_name)
|
||||||
|
src_dir_addon = src_path_base / addon_name
|
||||||
|
dst_dir_addon = dst_path_base / addon_name
|
||||||
|
shutil.move(src_dir_addon, dst_dir_addon)
|
||||||
|
|
||||||
|
# Clean up the temporary directory
|
||||||
|
shutil.rmtree(tmp_dir)
|
||||||
|
|
||||||
|
# Update the sha256 file
|
||||||
|
shutil.copy(artifact_checksum, local_checksum)
|
||||||
|
|
||||||
|
|
||||||
|
def update_blender():
|
||||||
|
system_name = platform.system().lower()
|
||||||
|
architecture = platform.machine()
|
||||||
|
|
||||||
|
# Check if we have the latest blender archive from shared
|
||||||
|
artifacts_path = PATH_ARTIFACTS / 'blender'
|
||||||
|
archive_name_pattern = "blender*" + system_name + "." + architecture + "*.sha256"
|
||||||
|
|
||||||
|
# Look for the appropriate Blender archive for this system
|
||||||
|
matched_archives = glob.glob(str(artifacts_path / archive_name_pattern))
|
||||||
|
|
||||||
|
# Check if we found any files
|
||||||
|
if len(matched_archives) != 1:
|
||||||
|
if len(matched_archives) == 0:
|
||||||
|
logger.error("No Blender archives found for this system!")
|
||||||
|
logger.error("System is: %s %s" % (system_name, architecture))
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"More than one candidate archive was found for this system. Only one is allowed!"
|
||||||
|
)
|
||||||
|
logger.error("The following candidates were found: %s" % str(matched_archives))
|
||||||
|
return
|
||||||
|
|
||||||
|
blender_build_checksum = Path(matched_archives[0])
|
||||||
|
blender_build_archive = blender_build_checksum.with_suffix('')
|
||||||
|
|
||||||
|
if not blender_build_archive.exists():
|
||||||
|
logger.error(
|
||||||
|
"Shasum exists but the target Blender archive %s does not!" % blender_build_archive
|
||||||
|
)
|
||||||
|
logger.error("Could not update blender")
|
||||||
|
return
|
||||||
|
|
||||||
|
local_checksum = PATH_LOCAL / 'blender' / f"{system_name}.sha256"
|
||||||
|
|
||||||
|
if local_checksum.exists():
|
||||||
|
if filecmp.cmp(local_checksum, blender_build_checksum):
|
||||||
|
logger.info("Already up to date")
|
||||||
|
return
|
||||||
|
|
||||||
|
src = artifacts_path / blender_build_archive
|
||||||
|
dst = PATH_LOCAL / 'blender' / system_name
|
||||||
|
if dst.exists():
|
||||||
|
shutil.rmtree(dst)
|
||||||
|
|
||||||
|
if system_name == 'linux':
|
||||||
|
extract_tar_xz(src, dst)
|
||||||
|
elif system_name == 'darwin':
|
||||||
|
extract_dmg(src, 'Blender.app', dst)
|
||||||
|
elif system_name == 'windows':
|
||||||
|
extract_zip(src, dst)
|
||||||
|
shutil.copy(blender_build_checksum, local_checksum)
|
||||||
|
|
||||||
|
|
||||||
|
def launch_blender():
|
||||||
|
system_name = platform.system().lower()
|
||||||
|
blender_path_base = PATH_LOCAL / 'blender' / system_name
|
||||||
|
if system_name == 'linux':
|
||||||
|
blender_path = blender_path_base / 'blender'
|
||||||
|
elif system_name == 'darwin':
|
||||||
|
blender_path = blender_path_base / 'Blender.app' / 'Contents' / 'MacOS' / 'Blender'
|
||||||
|
elif system_name == 'windows':
|
||||||
|
blender_path = blender_path_base / 'blender.exe'
|
||||||
|
else:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
os.environ['BLENDER_USER_CONFIG'] = str(PATH_LOCAL / 'config')
|
||||||
|
os.environ['BLENDER_USER_SCRIPTS'] = str(PATH_LOCAL / 'scripts')
|
||||||
|
subprocess.run([blender_path])
|
||||||
|
|
||||||
|
|
||||||
|
def update_addons():
|
||||||
|
path_in_zip_to_extract = Path('blender-studio-pipeline/scripts-blender/addons')
|
||||||
|
update_addon('blender-studio-pipeline-main.zip', path_in_zip_to_extract)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
logger.info('Updating Add-ons')
|
||||||
|
update_addons()
|
||||||
|
logger.info('Updating Blender')
|
||||||
|
update_blender()
|
||||||
|
logger.info('Launching Blender')
|
||||||
|
launch_blender()
|
2
scripts/project-tools/setup.cfg
Normal file
2
scripts/project-tools/setup.cfg
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[pycodestyle]
|
||||||
|
max-line-length = 100
|
46
scripts/project-tools/update_addons.py
Executable file
46
scripts/project-tools/update_addons.py
Executable file
@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
def download_file(url, out_folder, filename):
|
||||||
|
print("Downloading: " + url)
|
||||||
|
local_filename = out_folder / filename
|
||||||
|
|
||||||
|
# TODO Can't check any shasums before downloading so always remove and redownload everything for now
|
||||||
|
prev_downloaded_files = glob.glob(f"{local_filename}*")
|
||||||
|
for file in prev_downloaded_files:
|
||||||
|
os.remove(file)
|
||||||
|
|
||||||
|
# NOTE the stream=True parameter below
|
||||||
|
with requests.get(url, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(local_filename, 'wb') as f:
|
||||||
|
for chunk in r.iter_content(chunk_size=None):
|
||||||
|
if chunk:
|
||||||
|
f.write(chunk)
|
||||||
|
|
||||||
|
local_hash_filename = local_filename.with_suffix(".zip.sha256")
|
||||||
|
with open(local_filename, "rb") as f:
|
||||||
|
digest = hashlib.file_digest(f, "sha256")
|
||||||
|
with open(local_hash_filename, "w") as hash_file:
|
||||||
|
hash_file.write(digest.hexdigest())
|
||||||
|
|
||||||
|
return local_filename
|
||||||
|
|
||||||
|
|
||||||
|
current_file_folder_path = pathlib.Path(__file__).parent
|
||||||
|
download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve()
|
||||||
|
|
||||||
|
# Ensure that the download directory exists
|
||||||
|
os.makedirs(download_folder_path, exist_ok=True)
|
||||||
|
|
||||||
|
download_file(
|
||||||
|
"https://projects.blender.org/studio/blender-studio-pipeline/archive/main.zip",
|
||||||
|
download_folder_path,
|
||||||
|
"blender-studio-pipeline-main.zip",
|
||||||
|
)
|
134
scripts/project-tools/update_blender.py
Executable file
134
scripts/project-tools/update_blender.py
Executable file
@ -0,0 +1,134 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import email.utils
|
||||||
|
import glob
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
HOMEPAGE = "https://builder.blender.org/download/"
|
||||||
|
|
||||||
|
BLENDER_BRANCH = "main"
|
||||||
|
|
||||||
|
|
||||||
|
def download_file(url, out_folder):
|
||||||
|
print("Downloading: " + url)
|
||||||
|
local_filename = out_folder / url.split('/')[-1]
|
||||||
|
# NOTE the stream=True parameter below
|
||||||
|
with requests.get(url, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(local_filename, 'wb') as f:
|
||||||
|
for chunk in r.iter_content(chunk_size=None):
|
||||||
|
if chunk:
|
||||||
|
f.write(chunk)
|
||||||
|
return local_filename
|
||||||
|
|
||||||
|
|
||||||
|
def shasum_matches(file, sha_sum):
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
digest = hashlib.file_digest(f, "sha256")
|
||||||
|
return digest.hexdigest() == sha_sum
|
||||||
|
|
||||||
|
|
||||||
|
current_file_folder_path = pathlib.Path(__file__).parent
|
||||||
|
download_folder_path = (current_file_folder_path / "../../shared/artifacts/blender").resolve()
|
||||||
|
backup_folder_path = download_folder_path / "previous/current_snapshot"
|
||||||
|
|
||||||
|
os.makedirs(download_folder_path, exist_ok=True)
|
||||||
|
|
||||||
|
# Backup the old files
|
||||||
|
os.makedirs(backup_folder_path, exist_ok=True)
|
||||||
|
|
||||||
|
for f in os.listdir(download_folder_path):
|
||||||
|
if os.path.isfile(f):
|
||||||
|
path_to_file = f / download_folder_path
|
||||||
|
shutil.copy(path_to_file, backup_folder_path)
|
||||||
|
|
||||||
|
# Get all urls for the blender builds
|
||||||
|
platforms_dict = {
|
||||||
|
"windows": "zip",
|
||||||
|
"darwin.x86_64": "dmg",
|
||||||
|
"darwin.arm64": "dmg",
|
||||||
|
"linux": "tar.xz",
|
||||||
|
}
|
||||||
|
|
||||||
|
download_info = []
|
||||||
|
branch_string = "+" + BLENDER_BRANCH
|
||||||
|
reqs = requests.get(HOMEPAGE)
|
||||||
|
for match in re.findall('<a href=[' "'" '"][^"' "'" ']*[' "'" '"]', reqs.text):
|
||||||
|
if branch_string in match:
|
||||||
|
# Strip href and quotes around the url
|
||||||
|
download_url = match[9:-1]
|
||||||
|
for platform in platforms_dict:
|
||||||
|
file_extension = platforms_dict[platform]
|
||||||
|
if re.search(platform + ".*" + file_extension + "$", download_url):
|
||||||
|
download_info.append((platform, download_url))
|
||||||
|
|
||||||
|
updated_current_files = False
|
||||||
|
new_files_downloaded = False
|
||||||
|
# Download new builds if the shasums doesn't match
|
||||||
|
for info in download_info:
|
||||||
|
platform = info[0]
|
||||||
|
file_extension = platforms_dict[platform]
|
||||||
|
url = info[1]
|
||||||
|
url_sha = url + ".sha256"
|
||||||
|
sha = requests.get(url_sha).text.strip().lower()
|
||||||
|
|
||||||
|
current_platform_file = glob.glob(f"{download_folder_path}/*{platform}*{file_extension}")
|
||||||
|
if len(current_platform_file) > 1:
|
||||||
|
print(
|
||||||
|
f"Platform {platform} has multiple downloaded files in the artifacts directory, exiting!"
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
# Check if we need to download the file by looking at the shasum of the currently downloaded file (if any)
|
||||||
|
if len(current_platform_file) == 1:
|
||||||
|
current_file = current_platform_file[0]
|
||||||
|
if shasum_matches(current_file, sha):
|
||||||
|
# We already have the current file
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
updated_current_files = True
|
||||||
|
os.remove(current_file)
|
||||||
|
os.remove(current_file + ".sha256")
|
||||||
|
|
||||||
|
download_file(url_sha, download_folder_path)
|
||||||
|
downloaded_file = download_file(url, download_folder_path)
|
||||||
|
# Check that the file we downloaded is not corrupt
|
||||||
|
if not shasum_matches(downloaded_file, sha):
|
||||||
|
print(f"Downloaded file {downloaded_file} does not match its shasum, exiting!")
|
||||||
|
exit(1)
|
||||||
|
new_files_downloaded = True
|
||||||
|
|
||||||
|
if new_files_downloaded:
|
||||||
|
# Save download date for use in the rollback script
|
||||||
|
with open(download_folder_path / "download_date", "w") as date_file:
|
||||||
|
date_file.write(email.utils.formatdate(localtime=True))
|
||||||
|
print("Updated to the latest files")
|
||||||
|
|
||||||
|
if updated_current_files:
|
||||||
|
backup_path = download_folder_path / "previous"
|
||||||
|
# Put the current backup first in the directory listing
|
||||||
|
os.rename(backup_folder_path, backup_path / "00")
|
||||||
|
backup_dirs = os.listdir(backup_path)
|
||||||
|
backup_dirs.sort(reverse=True)
|
||||||
|
|
||||||
|
# Remove older backup folders if there are more than 10
|
||||||
|
folders_to_remove = len(backup_dirs) - 10
|
||||||
|
if folders_to_remove > 0:
|
||||||
|
for dir in backup_dirs[:folders_to_remove]:
|
||||||
|
shutil.rmtree(dir)
|
||||||
|
backup_dirs = backup_dirs[folders_to_remove:]
|
||||||
|
|
||||||
|
# Bump all folder names
|
||||||
|
# Assign a number to each file, reverse the processing order to not overwrite any files.
|
||||||
|
folder_number = len(backup_dirs)
|
||||||
|
for dir in backup_dirs:
|
||||||
|
os.rename(dir, backup_path / str(folder_number).zfill(2))
|
||||||
|
folder_number -= 1
|
||||||
|
else:
|
||||||
|
shutil.rmtree(backup_folder_path)
|
||||||
|
print("Nothing downloaded, everything was up to date")
|
Loading…
Reference in New Issue
Block a user