Add project-tools #142

Merged
Francesco Siddi merged 26 commits from ZedDB/blender-studio-pipeline:project-helper-tools into main 2023-08-31 20:33:04 +02:00
6 changed files with 536 additions and 0 deletions
Showing only changes of commit c13209a639 - Show all commits

View File

@ -0,0 +1,81 @@
#!/usr/bin/python3
import os
import pathlib
def create_path_dict(startpath, max_depth):
path_stucture_dict={}
start_folder_name = os.path.basename(start_search_path)
for root, dirs, files in os.walk(startpath, followlinks=True):
# We are only interested in the files and folders inside the start path.
cur_path = root.replace(startpath, start_folder_name)
level = cur_path.count(os.sep)
# Sanity check. We don't expect the directory tree to be too deep.
# Therefore, we will stop if we go too deep.
# This avoids infinite loops that can happen when we follow symlinks
if level > max_depth:
print("We have gone unexptibly deep in the file structure, stopping...")
exit(1)
# Insert the data into the dictionary
nested_dict = path_stucture_dict
key_path = cur_path.split(os.sep)
final_key = key_path[-1]
for key in key_path[:-1]:
nested_dict = nested_dict[key]
files_dict = {}
for f in files:
files_dict[f] = "file"
nested_dict[final_key] = files_dict
# Print the files structure to we can see the traversed file tree
indent = ' ' * 4 * (level)
print('{}{}/'.format(indent, os.path.basename(root)))
subindent = ' ' * 4 * (level + 1)
for f in files:
print('{}{}'.format(subindent, f))
return path_stucture_dict
def check_if_structure_is_consistent(start_path, path_dict, error_list):
for key in path_dict:
cur_path = str(start_path) + os.sep + key
print("Checking path: " + cur_path)
if os.path.exists(cur_path):
nested_item = path_dict[key]
if type(nested_item) is not dict:
if os.path.isfile(cur_path):
continue
else:
# This must be a file, warn if it is not
#print("ERROR: " + cur_path + " is not a file, when it should be!")
error_list += ["ERROR: " + cur_path + " is not a file, when it should be!"]
check_if_structure_is_consistent(cur_path, nested_item, error_list)
else:
#print("ERROR: " + cur_path + " doesn't exist!")
error_list += ["ERROR: " + cur_path + " doesn't exist!"]
current_file_path=pathlib.Path(__file__)
start_search_path=current_file_path.parent.parent.parent.resolve()
#path_dict = create_path_dict(str(start_search_path), 5)
# path_dict pre-generated. This is the stucture the consistency check will ensure is there
path_dict = {'pets': {'shared': {'pets': {'artifacts': {}}}, 'svn': {'tools': {'consistency_check.py': 'file'}}, 'local': {'blender': {}, 'scripts': {}, 'config': {}}}}
# TODO perhaps make a function to pretty print out the path_dict for easier inspection
error_list = []
check_if_structure_is_consistent(start_search_path.parent, path_dict, error_list)
print()
if len(error_list) == 0:
print("Consistency check: PASSED")
exit(0)
else:
print("Consistency check: FAILED")
print()
for error in error_list:
print(error)
# Exit with error as we didn't pass the consistency check
exit(1)

View File

@ -0,0 +1,72 @@
#!/bin/bash
# Get all archived sha hashes to use as ID numbers
ARCHIVE_DIR=../../shared/pets/artifacts/blender/previous/
cd $ARCHIVE_DIR
# TODO only rollbacks for one OS version
OS=linux
# Create an array with the available builds/files
available_builds=($(ls -t *$OS*.sha256))
installed_build=$(cd ../ && ls *$OS*.sha256)
echo $installed_build
echo -e "Available builds:\n"
valid_ids=()
ITER=0
for file in ${available_builds[@]}; do
file_date=$(stat -c '%y' $file)
# Cutoff the the date string to only show "date hours:min"
file_date=${file_date:0:19}
if [ $file == $installed_build ]; then
printf "\e[1mID:\e[0m \e[100m%3s " $ITER
printf "(%s)" "$file_date"
printf " <current>\e[0m"
else
printf "\e[1mID:\e[0m %3s " $ITER
printf "(%s)" "$file_date"
fi
valid_ids+=($ITER)
echo
ITER=$(expr $ITER + 1)
done
echo -e "\n"
choosen_blend_id=-1
prompt="Select which Blender build number to switch to. (press ENTER to confirm): "
while read -rp "$prompt" num && [[ "$num" ]]; do
# Check if "num" is a valid number.
[[ "$num" != *[[:digit:]]* ]] &&
{ echo "You need to choose a number!"; continue; }
if [[ ! " ${valid_ids[*]} " =~ " ${num} " ]]; then
# whatever you want to do when array doesn't contain value
echo "$num is not an available ID!"
continue
fi
choosen_blend_id=$num
break
done
((choosen_blend_id < 0)) && exit 0
choose_file=${available_builds[$choosen_blend_id]::-7}
# Remove the build we are replacing
rm ../*$OS*
# Copy over the choosen build
cp $choose_file* ../

View File

@ -0,0 +1,219 @@
#!/usr/bin/env python3
import hashlib
import logging
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import zipfile
from dataclasses import dataclass
from pathlib import Path
# The project base path (where shared, local and svn are located)
PATH_BASE = Path(__file__).resolve().parent.parent.parent
PATH_ARTIFACTS = PATH_BASE / 'shared' / 'artifacts'
PATH_LOCAL = PATH_BASE / 'local'
def setup_logger():
# Create a logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# Create a StreamHandler that outputs log messages to stdout
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setLevel(logging.DEBUG)
# Create a formatter for the log messages
formatter = logging.Formatter('%(levelname)s - %(message)s')
# Set the formatter for the StreamHandler
stream_handler.setFormatter(formatter)
# Add the StreamHandler to the logger
logger.addHandler(stream_handler)
return logger
logger = setup_logger()
@dataclass
class BlenderBuild:
archive: None
checksum: None
def extract_dmg(dmg_file: Path, internal_pah, dst_path: Path):
# Execute hdiutil to mount the dmg file
mount_process = subprocess.run(['hdiutil', 'attach', dmg_file, '-plist'], capture_output=True, text=True)
mount_output = mount_process.stdout
# Parse the mount_output to retrieve the mounted volume name
import plistlib
plist_data = plistlib.loads(mount_output.encode('utf-8'))
mount_point = plist_data['system-entities'][0]['mount-point']
# Ensure destination directory exists
dst_path = dst_path / internal_pah
dst_path.mkdir(parents=True, exist_ok=True)
# Extract the contents of the mounted dmg to the destination directory
file_in_dmg = os.path.join(mount_point, internal_pah)
subprocess.run(['ditto', file_in_dmg, dst_path])
# Unmount the dmg file
subprocess.run(['hdiutil', 'detach', mount_point])
def extract_tar_xz(file_path: Path, dst_path: Path):
dst_path.mkdir(parents=True, exist_ok=True)
subprocess.run(['tar', 'xf', file_path, '--directory', dst_path, '--strip-components=1'])
def extract_zip(file_path: Path, dst_path: Path):
temp_dir = tempfile.mkdtemp()
with zipfile.ZipFile(file_path, 'r') as zip_ref:
zip_ref.extractall(temp_dir)
try:
src_path = [subdir for subdir in Path(temp_dir).iterdir()][0]
except IndexError:
logger.fatal("The archive %s does not contain any directory" % file_path.name)
sys.exit(1)
dst_path.mkdir(parents=True, exist_ok=True)
shutil.move(src_path, dst_path)
shutil.rmtree(temp_dir)
def compare_checksum(file1, file2):
with open(file1, 'rb') as f1, open(file2, 'rb') as f2:
hash1 = hashlib.sha256(f1.read()).hexdigest()
hash2 = hashlib.sha256(f2.read()).hexdigest()
return hash1 == hash2
def update_addons():
# Check if we have the latest add-ons from shared
studio_pipeline_artifacts = PATH_ARTIFACTS / 'blender-studio-pipeline'
artifact_checksum = studio_pipeline_artifacts / 'main.zip.sha256'
if not artifact_checksum.exists():
logger.error("Missing file %s" % artifact_checksum)
logger.error("Could not update add-ons")
return
local_checksum = PATH_LOCAL / 'main.zip.sha256'
if local_checksum.exists():
if compare_checksum(local_checksum, artifact_checksum):
logger.info("Already up to date")
return
# Extract the archive in a temp location and move the addons content to local
tmp_dir = Path(tempfile.mkdtemp())
# Extract the zip file to the temporary directory
with zipfile.ZipFile(studio_pipeline_artifacts / 'main.zip', 'r') as zip_ref:
zip_ref.extractall(tmp_dir)
# Get the path of the folder to copy
src_path_base = tmp_dir / 'blender-studio-pipeline' / 'scripts-blender' / 'addons'
dst_path_base = PATH_LOCAL / 'scripts' / 'addons'
# Get a list of directories inside the given directory
addons = [subdir.name for subdir in src_path_base.iterdir() if subdir.is_dir()]
for addon_name in addons:
logger.debug("Moving %s" % addon_name)
src_dir_addon = src_path_base / addon_name
dst_dir_addon = dst_path_base / addon_name
if dst_dir_addon.exists():
shutil.rmtree(dst_dir_addon)
shutil.move(src_dir_addon, dst_dir_addon)
# Clean up the temporary directory
shutil.rmtree(tmp_dir)
# Update the sha256 file
shutil.copy(artifact_checksum, local_checksum)
def update_blender():
system_name = platform.system().lower()
architecture = platform.machine()
# Check if we have the latest add-ons from shared
artifacts_path = PATH_ARTIFACTS / 'blender'
blender_build = BlenderBuild
# Iterate over the files in the source directory
for file_path in artifacts_path.iterdir():
if file_path.is_file() \
and system_name in file_path.name \
and architecture in file_path.name \
and file_path.name.endswith('sha256'):
blender_build.checksum = file_path.name
blender_build.archive = file_path.with_suffix('')
break
artifact_checksum = artifacts_path / blender_build.checksum
if not artifact_checksum.exists():
logger.error("Missing file %s" % artifact_checksum)
logger.error("Could not update add-ons")
return
local_checksum = PATH_LOCAL / 'blender' / f"{system_name}.sha256"
if local_checksum.exists():
if compare_checksum(local_checksum, artifact_checksum):
logger.info("Already up to date")
return
src = artifacts_path / blender_build.archive
dst = PATH_LOCAL / 'blender' / system_name
if system_name == 'linux':
extract_tar_xz(src, dst)
elif system_name == 'darwin':
extract_dmg(src, 'Blender.app', dst)
elif system_name == 'windows':
extract_zip(src, dst)
shutil.copy(artifact_checksum, local_checksum)
def launch_blender():
system_name = platform.system().lower()
blender_path_base = PATH_LOCAL / 'blender' / system_name
if system_name == 'linux':
blender_path = blender_path_base / 'blender'
elif system_name == 'darwin':
blender_path = blender_path_base / 'Blender.app' / 'Contents' / 'MacOS' / 'Blender'
elif system_name == 'windows':
blender_path = blender_path_base / 'blender.exe'
else:
sys.exit(1)
# os.environ['BLENDER_USER_CONFIG'] = str(PATH_LOCAL / 'config')
os.environ['BLENDER_USER_SCRIPTS'] = str(PATH_LOCAL / 'scripts')
subprocess.run([blender_path])
if __name__ == '__main__':
logger.info('Update Add-ons')
update_addons()
logger.info('Update Blender')
update_blender()
logger.info('Launch Blender')
launch_blender()

View File

@ -0,0 +1,66 @@
#!/bin/bash
# TODO error out if blender is not installed locally already and there is no blender to download on shared
cur_dir=$(pwd)
cd ../../local
DOWNLOAD_DIR=../shared/pets
update_addons() {
zip_name="main.zip"
# Check if we have the latest addons from shared
if [ -f $zip_name.sha256 ]; then
shasum=$(cat $zip_name.sha256)
pushd $DOWNLOAD_DIR/addons
echo $shasum | sha256sum --check - && echo addons already at the latest version && popd && return 0
popd
fi
rm -fr ./scripts/*
extract_folder="blender-studio-pipeline/scripts-blender/addons"
# Record the extracted content
# Remove the first 5 lines, remove the last two lines, and extract the fourth column
unzip -l $DOWNLOAD_DIR/addons/$zip_name "$extract_folder/*" | tail -n +5 | head -n -2 | awk '{ print $4 }' > $zip_name.contents
unzip $DOWNLOAD_DIR/addons/$zip_name "$extract_folder/*" -d "./scripts"
mv ./scripts/$extract_folder ./scripts/
rm -fr ./scripts/blender-studio-pipeline
cp $DOWNLOAD_DIR/addons/$zip_name.sha256 .
}
update_blender() {
os=linux
# Ensure the os folder exists
mkdir -p blender/$os
if [ -f blender/$os.sha256 ]; then
shasum=$(cat blender/$os.sha256)
pushd $DOWNLOAD_DIR/artifacts/blender
echo $shasum *$os*.tar.xz | sha256sum --check - && echo blender already at the latest version && popd && return 0
popd
fi
rm -fr blender/$os/*
echo Extracting Blender
tar xf $DOWNLOAD_DIR/artifacts/blender/*$os*.tar.xz --directory blender/$os/ --strip-components=1 --checkpoint=.1000
cp $DOWNLOAD_DIR/artifacts/blender/*$os*.sha256 blender/$os.sha256
}
update_addons
update_blender
os=linux
cd blender/$os
export BLENDER_USER_CONFIG=../../config
export BLENDER_USER_SCRIPTS=../../scripts
# Actually launch Blender
./blender

View File

@ -0,0 +1,29 @@
#!/bin/bash
DOWNLOAD_DIR=../../shared/pets/artifacts/addons/
download_addon() {
URL=$1
OUT_NAME=$2
# Ensure that the download directory exists
mkdir -p $DOWNLOAD_DIR
# Switch to the download directory
pushd $DOWNLOAD_DIR
# TODO Can't check any shasums before downloading so always remove and redownload everything for now
rm $OUT_NAME*
# Download the addons repo
wget $URL -O $OUT_NAME
sha256sum *.zip > $OUT_NAME.sha256
popd
}
# download_addon <url to addon zip> <output name zip>
# Special download dir for monorepo with addons
DOWNLOAD_DIR=../../shared/pets/artifacts/blender-studio-pipeline/
download_addon https://projects.blender.org/studio/blender-studio-pipeline/archive/main.zip blender-studio-pipeline-main.zip

View File

@ -0,0 +1,69 @@
#!/bin/bash
# Stop execution on error
set -e
HOMEPAGE="https://builder.blender.org/download/"
BLENDER_BRANCH=main
DOWNLOAD_DIR=../../shared/pets/artifacts/blender
# Ensure that the download directory exists
mkdir -p $DOWNLOAD_DIR
# Switch to the download directory
pushd $DOWNLOAD_DIR
# .zip == Windows
# .dmg == Mac
# .tar.xz == Linux
# Create a dictinary with the above information
declare -A extensions
extensions=( ["windows"]="zip" ["darwin.x86_64"]="dmg" ["darwin.arm64"]="dmg" ["linux"]="tar.xz" )
latest_urls=$(wget --quiet -O - "$HOMEPAGE" | \
grep -io '<a href=['"'"'"][^"'"'"']*['"'"'"]' | \
sed -e 's/^<a href=["'"'"']//i' -e 's/["'"'"']$//i' | \
grep +$BLENDER_BRANCH)
for os in windows darwin.arm64 linux
do
file_extention="${extensions[$os]}"
latest=$(echo "$latest_urls" | grep $os | grep $file_extention$ )
# Strip carridge returns from teh sha sum as the windows ones has it
shasum=$(curl -s "$latest".sha256 | tr -d '\r')
echo sum: "$shasum" *"$file_extention"
# Check if we need to download the file by looking at the shasum of the currently downloaded file (if any)
if [ ! -f *$file_extention ]; then
# Make sure that we don't have any lingering sha files
rm *$file_extention* || true # "|| true" is here to ensure that we catch the case were there are no files to remove for "set -e"
else
# Skip downloading if shasum matches the current file on disk
echo $shasum *$file_extention | sha256sum --check - && echo $os: already at latest version && continue
mkdir -p previous
mv *$file_extention* previous/
pushd previous
# Remove older backups if there are more than 10
num_old_files=$(ls -1 *$file_extention | wc -l)
if [ "$num_old_files" -ge 10 ]; then
files_to_remove=$(ls -t *$file_extention | tail $((10-$num_old_files)))
# Add the shasums as well
shasums_to_remove=$(echo $files_to_remove | sed -e 's/$/.sha256')
rm $files_to_remove $shasums_to_remove
fi
popd
fi
wget -c "$latest"
wget -c "$latest".sha256
echo $shasum *$file_extention | sha256sum --check - || (echo Downloaded file does not match its shasum, exiting! && exit 1)
done
popd