Add project-tools #142
@ -2,6 +2,7 @@
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import json
|
||||
|
||||
|
||||
def create_path_dict(startpath, max_depth):
|
||||
@ -59,22 +60,19 @@ def check_if_structure_is_consistent(cur_path, path_dict, error_list):
|
||||
error_list += ["ERROR: %s doesn't exist!" % next_path]
|
||||
|
||||
|
||||
current_file_path = pathlib.Path(__file__).parent
|
||||
start_search_path = current_file_path.parent.parent.resolve()
|
||||
current_file_folder = pathlib.Path(__file__).parent
|
||||
start_search_path = current_file_folder.parent.parent.resolve()
|
||||
# path_dict = create_path_dict(str(start_search_path), 5)
|
||||
|
||||
# path_dict pre-generated. This is the stucture the consistency check will ensure is there
|
||||
path_dict = {
|
||||
'../../': {
|
||||
'shared': {'artifacts': {}},
|
||||
'svn': {'tools': {'consistency_check.py': 'file'}},
|
||||
'local': {'blender': {}, 'scripts': {}, 'config': {}},
|
||||
}
|
||||
}
|
||||
path_dict = {}
|
||||
with open(current_file_folder / "folder_structure.json") as json_file:
|
||||
path_dict = json.load(json_file)
|
||||
|
||||
# TODO perhaps make a function to pretty print out the path_dict for easier inspection
|
||||
|
||||
error_list = []
|
||||
check_if_structure_is_consistent(current_file_path, path_dict, error_list)
|
||||
check_if_structure_is_consistent(current_file_folder, path_dict, error_list)
|
||||
|
||||
print()
|
||||
if len(error_list) == 0:
|
||||
|
23
scripts/project-helper-tools/folder_structure.json
Normal file
23
scripts/project-helper-tools/folder_structure.json
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"../../": {
|
||||
"shared": {
|
||||
"artifacts": {}
|
||||
},
|
||||
"svn": {
|
||||
"tools": {
|
||||
"consistency_check.py": "file",
|
||||
"folder_structure.json": "file",
|
||||
"install_desktop_file.sh": "file",
|
||||
"rollback_blender.py": "file",
|
||||
"run_blender.py": "file",
|
||||
"update_addons.py": "file",
|
||||
"update_blender.py": "file"
|
||||
}
|
||||
},
|
||||
"local": {
|
||||
"blender": {},
|
||||
"scripts": {},
|
||||
"config": {}
|
||||
}
|
||||
}
|
||||
}
|
33
scripts/project-helper-tools/init_project_folder_structure.py
Executable file
33
scripts/project-helper-tools/init_project_folder_structure.py
Executable file
@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import json
|
||||
import shutil
|
||||
|
||||
|
||||
def create_folder_structure(cur_path, path_dict, source_folder):
|
||||
for path in path_dict:
|
||||
# Get next path to check for consistency
|
||||
next_path = (cur_path / path).resolve()
|
||||
print("Checking path: %s" % next_path)
|
||||
nested_item = path_dict[path]
|
||||
if type(nested_item) is not dict:
|
||||
# This is a file we should copy over
|
||||
if next_path.exists():
|
||||
continue
|
||||
print(f"Copying over: {next_path.name}")
|
||||
shutil.copy(source_folder / next_path.name, next_path)
|
||||
else:
|
||||
print(f"Creating folder: {next_path}")
|
||||
os.makedirs(next_path)
|
||||
create_folder_structure(next_path, nested_item, source_folder)
|
||||
|
||||
|
||||
current_file_folder = pathlib.Path(__file__).parent
|
||||
with open(current_file_folder / "folder_structure.json") as json_file:
|
||||
path_dict = json.load(json_file)
|
||||
target_folder = pathlib.Path("/tmp/pets")
|
||||
create_folder_structure(target_folder, path_dict["../../"], current_file_folder)
|
||||
print()
|
||||
print("Done!")
|
@ -211,6 +211,7 @@ def update_blender():
|
||||
|
||||
src = artifacts_path / blender_build_archive
|
||||
dst = PATH_LOCAL / 'blender' / system_name
|
||||
if dst.exists():
|
||||
shutil.rmtree(dst)
|
||||
|
||||
if system_name == 'linux':
|
||||
|
@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
DOWNLOAD_DIR=../../shared/artifacts/addons/
|
||||
|
||||
download_addon() {
|
||||
URL=$1
|
||||
OUT_NAME=$2
|
||||
|
||||
# Ensure that the download directory exists
|
||||
mkdir -p $DOWNLOAD_DIR
|
||||
|
||||
# Switch to the download directory
|
||||
pushd $DOWNLOAD_DIR
|
||||
|
||||
# TODO Can't check any shasums before downloading so always remove and redownload everything for now
|
||||
rm $OUT_NAME*
|
||||
|
||||
# Download the addons repo
|
||||
wget $URL -O $OUT_NAME
|
||||
sha256sum $OUT_NAME > $OUT_NAME.sha256
|
||||
|
||||
popd
|
||||
}
|
||||
|
||||
# download_addon <url to addon zip> <output name zip>
|
||||
download_addon https://projects.blender.org/studio/blender-studio-pipeline/archive/main.zip blender-studio-pipeline-main.zip
|
@ -1,89 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Stop execution on error
|
||||
set -e
|
||||
|
||||
HOMEPAGE="https://builder.blender.org/download/"
|
||||
|
||||
BLENDER_BRANCH=main
|
||||
DOWNLOAD_DIR=../../shared/artifacts/blender
|
||||
|
||||
# Ensure that the download directory exists
|
||||
mkdir -p $DOWNLOAD_DIR
|
||||
|
||||
# Switch to the download directory
|
||||
cd $DOWNLOAD_DIR
|
||||
|
||||
# .zip == Windows
|
||||
# .dmg == Mac
|
||||
# .tar.xz == Linux
|
||||
|
||||
# Create a dictinary with the above information
|
||||
declare -A extensions
|
||||
extensions=( ["windows"]="zip" ["darwin.x86_64"]="dmg" ["darwin.arm64"]="dmg" ["linux"]="tar.xz" )
|
||||
|
||||
latest_urls=$(wget --quiet -O - "$HOMEPAGE" | \
|
||||
grep -io '<a href=['"'"'"][^"'"'"']*['"'"'"]' | \
|
||||
sed -e 's/^<a href=["'"'"']//i' -e 's/["'"'"']$//i' | \
|
||||
grep +$BLENDER_BRANCH)
|
||||
|
||||
# Create a snapshot of the existing files
|
||||
mkdir -p previous/current_snapshot
|
||||
# Backup the old files, if there are any
|
||||
cp * previous/current_snapshot || true
|
||||
|
||||
updated_downloads=false
|
||||
downloaded_files=false
|
||||
|
||||
for os in windows darwin.x86_64 darwin.arm64 linux
|
||||
do
|
||||
file_extention="${extensions[$os]}"
|
||||
|
||||
latest=$(echo "$latest_urls" | grep $os | grep $file_extention$ )
|
||||
|
||||
# Strip carridge returns from the sha sum as the windows ones has it
|
||||
shasum=$(curl -s "$latest".sha256 | tr -d '\r')
|
||||
|
||||
current_os_file=*$os*$file_extention
|
||||
# Check if we need to download the file by looking at the shasum of the currently downloaded file (if any)
|
||||
if [ -f $current_os_file ]; then
|
||||
# Skip downloading if shasum matches the current file on disk
|
||||
echo $shasum $current_os_file | sha256sum --check - && echo $os: already at latest version && continue
|
||||
|
||||
updated_downloads=true
|
||||
fi
|
||||
# Make sure that we don't have any lingering files from this os
|
||||
rm $current_os_file* || true # "|| true" is here to ensure that we catch the case were there are no files to remove for "set -e"
|
||||
|
||||
wget -c "$latest"
|
||||
wget -c "$latest".sha256
|
||||
|
||||
echo $shasum *$os*$file_extention | sha256sum --check - || (echo Downloaded file does not match its shasum, exiting! && exit 1)
|
||||
downloaded_files=true
|
||||
done
|
||||
|
||||
if [ "$downloaded_files" = true ] ; then
|
||||
# Save download date for use in the rollback script
|
||||
date -R > download_date
|
||||
fi
|
||||
|
||||
if [ "$updated_downloads" = true ] ; then
|
||||
pushd previous
|
||||
|
||||
# Put the current backup first in the directory listing
|
||||
mv current_snapshot 00
|
||||
|
||||
# Bump all folder names
|
||||
# Assign a number to each file, reverse the processing order to not overwrite any files.
|
||||
ls -v | cat -n | tac | while read n f; do mv -n "$f" "$(printf "%02d" $n)"; done
|
||||
|
||||
# Remove older backup folders if there are more than 10
|
||||
num_old_files=$(ls -1 | wc -l)
|
||||
if [ "$num_old_files" -ge 10 ]; then
|
||||
files_to_remove=$(ls | tail $((10-$num_old_files)))
|
||||
rm -fr $files_to_remove
|
||||
fi
|
||||
popd
|
||||
else
|
||||
rm -fr previous/current_snapshot
|
||||
fi
|
Loading…
Reference in New Issue
Block a user