Add project-tools #142
@ -65,9 +65,10 @@ start_search_path = current_file_path.parent.parent.parent.resolve()
|
||||
# path_dict = create_path_dict(str(start_search_path), 5)
|
||||
|
||||
# path_dict pre-generated. This is the stucture the consistency check will ensure is there
|
||||
# TODO don't record or check the projects name.
|
||||
path_dict = {
|
||||
'pets': {
|
||||
'shared': {'pets': {'artifacts': {}}},
|
||||
'shared': {'artifacts': {}},
|
||||
'svn': {'tools': {'consistency_check.py': 'file'}},
|
||||
'local': {'blender': {}, 'scripts': {}, 'config': {}},
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import glob
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
@ -10,7 +11,6 @@ import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@ -44,12 +44,6 @@ def setup_logger():
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlenderBuild:
|
||||
archive: None
|
||||
checksum: None
|
||||
|
||||
|
||||
def extract_dmg(dmg_file: Path, internal_pah, dst_path: Path):
|
||||
# Execute hdiutil to mount the dmg file
|
||||
mount_process = subprocess.run(
|
||||
@ -77,7 +71,17 @@ def extract_dmg(dmg_file: Path, internal_pah, dst_path: Path):
|
||||
|
||||
def extract_tar_xz(file_path: Path, dst_path: Path):
|
||||
dst_path.mkdir(parents=True, exist_ok=True)
|
||||
subprocess.run(['tar', 'xf', file_path, '--directory', dst_path, '--strip-components=1'])
|
||||
subprocess.run(
|
||||
[
|
||||
'tar',
|
||||
'xf',
|
||||
file_path,
|
||||
'--directory',
|
||||
dst_path,
|
||||
'--strip-components=1',
|
||||
'--checkpoint=.1000',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def extract_zip(file_path: Path, dst_path: Path):
|
||||
@ -109,6 +113,7 @@ def update_addons():
|
||||
# Check if we have the latest add-ons from shared
|
||||
studio_pipeline_artifacts = PATH_ARTIFACTS / 'blender-studio-pipeline'
|
||||
artifact_checksum = studio_pipeline_artifacts / 'main.zip.sha256'
|
||||
artifact_archive = artifact_checksum.with_suffix('')
|
||||
|
||||
if not artifact_checksum.exists():
|
||||
logger.error("Missing file %s" % artifact_checksum)
|
||||
@ -122,6 +127,11 @@ def update_addons():
|
||||
logger.info("Already up to date")
|
||||
return
|
||||
|
||||
if not artifact_archive.exists():
|
||||
logger.error("Shasum exists but the archive file %s does not!" % artifact_archive)
|
||||
logger.error("Could not update add-ons")
|
||||
return
|
||||
|
||||
# Extract the archive in a temp location and move the addons content to local
|
||||
tmp_dir = Path(tempfile.mkdtemp())
|
||||
|
||||
@ -133,15 +143,25 @@ def update_addons():
|
||||
src_path_base = tmp_dir / 'blender-studio-pipeline' / 'scripts-blender' / 'addons'
|
||||
dst_path_base = PATH_LOCAL / 'scripts' / 'addons'
|
||||
|
||||
# Remove all files previously installed by the archive
|
||||
local_installed_files = PATH_LOCAL / 'main.zip.files'
|
||||
if local_installed_files.exists():
|
||||
with open(local_installed_files) as file:
|
||||
lines = [line.rstrip() for line in file]
|
||||
for folder in lines:
|
||||
shutil.rmtree(PATH_LOCAL / folder)
|
||||
|
||||
# Get a list of directories inside the given directory
|
||||
addons = [subdir.name for subdir in src_path_base.iterdir() if subdir.is_dir()]
|
||||
|
||||
with open(local_installed_files, 'w') as f:
|
||||
for addon_name in addons:
|
||||
f.write("%s\n" % addon_name)
|
||||
|
||||
for addon_name in addons:
|
||||
logger.debug("Moving %s" % addon_name)
|
||||
src_dir_addon = src_path_base / addon_name
|
||||
dst_dir_addon = dst_path_base / addon_name
|
||||
if dst_dir_addon.exists():
|
||||
shutil.rmtree(dst_dir_addon)
|
||||
shutil.move(src_dir_addon, dst_dir_addon)
|
||||
|
||||
# Clean up the temporary directory
|
||||
@ -155,39 +175,45 @@ def update_blender():
|
||||
system_name = platform.system().lower()
|
||||
architecture = platform.machine()
|
||||
|
||||
# Check if we have the latest add-ons from shared
|
||||
# Check if we have the latest blender archive from shared
|
||||
artifacts_path = PATH_ARTIFACTS / 'blender'
|
||||
|
||||
blender_build = BlenderBuild
|
||||
# Look for the appropriate Blender archive for this system
|
||||
matched_archives = glob.glob("blender*" + system_name + "." + architecture + "*.sha256")
|
||||
|
||||
# Iterate over the files in the source directory
|
||||
for file_path in artifacts_path.iterdir():
|
||||
if (
|
||||
file_path.is_file()
|
||||
and system_name in file_path.name
|
||||
and architecture in file_path.name
|
||||
and file_path.name.endswith('sha256')
|
||||
):
|
||||
blender_build.checksum = file_path.name
|
||||
blender_build.archive = file_path.with_suffix('')
|
||||
break
|
||||
# Check if we found any files
|
||||
if len(matched_archives) != 1:
|
||||
if len(matched_archives) == 0:
|
||||
logger.error("No Blender archives found for this system!")
|
||||
logger.error("System is: %s %s" % system_name, architecture)
|
||||
return
|
||||
else:
|
||||
logger.error(
|
||||
"More than one candidate archive was found for this system. Only one is allowed!"
|
||||
)
|
||||
logger.error("The following candidates were found: %s" % str(matched_archives))
|
||||
return
|
||||
|
||||
artifact_checksum = artifacts_path / blender_build.checksum
|
||||
blender_build_checksum = Path(matched_archives[0])
|
||||
blender_build_archive = blender_build_checksum.with_suffix('')
|
||||
|
||||
if not artifact_checksum.exists():
|
||||
logger.error("Missing file %s" % artifact_checksum)
|
||||
logger.error("Could not update add-ons")
|
||||
if not blender_build_archive.exists():
|
||||
logger.error(
|
||||
"Shasum exists but the target Blender archive %s does not!" % blender_build_archive
|
||||
)
|
||||
logger.error("Could not update blender")
|
||||
return
|
||||
|
||||
local_checksum = PATH_LOCAL / 'blender' / f"{system_name}.sha256"
|
||||
|
||||
if local_checksum.exists():
|
||||
if compare_checksum(local_checksum, artifact_checksum):
|
||||
if compare_checksum(local_checksum, blender_build_checksum):
|
||||
logger.info("Already up to date")
|
||||
return
|
||||
|
||||
src = artifacts_path / blender_build.archive
|
||||
src = artifacts_path / blender_build_archive
|
||||
dst = PATH_LOCAL / 'blender' / system_name
|
||||
shutil.rmtree(dst)
|
||||
|
||||
if system_name == 'linux':
|
||||
extract_tar_xz(src, dst)
|
||||
@ -195,7 +221,7 @@ def update_blender():
|
||||
extract_dmg(src, 'Blender.app', dst)
|
||||
elif system_name == 'windows':
|
||||
extract_zip(src, dst)
|
||||
shutil.copy(artifact_checksum, local_checksum)
|
||||
shutil.copy(blender_build_checksum, local_checksum)
|
||||
|
||||
|
||||
def launch_blender():
|
||||
@ -210,7 +236,7 @@ def launch_blender():
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
# os.environ['BLENDER_USER_CONFIG'] = str(PATH_LOCAL / 'config')
|
||||
os.environ['BLENDER_USER_CONFIG'] = str(PATH_LOCAL / 'config')
|
||||
os.environ['BLENDER_USER_SCRIPTS'] = str(PATH_LOCAL / 'scripts')
|
||||
subprocess.run([blender_path])
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user