Pipeline Release: Use Add-On Bundle to Update Add-Ons #269

Merged
Showing only changes of commit 791663670d - Show all commits

View File

@ -1,11 +1,10 @@
#!/usr/bin/env python3
import glob
import hashlib
import os
import pathlib
from pathlib import Path
import requests
from urllib.request import urlretrieve
import tempfile
import sys
import zipfile
@ -13,69 +12,88 @@ import shutil
def main():
# TODO Replace download path with actual download from studio.blender.org
downloads_path = pathlib.Path(__file__).parent.joinpath("downloads")
release_zip = downloads_path.joinpath("blender_studio_pipeline_release.zip")
release_checksum = downloads_path.joinpath("blender_studio_pipeline_release.zip.sha256")
download_path = Path(tempfile.mkdtemp(prefix="blender_studio_addons_download" + "_"))
zip_filepath = download_path.joinpath("blender_studio_add-ons_latest.zip")
checksum_path = download_path.joinpath("blender_studio_add-ons_latest.zip.sha256")
extract_path = download_path.joinpath("extract")
extract_path.mkdir()
print(f"Downloading {zip_filepath.name}......", end="")
urlretrieve(
"https://projects.blender.org/studio/blender-studio-pipeline/releases/download/latest/blender_studio_add-ons_latest.zip",
str(zip_filepath),
)
print("Complete")
print(f"Downloading {checksum_path.name}......", end="")
urlretrieve(
"https://projects.blender.org/studio/blender-studio-pipeline/releases/download/latest/blender_studio_add-ons_latest.zip.sha256",
str(checksum_path),
)
print("Complete")
# TODO make script work from SVN directory, maybe add warning if not in corect directory
# current_file_folder_path = pathlib.Path(__file__).parent
# download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve()
download_folder_path = Path(
"/data/my_project/shared/artifacts/addons/"
) # TEMP while developing script
checksum = generate_checksum(zip_filepath)
with open(checksum_path, "r") as f:
lines = f.readlines()
if lines[0].split(" ")[0].strip() != checksum:
print("Download Error: checksum is invalid, please try again")
sys.exit(1)
# Ensure that the download directory exists
os.makedirs(download_folder_path, exist_ok=True)
extract_release_zip(release_zip, download_folder_path)
current_file_folder_path = pathlib.Path(__file__).parent
download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve()
if not download_folder_path.exists():
print(
f"Ensure script is run out of Project Tools directory {str(download_folder_path)} does not exist"
)
sys.exit(1)
extract_release_zip(zip_filepath, extract_path, download_folder_path)
shutil.rmtree(download_path)
print("Blender Studio Add-Ons Successfully Updated for Current Project")
print(
"These Add-Ons will be copied to your local directory next time you launch Blender via Projet Tools"
)
def extract_release_zip(file_path: Path, dst_path: Path):
temp_dir = tempfile.mkdtemp()
def extract_release_zip(file_path: Path, extract_path: Path, dst_path: Path):
"""Extracts the contents of a zip file to a destination folder.
Args:
file_path (Path): Path to the zip file to extract.
extract_path (Path): Path to extract the contents of the zip file to.
dst_path (Path): Destination path to copy the extracted files to.
"""
with zipfile.ZipFile(file_path, 'r') as zip_ref:
zip_ref.extractall(temp_dir)
zip_ref.extractall(extract_path)
try:
src_path = [subdir for subdir in Path(temp_dir).iterdir()][0]
src_path = [subdir for subdir in Path(extract_path).iterdir()][0]
except IndexError:
print("The archive %s does not contain any directory" % file_path.name)
sys.exit(1)
for file in src_path.iterdir():
# TODO use checksum to skip files that are already updated?
print(f"Extacting: {file.name}")
original_file = dst_path / file.name
if original_file.exists():
os.remove(original_file)
shutil.move(file, dst_path)
shutil.rmtree(temp_dir)
shutil.rmtree(extract_path)
# def download_file(url, out_folder, filename):
# print("Downloading: " + url)
# local_filename = out_folder / filename
def generate_checksum(archive_path: Path) -> str:
"""Generate checksum for archive file.
# # TODO Can't check any shasums before downloading so always remove and redownload everything for now
# prev_downloaded_files = glob.glob(f"{local_filename}*")
# for file in prev_downloaded_files:
# os.remove(file)
Args:
archive_path (Path): Path to archive file to generate checksum for.
# # NOTE the stream=True parameter below
# with requests.get(url, stream=True) as r:
# r.raise_for_status()
# with open(local_filename, 'wb') as f:
# for chunk in r.iter_content(chunk_size=None):
# if chunk:
# f.write(chunk)
Returns:
str: Hex digest string of checksum.
"""
# local_hash_filename = local_filename.with_suffix(".zip.sha256")
# with open(local_filename, "rb") as f:
# digest = hashlib.file_digest(f, "sha256")
# with open(local_hash_filename, "w") as hash_file:
# hash_file.write(digest.hexdigest())
with open(archive_path, 'rb') as file:
digest = hashlib.file_digest(file, "sha256")
return digest.hexdigest()
# return local_filename
if __name__ == "__main__":
main()