Pipeline Release: Use Add-On Bundle to Update Add-Ons #269

Merged
Showing only changes of commit 791663670d - Show all commits

View File

@ -1,11 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import glob
import hashlib import hashlib
import os import os
import pathlib import pathlib
from pathlib import Path from pathlib import Path
import requests from urllib.request import urlretrieve
import tempfile import tempfile
import sys import sys
import zipfile import zipfile
@ -13,69 +12,88 @@ import shutil
def main(): def main():
# TODO Replace download path with actual download from studio.blender.org download_path = Path(tempfile.mkdtemp(prefix="blender_studio_addons_download" + "_"))
downloads_path = pathlib.Path(__file__).parent.joinpath("downloads") zip_filepath = download_path.joinpath("blender_studio_add-ons_latest.zip")
release_zip = downloads_path.joinpath("blender_studio_pipeline_release.zip") checksum_path = download_path.joinpath("blender_studio_add-ons_latest.zip.sha256")
release_checksum = downloads_path.joinpath("blender_studio_pipeline_release.zip.sha256") extract_path = download_path.joinpath("extract")
extract_path.mkdir()
print(f"Downloading {zip_filepath.name}......", end="")
urlretrieve(
"https://projects.blender.org/studio/blender-studio-pipeline/releases/download/latest/blender_studio_add-ons_latest.zip",
str(zip_filepath),
)
print("Complete")
print(f"Downloading {checksum_path.name}......", end="")
urlretrieve(
"https://projects.blender.org/studio/blender-studio-pipeline/releases/download/latest/blender_studio_add-ons_latest.zip.sha256",
str(checksum_path),
)
print("Complete")
# TODO make script work from SVN directory, maybe add warning if not in corect directory checksum = generate_checksum(zip_filepath)
# current_file_folder_path = pathlib.Path(__file__).parent with open(checksum_path, "r") as f:
# download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve() lines = f.readlines()
download_folder_path = Path( if lines[0].split(" ")[0].strip() != checksum:
"/data/my_project/shared/artifacts/addons/" print("Download Error: checksum is invalid, please try again")
) # TEMP while developing script sys.exit(1)
# Ensure that the download directory exists current_file_folder_path = pathlib.Path(__file__).parent
os.makedirs(download_folder_path, exist_ok=True) download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve()
if not download_folder_path.exists():
extract_release_zip(release_zip, download_folder_path) print(
f"Ensure script is run out of Project Tools directory {str(download_folder_path)} does not exist"
)
sys.exit(1)
extract_release_zip(zip_filepath, extract_path, download_folder_path)
shutil.rmtree(download_path)
print("Blender Studio Add-Ons Successfully Updated for Current Project")
print(
"These Add-Ons will be copied to your local directory next time you launch Blender via Projet Tools"
)
def extract_release_zip(file_path: Path, dst_path: Path): def extract_release_zip(file_path: Path, extract_path: Path, dst_path: Path):
temp_dir = tempfile.mkdtemp() """Extracts the contents of a zip file to a destination folder.
Args:
file_path (Path): Path to the zip file to extract.
extract_path (Path): Path to extract the contents of the zip file to.
dst_path (Path): Destination path to copy the extracted files to.
"""
with zipfile.ZipFile(file_path, 'r') as zip_ref: with zipfile.ZipFile(file_path, 'r') as zip_ref:
zip_ref.extractall(temp_dir) zip_ref.extractall(extract_path)
try: try:
src_path = [subdir for subdir in Path(temp_dir).iterdir()][0] src_path = [subdir for subdir in Path(extract_path).iterdir()][0]
except IndexError: except IndexError:
print("The archive %s does not contain any directory" % file_path.name) print("The archive %s does not contain any directory" % file_path.name)
sys.exit(1) sys.exit(1)
for file in src_path.iterdir(): for file in src_path.iterdir():
# TODO use checksum to skip files that are already updated? print(f"Extacting: {file.name}")
original_file = dst_path / file.name original_file = dst_path / file.name
if original_file.exists(): if original_file.exists():
os.remove(original_file) os.remove(original_file)
shutil.move(file, dst_path) shutil.move(file, dst_path)
shutil.rmtree(temp_dir) shutil.rmtree(extract_path)
# def download_file(url, out_folder, filename): def generate_checksum(archive_path: Path) -> str:
# print("Downloading: " + url) """Generate checksum for archive file.
# local_filename = out_folder / filename
# # TODO Can't check any shasums before downloading so always remove and redownload everything for now Args:
# prev_downloaded_files = glob.glob(f"{local_filename}*") archive_path (Path): Path to archive file to generate checksum for.
# for file in prev_downloaded_files:
# os.remove(file)
# # NOTE the stream=True parameter below Returns:
# with requests.get(url, stream=True) as r: str: Hex digest string of checksum.
# r.raise_for_status() """
# with open(local_filename, 'wb') as f:
# for chunk in r.iter_content(chunk_size=None):
# if chunk:
# f.write(chunk)
# local_hash_filename = local_filename.with_suffix(".zip.sha256") with open(archive_path, 'rb') as file:
# with open(local_filename, "rb") as f: digest = hashlib.file_digest(file, "sha256")
# digest = hashlib.file_digest(f, "sha256") return digest.hexdigest()
# with open(local_hash_filename, "w") as hash_file:
# hash_file.write(digest.hexdigest())
# return local_filename
if __name__ == "__main__": if __name__ == "__main__":
main() main()