Pipeline Release: Use Add-On Bundle to Update Add-Ons #269
@ -4,45 +4,78 @@ import glob
|
|||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
from pathlib import Path
|
||||||
import requests
|
import requests
|
||||||
|
import tempfile
|
||||||
|
import sys
|
||||||
|
import zipfile
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
def download_file(url, out_folder, filename):
|
def main():
|
||||||
print("Downloading: " + url)
|
# TODO Replace download path with actual download from studio.blender.org
|
||||||
local_filename = out_folder / filename
|
downloads_path = pathlib.Path(__file__).parent.joinpath("downloads")
|
||||||
|
release_zip = downloads_path.joinpath("blender_studio_pipeline_release.zip")
|
||||||
|
release_checksum = downloads_path.joinpath("blender_studio_pipeline_release.zip.sha256")
|
||||||
|
|
||||||
# TODO Can't check any shasums before downloading so always remove and redownload everything for now
|
# TODO make script work from SVN directory, maybe add warning if not in corect directory
|
||||||
prev_downloaded_files = glob.glob(f"{local_filename}*")
|
# current_file_folder_path = pathlib.Path(__file__).parent
|
||||||
for file in prev_downloaded_files:
|
# download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve()
|
||||||
os.remove(file)
|
download_folder_path = Path(
|
||||||
|
"/data/my_project/shared/artifacts/addons/"
|
||||||
# NOTE the stream=True parameter below
|
) # TEMP while developing script
|
||||||
with requests.get(url, stream=True) as r:
|
|
||||||
r.raise_for_status()
|
|
||||||
with open(local_filename, 'wb') as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=None):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
local_hash_filename = local_filename.with_suffix(".zip.sha256")
|
|
||||||
with open(local_filename, "rb") as f:
|
|
||||||
digest = hashlib.file_digest(f, "sha256")
|
|
||||||
with open(local_hash_filename, "w") as hash_file:
|
|
||||||
hash_file.write(digest.hexdigest())
|
|
||||||
|
|
||||||
return local_filename
|
|
||||||
|
|
||||||
|
|
||||||
current_file_folder_path = pathlib.Path(__file__).parent
|
|
||||||
download_folder_path = (current_file_folder_path / "../../shared/artifacts/addons/").resolve()
|
|
||||||
|
|
||||||
# Ensure that the download directory exists
|
# Ensure that the download directory exists
|
||||||
os.makedirs(download_folder_path, exist_ok=True)
|
os.makedirs(download_folder_path, exist_ok=True)
|
||||||
|
|
||||||
print("This script currently does nothing. If you want to update the 'studio-pipeline' addons, run the 'package_local.py' script in the studio-pipline repo.")
|
extract_release_zip(release_zip, download_folder_path)
|
||||||
|
|
||||||
#download_file(
|
|
||||||
# "https://projects.blender.org/studio/blender-studio-pipeline/archive/main.zip",
|
def extract_release_zip(file_path: Path, dst_path: Path):
|
||||||
# download_folder_path,
|
temp_dir = tempfile.mkdtemp()
|
||||||
# "blender-studio-pipeline-main.zip",
|
with zipfile.ZipFile(file_path, 'r') as zip_ref:
|
||||||
#)
|
zip_ref.extractall(temp_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
src_path = [subdir for subdir in Path(temp_dir).iterdir()][0]
|
||||||
|
except IndexError:
|
||||||
|
print("The archive %s does not contain any directory" % file_path.name)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for file in src_path.iterdir():
|
||||||
|
# TODO use checksum to skip files that are already updated?
|
||||||
|
original_file = dst_path / file.name
|
||||||
|
if original_file.exists():
|
||||||
|
os.remove(original_file)
|
||||||
|
shutil.move(file, dst_path)
|
||||||
|
|
||||||
|
shutil.rmtree(temp_dir)
|
||||||
|
|
||||||
|
|
||||||
|
# def download_file(url, out_folder, filename):
|
||||||
|
# print("Downloading: " + url)
|
||||||
|
# local_filename = out_folder / filename
|
||||||
|
|
||||||
|
# # TODO Can't check any shasums before downloading so always remove and redownload everything for now
|
||||||
|
# prev_downloaded_files = glob.glob(f"{local_filename}*")
|
||||||
|
# for file in prev_downloaded_files:
|
||||||
|
# os.remove(file)
|
||||||
|
|
||||||
|
# # NOTE the stream=True parameter below
|
||||||
|
# with requests.get(url, stream=True) as r:
|
||||||
|
# r.raise_for_status()
|
||||||
|
# with open(local_filename, 'wb') as f:
|
||||||
|
# for chunk in r.iter_content(chunk_size=None):
|
||||||
|
# if chunk:
|
||||||
|
# f.write(chunk)
|
||||||
|
|
||||||
|
# local_hash_filename = local_filename.with_suffix(".zip.sha256")
|
||||||
|
# with open(local_filename, "rb") as f:
|
||||||
|
# digest = hashlib.file_digest(f, "sha256")
|
||||||
|
# with open(local_hash_filename, "w") as hash_file:
|
||||||
|
# hash_file.write(digest.hexdigest())
|
||||||
|
|
||||||
|
# return local_filename
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
Loading…
Reference in New Issue
Block a user