2017-07-10 01:44:54 +02:00
|
|
|
"""
|
|
|
|
All the stuff that needs to run in a subprocess.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import pathlib
|
2017-07-12 20:07:20 -07:00
|
|
|
import shutil
|
2017-07-13 16:33:14 -07:00
|
|
|
import json
|
2017-07-22 20:14:09 -07:00
|
|
|
from .bpkg import utils
|
2017-07-21 00:27:16 -07:00
|
|
|
from .bpkg import Package, Repository
|
|
|
|
from .messages import *
|
|
|
|
from .bpkg.exceptions import *
|
2017-07-10 01:44:54 +02:00
|
|
|
|
2017-07-21 00:27:16 -07:00
|
|
|
#TODO: move actual downloading code into bpkg
|
|
|
|
#functions here should only contain glue code for facilitating subprocessing of bpkg functionality
|
2017-07-10 01:44:54 +02:00
|
|
|
def _download(pipe_to_blender, package_url: str, download_dir: pathlib.Path) -> pathlib.Path:
|
|
|
|
"""Downloads the given package
|
|
|
|
|
|
|
|
:returns: path to the downloaded file, or None in case of error.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
log = logging.getLogger('%s.download' % __name__)
|
|
|
|
log.info('Going to download %s to %s', package_url, download_dir)
|
|
|
|
pipe_to_blender.send(Progress(0.0))
|
|
|
|
|
|
|
|
log.info('Downloading %s', package_url)
|
2017-07-20 19:22:45 -07:00
|
|
|
try:
|
|
|
|
resp = requests.get(package_url, stream=True, verify=True)
|
|
|
|
except requests.exceptions.RequestException as err:
|
|
|
|
pipe_to_blender.send(DownloadError(1, err))
|
|
|
|
raise
|
2017-07-10 01:44:54 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
resp.raise_for_status()
|
|
|
|
except requests.HTTPError as ex:
|
|
|
|
log.error('Error downloading %s: %s', package_url, ex)
|
|
|
|
pipe_to_blender.send(DownloadError(resp.status_code, str(ex)))
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Use float so that we can also use infinity
|
|
|
|
content_length = float(resp.headers['content-length'])
|
|
|
|
except KeyError:
|
|
|
|
log.warning('Server did not send content length, cannot report progress.')
|
|
|
|
content_length = float('inf')
|
|
|
|
|
2017-07-11 22:39:31 -07:00
|
|
|
# TODO: check if there's enough disk space.
|
|
|
|
|
2017-07-10 01:44:54 +02:00
|
|
|
# TODO: get filename from Content-Disposition header, if available.
|
|
|
|
# TODO: use urllib.parse to parse the URL.
|
|
|
|
local_filename = package_url.split('/')[-1] or 'download.tmp'
|
|
|
|
local_fpath = download_dir / local_filename
|
|
|
|
|
|
|
|
downloaded_length = 0
|
|
|
|
with local_fpath.open('wb') as outfile:
|
|
|
|
for chunk in resp.iter_content(chunk_size=1024 ** 2):
|
|
|
|
# Handle abort messages from Blender
|
|
|
|
while pipe_to_blender.poll():
|
|
|
|
recvd = pipe_to_blender.recv()
|
|
|
|
if isinstance(recvd, Abort):
|
|
|
|
log.warning('Aborting download of %s by request', package_url)
|
|
|
|
pipe_to_blender.send(Aborted())
|
|
|
|
return None
|
|
|
|
log.warning('Unknown message %s received, ignoring', recvd)
|
|
|
|
|
|
|
|
if not chunk: # filter out keep-alive new chunks
|
|
|
|
continue
|
|
|
|
|
|
|
|
outfile.write(chunk)
|
|
|
|
|
|
|
|
downloaded_length += len(chunk)
|
|
|
|
|
|
|
|
# TODO: use multiplier for progress, so that we can count up to 70% and
|
|
|
|
# leave 30% "progress" for installation of the package.
|
|
|
|
pipe_to_blender.send(Progress(downloaded_length / content_length))
|
|
|
|
|
|
|
|
return local_fpath
|
|
|
|
|
2017-07-18 15:10:11 -07:00
|
|
|
def _add_to_installed(storage_path: pathlib.Path, pkg: Package):
|
|
|
|
"""Add pkg to local repository"""
|
|
|
|
repo_path = storage_path / 'local.json'
|
|
|
|
if repo_path.exists():
|
|
|
|
repo = Repository.from_file(repo_path)
|
|
|
|
else:
|
|
|
|
repo = Repository()
|
|
|
|
repo.packages.append(pkg)
|
|
|
|
repo.to_file(repo_path)
|
|
|
|
|
|
|
|
def _remove_from_installed(storage_path: pathlib.Path, pkg: Package):
|
|
|
|
"""Remove pkg from local repository"""
|
|
|
|
repo = Repository.from_file(storage_path / 'local.json')
|
|
|
|
#TODO: this won't work, compare by name? (watch out for conflicts though)
|
|
|
|
repo.packages.remove(pkg)
|
|
|
|
|
2017-07-12 20:07:20 -07:00
|
|
|
def _install(pipe_to_blender, pkgpath: pathlib.Path, dest: pathlib.Path, searchpaths: list):
|
2017-07-11 22:39:31 -07:00
|
|
|
"""Extracts/moves package at `pkgpath` to `dest`"""
|
|
|
|
import zipfile
|
|
|
|
|
2017-07-12 15:03:12 -07:00
|
|
|
log = logging.getLogger('%s.install' % __name__)
|
|
|
|
|
|
|
|
log.debug("Starting installation")
|
2017-07-11 22:39:31 -07:00
|
|
|
pipe_to_blender.send(Progress(0.0))
|
|
|
|
|
|
|
|
if not pkgpath.is_file():
|
2017-07-14 19:22:53 -07:00
|
|
|
raise InstallException("Package isn't a file")
|
2017-07-11 22:39:31 -07:00
|
|
|
|
|
|
|
if not dest.is_dir():
|
2017-07-14 19:22:53 -07:00
|
|
|
raise InstallException("Destination is not a directory")
|
2017-07-11 22:39:31 -07:00
|
|
|
|
|
|
|
# TODO: check to make sure addon/package isn't already installed elsewhere
|
2017-07-10 01:44:54 +02:00
|
|
|
|
2017-07-11 22:39:31 -07:00
|
|
|
# The following is adapted from `addon_install` in bl_operators/wm.py
|
|
|
|
|
|
|
|
# check to see if the file is in compressed format (.zip)
|
2017-07-22 20:14:09 -07:00
|
|
|
if zipfile.is_zipfile(str(pkgpath)):
|
2017-07-12 15:03:12 -07:00
|
|
|
log.debug("Package is zipfile")
|
2017-07-11 22:39:31 -07:00
|
|
|
try:
|
|
|
|
file_to_extract = zipfile.ZipFile(str(pkgpath), 'r')
|
|
|
|
except Exception as err:
|
2017-07-14 19:22:53 -07:00
|
|
|
raise InstallException("Failed to read zip file: %s" % err) from err
|
2017-07-11 22:39:31 -07:00
|
|
|
|
2017-07-12 15:03:12 -07:00
|
|
|
def root_files(filelist: list) -> list:
|
|
|
|
"""Some string parsing to get a list of the root contents of a zip from its namelist"""
|
|
|
|
rootlist = []
|
|
|
|
for f in filelist:
|
|
|
|
# Get all names which have no path separators (root level files)
|
|
|
|
# or have a single path separator at the end (root level directories).
|
|
|
|
if len(f.rstrip('/').split('/')) == 1:
|
|
|
|
rootlist.append(f)
|
|
|
|
return rootlist
|
|
|
|
|
2017-07-12 20:07:20 -07:00
|
|
|
conflicts = [dest / f for f in root_files(file_to_extract.namelist()) if (dest / f).exists()]
|
|
|
|
backups = []
|
|
|
|
for conflict in conflicts:
|
2017-07-14 19:22:53 -07:00
|
|
|
log.debug("Creating backup of conflict %s", conflict)
|
2017-07-22 20:14:09 -07:00
|
|
|
backups.append(utils.InplaceBackup(conflict))
|
2017-07-11 22:39:31 -07:00
|
|
|
|
|
|
|
try:
|
|
|
|
file_to_extract.extractall(str(dest))
|
|
|
|
except Exception as err:
|
2017-07-12 20:07:20 -07:00
|
|
|
for backup in backups:
|
|
|
|
backup.restore()
|
2017-07-14 19:22:53 -07:00
|
|
|
raise InstallException("Failed to extract zip file to '%s': %s" % (dest, err)) from err
|
2017-07-11 22:39:31 -07:00
|
|
|
|
2017-07-12 20:07:20 -07:00
|
|
|
for backup in backups:
|
|
|
|
backup.remove()
|
|
|
|
|
2017-07-11 22:39:31 -07:00
|
|
|
else:
|
2017-07-12 15:03:12 -07:00
|
|
|
log.debug("Package is pyfile")
|
2017-07-11 22:39:31 -07:00
|
|
|
dest_file = (dest / pkgpath.name)
|
|
|
|
|
|
|
|
if dest_file.exists():
|
2017-07-22 20:14:09 -07:00
|
|
|
backup = utils.InplaceBackup(dest_file)
|
2017-07-11 22:39:31 -07:00
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.copyfile(str(pkgpath), str(dest_file))
|
|
|
|
except Exception as err:
|
2017-07-12 20:07:20 -07:00
|
|
|
backup.restore()
|
2017-07-14 19:22:53 -07:00
|
|
|
raise InstallException("Failed to copy file to '%s': %s" % (dest, err)) from err
|
2017-07-11 22:39:31 -07:00
|
|
|
|
|
|
|
try:
|
|
|
|
pkgpath.unlink()
|
2017-07-12 15:03:12 -07:00
|
|
|
log.debug("Removed cached package: %s", pkgpath)
|
2017-07-11 22:39:31 -07:00
|
|
|
except Exception as err:
|
2017-07-14 19:22:53 -07:00
|
|
|
pipe_to_blender.send(SubprocWarning("Install succeeded, but failed to remove package from cache: %s" % err))
|
|
|
|
log.warning("Failed to remove package from cache: %s", err)
|
2017-07-11 22:39:31 -07:00
|
|
|
|
|
|
|
pipe_to_blender.send(Progress(1.0))
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2017-07-20 19:22:45 -07:00
|
|
|
def download_and_install(pipe_to_blender, package_url: str, install_path: pathlib.Path, search_paths: list):
|
2017-07-10 01:44:54 +02:00
|
|
|
"""Downloads and installs the given package."""
|
|
|
|
|
|
|
|
from . import cache
|
|
|
|
|
|
|
|
log = logging.getLogger('%s.download_and_install' % __name__)
|
|
|
|
|
|
|
|
cache_dir = cache.cache_directory('downloads')
|
2017-07-20 19:22:45 -07:00
|
|
|
downloaded = _download(pipe_to_blender, package_url, cache_dir)
|
2017-07-10 01:44:54 +02:00
|
|
|
|
|
|
|
if not downloaded:
|
|
|
|
log.debug('Download failed/aborted, not going to install anything.')
|
|
|
|
return
|
|
|
|
|
2017-07-11 22:39:31 -07:00
|
|
|
try:
|
2017-07-12 20:07:20 -07:00
|
|
|
_install(pipe_to_blender, downloaded, install_path, search_paths)
|
2017-07-11 22:39:31 -07:00
|
|
|
pipe_to_blender.send(Success())
|
2017-07-12 20:07:20 -07:00
|
|
|
except InstallException as err:
|
2017-07-14 19:22:53 -07:00
|
|
|
log.exception("Failed to install package: %s", err)
|
2017-07-13 16:33:14 -07:00
|
|
|
pipe_to_blender.send(InstallError(err))
|
|
|
|
|
2017-07-20 19:22:45 -07:00
|
|
|
def uninstall(pipe_to_blender, package: Package, install_path: pathlib.Path):
|
|
|
|
"""Deletes the given package's files from the install directory"""
|
|
|
|
#TODO: move package to cache and present an "undo" button to user, to give nicer UX on misclicks
|
|
|
|
|
|
|
|
#TODO: move this to a shared utility function
|
|
|
|
# Duplicated code with InplaceBackup class
|
|
|
|
def _rm(path: pathlib.Path):
|
|
|
|
"""Just delete whatever is specified by `path`"""
|
|
|
|
if path.is_dir():
|
|
|
|
shutil.rmtree(str(path))
|
|
|
|
else:
|
|
|
|
path.unlink()
|
|
|
|
|
|
|
|
for pkgfile in [install_path / pathlib.Path(p) for p in package.files]:
|
|
|
|
if not pkgfile.exists():
|
|
|
|
pipe_to_blender.send(UninstallError("Could not find file owned by package: '%s'. Refusing to uninstall." % pkgfile))
|
|
|
|
return None
|
|
|
|
|
|
|
|
for pkgfile in [install_path / pathlib.Path(p) for p in package.files]:
|
|
|
|
_rm(pkgfile)
|
|
|
|
|
|
|
|
pipe_to_blender.send(Success())
|
|
|
|
|
|
|
|
|
2017-07-15 02:18:18 -07:00
|
|
|
def _load_repo(storage_path: pathlib.Path) -> Repository:
|
|
|
|
"""Reads the stored repositories"""
|
|
|
|
|
|
|
|
repo_path = storage_path / 'repo.json'
|
|
|
|
return Repository.from_file(repo_path)
|
|
|
|
|
2017-07-13 16:33:14 -07:00
|
|
|
def refresh(pipe_to_blender, storage_path: pathlib.Path, repository_url: str):
|
|
|
|
"""Retrieves and stores the given repository"""
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__ + '.refresh')
|
|
|
|
|
|
|
|
repo_path = storage_path / 'repo.json'
|
2017-07-18 18:25:53 -07:00
|
|
|
if repo_path.exists():
|
2017-07-13 16:33:14 -07:00
|
|
|
repo = Repository.from_file(repo_path)
|
2017-07-18 18:25:53 -07:00
|
|
|
if repo.url != repository_url:
|
|
|
|
# We're getting a new repository
|
|
|
|
repo = Repository(repository_url)
|
|
|
|
else:
|
2017-07-13 16:33:14 -07:00
|
|
|
repo = Repository(repository_url)
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo.refresh()
|
|
|
|
except DownloadException as err:
|
2017-07-14 20:26:03 -07:00
|
|
|
pipe_to_blender.send(SubprocError(err))
|
|
|
|
return
|
2017-07-10 01:44:54 +02:00
|
|
|
|
2017-07-13 16:33:14 -07:00
|
|
|
repo.to_file(repo_path) # TODO: this always writes even if repo wasn't changed
|
2017-07-20 19:22:45 -07:00
|
|
|
pipe_to_blender.send(RepositoryResult(repo))
|
2017-07-19 22:24:27 -07:00
|
|
|
pipe_to_blender.send(Success())
|
2017-07-14 21:15:00 -07:00
|
|
|
|
|
|
|
def load(pipe_to_blender, storage_path: pathlib.Path):
|
2017-07-15 02:18:18 -07:00
|
|
|
"""Reads the stored repository and sends the result to blender"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = _load_repo(storage_path)
|
|
|
|
pipe_to_blender.send(RepositoryResult(repo.to_dict(sort=True, ids=True)))
|
2017-07-18 15:10:11 -07:00
|
|
|
pipe_to_blender.send(Success())
|
|
|
|
return repo
|
2017-07-15 02:18:18 -07:00
|
|
|
except BadRepository as err:
|
|
|
|
pipe_to_blender.send(SubprocError("Failed to read repository: %s" % err))
|
|
|
|
|
2017-07-18 15:10:11 -07:00
|
|
|
# def load_local(pipe_to_blender
|
|
|
|
|
2017-07-10 01:44:54 +02:00
|
|
|
|
|
|
|
def debug_hang():
|
|
|
|
"""Hangs for an hour. For testing purposes only."""
|
|
|
|
|
|
|
|
import time
|
|
|
|
time.sleep(3600)
|