From ee0bbca8e5fad78ab421719589f1226ff0f95f1d Mon Sep 17 00:00:00 2001 From: Ellwood Zwovic Date: Tue, 29 Aug 2017 04:30:49 -0700 Subject: [PATCH] Move package manager to blender branch Moved the package manger out of an addon. It now lives here: https://developer.blender.org/diffusion/B/browse/soc-2017-package_manager/ This repository still contains the repo generation script, the readme has been updated to reflect this. --- README.md | 124 +--- package_manager/__init__.py | 1101 ---------------------------- package_manager/appdirs.py | 552 -------------- package_manager/bpkg/__init__.py | 19 - package_manager/bpkg/exceptions.py | 14 - package_manager/bpkg/types.py | 501 ------------- package_manager/bpkg/utils.py | 218 ------ package_manager/cache.py | 49 -- package_manager/messages.py | 73 -- package_manager/subproc.py | 80 -- package_manager/utils.py | 29 - setup.py | 114 --- tests/test_make_repo.py | 27 +- tests/test_refresh_repos.py | 72 -- tests/test_repo_io.py | 65 -- 15 files changed, 30 insertions(+), 3008 deletions(-) delete mode 100644 package_manager/__init__.py delete mode 100644 package_manager/appdirs.py delete mode 100644 package_manager/bpkg/__init__.py delete mode 100644 package_manager/bpkg/exceptions.py delete mode 100644 package_manager/bpkg/types.py delete mode 100644 package_manager/bpkg/utils.py delete mode 100644 package_manager/cache.py delete mode 100644 package_manager/messages.py delete mode 100644 package_manager/subproc.py delete mode 100644 package_manager/utils.py delete mode 100755 setup.py delete mode 100644 tests/test_refresh_repos.py delete mode 100755 tests/test_repo_io.py diff --git a/README.md b/README.md index 9ac7cbe..c5e59f5 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,9 @@ -# BlenderPackage, the Blender Package Manager (wip) +# Repository generator for the blender package manager +For the package manager itself, see the `soc-2017-package_manager` branch here: +https://developer.blender.org/diffusion/B/browse/soc-2017-package_manager/ -This is work-in-progress documentation for the work-in-progress package manager -(the name is also a work-in-progress) As such, everything here is subject to -change. - -# Installation and Testing - -1. Clone blender and checkout the [`soc-2017-package_manager` branch](https://developer.blender.org/diffusion/B/browse/soc-2017-package_manager/). - - git clone git://git.blender.org/blender.git - git checkout soc-2017-package_manager - -2. [Compile](https://wiki.blender.org/index.php/Dev:Doc/Building_Blender). -2. You may want to build without addons (or delete them from `/bin//scripts/addons*` afterwards). - This is because system addons typically can't be altered by the user (permissions), so the package manager won't be able to uninstall/update them. - Plus, the test repo only contains official addons, so if left as is, all addons will be installed already. -3. Clone the [package manager addon repository](https://developer.blender.org/diffusion/BPMA/repository): - - git clone git://git.blender.org/blender-package-manager-addon.git - -4. Install the addon. Copy (or symlink) the `package_manager` directory - contained *within* the cloned repository into - `/path/to/blender/build/bin/2.78/scripts/addons/` -5. Start blender and enable the addon (it's in the "Testing" support level") -6. Add a repository in *User Preferences > Packages > Repositories* by clicking the "plus" icon. You can use a local repository (see below), or the one I set up for testing: `http://blendermonkey.com/bpkg`. - Currently only one repository is allowed, but this will change. - - -### Repository creation - -A local repository can be generated with the `generate_repository` script found -in the addon repository. Example usage: +This repository contains a script (`generate_repository`) for generating +repositories of blender packages. Example usage: ./generate_repository /path/to/packages --baseurl 'http://localhost/' @@ -38,82 +11,21 @@ This will produce a `repo.json` file in the current directory, which can then be copied to the server. The baseurl is prepended to the filename of each package to form the package's url (so for example, `http://localhost/node_wrangler.py`). +For an explanation of the other options see `generate_repository --help`: -# Known limitations + usage: generate_repository [-h] [-v] [-u BASEURL] [-n NAME] [-o OUTPUT] path -Things which are known to be bad, but are planned to get better + Generate a blender package repository from a directory of addons -* No progress display -* Asynchronous operators can be run multiple times at once -* Not more than one repository can be used at once -* Only the latest version of a package can be installed and uninstalled + positional arguments: + path Path to addon directory -# Notes - -My intention is to eventually make uninstalls undo-able until blender is -restarted by moving the uninstalled files to a cache directory which is flushed -on startup and/or exit. - -Packages are identified by their name. This could of course cause issues if two -different packages end up with the same name. As it seems 2.8 will break many -addons anyway, perhaps we can add a required metadata field that allows for -more reliable unique identification? - -# Terminology - -## Package - -A _package_ consists of a single file, or a zip archive containing files to be installed. - -Note: -I think it would be good to always store `bl_info` metadata with the package, -but how best to do this while being compatible with existing addons and future -non-addons remains an open question (perhaps we can always include an -`__init__.py` even in non-addon packages?) - - -## Repository - -A _repository_ consists of a directory containing a "repo.json" file. This -repo.json file contains metadata describing each package (`bl_info`) and where -it may be retrieved from. - -A repo.json file may currently be generated from a directory of addons by -running `blenderpack.py `. - - -## Client - -Clients "use" a repository by including its repo.json in packages listed in -Clients can be configured to use multiple repositories at once. - -In addition, the client maintains it's own "local repository", which is a -repo.json containing installed packages. - -Clients can take the following actions: - -### Install - -_Installing_ means downloading a single package, adding it to the local -repository, and extracting/copying the package's file(s) to their -destination(s). - -### Uninstall - -_Uninstalling_ means deleting a single package's files, then removing it from -the local repository. - -Note: -If some packages store user-created data (e.g. preferences), we may want to -preserve that somehow. - -### Upgrade - -_Upgrading_ means looking for and installing newer addons with the same names as installed -addons. - -### Refresh - -_Refreshing_ means checking for modifications to the `repo.json`s of the enabled -repositories, and new packages which may have appeared on disk. + optional arguments: + -h, --help show this help message and exit + -v, --verbose Increase verbosity (can be used multiple times) + -u BASEURL, --baseurl BASEURL + Component of URL leading up to the package filename. + -n NAME, --name NAME Name of repo (defaults to basename of 'path') + -o OUTPUT, --output OUTPUT + Directory in which to write repo.json file diff --git a/package_manager/__init__.py b/package_manager/__init__.py deleted file mode 100644 index 0d9cca2..0000000 --- a/package_manager/__init__.py +++ /dev/null @@ -1,1101 +0,0 @@ -""" -Blender Package manager -""" - -bl_info = { - 'name': 'Package Manager', - 'author': 'Ellwood Zwovic (gandalf3), Sybren A. Stüvel, Mitchell Stokes (Moguri)', - 'version': (0, 1, 0), - 'blender': (2, 79, 0), - 'location': 'User Preferences > Packages', - 'description': 'A tool for installing, updating, and otherwise managing, addons and packages.', - 'category': 'System', - 'support': 'TESTING', -} -import logging -log = logging.getLogger(__name__) - -# HACK: -# due to lack of fork() on windows, multiprocessing will re-execute this module -# in a new process. In such cases we only need subproc, everything else is only -# used to spawn the subprocess in the first place. -try: - import bpy -except ImportError: - from . import subproc -else: - if 'bpkg' in locals(): - from importlib import reload - - def recursive_reload(mod): - """Reloads the given module and all its submodules""" - log.debug("Reloading %s", mod) - from types import ModuleType - reloaded_mod = reload(mod) - for attr in [getattr(mod, attr_name) for attr_name in dir(mod)]: - if type(attr) is ModuleType and attr.__name__.startswith(mod.__name__): - recursive_reload(attr) - return reloaded_mod - - subproc = recursive_reload(subproc) - messages = recursive_reload(messages) - utils = recursive_reload(utils) - bpkg = recursive_reload(bpkg) - Package = bpkg.types.Package - - from . import subproc - from . import messages - from . import bpkg - from . import utils - from .bpkg.types import ( - Package, - ConsolidatedPackage, - ) - from pathlib import Path - from collections import OrderedDict - import multiprocessing - mp_context = multiprocessing.get_context() - mp_context.set_executable(bpy.app.binary_path_python) - - # global list of all known packages, indexed by name - _packages = OrderedDict() - - # used for lazy loading - _main_has_run = False - - class SubprocMixin: - """Mix-in class for things that need to be run in a subprocess.""" - - log = logging.getLogger(__name__ + '.SubprocMixin') - _state = 'INITIALIZING' - _abort_timeout = 0 # time at which we stop waiting for an abort response and just terminate the process - - # Mapping from message type (see bpkg_manager.subproc) to handler function. - # Should be constructed before modal() gets called. - msg_handlers = {} - - def execute(self, context): - return self.invoke(context, None) - - def quit(self): - """Signals the state machine to stop this operator from running.""" - self._state = 'QUIT' - - def invoke(self, context, event): - self.pipe_blender, self.pipe_subproc = multiprocessing.Pipe() - - # The subprocess should just be terminated when Blender quits. Without this, - # Blender would hang while closing, until the subprocess terminates itself. - # TODO: Perhaps it would be better to fork when blender exits? - self.process = self.create_subprocess() - self.process.daemon = True - self.process.start() - - self._state = 'RUNNING' - - wm = context.window_manager - wm.modal_handler_add(self) - self.timer = wm.event_timer_add(0.1, context.window) - - return {'RUNNING_MODAL'} - - def modal(self, context, event): - import time - - if event.type == 'ESC': - self.log.warning('Escape pressed, sending abort signal to subprocess') - self.abort() - return {'PASS_THROUGH'} - - if event.type != 'TIMER': - return {'PASS_THROUGH'} - - if self._state == 'ABORTING' and time.time() > self._abort_timeout: - self.log.error('No response from subprocess to abort request, terminating it.') - self.report({'ERROR'}, 'No response from subprocess to abort request, terminating it.') - self.process.terminate() - self._finish(context) - return {'CANCELLED'} - - while self.pipe_blender.poll(): - self.handle_received_data() - - if self._state == 'QUIT': - self._finish(context) - return {'FINISHED'} - - if not self.process.is_alive(): - self.report_process_died() - self._finish(context) - return {'CANCELLED'} - - return {'RUNNING_MODAL'} - - def abort(self): - import time - - # Allow the subprocess 10 seconds to repsond to our abort message. - self._abort_timeout = time.time() + 10 - self._state = 'ABORTING' - - self.pipe_blender.send(messages.Abort()) - - def _finish(self, context): - try: - self.cancel(context) - except AttributeError: - pass - - global bpkg_operation_running - - context.window_manager.event_timer_remove(self.timer) - bpkg_operation_running = False - - if self.process and self.process.is_alive(): - self.log.debug('Waiting for subprocess to quit') - try: - self.process.join(timeout=10) - except multiprocessing.TimeoutError: - self.log.warning('Subprocess is hanging, terminating it forcefully.') - self.process.terminate() - else: - self.log.debug('Subprocess stopped with exit code %i', self.process.exitcode) - - def handle_received_data(self): - recvd = self.pipe_blender.recv() - - self.log.debug('Received message from subprocess: %s', recvd) - try: - handler = self.msg_handlers[type(recvd)] - except KeyError: - self.log.error('Unable to handle received message %s', recvd) - # Maybe we shouldn't show this to the user? - self.report({'WARNING'}, 'Unable to handle received message %s' % recvd) - return - - handler(recvd) - - def create_subprocess(self): - """Implement this in a subclass. - - :rtype: multiprocessing.Process - """ - raise NotImplementedError() - - def report_process_died(self): - """Provides the user with sensible information when the process has died. - - Implement this in a subclass. - """ - raise NotImplementedError() - - - class PACKAGE_OT_install(SubprocMixin, bpy.types.Operator): - bl_idname = 'package.install' - bl_label = 'Install package' - bl_description = 'Downloads and installs a Blender add-on package' - bl_options = {'REGISTER'} - - package_name = bpy.props.StringProperty( - name='package_name', - description='The name of the package to install' - ) - - log = logging.getLogger(__name__ + '.PACKAGE_OT_install') - - def invoke(self, context, event): - if not self.package_name: - self.report({'ERROR'}, 'Package name not given') - return {'CANCELLED'} - - return super().invoke(context, event) - - def create_subprocess(self): - """Starts the download process. - - Also registers the message handlers. - - :rtype: multiprocessing.Process - """ - - self.msg_handlers = { - messages.Progress: self._subproc_progress, - messages.DownloadError: self._subproc_download_error, - messages.InstallError: self._subproc_install_error, - messages.Success: self._subproc_success, - messages.Aborted: self._subproc_aborted, - } - - global _packages - package = _packages[self.package_name].get_latest_version() - - import pathlib - - # TODO: We need other paths besides this one on subprocess end, so it might be better to pass them all at once. - # For now, just pass this one. - install_path = pathlib.Path(bpy.utils.user_resource('SCRIPTS', 'addons', create=True)) - self.log.debug("Using %s as install path", install_path) - - import addon_utils - proc = mp_context.Process(target=subproc.download_and_install_package, - args=(self.pipe_subproc, package, install_path)) - return proc - - def _subproc_progress(self, progress: messages.Progress): - self.log.info('Task progress at %i%%', progress.progress * 100) - - def _subproc_download_error(self, error: messages.DownloadError): - self.report({'ERROR'}, 'Unable to download package: %s' % error.message) - self.quit() - - def _subproc_install_error(self, error: messages.InstallError): - self.report({'ERROR'}, 'Unable to install package: %s' % error.message) - self.quit() - - def _subproc_success(self, success: messages.Success): - self.report({'INFO'}, 'Package installed successfully') - global _packages - _packages = build_packagelist() - self.quit() - - def _subproc_aborted(self, aborted: messages.Aborted): - self.report({'ERROR'}, 'Package installation aborted per your request') - self.quit() - - def report_process_died(self): - if self.process.exitcode: - self.log.error('Process died without telling us! Exit code was %i', self.process.exitcode) - self.report({'ERROR'}, 'Error downloading package, exit code %i' % self.process.exitcode) - else: - self.log.error('Process died without telling us! Exit code was 0 though') - self.report({'WARNING'}, 'Error downloading package, but process finished OK. This is weird.') - - class PACKAGE_OT_uninstall(SubprocMixin, bpy.types.Operator): - bl_idname = 'package.uninstall' - bl_label = 'Install package' - bl_description = "Remove installed package files from filesystem" - bl_options = {'REGISTER'} - - package_name = bpy.props.StringProperty(name='package_name', description='The name of the package to uninstall') - - log = logging.getLogger(__name__ + '.PACKAGE_OT_uninstall') - - def invoke(self, context, event): - if not self.package_name: - self.report({'ERROR'}, 'Package name not given') - return {'CANCELLED'} - - return super().invoke(context, event) - - def create_subprocess(self): - """Starts the uninstall process and registers the message handlers. - :rtype: multiprocessing.Process - """ - - self.msg_handlers = { - messages.UninstallError: self._subproc_uninstall_error, - messages.Success: self._subproc_success, - } - - import pathlib - install_path = pathlib.Path(bpy.utils.user_resource('SCRIPTS', 'addons', create=True)) - - global _packages - package = _packages[self.package_name].get_latest_version() - - proc = mp_context.Process(target=subproc.uninstall_package, - args=(self.pipe_subproc, package, install_path)) - return proc - - - def _subproc_uninstall_error(self, error: messages.InstallError): - self.report({'ERROR'}, error.message) - self.quit() - - def _subproc_success(self, success: messages.Success): - self.report({'INFO'}, 'Package uninstalled successfully') - bpy.ops.package.refresh() - self.quit() - - def report_process_died(self): - if self.process.exitcode: - self.log.error('Process died without telling us! Exit code was %i', self.process.exitcode) - self.report({'ERROR'}, 'Error downloading package, exit code %i' % self.process.exitcode) - else: - self.log.error('Process died without telling us! Exit code was 0 though') - self.report({'WARNING'}, 'Error downloading package, but process finished OK. This is weird.') - - - def get_installed_packages(refresh=False) -> list: - """Get list of packages installed on disk""" - import addon_utils - installed_pkgs = [] - for mod in addon_utils.modules(refresh=refresh): - pkg = Package.from_module(mod) - pkg.installed = True - installed_pkgs.append(pkg) - return installed_pkgs - - def get_repo_storage_path() -> Path: - return Path(bpy.utils.user_resource('CONFIG', 'repositories')) - - def get_repositories() -> list: - """ - Get list of downloaded repositories and update wm.package_repositories - """ - log = logging.getLogger(__name__ + ".get_repositories") - storage_path = get_repo_storage_path() - repos = bpkg.load_repositories(storage_path) - log.debug("repos: %s", repos) - - return repos - - # class PACKAGE_OT_refresh_packages(bpy.types.Operator): - # bl_idname = "package.refresh_packages" - # bl_label = "Refresh Packages" - # bl_description = "Scan for packages on disk" - # - # log = logging.getLogger(__name__ + ".PACKAGE_OT_refresh_packages") - # - # def execute(self, context): - # global _packages - # installed_packages = get_packages_from_disk(refresh=True) - # available_packages = get_packages_from_repo() - # _packages = build_composite_packagelist(installed_packages, available_packages) - # context.area.tag_redraw() - # - # return {'FINISHED'} - - class PACKAGE_OT_refresh(SubprocMixin, bpy.types.Operator): - bl_idname = "package.refresh" - bl_label = "Refresh" - bl_description = 'Check repositories for new and updated packages' - bl_options = {'REGISTER'} - - log = logging.getLogger(__name__ + ".PACKAGE_OT_refresh") - _running = False - - def invoke(self, context, event): - wm = context.window_manager - self.repositories = wm.package_repositories - if len(self.repositories) == 0: - self.report({'ERROR'}, "No repositories to refresh") - return {'CANCELLED'} - - PACKAGE_OT_refresh._running = True - return super().invoke(context, event) - - @classmethod - def poll(cls, context): - return not cls._running - - def cancel(self, context): - PACKAGE_OT_refresh._running = False - context.area.tag_redraw() - - def create_subprocess(self): - """Starts the download process. - - Also registers the message handlers. - - :rtype: multiprocessing.Process - """ - - #TODO: make sure all possible messages are handled - self.msg_handlers = { - messages.Progress: self._subproc_progress, - messages.SubprocError: self._subproc_error, - messages.DownloadError: self._subproc_download_error, - messages.Success: self._subproc_success, - # messages.RepositoryResult: self._subproc_repository_result, - messages.BadRepositoryError: self._subproc_repository_error, - messages.Aborted: self._subproc_aborted, - } - - import pathlib - - storage_path = pathlib.Path(bpy.utils.user_resource('CONFIG', 'repositories', create=True)) - repository_urls = [repo.url for repo in self.repositories] - self.log.debug("Repository urls %s", repository_urls) - - proc = mp_context.Process(target=subproc.refresh_repositories, - args=(self.pipe_subproc, storage_path, repository_urls)) - return proc - - def _subproc_progress(self, progress: messages.Progress): - self.log.info('Task progress at %i%%', progress.progress * 100) - - def _subproc_error(self, error: messages.SubprocError): - self.report({'ERROR'}, 'Unable to refresh package list: %s' % error.message) - self.quit() - - def _subproc_download_error(self, error: messages.DownloadError): - self.report({'ERROR'}, 'Unable to download package list: %s' % error.message) - self.quit() - - def _subproc_repository_error(self, error: messages.BadRepositoryError): - self.report({'ERROR'}, str(error.message)) - self.quit() - - def _subproc_success(self, success: messages.Success): - self.report({'INFO'}, 'Finished refreshing lists') - global _packages - _packages = build_packagelist() - self.quit() - - def _subproc_aborted(self, aborted: messages.Aborted): - self.report({'ERROR'}, 'Package list retrieval aborted per your request') - self.quit() - - def report_process_died(self): - if self.process.exitcode: - self.log.error('Refresh process died without telling us! Exit code was %i', self.process.exitcode) - self.report({'ERROR'}, 'Error refreshing package lists, exit code %i' % self.process.exitcode) - else: - self.log.error('Refresh process died without telling us! Exit code was 0 though') - self.report({'WARNING'}, 'Error refreshing package lists, but process finished OK. This is weird.') - - class RepositoryProperty(bpy.types.PropertyGroup): - name = bpy.props.StringProperty(name="Name") - url = bpy.props.StringProperty(name="URL") - status = bpy.props.EnumProperty(name="Status", items=[ - ("OK", "Okay", "FILE_TICK"), - ("NOTFOUND", "Not found", "ERROR"), - ("NOCONNECT", "Could not connect", "QUESTION"), - ]) - enabled = bpy.props.BoolProperty(name="Enabled") - - class PACKAGE_UL_repositories(bpy.types.UIList): - def draw_item(self, context, layout, data, item, icon, active_data, active_propname): - layout.alignment='LEFT' - layout.prop(item, "enabled", text="") - if len(item.name) == 0: - layout.label(item['url']) - else: - layout.label(item.name) - - class PACKAGE_OT_add_repository(bpy.types.Operator): - bl_idname = "package.add_repository" - bl_label = "Add Repository" - - url = bpy.props.StringProperty(name="Repository URL") - - def invoke(self, context, event): - wm = context.window_manager - return wm.invoke_props_dialog(self) - - def execute(self, context): - wm = context.window_manager - - if len(self.url) == 0: - self.report({'ERROR'}, "Repository URL not specified") - return {'CANCELLED'} - - repo = wm.package_repositories.add() - repo.url = utils.sanitize_repository_url(self.url) - - bpy.ops.package.refresh() - - context.area.tag_redraw() - return {'FINISHED'} - - class PACKAGE_OT_remove_repository(bpy.types.Operator): - bl_idname = "package.remove_repository" - bl_label = "Remove Repository" - - def execute(self, context): - wm = context.window_manager - try: - repo = wm['package_repositories'][wm.package_active_repository] - except IndexError: - return {'CANCELLED'} - - filename = bpkg.utils.format_filename(repo.name) + ".json" - path = (get_repo_storage_path() / filename) - if path.exists(): - path.unlink() - - wm.package_repositories.remove(wm.package_active_repository) - - return {'FINISHED'} - - class USERPREF_PT_packages(bpy.types.Panel): - bl_label = "Package Management" - bl_space_type = 'USER_PREFERENCES' - bl_region_type = 'WINDOW' - bl_options = {'HIDE_HEADER'} - - log = logging.getLogger(__name__ + '.USERPREF_PT_packages') - - displayed_packages = [] - expanded_packages = [] - preference_package = None - - redraw = True - - @classmethod - def poll(cls, context): - userpref = context.user_preferences - return (userpref.active_section == 'PACKAGES') - - def draw(self, context): - layout = self.layout - wm = context.window_manager - - mainrow = layout.row() - spl = mainrow.split(.2) - sidebar = spl.column(align=True) - pkgzone = spl.column() - - sidebar.label("Repositories") - row = sidebar.row() - row.template_list("PACKAGE_UL_repositories", "", wm, "package_repositories", wm, "package_active_repository") - col = row.column(align=True) - col.operator(PACKAGE_OT_add_repository.bl_idname, text="", icon='ZOOMIN') - col.operator(PACKAGE_OT_remove_repository.bl_idname, text="", icon='ZOOMOUT') - sidebar.separator() - sidebar.operator(PACKAGE_OT_refresh.bl_idname, text="Check for updates") - - sidebar.separator() - sidebar.label("Category") - sidebar.prop(wm, "addon_filter", text="") - - sidebar.separator() - sidebar.label("Support level") - sidebar.prop(wm, "addon_support") - - top = pkgzone.row() - spl = top.split(.6) - spl.prop(wm, "package_search", text="", icon='VIEWZOOM') - spl_r = spl.row() - spl_r.prop(wm, "package_install_filter", expand=True) - - def filtered_packages(filters: dict, packages: OrderedDict) -> list:# {{{ - """Returns filtered and sorted list of names of packages which match filters""" - - #TODO: using lower() for case-insensitive comparison doesn't work in some languages - def match_contains(blinfo) -> bool: - if blinfo['name'].lower().__contains__(filters['search'].lower()): - return True - return False - - def match_startswith(blinfo) -> bool: - if blinfo['name'].lower().startswith(filters['search'].lower()): - return True - return False - - def match_support(blinfo) -> bool: - if 'support' in blinfo: - if set((blinfo['support'],)).issubset(filters['support']): - return True - else: - if {'COMMUNITY'}.issubset(filters['support']): - return True - return False - - def match_installstate(metapkg: ConsolidatedPackage) -> bool: - if filters['installstate'] == 'AVAILABLE': - return True - - if filters['installstate'] == 'INSTALLED': - if metapkg.installed: - return True - - if filters['installstate'] == 'UPDATES': - if metapkg.installed: - if metapkg.get_latest_installed_version().version < metapkg.get_latest_version().version: - return True - return False - - def match_repositories(metapkg) -> bool: - pkg = metapkg.get_display_version() - for repo in pkg.repositories: - if repo.name in filters['repository']: - return True - return False - - def match_category(blinfo) -> bool: - if filters['category'].lower() == 'all': - return True - if 'category' not in blinfo: - return False - if blinfo['category'].lower() == filters['category'].lower(): - return True - return False - - - # use two lists as a simple way of putting "matches from the beginning" on top - contains = [] - startswith = [] - - for pkgname, metapkg in packages.items(): - blinfo = metapkg.versions[0].bl_info - if match_repositories(metapkg)\ - and match_category(blinfo)\ - and match_support(blinfo)\ - and match_installstate(metapkg): - if len(filters['search']) == 0: - startswith.append(pkgname) - continue - if match_startswith(blinfo): - startswith.append(pkgname) - continue - if match_contains(blinfo): - contains.append(pkgname) - continue - - return startswith + contains# }}} - - def draw_package(metapkg: ConsolidatedPackage, layout: bpy.types.UILayout): #{{{ - """Draws the given package""" - pkg = metapkg.get_display_version() - - def draw_operators(metapkg, layout): # {{{ - """ - Draws install, uninstall, update, enable, disable, and preferences - buttons as applicable for the given package - """ - pkg = metapkg.get_display_version() - - if metapkg.installed: - if metapkg.updateable: - layout.operator( - PACKAGE_OT_install.bl_idname, - text="Update to {}".format(utils.fmt_version(metapkg.get_latest_version().version)), - ).package_name=metapkg.name - layout.separator() - - #TODO: only show preferences button if addon has preferences to show - if pkg.enabled: - layout.operator( - WM_OT_package_toggle_preferences.bl_idname, - text="Preferences", - ).package_name=metapkg.name - layout.operator( - PACKAGE_OT_uninstall.bl_idname, - text="Uninstall", - ).package_name=metapkg.name - else: - layout.operator( - PACKAGE_OT_install.bl_idname, - text="Install", - ).package_name=metapkg.name - # }}} - - def draw_preferences(pkg: Package, layout: bpy.types.UILayout): - """Draw the package's preferences in the given layout""" - addon_preferences = context.user_preferences.addons[pkg.module_name].preferences - if addon_preferences is not None: - draw = getattr(addon_preferences, "draw", None) - if draw is not None: - addon_preferences_class = type(addon_preferences) - box_prefs = layout.box() - box_prefs.label("Preferences:") - addon_preferences_class.layout = box_prefs - try: - draw(context) - except: - import traceback - traceback.print_exc() - box_prefs.label(text="Error (see console)", icon='ERROR') - del addon_preferences_class.layout - - def collapsed(metapkg, layout):# {{{ - """Draw collapsed version of package layout""" - pkg = metapkg.get_display_version() - - # Only 'install' button is shown when package isn't installed, - # so allow more space for title/description. - spl = layout.split(.5 if pkg.installed else .8) - - metacol = spl.column(align=True) - - buttonrow = spl.row(align=True) - buttonrow.alignment = 'RIGHT' - - l1 = metacol.row() - l2 = metacol.row() - - draw_operators(metapkg, buttonrow) - - if pkg.installed: - metacol.active = pkg.enabled - l1.operator(PACKAGE_OT_toggle_enabled.bl_idname, - icon='CHECKBOX_HLT' if pkg.enabled else 'CHECKBOX_DEHLT', - text="", - emboss=False, - ).package_name = metapkg.name - - if pkg.name: - l1.label(text=pkg.name) - if pkg.description: - l2.label(text=pkg.description) - l2.enabled = False #Give name more visual weight - # }}} - - - def expanded(metapkg, layout, layoutbox):# {{{ - """Draw expanded version of package layout""" - - pkg = metapkg.get_display_version() - - metacol = layoutbox.column(align=True) - row1 = layout.row(align=True) - row1.operator(PACKAGE_OT_toggle_enabled.bl_idname, - icon='CHECKBOX_HLT' if pkg.enabled else 'CHECKBOX_DEHLT', - text="", - emboss=False, - ).package_name = metapkg.name - row1.label(pkg.name) - - if metapkg.installed: - metacol.active = pkg.enabled - row1.active = pkg.enabled - - if pkg.description: - row = metacol.row() - row.label(pkg.description) - - def draw_metadatum(label: str, value: str, layout: bpy.types.UILayout): - """Draw the given key value pair in a new row in given layout container""" - row = layout.row() - row.scale_y = .8 - spl = row.split(.15) - spl.label("{}:".format(label)) - spl.label(value) - - # don't compare against None here; we don't want to display empty arrays/strings either - if pkg.location: - draw_metadatum("Location", pkg.location, metacol) - if pkg.version: - draw_metadatum("Version", utils.fmt_version(pkg.version), metacol) - if pkg.blender: - draw_metadatum("Blender version", utils.fmt_version(pkg.blender), metacol) - if pkg.category: - draw_metadatum("Category", pkg.category, metacol) - if pkg.author: - draw_metadatum("Author", pkg.author, metacol) - if pkg.support: - draw_metadatum("Support level", pkg.support.title(), metacol) - if pkg.warning: - draw_metadatum("Warning", pkg.warning, metacol) - - metacol.separator() - - spl = layoutbox.row().split(.35) - urlrow = spl.row() - buttonrow = spl.row(align=True) - - urlrow.alignment = 'LEFT' - if pkg.wiki_url: - urlrow.operator("wm.url_open", text="Documentation", icon='HELP').url=pkg.wiki_url - if pkg.tracker_url: - urlrow.operator("wm.url_open", text="Report a Bug", icon='URL').url=pkg.tracker_url - - buttonrow.alignment = 'RIGHT' - draw_operators(metapkg, buttonrow) - - def draw_version(layout: bpy.types.UILayout, pkg: Package): - """Draw version of package""" - spl = layout.split(.9) - left = spl.column() - right = spl.column() - right.alignment = 'RIGHT' - - left.label(text=utils.fmt_version(pkg.version)) - - for repo in pkg.repositories: - draw_metadatum("Repository", repo.name, left) - - if pkg.installed: - right.label(text="Installed") - - draw_metadatum("Installed to", str(pkg.installed_location), left) - - if len(metapkg.versions) > 1: - row = pkgbox.row() - row.label(text="There are multiple versions of this package:") - for version in metapkg.versions: - subvbox = pkgbox.box() - draw_version(subvbox, version) - - # }}} - - is_expanded = (metapkg.name in self.expanded_packages) - - pkgbox = layout.box() - row = pkgbox.row(align=True) - row.operator( - WM_OT_package_toggle_expand.bl_idname, - icon='TRIA_DOWN' if is_expanded else 'TRIA_RIGHT', - emboss=False, - ).package_name=metapkg.name - - if is_expanded: - expanded(metapkg, row, pkgbox) - else: - collapsed(metapkg, row)# }}} - - if pkg.installed and pkg.enabled and pkg.name == USERPREF_PT_packages.preference_package: - draw_preferences(pkg, pkgbox) - - - def center_message(layout, msg: str): - """draw a label in the center of an extra-tall row""" - row = layout.row() - row.label(text=msg) - row.alignment='CENTER' - row.scale_y = 10 - - global _main_has_run - if not _main_has_run: - # TODO: read repository and installed packages synchronously for now; - # can't run an operator from draw code to do async monitoring - main() - - global _packages - if len(_packages) == 0: - center_message(pkgzone, "No packages found") - return - - wm = bpy.context.window_manager - filters = { - 'category': wm.addon_filter, - 'search': wm.package_search, - 'support': wm.addon_support, - 'repository': set([repo.name for repo in wm.package_repositories if repo.enabled]), - 'installstate': wm.package_install_filter, - } - USERPREF_PT_packages.displayed_packages = filtered_packages(filters, _packages) - - for pkgname in USERPREF_PT_packages.displayed_packages: - row = pkgzone.row() - draw_package(_packages[pkgname], row) - - - class WM_OT_package_toggle_expand(bpy.types.Operator):# {{{ - bl_idname = "wm.package_toggle_expand" - bl_label = "" - bl_description = "Toggle display of extended information for given package (hold shift to collapse all other packages)" - bl_options = {'INTERNAL'} - - log = logging.getLogger(__name__ + ".WM_OT_package_toggle_expand") - - package_name = bpy.props.StringProperty( - name="Package Name", - description="Name of package to expand/collapse", - ) - - def invoke(self, context, event): - if event.shift: - USERPREF_PT_packages.expanded_packages = [] - if self.package_name in USERPREF_PT_packages.expanded_packages: - USERPREF_PT_packages.expanded_packages.remove(self.package_name) - else: - USERPREF_PT_packages.expanded_packages.append(self.package_name) - - return {'FINISHED'}# }}} - - class WM_OT_package_toggle_preferences(bpy.types.Operator):# {{{ - bl_idname = "wm.package_toggle_preferences" - bl_label = "" - bl_description = "Toggle display of package preferences" - bl_options = {'INTERNAL'} - - package_name = bpy.props.StringProperty( - name="Package Name", - description="Name of package whos preferences to display", - ) - - def invoke(self, context, event): - if USERPREF_PT_packages.preference_package == self.package_name: - USERPREF_PT_packages.preference_package = None - else: - USERPREF_PT_packages.preference_package = self.package_name - return {'FINISHED'}# }}} - - class PACKAGE_OT_toggle_enabled(bpy.types.Operator):# {{{ - bl_idname = "package.toggle_enabled" - bl_label = "" - bl_description = "Enable given package if it's disabled, and vice versa if it's enabled" - - log = logging.getLogger(__name__ + ".PACKAGE_OT_toggle_enabled") - - package_name = bpy.props.StringProperty( - name="Package Name", - description="Name of package to enable", - ) - - def execute(self, context): - import addon_utils - global _packages - metapkg = _packages[self.package_name] - - - if not metapkg.installed: - self.report({'ERROR'}, "Can't enable package which isn't installed") - return {'CANCELLED'} - - # enable/disable all installed versions, just in case there are more than one - for pkg in metapkg.versions: - if not pkg.installed: - continue - if not pkg.module_name: - self.log.warning("Can't enable package `%s` without a module name", pkg.name) - continue - - if pkg.enabled: - addon_utils.disable(pkg.module_name, default_set=True) - pkg.enabled = False - self.log.debug("Disabling") - else: - self.log.debug("Enabling") - addon_utils.enable(pkg.module_name, default_set=True) - pkg.enabled = True - - return {'FINISHED'}# }}} - - class PACKAGE_OT_disable(bpy.types.Operator):# {{{ - bl_idname = "package.disable" - bl_label = "" - bl_description = "Disable given package" - - log = logging.getLogger(__name__ + ".PACKAGE_OT_disable") - - package_name = bpy.props.StringProperty( - name="Package Name", - description="Name of package to disable", - ) - - def execute(self, context): - global _packages - package = _packages[self.package_name].get_display_version() - - if not package.module_name: - self.log.error("Can't disable package without a module name") - return {'CANCELLED'} - - ret = bpy.ops.wm.addon_disable(package.module_name) - if ret == {'FINISHED'}: - _packages[self.package_name].enabled = False - return ret# }}} - - # class PackageManagerPreferences(bpy.types.AddonPreferences): - # bl_idname = __package__ - # - # repositories = bpy.props.CollectionProperty( - # type=RepositoryProperty, - # name="Repositories", - # ) - # active_repository = bpy.props.IntProperty() - - def build_packagelist() -> OrderedDict:# {{{ - """Make an OrderedDict of ConsolidatedPackages from known repositories + installed packages, keyed by package name""" - - log = logging.getLogger(__name__ + ".build_composite_packagelist") - masterlist = {} - installed_packages = get_installed_packages(refresh=True) - known_repositories = get_repositories() - - for repo in known_repositories: - for pkg in repo.packages: - pkg.repositories.add(repo) - if pkg.name is None: - return OrderedDict() - if pkg.name in masterlist: - masterlist[pkg.name].add_version(pkg) - else: - masterlist[pkg.name] = ConsolidatedPackage(pkg) - - for pkg in installed_packages: - if pkg.name in masterlist: - masterlist[pkg.name].add_version(pkg) - else: - masterlist[pkg.name] = ConsolidatedPackage(pkg) - - # log.debug(masterlist[None].__dict__) - return OrderedDict(sorted(masterlist.items()))# }}} - - def main(): - """Entry point; performs initial loading of repositories and installed packages""" - global _packages - global _main_has_run - - _packages = build_packagelist() - - # load repositories from disk - repos = get_repositories() - wm = bpy.context.window_manager - wm.package_repositories.clear() - - #TODO: store repository props in .blend so enabled/disabled state can be remembered - for repo in repos: - repo_prop = wm.package_repositories.add() - repo_prop.name = repo.name - repo_prop.enabled = True - repo_prop.url = repo.url - - # needed for lazy loading - _main_has_run = True - - - def register(): - bpy.utils.register_class(PACKAGE_OT_install) - bpy.utils.register_class(PACKAGE_OT_uninstall) - bpy.utils.register_class(PACKAGE_OT_toggle_enabled) - # bpy.utils.register_class(PACKAGE_OT_disable) - # bpy.utils.register_class(PACKAGE_OT_refresh_repositories) - # bpy.utils.register_class(PACKAGE_OT_refresh_packages) - bpy.utils.register_class(PACKAGE_OT_refresh) - bpy.utils.register_class(USERPREF_PT_packages) - bpy.utils.register_class(WM_OT_package_toggle_expand) - bpy.utils.register_class(WM_OT_package_toggle_preferences) - bpy.types.WindowManager.package_search = bpy.props.StringProperty( - name="Search", - description="Filter packages by name", - options={'TEXTEDIT_UPDATE'} - ) - bpy.types.WindowManager.package_install_filter = bpy.props.EnumProperty( - items=[('AVAILABLE', "Available", "All packages in selected repositories"), - ('INSTALLED', "Installed", "All installed packages"), - ('UPDATES', "Updates", "All installed packages for which there is a newer version availabe") - ], - name="Install filter", - default='AVAILABLE', - ) - - bpy.utils.register_class(RepositoryProperty) - bpy.types.WindowManager.package_repositories = bpy.props.CollectionProperty( - type=RepositoryProperty, - name="Repositories", - ) - bpy.types.WindowManager.package_active_repository = bpy.props.IntProperty() - bpy.utils.register_class(PACKAGE_OT_add_repository) - bpy.utils.register_class(PACKAGE_OT_remove_repository) - bpy.utils.register_class(PACKAGE_UL_repositories) - - # bpy.utils.register_class(PackageManagerPreferences) - - - def unregister(): - bpy.utils.unregister_class(PACKAGE_OT_install) - bpy.utils.unregister_class(PACKAGE_OT_uninstall) - bpy.utils.unregister_class(PACKAGE_OT_toggle_enabled) - # bpy.utils.unregister_class(PACKAGE_OT_disable) - # bpy.utils.unregister_class(PACKAGE_OT_refresh_repositories) - # bpy.utils.unregister_class(PACKAGE_OT_refresh_packages) - bpy.utils.unregister_class(PACKAGE_OT_refresh) - bpy.utils.unregister_class(USERPREF_PT_packages) - bpy.utils.unregister_class(WM_OT_package_toggle_expand) - bpy.utils.unregister_class(WM_OT_package_toggle_preferences) - del bpy.types.WindowManager.package_search - del bpy.types.WindowManager.package_install_filter - - bpy.utils.unregister_class(RepositoryProperty) - del bpy.types.WindowManager.package_repositories - del bpy.types.WindowManager.package_active_repository - bpy.utils.unregister_class(PACKAGE_OT_add_repository) - bpy.utils.unregister_class(PACKAGE_OT_remove_repository) - bpy.utils.unregister_class(PACKAGE_UL_repositories) - - # bpy.utils.unregister_class(PackageManagerPreferences) diff --git a/package_manager/appdirs.py b/package_manager/appdirs.py deleted file mode 100644 index 13485be..0000000 --- a/package_manager/appdirs.py +++ /dev/null @@ -1,552 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2005-2010 ActiveState Software Inc. -# Copyright (c) 2013 Eddy Petrișor - -"""Utilities for determining application-specific dirs. - -See for details and usage. -""" -# Dev Notes: -# - MSDN on where to store app data files: -# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 -# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html -# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html - -__version_info__ = (1, 4, 0) -__version__ = '.'.join(map(str, __version_info__)) - - -import sys -import os - -PY3 = sys.version_info[0] == 3 - -if PY3: - unicode = str - -if sys.platform.startswith('java'): - import platform - os_name = platform.java_ver()[3][0] - if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. - system = 'win32' - elif os_name.startswith('Mac'): # "Mac OS X", etc. - system = 'darwin' - else: # "Linux", "SunOS", "FreeBSD", etc. - # Setting this to "linux2" is not ideal, but only Windows or Mac - # are actually checked for and the rest of the module expects - # *sys.platform* style strings. - system = 'linux2' -else: - system = sys.platform - - - -def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - Mac OS X: ~/Library/Application Support/ - Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\\Application Data\\ - Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ - Win 7 (not roaming): C:\Users\\AppData\Local\\ - Win 7 (roaming): C:\Users\\AppData\Roaming\\ - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/". - """ - if system == "win32": - if appauthor is None: - appauthor = appname - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.normpath(_get_win_folder(const)) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('~/Library/Application Support/') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): - """Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of data dirs should be - returned. By default, the first item from XDG_DATA_DIRS is - returned, or '/usr/local/share/', - if XDG_DATA_DIRS is not set - - Typical user data directories are: - Mac OS X: /Library/Application Support/ - Unix: /usr/local/share/ or /usr/share/ - Win XP: C:\Documents and Settings\All Users\Application Data\\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. - - For Unix, this is using the $XDG_DATA_DIRS[0] default. - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('/Library/Application Support') - if appname: - path = os.path.join(path, appname) - else: - # XDG default for $XDG_DATA_DIRS - # only first, if multipath is False - path = os.getenv('XDG_DATA_DIRS', - os.pathsep.join(['/usr/local/share', '/usr/share'])) - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - if appname and version: - path = os.path.join(path, version) - return path - - -def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - Mac OS X: same as user_data_dir - Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by deafult "~/.config/". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): - """Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of config dirs should be - returned. By default, the first item from XDG_CONFIG_DIRS is - returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set - - Typical user data directories are: - Mac OS X: same as site_data_dir - Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in - $XDG_CONFIG_DIRS - Win *: same as site_data_dir - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - - For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system in ["win32", "darwin"]: - path = site_data_dir(appname, appauthor) - if appname and version: - path = os.path.join(path, version) - else: - # XDG default for $XDG_CONFIG_DIRS - # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - -def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Cache" to the base app data dir for Windows. See - discussion below. - - Typical user cache directories are: - Mac OS X: ~/Library/Caches/ - Unix: ~/.cache/ (XDG default) - Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache - Vista: C:\Users\\AppData\Local\\\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go in - the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming - app data dir (the default returned by `user_data_dir` above). Apps typically - put cache data somewhere *under* the given dir here. Some examples: - ...\Mozilla\Firefox\Profiles\\Cache - ...\Acme\SuperApp\Cache\1.0 - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - This can be disabled with the `opinion=False` option. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - if opinion: - path = os.path.join(path, "Cache") - elif system == 'darwin': - path = os.path.expanduser('~/Library/Caches') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) - if appname: - path = os.path.join(path, appname.lower().replace(' ', '-')) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific log dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Logs" to the base app data dir for Windows, and "log" to the - base cache dir for Unix. See discussion below. - - Typical user cache directories are: - Mac OS X: ~/Library/Logs/ - Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined - Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs - Vista: C:\Users\\AppData\Local\\\Logs - - On Windows the only suggestion in the MSDN docs is that local settings - go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in - examples of what some windows apps use for a logs dir.) - - OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` - value for Windows and appends "log" to the user cache dir for Unix. - This can be disabled with the `opinion=False` option. - """ - if system == "darwin": - path = os.path.join( - os.path.expanduser('~/Library/Logs'), - appname) - elif system == "win32": - path = user_data_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "Logs") - else: - path = user_cache_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "log") - if appname and version: - path = os.path.join(path, version) - return path - - -class AppDirs(object): - """Convenience wrapper for getting application dirs.""" - def __init__(self, appname, appauthor=None, version=None, roaming=False, - multipath=False): - self.appname = appname - self.appauthor = appauthor - self.version = version - self.roaming = roaming - self.multipath = multipath - - @property - def user_data_dir(self): - return user_data_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_data_dir(self): - return site_data_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_config_dir(self): - return user_config_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_config_dir(self): - return site_config_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_cache_dir(self): - return user_cache_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_log_dir(self): - return user_log_dir(self.appname, self.appauthor, - version=self.version) - - -#---- internal support stuff - -def _get_win_folder_from_registry(csidl_name): - """This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - dir, type = _winreg.QueryValueEx(key, shell_folder_name) - return dir - - -def _get_win_folder_with_pywin32(csidl_name): - from win32com.shell import shellcon, shell - dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) - # Try to make this a unicode path because SHGetFolderPath does - # not return unicode strings when there is unicode data in the - # path. - try: - dir = unicode(dir) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - try: - import win32api - dir = win32api.GetShortPathName(dir) - except ImportError: - pass - except UnicodeError: - pass - return dir - - -def _get_win_folder_with_ctypes(csidl_name): - import ctypes - - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - -def _get_win_folder_with_jna(csidl_name): - import array - from com.sun import jna - from com.sun.jna.platform import win32 - - buf_size = win32.WinDef.MAX_PATH * 2 - buf = array.zeros('c', buf_size) - shell = win32.Shell32.INSTANCE - shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf = array.zeros('c', buf_size) - kernel = win32.Kernel32.INSTANCE - if kernal.GetShortPathName(dir, buf, buf_size): - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - return dir - -if system == "win32": - try: - import win32com.shell - _get_win_folder = _get_win_folder_with_pywin32 - except ImportError: - try: - from ctypes import windll - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - try: - import com.sun.jna - _get_win_folder = _get_win_folder_with_jna - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -#---- self test code - -if __name__ == "__main__": - appname = "MyApp" - appauthor = "MyCompany" - - props = ("user_data_dir", "site_data_dir", - "user_config_dir", "site_config_dir", - "user_cache_dir", "user_log_dir") - - print("-- app dirs (with optional 'version')") - dirs = AppDirs(appname, appauthor, version="1.0") - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'version')") - dirs = AppDirs(appname, appauthor) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'appauthor')") - dirs = AppDirs(appname) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (with disabled 'appauthor')") - dirs = AppDirs(appname, appauthor=False) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/package_manager/bpkg/__init__.py b/package_manager/bpkg/__init__.py deleted file mode 100644 index 556932b..0000000 --- a/package_manager/bpkg/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -__all__ = ( - "exceptions", - "types", -) - -from . types import ( - Package, - Repository, - ) -from pathlib import Path - -def load_repositories(repo_storage_path: Path) -> list: - repositories = [] - for repofile in repo_storage_path.glob('*.json'): - # try - repo = Repository.from_file(repofile) - # except - repositories.append(repo) - return repositories diff --git a/package_manager/bpkg/exceptions.py b/package_manager/bpkg/exceptions.py deleted file mode 100644 index 0e9d339..0000000 --- a/package_manager/bpkg/exceptions.py +++ /dev/null @@ -1,14 +0,0 @@ -class BpkgException(Exception): - """Superclass for all package manager exceptions""" - -class InstallException(BpkgException): - """Raised when there is an error during installation""" - -class DownloadException(BpkgException): - """Raised when there is an error downloading something""" - -class BadRepositoryException(BpkgException): - """Raised when there is an error while reading or manipulating a repository""" - -class PackageException(BpkgException): - """Raised when there is an error while manipulating a package""" diff --git a/package_manager/bpkg/types.py b/package_manager/bpkg/types.py deleted file mode 100644 index 7ed99b2..0000000 --- a/package_manager/bpkg/types.py +++ /dev/null @@ -1,501 +0,0 @@ -import logging -import json -from pathlib import Path -from . import exceptions -from . import utils - -class Package: - """ - Stores package methods and metadata - """ - - log = logging.getLogger(__name__ + ".Package") - - def __init__(self, package_dict:dict = None): - self.bl_info = {} - self.url = "" - self.files = [] - - self.repositories = set() - self.installed_location = None - self.module_name = None - - self.installed = False - self.is_user = False - self.enabled = False - - self.set_from_dict(package_dict) - - def test_is_user(self) -> bool: - """Return true if package's install location is in user or preferences scripts path""" - import bpy - user_script_path = bpy.utils.script_path_user() - prefs_script_path = bpy.utils.script_path_pref() - - if user_script_path is not None: - in_user = Path(user_script_path) in Path(self.installed_location).parents - else: - in_user = False - - if prefs_script_path is not None: - in_prefs = Path(prefs_script_path) in Path(self.installed_location).parents - else: - in_prefs = False - - return in_user or in_prefs - - def test_enabled(self) -> bool: - """Return true if package is enabled""" - import bpy - if self.module_name is not None: - return (self.module_name in bpy.context.user_preferences.addons) - else: - return False - - def test_installed(self) -> bool: - """Return true if package is installed""" - import addon_utils - return len([Package.from_module(mod) for mod in addon_utils.modules(refresh=False) if - addon_utils.module_bl_info(mod)['name'] == self.name and - addon_utils.module_bl_info(mod)['version'] == self.version]) > 0 - - def set_installed_metadata(self, installed_pkg): - """Sets metadata specific to installed packages from the Package given as `installed_pkg`""" - self.installed = installed_pkg.test_installed() - self.enabled = installed_pkg.test_enabled() - self.is_user = installed_pkg.test_is_user() - self.module_name = installed_pkg.module_name - self.installed_location = installed_pkg.installed_location - - def to_dict(self) -> dict: - """ - Return a dict representation of the package - """ - return { - 'bl_info': self.bl_info, - 'url': self.url, - 'files': self.files, - } - - def set_from_dict(self, package_dict: dict): - """ - Get attributes from a dict such as produced by `to_dict` - """ - if package_dict is None: - package_dict = {} - - for attr in ('files', 'url', 'bl_info'): - if package_dict.get(attr) is not None: - setattr(self, attr, package_dict[attr]) - - # bl_info convenience getters {{{ - # required fields - @property - def name(self) -> str: - """Get name from bl_info""" - return self.bl_info.get('name') - - @property - def version(self) -> tuple: - """Get version from bl_info""" - return tuple(self.bl_info.get('version')) - - @property - def blender(self) -> tuple: - """Get blender from bl_info""" - return self.bl_info.get('blender') - - # optional fields - @property - def description(self) -> str: - """Get description from bl_info""" - return self.bl_info.get('description') - - @property - def author(self) -> str: - """Get author from bl_info""" - return self.bl_info.get('author') - - @property - def category(self) -> str: - """Get category from bl_info""" - return self.bl_info.get('category') - - @property - def location(self) -> str: - """Get location from bl_info""" - return self.bl_info.get('location') - - @property - def support(self) -> str: - """Get support from bl_info""" - return self.bl_info.get('support') - - @property - def warning(self) -> str: - """Get warning from bl_info""" - return self.bl_info.get('warning') - - @property - def wiki_url(self) -> str: - """Get wiki_url from bl_info""" - return self.bl_info.get('wiki_url') - - @property - def tracker_url(self) -> str: - """Get tracker_url from bl_info""" - return self.bl_info.get('tracker_url') - # }}} - - # @classmethod - # def from_dict(cls, package_dict: dict): - # """ - # Return a Package with values from dict - # """ - # pkg = cls() - # pkg.set_from_dict(package_dict) - - @classmethod - def from_blinfo(cls, blinfo: dict): - """ - Return a Package with bl_info filled in - """ - return cls({'bl_info': blinfo}) - - @classmethod - def from_module(cls, module): - """ - Return a Package object from an addon module - """ - from pathlib import Path - filepath = Path(module.__file__) - if filepath.name == '__init__.py': - filepath = filepath.parent - - pkg = cls() - pkg.files = [filepath.name] - pkg.installed_location = str(filepath) - pkg.module_name = module.__name__ - - try: - pkg.bl_info = module.bl_info - except AttributeError as err: - raise exceptions.BadAddon("Module does not appear to be an addon; no bl_info attribute") from err - return pkg - - def download(self, dest: Path, progress_callback=None) -> Path: - """Downloads package to `dest`""" - - if not self.url: - raise ValueError("Cannot download package without a URL") - - return utils.download(self.url, dest, progress_callback) - - def install(self, dest_dir: Path, cache_dir: Path, progress_callback=None): - """Downloads package to `cache_dir`, then extracts/moves package to `dest_dir`""" - - log = logging.getLogger('%s.install' % __name__) - - downloaded = self.download(cache_dir, progress_callback) - - if not downloaded: - log.debug('Download returned None, not going to install anything.') - return - - utils.install(downloaded, dest_dir) - # utils.rm(downloaded) - - def __eq__(self, other): - return self.name == other.name and self.version == other.version - - def __lt__(self, other): - return self.version < other.version - - def __hash__(self): - return hash((self.name, self.version)) - - def __repr__(self) -> str: - # return self.name - return "Package('name': {}, 'version': {})".format(self.name, self.version) - -class ConsolidatedPackage: - """ - Stores a grouping of different versions of the same package - """ - - log = logging.getLogger(__name__ + ".ConsolidatedPackage") - - def __init__(self, pkg=None): - self.versions = [] - self.updateable = False - - if pkg is not None: - self.add_version(pkg) - - @property - def installed(self) -> bool: - """Return true if any version of this package is installed""" - for pkg in self.versions: - if pkg.installed: - return True - return False - - @property - def name(self) -> str: - """ - Return name of this package. All package versions in a - ConsolidatedPackage should have the same name by definition - - Returns None if there are no versions - """ - try: - return self.versions[0].name - except IndexError: - return None - - def get_latest_installed_version(self) -> Package: - """ - Return the installed package with the highest version number. - If no packages are installed, return None. - """ - #self.versions is always sorted newer -> older, so we can just grab the first we find - for pkg in self.versions: - if pkg.installed: - return pkg - return None - - def get_latest_version(self) -> Package: - """Return package with highest version number, returns None if there are no versions""" - try: - return self.versions[0] # this is always sorted with the highest on top - except IndexError: - return None - - def get_display_version(self) -> Package: - """ - Return installed package with highest version number. - If no version is installed, return highest uninstalled version. - """ - pkg = self.get_latest_installed_version() - if pkg is None: - pkg = self.get_latest_version() - return pkg - - def add_version(self, newpkg: Package): - """Adds a package to the collection of versions""" - - if self.name and newpkg.name != self.name: - raise exceptions.PackageException("Name mismatch, refusing to add %s to %s" % (newpkg, self)) - - for pkg in self: - if pkg == newpkg: - pkg.repositories.union(newpkg.repositories) - if newpkg.installed: - pkg.set_installed_metadata(newpkg) - return - - self.versions.append(newpkg) - self.versions.sort(key=lambda v: v.version, reverse=True) - - - def __iter__(self): - return (pkg for pkg in self.versions) - - def __repr__(self): - return ("ConsolidatedPackage".format(self.name)) - -class Repository: - """ - Stores repository metadata (including packages) - """ - - log = logging.getLogger(__name__ + ".Repository") - - def __init__(self, url=None): - if url is None: - url = "" - self.set_from_dict({'url': url}) - - # def cleanse_packagelist(self): - # """Remove empty packages (no bl_info), packages with no name""" - - def refresh(self, storage_path: Path, progress_callback=None): - """ - Requests repo.json from URL and embeds etag/last-modification headers - """ - import requests - - if progress_callback is None: - progress_callback = lambda x: None - - progress_callback(0.0) - - if self.url is None: - raise ValueError("Cannot refresh repository without a URL") - - url = utils.add_repojson_to_url(self.url) - - self.log.debug("Refreshing repository from %s", self.url) - - req_headers = {} - # Do things this way to avoid adding empty objects/None to the req_headers dict - try: - req_headers['If-None-Match'] = self._headers['etag'] - except KeyError: - pass - try: - req_headers['If-Modified-Since'] = self._headers['last-modified'] - except KeyError: - pass - - try: - resp = requests.get(url, headers=req_headers, timeout=60) - except requests.exceptions.InvalidSchema as err: - raise exceptions.DownloadException("Invalid schema. Did you mean to use http://?") from err - except requests.exceptions.ConnectionError as err: - raise exceptions.DownloadException("Failed to connect. Are you sure '%s' is the correct URL?" % url) from err - except requests.exceptions.RequestException as err: - raise exceptions.DownloadException(err) from err - - try: - resp.raise_for_status() - except requests.HTTPError as err: - self.log.error('Error downloading %s: %s', url, err) - raise exceptions.DownloadException(resp.status_code, resp.reason) from err - - if resp.status_code == requests.codes.not_modified: - self.log.debug("Packagelist not modified") - return - - resp_headers = {} - try: - resp_headers['etag'] = resp.headers['etag'] - except KeyError: - pass - try: - resp_headers['last-modified'] = resp.headers['last-modified'] - except KeyError: - pass - - self.log.debug("Found headers: %s", resp_headers) - - progress_callback(0.7) - - try: - repodict = resp.json() - except json.decoder.JSONDecodeError: - self.log.exception("Failed to parse downloaded repository") - raise exceptions.BadRepositoryException( - "Could not parse repository downloaded from '%s'. Are you sure this is the correct URL?" % url - ) - repodict['_headers'] = resp_headers - repodict['url'] = self.url - - self.set_from_dict(repodict) - self.to_file(storage_path / utils.format_filename(self.name, ".json")) - - progress_callback(1.0) - - - def to_dict(self, sort=False, ids=False) -> dict: - """ - Return a dict representation of the repository - """ - packages = [p.to_dict() for p in self.packages] - - if sort: - packages.sort(key=lambda p: p['bl_info']['name'].lower()) - - if ids: - for pkg in packages: - # hash may be too big for a C int - pkg['id'] = str(hash(pkg['url'] + pkg['bl_info']['name'] + self.name + self.url)) - - return { - 'name': self.name, - 'packages': packages, - 'url': self.url, - '_headers': self._headers, - } - - def set_from_dict(self, repodict: dict): - """ - Get repository attributes from a dict such as produced by `to_dict` - """ - - # def initialize(item, value): - # if item is None: - # return value - # else: - # return item - - #Be certain to initialize everything; downloaded packagelist might contain null values - # url = initialize(repodict.get('url'), "") - # packages = initialize(repodict.get('packages'), []) - # headers = initialize(repodict.get('_headers'), {}) - name = repodict.get('name', "") - url = repodict.get('url', "") - packages = repodict.get('packages', []) - headers = repodict.get('_headers', {}) - - self.name = name - self.url = url - self.packages = [Package(pkg) for pkg in packages] - self._headers = headers - - @classmethod - def from_dict(cls, repodict: dict): - """ - Like `set_from_dict`, but immutable - """ - repo = cls() - repo.set_from_dict(repodict) - return repo - - def to_file(self, path: Path): - """ - Dump repository to a json file at `path`. - """ - if self.packages is None: - self.log.warning("Writing an empty repository") - - self.log.debug("URL is %s", self.url) - - with path.open('w', encoding='utf-8') as repo_file: - json.dump(self.to_dict(), repo_file, indent=4, sort_keys=True) - self.log.debug("Repository written to %s" % path) - - # def set_from_file(self, path: Path): - # """ - # Set the current instance's attributes from a json file - # """ - # repo_file = path.open('r', encoding='utf-8') - # - # with repo_file: - # try: - # self.set_from_dict(json.load(repo_file)) - # except Exception as err: - # raise BadRepository from err - # - # self.log.debug("Repository read from %s", path) - - @classmethod - def from_file(cls, path: Path): - """ - Read repository from a json file at `path`. - """ - repo_file = path.open('r', encoding='utf-8') - - with repo_file: - try: - repo = cls.from_dict(json.load(repo_file)) - except json.JSONDecodeError as err: - raise exceptions.BadRepositoryException(err) from err - if repo.url is None or len(repo.url) == 0: - raise exceptions.BadRepositoryException("Repository missing URL") - - cls.log.debug("Repository read from %s", path) - return repo - - def __repr__(self): - return "Repository({}, {})".format(self.name, self.url) diff --git a/package_manager/bpkg/utils.py b/package_manager/bpkg/utils.py deleted file mode 100644 index d9c7cac..0000000 --- a/package_manager/bpkg/utils.py +++ /dev/null @@ -1,218 +0,0 @@ -from pathlib import Path -from . import exceptions -import shutil -import logging - - -def format_filename(s: str, ext=None) -> str: - """Take a string and turn it into a reasonable filename""" - import string - if ext is None: - ext = "" - valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits) - filename = ''.join(char for char in s if char in valid_chars) - filename = filename.replace(' ','_') - filename.lower() - filename += ext - return filename - -def download(url: str, destination: Path, progress_callback=None) -> Path: - """ - Downloads file at the given url, and if progress_callback is specified, - repeatedly calls progress_callback with an argument between 0 and 1, or infinity. - Raises DownloadException if an error occurs with the download. - - :returns: path to the downloaded file, or None if not modified - """ - - import requests - log = logging.getLogger('%s.download' % __name__) - - if progress_callback is None: - # assign to do-nothing function - progress_callback = lambda x: None - - progress_callback(0) - - # derive filename from url if `destination` is an existing directory, otherwise use `destination` directly - if destination.is_dir(): - # TODO: get filename from Content-Disposition header, if available. - from urllib.parse import urlsplit, urlunsplit - parsed_url = urlsplit(url) - local_filename = Path(parsed_url.path).name or 'download.tmp' - local_fpath = destination / local_filename - else: - local_fpath = destination - - log.info('Downloading %s -> %s', url, local_fpath) - - # try: - resp = requests.get(url, stream=True, verify=True) - # except requests.exceptions.RequestException as err: - # raise exceptions.DownloadException(err) from err - - try: - resp.raise_for_status() - except requests.HTTPError as err: - raise exceptions.DownloadException(resp.status_code, str(err)) from err - - if resp.status_code == requests.codes.not_modified: - log.info("Server responded 'Not Modified', not downloading") - return None - - try: - # Use float so that we can also use infinity - content_length = float(resp.headers['content-length']) - except KeyError: - log.warning('Server did not send content length, cannot report progress.') - content_length = float('inf') - - # TODO: check if there's enough disk space. - - - downloaded_length = 0 - with local_fpath.open('wb') as outfile: - for chunk in resp.iter_content(chunk_size=1024 ** 2): - if not chunk: # filter out keep-alive new chunks - continue - - outfile.write(chunk) - downloaded_length += len(chunk) - progress_callback(downloaded_length / content_length) - - return local_fpath - - -def rm(path: Path): - """Delete whatever is specified by `path`""" - if path.is_dir(): - shutil.rmtree(str(path)) - else: - path.unlink() - -class InplaceBackup: - """Utility class for moving a file out of the way by appending a '~'""" - - log = logging.getLogger('%s.inplace-backup' % __name__) - - def __init__(self, path: Path): - self.path = path - self.backup() - - def backup(self): - """Move 'path' to 'path~'""" - if not self.path.exists(): - raise FileNotFoundError("Can't backup path which doesn't exist") - - self.backup_path = Path(str(self.path) + '~') - if self.backup_path.exists(): - self.log.warning("Overwriting existing backup '{}'".format(self.backup_path)) - rm(self.backup_path) - - shutil.move(str(self.path), str(self.backup_path)) - - def restore(self): - """Move 'path~' to 'path'""" - try: - getattr(self, 'backup_path') - except AttributeError as err: - raise RuntimeError("Can't restore file before backing it up") from err - - if not self.backup_path.exists(): - raise FileNotFoundError("Can't restore backup which doesn't exist") - - if self.path.exists(): - self.log.warning("Overwriting '{0}' with backup file".format(self.path)) - rm(self.path) - - shutil.move(str(self.backup_path), str(self.path)) - - def remove(self): - """Remove 'path~'""" - rm(self.backup_path) - - -def install(src_file: Path, dest_dir: Path): - """Extracts/moves package at `src_file` to `dest_dir`""" - - import zipfile - - log = logging.getLogger('%s.install' % __name__) - log.debug("Starting installation") - - if not src_file.is_file(): - raise exceptions.InstallException("Package isn't a file") - - if not dest_dir.is_dir(): - raise exceptions.InstallException("Destination is not a directory") - - # TODO: check to make sure addon/package isn't already installed elsewhere - - # The following is adapted from `addon_install` in bl_operators/wm.py - - # check to see if the file is in compressed format (.zip) - if zipfile.is_zipfile(str(src_file)): - log.debug("Package is zipfile") - try: - file_to_extract = zipfile.ZipFile(str(src_file), 'r') - except Exception as err: - raise exceptions.InstallException("Failed to read zip file: %s" % err) from err - - def root_files(filelist: list) -> list: - """Some string parsing to get a list of the root contents of a zip from its namelist""" - rootlist = [] - for f in filelist: - # Get all names which have no path separators (root level files) - # or have a single path separator at the end (root level directories). - if len(f.rstrip('/').split('/')) == 1: - rootlist.append(f) - return rootlist - - conflicts = [dest_dir / f for f in root_files(file_to_extract.namelist()) if (dest_dir / f).exists()] - backups = [] - for conflict in conflicts: - log.debug("Creating backup of conflict %s", conflict) - backups.append(InplaceBackup(conflict)) - - try: - file_to_extract.extractall(str(dest_dir)) - except Exception as err: - for backup in backups: - backup.restore() - raise exceptions.InstallException("Failed to extract zip file to '%s': %s" % (dest_dir, err)) from err - - for backup in backups: - backup.remove() - - else: - log.debug("Package is pyfile") - dest_file = (dest_dir / src_file.name) - - if dest_file.exists(): - backup = InplaceBackup(dest_file) - - try: - shutil.copyfile(str(src_file), str(dest_file)) - except Exception as err: - backup.restore() - raise exceptions.InstallException("Failed to copy file to '%s': %s" % (dest_dir, err)) from err - - log.debug("Installation succeeded") - - -# def load_repository(repo_storage_path: Path, repo_name: str) -> Repository: -# """Loads .json from """ -# pass -# -# def download_repository(repo_storage_path: Path, repo_name: str): -# """Loads .json from """ -# pass -# this is done in Repository - - -def add_repojson_to_url(url: str) -> str: - """Add `repo.json` to the path component of a url""" - from urllib.parse import urlsplit, urlunsplit - parsed_url = urlsplit(url) - new_path = parsed_url.path + "/repo.json" - return urlunsplit((parsed_url.scheme, parsed_url.netloc, new_path, parsed_url.query, parsed_url.fragment)) diff --git a/package_manager/cache.py b/package_manager/cache.py deleted file mode 100644 index 881478b..0000000 --- a/package_manager/cache.py +++ /dev/null @@ -1,49 +0,0 @@ -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - - -import os -import logging -import pathlib - -from . import appdirs - -log = logging.getLogger(__name__) - - -def cache_directory(*subdirs) -> pathlib.Path: - """Returns an OS-specifc cache location, and ensures it exists. - - Should be replaced with a call to bpy.utils.user_resource('CACHE', ...) - once https://developer.blender.org/T47684 is finished. - - :param subdirs: extra subdirectories inside the cache directory. - - >>> cache_directory() - '.../blender_cloud/your_username' - >>> cache_directory('sub1', 'sub2') - '.../blender_cloud/your_username/sub1/sub2' - """ - - # TODO: use bpy.utils.user_resource('CACHE', ...) - # once https://developer.blender.org/T47684 is finished. - user_cache_dir = appdirs.user_cache_dir(appname='Blender', appauthor=False) - cache_dir = pathlib.Path(user_cache_dir) / 'blender_package_manager' / pathlib.Path(*subdirs) - cache_dir.mkdir(mode=0o700, parents=True, exist_ok=True) - - return cache_dir diff --git a/package_manager/messages.py b/package_manager/messages.py deleted file mode 100644 index 8e28b3e..0000000 --- a/package_manager/messages.py +++ /dev/null @@ -1,73 +0,0 @@ -from .bpkg.types import Repository - -class Message: - """Superclass for all message sent over pipes.""" - - -# Blender messages - -class BlenderMessage(Message): - """Superclass for all messages sent from Blender to the subprocess.""" - -class Abort(BlenderMessage): - """Sent when the user requests abortion of a task.""" - - -# Subproc messages - -class SubprocMessage(Message): - """Superclass for all messages sent from the subprocess to Blender.""" - -class Progress(SubprocMessage): - """Send from subprocess to Blender to report progress. - - :ivar progress: the progress percentage, from 0-1. - """ - - def __init__(self, progress: float): - self.progress = progress - -class Success(SubprocMessage): - """Sent when an operation finished sucessfully.""" - -class RepositoryResult(SubprocMessage): - """Sent when an operation returns a repository to be used on the parent process.""" - - def __init__(self, repository_name: str): - self.repository = repository - -class Aborted(SubprocMessage): - """Sent as response to Abort message.""" - -# subproc warnings - -class SubprocWarning(SubprocMessage): - """Superclass for all non-fatal warning messages sent from the subprocess.""" - - def __init__(self, message: str): - self.message = message - -# subproc errors - -class SubprocError(SubprocMessage): - """Superclass for all fatal error messages sent from the subprocess.""" - - def __init__(self, message: str): - self.message = message - -class InstallError(SubprocError): - """Sent when there was an error installing something.""" - -class UninstallError(SubprocError): - """Sent when there was an error uninstalling something.""" - -class BadRepositoryError(SubprocError): - """Sent when a repository can't be used for some reason""" - -class DownloadError(SubprocMessage): - """Sent when there was an error downloading something.""" - - def __init__(self, message: str, status_code: int = None): - self.status_code = status_code - self.message = message - diff --git a/package_manager/subproc.py b/package_manager/subproc.py deleted file mode 100644 index 5051918..0000000 --- a/package_manager/subproc.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -All the stuff that needs to run in a subprocess. -""" - -from pathlib import Path -from . import bpkg -from . import messages -from .bpkg import exceptions as bpkg_exs -from .bpkg.types import (Package, Repository) -import logging - -def download_and_install_package(pipe_to_blender, package: Package, install_path: Path): - """Downloads and installs the given package.""" - - log = logging.getLogger(__name__ + '.download_and_install') - - from . import cache - cache_dir = cache.cache_directory('downloads') - - try: - package.install(install_path, cache_dir) - except bpkg_exs.DownloadException as err: - pipe_to_blender.send(messages.DownloadError(err)) - log.exception(err) - except bpkg_exs.InstallException as err: - pipe_to_blender.send(messages.InstallError(err)) - log.exception(err) - - pipe_to_blender.send(messages.Success()) - - -def uninstall_package(pipe_to_blender, package: Package, install_path: Path): - """Deletes the given package's files from the install directory""" - #TODO: move package to cache and present an "undo" button to user, to give nicer UX on misclicks - - for pkgfile in [install_path / Path(p) for p in package.files]: - if not pkgfile.exists(): - pipe_to_blender.send(messages.UninstallError("Could not find file owned by package: '%s'. Refusing to uninstall." % pkgfile)) - return None - - for pkgfile in [install_path / Path(p) for p in package.files]: - bpkg.utils.rm(pkgfile) - - pipe_to_blender.send(messages.Success()) - - -def refresh_repositories(pipe_to_blender, repo_storage_path: Path, repository_urls: str, progress_callback=None): - """Downloads and stores the given repository""" - - log = logging.getLogger(__name__ + '.refresh_repository') - - if progress_callback is None: - progress_callback = lambda x: None - progress_callback(0.0) - - repos = bpkg.load_repositories(repo_storage_path) - - def prog(progress: float): - progress_callback(progress/len(repos)) - - known_repo_urls = [repo.url for repo in repos] - for repo_url in repository_urls: - if repo_url not in known_repo_urls: - repos.append(Repository(repo_url)) - - for repo in repos: - log.debug("repo name: %s, url: %s", repo.name, repo.url) - for repo in repos: - try: - repo.refresh(repo_storage_path, progress_callback=prog) - except bpkg_exs.DownloadException as err: - pipe_to_blender.send(messages.DownloadError(err)) - log.exception("Download error") - except bpkg_exs.BadRepositoryException as err: - pipe_to_blender.send(messages.BadRepositoryError(err)) - log.exception("Bad repository") - - progress_callback(1.0) - pipe_to_blender.send(messages.Success()) - diff --git a/package_manager/utils.py b/package_manager/utils.py deleted file mode 100644 index 86d1765..0000000 --- a/package_manager/utils.py +++ /dev/null @@ -1,29 +0,0 @@ -import bpy -from . import bpkg -from pathlib import Path -import logging - -from collections import OrderedDict - -def fmt_version(version_number: tuple) -> str: - """Take version number as a tuple and format it as a string""" - vstr = str(version_number[0]) - for component in version_number[1:]: - vstr += "." + str(component) - return vstr - -def sanitize_repository_url(url: str) -> str: - """Sanitize repository url""" - from urllib.parse import urlsplit, urlunsplit - parsed_url = urlsplit(url) - # new_path = parsed_url.path.rstrip("repo.json") - new_path = parsed_url.path - return urlunsplit((parsed_url.scheme, parsed_url.netloc, new_path, parsed_url.query, parsed_url.fragment)) - -def add_repojson_to_url(url: str) -> str: - """Add `repo.json` to the path component of a url""" - from urllib.parse import urlsplit, urlunsplit - parsed_url = urlsplit(url) - new_path = str(Path(parsed_url.path) / "repo.json") - return urlunsplit((parsed_url.scheme, parsed_url.netloc, new_path, parsed_url.query, parsed_url.fragment)) - diff --git a/setup.py b/setup.py deleted file mode 100755 index b73ca9a..0000000 --- a/setup.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python3 -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -# setup.py adapted from blender_cloud addon - -import glob -import sys -import zipfile - -from distutils import log -from distutils.core import Command -from distutils.command.bdist import bdist -from distutils.command.install import install -from distutils.command.install_egg_info import install_egg_info -from setuptools import setup, find_packages - -# sys.dont_write_bytecode = True - -# noinspection PyAttributeOutsideInit -class BlenderAddonBdist(bdist): - """Ensures that 'python setup.py bdist' creates a zip file.""" - - def initialize_options(self): - super().initialize_options() - self.formats = ['zip'] - self.plat_name = 'addon' # use this instead of 'linux-x86_64' or similar. - - def run(self): - self.run_command('wheels') - super().run() - - -# noinspection PyAttributeOutsideInit -class BlenderAddonFdist(BlenderAddonBdist): - """Ensures that 'python setup.py fdist' creates a plain folder structure.""" - - user_options = [ - ('dest-path=', None, 'addon installation path'), - ] - - def initialize_options(self): - super().initialize_options() - self.dest_path = None # path that will contain the addon - - def run(self): - super().run() - - # dist_files is a list of tuples ('bdist', 'any', 'filepath') - filepath = self.distribution.dist_files[0][2] - - # if dest_path is not specified use the filename as the dest_path (minus the .zip) - assert filepath.endswith('.zip') - target_folder = self.dest_path or filepath[:-4] - - print('Unzipping the package on {}.'.format(target_folder)) - - with zipfile.ZipFile(filepath, 'r') as zip_ref: - zip_ref.extractall(target_folder) - - -# noinspection PyAttributeOutsideInit -class BlenderAddonInstall(install): - """Ensures the module is placed at the root of the zip file.""" - - def initialize_options(self): - super().initialize_options() - self.prefix = '' - self.install_lib = '' - - -class AvoidEggInfo(install_egg_info): - """Makes sure the egg-info directory is NOT created. - - If we skip this, the user's addon directory will be polluted by egg-info - directories, which Blender doesn't use anyway. - """ - - def run(self): - pass - - -setup( - cmdclass={'bdist': BlenderAddonBdist, - 'fdist': BlenderAddonFdist, - 'install': BlenderAddonInstall, - 'install_egg_info': AvoidEggInfo}, - name='bpkg', - description='Integrated package manager for Blender', - version='0.0.1', - author='Ellwood Zwovic', - author_email='gandalf3@blendermonkey.com', - packages=['bpkg'], - scripts=['generate_repository'], - url='https://developer.blender.org/diffusion/BPMA/', - platforms='', - zip_safe=False, -) - diff --git a/tests/test_make_repo.py b/tests/test_make_repo.py index e987903..9a4ce6b 100644 --- a/tests/test_make_repo.py +++ b/tests/test_make_repo.py @@ -5,7 +5,12 @@ from pathlib import Path import logging import ast import json -import bpkg-repogen + +import types +import importlib.machinery +loader = importlib.machinery.SourceFileLoader('generate_repository', 'generate_repository') +generate_repository = types.ModuleType(loader.name) +loader.exec_module(generate_repository) logging.basicConfig(level=logging.ERROR, format='%(levelname)8s: %(message)s') @@ -18,19 +23,11 @@ class TestRepoGeneration(unittest.TestCase): def test_extract_blinfo_from_nonexistent(self): test_file = 'file_that_doesnt_exist' with self.assertRaises(FileNotFoundError): - bpkg-repogen.extract_blinfo(self.addon_path / test_file) + generate_repository.extract_blinfo(self.addon_path / test_file) - def test_package_quantity(self): - repo = bpkg-repogen.bpkg-repogen(self.addon_path, "name of the repo") - acceptible_addons = [ - f for f in self.addon_path.iterdir() - if not f.match('*nonaddon*') - ] - self.assertEqual(len(repo.packages), len(acceptible_addons)) - - def test_bpkg-repogen_from_nonexistent(self): + def test_generate_repository_from_nonexistent(self): with self.assertRaises(FileNotFoundError): - bpkg-repogen.bpkg-repogen(Path('in_a_galaxy_far_far_away'), "somename") + generate_repository.make_repo(Path('in_a_galaxy_far_far_away'), "somename", "someurl") # addons which should contain bl_infos yes_blinfo = [ @@ -45,7 +42,7 @@ no_blinfo = [ def generate_good_blinfo_test(test_file: Path): def test(self): - reality = bpkg-repogen.extract_blinfo(test_file) + reality = generate_repository.extract_blinfo(test_file) with (self.helper_path / 'expected_blinfo').open("r") as f: expectation = ast.literal_eval(f.read()) self.assertEqual(expectation, reality) @@ -53,8 +50,8 @@ def generate_good_blinfo_test(test_file: Path): def generate_bad_blinfo_test(test_file: Path): def test(self): - with self.assertRaises(bpkg-repogen.BadAddon): - bpkg-repogen.extract_blinfo(test_file) + with self.assertRaises(generate_repository.BadAddon): + generate_repository.extract_blinfo(test_file) return test # Add test method retur diff --git a/tests/test_refresh_repos.py b/tests/test_refresh_repos.py deleted file mode 100644 index 802b200..0000000 --- a/tests/test_refresh_repos.py +++ /dev/null @@ -1,72 +0,0 @@ -import requests -import unittest -from unittest import mock -# from blenderpack import Repositories, fetch_repo -from datetime import datetime -import json - -# based on https://stackoverflow.com/a/28507806/2730823 - -# This method will be used by the mock to replace requests.get -def mocked_requests_get(*args, **kwargs): - cidict = requests.structures.CaseInsensitiveDict - req_headers = cidict(kwargs.get('headers')) - t_fmt = '%a, %m %b %Y %X %Z' - - class MockResponse: - def __init__(self, headers: cidict, status_code: int): - self.headers = headers - self.status_code = status_code - - def json(self): - return json.dumps({'url': 'http://someurl.tld/repo.json'}) - - if args[0] == 'http://someurl.tld/repo.json': - resp_headers = cidict({ - "ETag": '"2a0094b-b74-55326ced274f3"', - "Last-Modified": 'Sun, 13 Mar 2011 13:38:53 GMT', - }) - - if req_headers == {}: - resp_code = 200 - else: - req_headers = cidict(req_headers) - resp_code = 304 if req_headers.get('if-none-match', '') == resp_headers['etag']\ - or datetime.strptime(req_headers.get('if-modified-since', ''), t_fmt) < \ - datetime.strptime(resp_headers['last-modified'], t_fmt) \ - else 200 - return MockResponse(resp_headers, resp_code) - - return MockResponse(None, 404) - -class MockRepositories: - storage = {} - - def load(self, *args, **kwargs): - if args[0] not in self.storage: - self.storage[args[0]] = {'url': args[0]} - - return self.storage[args[0]] - - def write(self, *args, **kwargs): - self.storage[args[0]['url']] = args[0] - - -class fetch_url_twice(unittest.TestCase): - - @mock.patch('requests.get', side_effect=mocked_requests_get) - def test_fetch(self, mock_get): - self.fail('unfinished test') - repos = MockRepositories() - fetch_repo('http://someurl.tld/repo.json', repos) - mock_get.assert_called_with('http://someurl.tld/repo.json', headers={}) - - fetch_repo('http://someurl.tld/repo.json', repos) - mock_get.assert_called_with('http://someurl.tld/repo.json', headers={ - 'If-None-Match': '"2a0094b-b74-55326ced274f3"', - 'If-Modified-Since': 'Sun, 13 Mar 2011 13:38:53 GMT' - }) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_repo_io.py b/tests/test_repo_io.py deleted file mode 100755 index 5035c2b..0000000 --- a/tests/test_repo_io.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 - -import unittest -from pathlib import Path -import logging -import json -import bpackage as BP - -logging.basicConfig(level=logging.DEBUG, - format='%(levelname)8s: %(message)s') - -class TestRepoInstantiation(unittest.TestCase): - """ - Tests of the creation of a Repository object - """ - - # helper_path = Path('tests/test_helpers') - # repos = blenderpack.Repositories(helper_path / 'repo.json') - - # def test_load(self): - # repo = self.repos.load('http://someurl.tld/repo.json') - - repo_dict = { - 'name': 'The Best Repo Ever', - 'url': 'http://someurl.tld/repo.json', - 'packages': [ - {'name': 'pkg1'}, - {'name': 'pkg2'}, - ], - } - - def test_create_from_dict(self): - """ - Instantiate repository repository with a dict and check - if all the items are carried over - """ - repodict = self.repo_dict - repo = BP.Repository(repodict) - for key, val in repodict.items(): - self.assertEqual(getattr(repo, key), val) - - def test_create_from_none(self): - """ - Instantiate repository repository from none and check that - the new repository's properties are set to none - """ - repodict = self.repo_dict - repo = BP.Repository(None) - for key, val in repodict.items(): - self.assertEqual(getattr(repo, key), None) - - def test_create_from_incomplete(self): - """ - Instantiate repository repository from a partial dict - and check that all properties are set, either to None or to the - value from the dict - """ - repodict = { - 'name': 'The Best Repo Ever', - } - repo = BP.Repository(repodict) - for key, val in repodict.items(): - self.assertEqual(getattr(repo, key), val) - self.assertIs(repo.url, None) -