Compare commits
67 Commits
builtin-si
...
soc-2017-p
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c24bb59a44 | ||
![]() |
b35e5240f6 | ||
![]() |
0a5353e853 | ||
![]() |
74a19feba8 | ||
![]() |
0f17f1937f | ||
![]() |
280b80349a | ||
![]() |
8e815f4ce2 | ||
![]() |
e881a0e2a4 | ||
![]() |
03a4bd6132 | ||
![]() |
1ab4d5fec4 | ||
![]() |
2be8b4de2a | ||
![]() |
1d6bcff706 | ||
![]() |
7fbb720265 | ||
![]() |
b259a8597c | ||
![]() |
2cccd9341b | ||
![]() |
c2ed145322 | ||
![]() |
a2301ec260 | ||
![]() |
688cb2d6e0 | ||
![]() |
ce0396c878 | ||
![]() |
01a4ea98c5 | ||
![]() |
6c3786713e | ||
![]() |
90ea56b2fb | ||
![]() |
b62b62d51b | ||
![]() |
16da5d84c0 | ||
![]() |
6d382129f9 | ||
![]() |
b9338dde5a | ||
![]() |
b42bee90f9 | ||
![]() |
552545d6a4 | ||
![]() |
705695bf4d | ||
![]() |
0c4fd02c27 | ||
![]() |
ba99e26e7f | ||
![]() |
d5daa2705a | ||
![]() |
6be55223dc | ||
![]() |
da7efd96f7 | ||
![]() |
1bc66c6397 | ||
![]() |
6058b5cbaa | ||
![]() |
233d8f2faf | ||
![]() |
0415d04617 | ||
![]() |
abd9c4ed11 | ||
![]() |
08ce79dd85 | ||
![]() |
3907c90c53 | ||
![]() |
ab139c834d | ||
![]() |
f226dd3030 | ||
![]() |
94c48c8f92 | ||
![]() |
684d870dc4 | ||
![]() |
480060696d | ||
![]() |
bfd4b73707 | ||
![]() |
88756edd2a | ||
![]() |
c9ec8ba88f | ||
![]() |
a17a32e6ef | ||
![]() |
65a44d64c1 | ||
![]() |
f5428b3213 | ||
![]() |
6debd2134a | ||
![]() |
9cec3f7212 | ||
![]() |
956d8e790c | ||
![]() |
47c68a54ad | ||
![]() |
dbe6331a3b | ||
![]() |
339c51ab17 | ||
![]() |
9f34503f2f | ||
![]() |
fca155031f | ||
![]() |
ed3eb48729 | ||
![]() |
b0d9153dd3 | ||
![]() |
6a21120023 | ||
![]() |
e34888b80d | ||
![]() |
0ff4627729 | ||
![]() |
d81cabbaa8 | ||
ab1c980914 |
86
release/scripts/modules/bpkg/__init__.py
Normal file
86
release/scripts/modules/bpkg/__init__.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
from . import utils
|
||||||
|
from . import types
|
||||||
|
from . import display
|
||||||
|
from . import exceptions
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Global package dict, keyed by package name. Use refresh_packages() to update it
|
||||||
|
packages = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_repo_storage_path() -> Path:
|
||||||
|
"""Return Path to the directory in which downloaded repository indices are
|
||||||
|
stored"""
|
||||||
|
import bpy
|
||||||
|
return Path(bpy.utils.user_resource('CONFIG', 'repositories'))
|
||||||
|
|
||||||
|
|
||||||
|
def get_repositories() -> list:
|
||||||
|
"""
|
||||||
|
Get list of downloaded repositories and update wm.package_repositories
|
||||||
|
"""
|
||||||
|
storage_path = get_repo_storage_path()
|
||||||
|
repos = utils.load_repositories(storage_path)
|
||||||
|
return repos
|
||||||
|
|
||||||
|
|
||||||
|
def refresh_repository_props():
|
||||||
|
"""Create RepositoryProperty collection from repository files"""
|
||||||
|
# TODO: store repository props in .blend so enabled/disabled state can be remembered
|
||||||
|
import bpy
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
repos = get_repositories()
|
||||||
|
wm.package_repositories.clear()
|
||||||
|
for repo in repos:
|
||||||
|
repo_prop = wm.package_repositories.add()
|
||||||
|
repo_prop.name = repo.name
|
||||||
|
repo_prop.enabled = True
|
||||||
|
repo_prop.url = repo.url
|
||||||
|
repo_prop.filepath = str(repo.filepath)
|
||||||
|
|
||||||
|
|
||||||
|
def get_installed_packages(refresh=False) -> list:
|
||||||
|
"""Get list of packages installed on disk. If refresh == True, re-scan for new packages"""
|
||||||
|
import addon_utils
|
||||||
|
installed_pkgs = []
|
||||||
|
# TODO: Do recursive search for packages to allow for user-customized addon
|
||||||
|
# layout on filesystem. Just use addon_utils for now
|
||||||
|
for mod in addon_utils.modules(refresh=refresh):
|
||||||
|
try:
|
||||||
|
pkg = types.Package.from_module(mod)
|
||||||
|
except exceptions.PackageException as err:
|
||||||
|
msg = "Error parsing package \"{}\" ({}): {}".format(
|
||||||
|
mod.__name__, mod.__file__, err)
|
||||||
|
display.pkg_errors.append(msg)
|
||||||
|
else:
|
||||||
|
pkg.installed = True
|
||||||
|
installed_pkgs.append(pkg)
|
||||||
|
return installed_pkgs
|
||||||
|
|
||||||
|
|
||||||
|
def refresh_packages():
|
||||||
|
"""Update bpkg.packages, a dict of ConsolidatedPackages from known repositories and
|
||||||
|
installed packages, keyed by package name"""
|
||||||
|
|
||||||
|
global packages
|
||||||
|
masterlist = {}
|
||||||
|
display.pkg_errors.clear()
|
||||||
|
installed_packages = get_installed_packages(refresh=True)
|
||||||
|
known_repositories = get_repositories()
|
||||||
|
|
||||||
|
for repo in known_repositories:
|
||||||
|
for pkg in repo.packages:
|
||||||
|
pkg.repositories.add(repo)
|
||||||
|
if pkg.name in masterlist:
|
||||||
|
masterlist[pkg.name].add_version(pkg)
|
||||||
|
else:
|
||||||
|
masterlist[pkg.name] = types.ConsolidatedPackage(pkg)
|
||||||
|
|
||||||
|
for pkg in installed_packages:
|
||||||
|
if pkg.name in masterlist:
|
||||||
|
masterlist[pkg.name].add_version(pkg)
|
||||||
|
else:
|
||||||
|
masterlist[pkg.name] = types.ConsolidatedPackage(pkg)
|
||||||
|
|
||||||
|
packages = masterlist
|
158
release/scripts/modules/bpkg/actions.py
Normal file
158
release/scripts/modules/bpkg/actions.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from . import exceptions
|
||||||
|
from . import utils
|
||||||
|
import shutil
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
def download(url: str, destination: Path, progress_callback=None) -> Path:
|
||||||
|
"""
|
||||||
|
Downloads file at the given url, and if progress_callback is specified,
|
||||||
|
repeatedly calls progress_callback with an argument between 0 and 1, or
|
||||||
|
infinity if progress cannot be determined. Raises DownloadException if an
|
||||||
|
error occurs with the download.
|
||||||
|
|
||||||
|
:returns: path to the downloaded file, or None if not modified
|
||||||
|
"""
|
||||||
|
|
||||||
|
import requests
|
||||||
|
log = logging.getLogger('%s.download' % __name__)
|
||||||
|
|
||||||
|
if progress_callback is None:
|
||||||
|
# assign to do-nothing function
|
||||||
|
def progress_callback(x): return None
|
||||||
|
|
||||||
|
progress_callback(0)
|
||||||
|
|
||||||
|
log.info('Downloading %s ', url)
|
||||||
|
|
||||||
|
resp = requests.get(url, stream=True, verify=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.HTTPError as err:
|
||||||
|
raise exceptions.DownloadException(resp.status_code, str(err))
|
||||||
|
|
||||||
|
if resp.status_code == requests.codes.not_modified:
|
||||||
|
log.info("Server responded 'Not Modified', not downloading")
|
||||||
|
progress_callback(1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# determine destination filename from url, but only after we've determined it works as a real url
|
||||||
|
# derive filename from url if given `destination` is an existing directory,
|
||||||
|
# otherwise use `destination` directly
|
||||||
|
if destination.is_dir():
|
||||||
|
# TODO: get filename from Content-Disposition header, if available.
|
||||||
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
parsed_url = urlsplit(url)
|
||||||
|
local_filename = Path(parsed_url.path).name or 'download.tmp'
|
||||||
|
local_fpath = destination / local_filename
|
||||||
|
else:
|
||||||
|
local_fpath = destination
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_length = int(resp.headers['content-length'])
|
||||||
|
except KeyError:
|
||||||
|
log.warning(
|
||||||
|
'Server did not send content length, cannot report progress.')
|
||||||
|
content_length = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
downloaded_length = 0
|
||||||
|
with local_fpath.open('wb') as outfile:
|
||||||
|
for chunk in resp.iter_content(chunk_size=1024 ** 2):
|
||||||
|
if not chunk: # filter out keep-alive new chunks
|
||||||
|
continue
|
||||||
|
|
||||||
|
outfile.write(chunk)
|
||||||
|
downloaded_length += len(chunk)
|
||||||
|
try:
|
||||||
|
progress_callback(downloaded_length / content_length)
|
||||||
|
except ZeroDivisionError:
|
||||||
|
pass
|
||||||
|
except OSError as err:
|
||||||
|
raise exceptions.DownloadException("Encountered an error while writing file to '%s', are you sure there's enough space?" % local_fpath) from err
|
||||||
|
except PermissionError as err:
|
||||||
|
raise exceptions.DownloadException("No permissions to write to '%s'" % local_fpath)
|
||||||
|
|
||||||
|
progress_callback(1)
|
||||||
|
|
||||||
|
return local_fpath
|
||||||
|
|
||||||
|
|
||||||
|
def install(src_file: Path, dest_dir: Path):
|
||||||
|
"""Extracts/moves package at `src_file` to `dest_dir`"""
|
||||||
|
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
log = logging.getLogger('%s.install' % __name__)
|
||||||
|
log.error("Starting installation")
|
||||||
|
|
||||||
|
if not src_file.is_file():
|
||||||
|
raise exceptions.InstallException("Package isn't a file")
|
||||||
|
|
||||||
|
if not dest_dir.is_dir():
|
||||||
|
raise exceptions.InstallException("Destination is not a directory")
|
||||||
|
|
||||||
|
# TODO: check to make sure addon/package isn't already installed elsewhere
|
||||||
|
|
||||||
|
def install_zip(src_zip, dest_dir):
|
||||||
|
"""Extract src_zip to dest_dir"""
|
||||||
|
try:
|
||||||
|
file_to_extract = zipfile.ZipFile(str(src_zip), 'r')
|
||||||
|
except Exception as err:
|
||||||
|
raise exceptions.InstallException(
|
||||||
|
"Failed to read zip file: %s" % err) from err
|
||||||
|
|
||||||
|
def root_files(filelist: list) -> list:
|
||||||
|
"""Some string parsing to get a list of the root contents of a zip from its namelist"""
|
||||||
|
rootlist = []
|
||||||
|
for f in filelist:
|
||||||
|
# Get all names which have no path separators (root level files)
|
||||||
|
# or have a single path separator at the end (root level directories).
|
||||||
|
if len(f.rstrip('/').split('/')) == 1:
|
||||||
|
rootlist.append(f)
|
||||||
|
return rootlist
|
||||||
|
|
||||||
|
conflicts = [
|
||||||
|
dest_dir / f for f in root_files(file_to_extract.namelist()) if (dest_dir / f).exists()]
|
||||||
|
backups = []
|
||||||
|
for conflict in conflicts:
|
||||||
|
log.debug("Creating backup of conflict %s", conflict)
|
||||||
|
backups.append(utils.InplaceBackup(conflict))
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_to_extract.extractall(str(dest_dir))
|
||||||
|
except Exception as err:
|
||||||
|
for backup in backups:
|
||||||
|
backup.restore()
|
||||||
|
raise exceptions.InstallException(
|
||||||
|
"Failed to extract zip file to '%s': %s" % (dest_dir, err)) from err
|
||||||
|
|
||||||
|
for backup in backups:
|
||||||
|
backup.remove()
|
||||||
|
|
||||||
|
def install_py(src_file, dest_dir):
|
||||||
|
"""Move src_file to dest_dir)"""
|
||||||
|
dest_file = dest_dir / src_file.name
|
||||||
|
backup = None
|
||||||
|
|
||||||
|
if dest_file.exists():
|
||||||
|
backup = utils.InplaceBackup(dest_file)
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.copyfile(str(src_file), str(dest_file))
|
||||||
|
except Exception as err:
|
||||||
|
backup.restore()
|
||||||
|
raise exceptions.InstallException(
|
||||||
|
"Failed to copy file to '%s': %s" % (dest_dir, err)) from err
|
||||||
|
|
||||||
|
if backup:
|
||||||
|
backup.remove()
|
||||||
|
|
||||||
|
if zipfile.is_zipfile(str(src_file)):
|
||||||
|
install_zip(src_file, dest_dir)
|
||||||
|
else:
|
||||||
|
install_py(src_file, dest_dir)
|
||||||
|
|
||||||
|
log.debug("Installation succeeded")
|
552
release/scripts/modules/bpkg/appdirs.py
Normal file
552
release/scripts/modules/bpkg/appdirs.py
Normal file
@@ -0,0 +1,552 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||||
|
# Copyright (c) 2013 Eddy Petrișor
|
||||||
|
|
||||||
|
"""Utilities for determining application-specific dirs.
|
||||||
|
|
||||||
|
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||||
|
"""
|
||||||
|
# Dev Notes:
|
||||||
|
# - MSDN on where to store app data files:
|
||||||
|
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||||
|
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||||
|
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
|
|
||||||
|
__version_info__ = (1, 4, 0)
|
||||||
|
__version__ = '.'.join(map(str, __version_info__))
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
if sys.platform.startswith('java'):
|
||||||
|
import platform
|
||||||
|
os_name = platform.java_ver()[3][0]
|
||||||
|
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||||
|
system = 'win32'
|
||||||
|
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||||
|
system = 'darwin'
|
||||||
|
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||||
|
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||||
|
# are actually checked for and the rest of the module expects
|
||||||
|
# *sys.platform* style strings.
|
||||||
|
system = 'linux2'
|
||||||
|
else:
|
||||||
|
system = sys.platform
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: ~/Library/Application Support/<AppName>
|
||||||
|
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||||
|
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||||
|
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||||
|
That means, by default "~/.local/share/<AppName>".
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||||
|
path = os.path.normpath(_get_win_folder(const))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Application Support/')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of data dirs should be
|
||||||
|
returned. By default, the first item from XDG_DATA_DIRS is
|
||||||
|
returned, or '/usr/local/share/<AppName>',
|
||||||
|
if XDG_DATA_DIRS is not set
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: /Library/Application Support/<AppName>
|
||||||
|
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||||
|
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('/Library/Application Support')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_DATA_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_DATA_DIRS',
|
||||||
|
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||||
|
r"""Return full path to the user-specific config dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: same as user_data_dir
|
||||||
|
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||||
|
Win *: same as user_data_dir
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||||
|
That means, by deafult "~/.config/<AppName>".
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = user_data_dir(appname, appauthor, None, roaming)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||||
|
"""Return full path to the user-shared data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"multipath" is an optional parameter only applicable to *nix
|
||||||
|
which indicates that the entire list of config dirs should be
|
||||||
|
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||||
|
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: same as site_data_dir
|
||||||
|
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||||
|
$XDG_CONFIG_DIRS
|
||||||
|
Win *: same as site_data_dir
|
||||||
|
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||||
|
|
||||||
|
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||||
|
|
||||||
|
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||||
|
"""
|
||||||
|
if system in ["win32", "darwin"]:
|
||||||
|
path = site_data_dir(appname, appauthor)
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
else:
|
||||||
|
# XDG default for $XDG_CONFIG_DIRS
|
||||||
|
# only first, if multipath is False
|
||||||
|
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||||
|
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||||
|
if appname:
|
||||||
|
if version:
|
||||||
|
appname = os.path.join(appname, version)
|
||||||
|
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||||
|
|
||||||
|
if multipath:
|
||||||
|
path = os.pathsep.join(pathlist)
|
||||||
|
else:
|
||||||
|
path = pathlist[0]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific cache dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Cache" to the base app data dir for Windows. See
|
||||||
|
discussion below.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Caches/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName> (XDG default)
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||||
|
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||||
|
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||||
|
put cache data somewhere *under* the given dir here. Some examples:
|
||||||
|
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||||
|
...\Acme\SuperApp\Cache\1.0
|
||||||
|
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "win32":
|
||||||
|
if appauthor is None:
|
||||||
|
appauthor = appname
|
||||||
|
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||||
|
if appname:
|
||||||
|
if appauthor is not False:
|
||||||
|
path = os.path.join(path, appauthor, appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Cache")
|
||||||
|
elif system == 'darwin':
|
||||||
|
path = os.path.expanduser('~/Library/Caches')
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname)
|
||||||
|
else:
|
||||||
|
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||||
|
if appname:
|
||||||
|
path = os.path.join(path, appname.lower().replace(' ', '-'))
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||||
|
r"""Return full path to the user-specific log dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"appauthor" (only used on Windows) is the name of the
|
||||||
|
appauthor or distributing body for this application. Typically
|
||||||
|
it is the owning company name. This falls back to appname. You may
|
||||||
|
pass False to disable it.
|
||||||
|
"version" is an optional version path element to append to the
|
||||||
|
path. You might want to use this if you want multiple versions
|
||||||
|
of your app to be able to run independently. If used, this
|
||||||
|
would typically be "<major>.<minor>".
|
||||||
|
Only applied when appname is present.
|
||||||
|
"opinion" (boolean) can be False to disable the appending of
|
||||||
|
"Logs" to the base app data dir for Windows, and "log" to the
|
||||||
|
base cache dir for Unix. See discussion below.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Logs/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||||
|
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings
|
||||||
|
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||||
|
examples of what some windows apps use for a logs dir.)
|
||||||
|
|
||||||
|
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||||
|
value for Windows and appends "log" to the user cache dir for Unix.
|
||||||
|
This can be disabled with the `opinion=False` option.
|
||||||
|
"""
|
||||||
|
if system == "darwin":
|
||||||
|
path = os.path.join(
|
||||||
|
os.path.expanduser('~/Library/Logs'),
|
||||||
|
appname)
|
||||||
|
elif system == "win32":
|
||||||
|
path = user_data_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "Logs")
|
||||||
|
else:
|
||||||
|
path = user_cache_dir(appname, appauthor, version)
|
||||||
|
version = False
|
||||||
|
if opinion:
|
||||||
|
path = os.path.join(path, "log")
|
||||||
|
if appname and version:
|
||||||
|
path = os.path.join(path, version)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
class AppDirs(object):
|
||||||
|
"""Convenience wrapper for getting application dirs."""
|
||||||
|
def __init__(self, appname, appauthor=None, version=None, roaming=False,
|
||||||
|
multipath=False):
|
||||||
|
self.appname = appname
|
||||||
|
self.appauthor = appauthor
|
||||||
|
self.version = version
|
||||||
|
self.roaming = roaming
|
||||||
|
self.multipath = multipath
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_data_dir(self):
|
||||||
|
return user_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_data_dir(self):
|
||||||
|
return site_data_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_config_dir(self):
|
||||||
|
return user_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, roaming=self.roaming)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site_config_dir(self):
|
||||||
|
return site_config_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version, multipath=self.multipath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_cache_dir(self):
|
||||||
|
return user_cache_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_log_dir(self):
|
||||||
|
return user_log_dir(self.appname, self.appauthor,
|
||||||
|
version=self.version)
|
||||||
|
|
||||||
|
|
||||||
|
#---- internal support stuff
|
||||||
|
|
||||||
|
def _get_win_folder_from_registry(csidl_name):
|
||||||
|
"""This is a fallback technique at best. I'm not sure if using the
|
||||||
|
registry for this guarantees us the correct answer for all CSIDL_*
|
||||||
|
names.
|
||||||
|
"""
|
||||||
|
import _winreg
|
||||||
|
|
||||||
|
shell_folder_name = {
|
||||||
|
"CSIDL_APPDATA": "AppData",
|
||||||
|
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||||
|
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
key = _winreg.OpenKey(
|
||||||
|
_winreg.HKEY_CURRENT_USER,
|
||||||
|
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||||
|
)
|
||||||
|
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_pywin32(csidl_name):
|
||||||
|
from win32com.shell import shellcon, shell
|
||||||
|
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||||
|
# Try to make this a unicode path because SHGetFolderPath does
|
||||||
|
# not return unicode strings when there is unicode data in the
|
||||||
|
# path.
|
||||||
|
try:
|
||||||
|
dir = unicode(dir)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
try:
|
||||||
|
import win32api
|
||||||
|
dir = win32api.GetShortPathName(dir)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
except UnicodeError:
|
||||||
|
pass
|
||||||
|
return dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_win_folder_with_ctypes(csidl_name):
|
||||||
|
import ctypes
|
||||||
|
|
||||||
|
csidl_const = {
|
||||||
|
"CSIDL_APPDATA": 26,
|
||||||
|
"CSIDL_COMMON_APPDATA": 35,
|
||||||
|
"CSIDL_LOCAL_APPDATA": 28,
|
||||||
|
}[csidl_name]
|
||||||
|
|
||||||
|
buf = ctypes.create_unicode_buffer(1024)
|
||||||
|
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in buf:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf2 = ctypes.create_unicode_buffer(1024)
|
||||||
|
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||||
|
buf = buf2
|
||||||
|
|
||||||
|
return buf.value
|
||||||
|
|
||||||
|
def _get_win_folder_with_jna(csidl_name):
|
||||||
|
import array
|
||||||
|
from com.sun import jna
|
||||||
|
from com.sun.jna.platform import win32
|
||||||
|
|
||||||
|
buf_size = win32.WinDef.MAX_PATH * 2
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
shell = win32.Shell32.INSTANCE
|
||||||
|
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
# Downgrade to short path name if have highbit chars. See
|
||||||
|
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||||
|
has_high_char = False
|
||||||
|
for c in dir:
|
||||||
|
if ord(c) > 255:
|
||||||
|
has_high_char = True
|
||||||
|
break
|
||||||
|
if has_high_char:
|
||||||
|
buf = array.zeros('c', buf_size)
|
||||||
|
kernel = win32.Kernel32.INSTANCE
|
||||||
|
if kernal.GetShortPathName(dir, buf, buf_size):
|
||||||
|
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||||
|
|
||||||
|
return dir
|
||||||
|
|
||||||
|
if system == "win32":
|
||||||
|
try:
|
||||||
|
import win32com.shell
|
||||||
|
_get_win_folder = _get_win_folder_with_pywin32
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from ctypes import windll
|
||||||
|
_get_win_folder = _get_win_folder_with_ctypes
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import com.sun.jna
|
||||||
|
_get_win_folder = _get_win_folder_with_jna
|
||||||
|
except ImportError:
|
||||||
|
_get_win_folder = _get_win_folder_from_registry
|
||||||
|
|
||||||
|
|
||||||
|
#---- self test code
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
appname = "MyApp"
|
||||||
|
appauthor = "MyCompany"
|
||||||
|
|
||||||
|
props = ("user_data_dir", "site_data_dir",
|
||||||
|
"user_config_dir", "site_config_dir",
|
||||||
|
"user_cache_dir", "user_log_dir")
|
||||||
|
|
||||||
|
print("-- app dirs (with optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'version')")
|
||||||
|
dirs = AppDirs(appname, appauthor)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (without optional 'appauthor')")
|
||||||
|
dirs = AppDirs(appname)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||||
|
|
||||||
|
print("\n-- app dirs (with disabled 'appauthor')")
|
||||||
|
dirs = AppDirs(appname, appauthor=False)
|
||||||
|
for prop in props:
|
||||||
|
print("%s: %s" % (prop, getattr(dirs, prop)))
|
49
release/scripts/modules/bpkg/cache.py
Normal file
49
release/scripts/modules/bpkg/cache.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
from . import appdirs
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def cache_directory(*subdirs) -> pathlib.Path:
|
||||||
|
"""Returns an OS-specifc cache location, and ensures it exists.
|
||||||
|
|
||||||
|
Should be replaced with a call to bpy.utils.user_resource('CACHE', ...)
|
||||||
|
once https://developer.blender.org/T47684 is finished.
|
||||||
|
|
||||||
|
:param subdirs: extra subdirectories inside the cache directory.
|
||||||
|
|
||||||
|
>>> cache_directory()
|
||||||
|
'.../blender_cloud/your_username'
|
||||||
|
>>> cache_directory('sub1', 'sub2')
|
||||||
|
'.../blender_cloud/your_username/sub1/sub2'
|
||||||
|
"""
|
||||||
|
|
||||||
|
# TODO: use bpy.utils.user_resource('CACHE', ...)
|
||||||
|
# once https://developer.blender.org/T47684 is finished.
|
||||||
|
user_cache_dir = appdirs.user_cache_dir(appname='Blender', appauthor=False)
|
||||||
|
cache_dir = pathlib.Path(user_cache_dir) / 'blender_package_manager' / pathlib.Path(*subdirs)
|
||||||
|
cache_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
return cache_dir
|
30
release/scripts/modules/bpkg/display.py
Normal file
30
release/scripts/modules/bpkg/display.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""A global storage space for display related stuff which needs to be accessible to operators"""
|
||||||
|
|
||||||
|
# list of names of packages currently displayed (matching filters)
|
||||||
|
displayed_packages = []
|
||||||
|
# list of names of packages currently expanded
|
||||||
|
expanded_packages = []
|
||||||
|
# name of package who's preferences are shown
|
||||||
|
preference_package = None
|
||||||
|
|
||||||
|
|
||||||
|
def repository_items(self, context) -> list:
|
||||||
|
"""Return displayed repository enum items"""
|
||||||
|
import bpy
|
||||||
|
try:
|
||||||
|
repos = context.window_manager['package_repositories']
|
||||||
|
except KeyError:
|
||||||
|
return []
|
||||||
|
repolist = []
|
||||||
|
for repo in repos:
|
||||||
|
try:
|
||||||
|
repolist.append((repo['name'], repo['name'],
|
||||||
|
"{} ({})".format(repo['name'], repo['url'])))
|
||||||
|
except KeyError: # name may not be set before refresh() finishes execution, in which case leave it out
|
||||||
|
pass
|
||||||
|
return repolist
|
||||||
|
|
||||||
|
|
||||||
|
# List of error messages from errors encountered while handling packages
|
||||||
|
# Used to display such errors in the UI
|
||||||
|
pkg_errors = []
|
14
release/scripts/modules/bpkg/exceptions.py
Normal file
14
release/scripts/modules/bpkg/exceptions.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
class BpkgException(Exception):
|
||||||
|
"""Superclass for all package manager exceptions"""
|
||||||
|
|
||||||
|
class InstallException(BpkgException):
|
||||||
|
"""Raised when there is an error during installation"""
|
||||||
|
|
||||||
|
class DownloadException(BpkgException):
|
||||||
|
"""Raised when there is an error downloading something"""
|
||||||
|
|
||||||
|
class BadRepositoryException(BpkgException):
|
||||||
|
"""Raised when there is an error while reading or manipulating a repository"""
|
||||||
|
|
||||||
|
class PackageException(BpkgException):
|
||||||
|
"""Raised when there is an error while manipulating a package"""
|
75
release/scripts/modules/bpkg/messages.py
Normal file
75
release/scripts/modules/bpkg/messages.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
from .types import Repository
|
||||||
|
|
||||||
|
|
||||||
|
class Message:
|
||||||
|
"""Superclass for all message sent over pipes."""
|
||||||
|
|
||||||
|
|
||||||
|
# Blender messages
|
||||||
|
|
||||||
|
class BlenderMessage(Message):
|
||||||
|
"""Superclass for all messages sent from Blender to the subprocess."""
|
||||||
|
|
||||||
|
|
||||||
|
class Abort(BlenderMessage):
|
||||||
|
"""Sent when the user requests abortion of a task."""
|
||||||
|
|
||||||
|
|
||||||
|
# Subproc messages
|
||||||
|
|
||||||
|
class SubprocMessage(Message):
|
||||||
|
"""Superclass for all messages sent from the subprocess to Blender."""
|
||||||
|
|
||||||
|
|
||||||
|
class Progress(SubprocMessage):
|
||||||
|
"""Send from subprocess to Blender to report progress.
|
||||||
|
|
||||||
|
:ivar progress: the progress percentage, from 0-1.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, progress: float):
|
||||||
|
self.progress = progress
|
||||||
|
|
||||||
|
|
||||||
|
class Success(SubprocMessage):
|
||||||
|
"""Sent when an operation finished sucessfully."""
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryResult(SubprocMessage):
|
||||||
|
"""Sent when an operation returns a repository to be used on the parent process."""
|
||||||
|
|
||||||
|
def __init__(self, repository_name: str):
|
||||||
|
self.repository = repository
|
||||||
|
|
||||||
|
|
||||||
|
class Aborted(SubprocMessage):
|
||||||
|
"""Sent as response to Abort message."""
|
||||||
|
|
||||||
|
# subproc errors
|
||||||
|
|
||||||
|
|
||||||
|
class SubprocError(SubprocMessage):
|
||||||
|
"""Superclass for all fatal error messages sent from the subprocess."""
|
||||||
|
|
||||||
|
def __init__(self, message: str):
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
|
class InstallError(SubprocError):
|
||||||
|
"""Sent when there was an error installing something."""
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallError(SubprocError):
|
||||||
|
"""Sent when there was an error uninstalling something."""
|
||||||
|
|
||||||
|
|
||||||
|
class BadRepositoryError(SubprocError):
|
||||||
|
"""Sent when a repository can't be used for some reason"""
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadError(SubprocError):
|
||||||
|
"""Sent when there was an error downloading something."""
|
||||||
|
|
||||||
|
def __init__(self, message: str, status_code: int = None):
|
||||||
|
self.status_code = status_code
|
||||||
|
self.message = message
|
96
release/scripts/modules/bpkg/subproc.py
Normal file
96
release/scripts/modules/bpkg/subproc.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""Functions to be executed in a subprocess"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from . import (
|
||||||
|
messages,
|
||||||
|
exceptions,
|
||||||
|
utils,
|
||||||
|
)
|
||||||
|
from .types import (
|
||||||
|
Package,
|
||||||
|
Repository,
|
||||||
|
)
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
def download_and_install_package(pipe_to_blender, package: Package, install_path: Path):
|
||||||
|
"""Downloads and installs the given package."""
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + '.download_and_install')
|
||||||
|
|
||||||
|
def prog(p: float) -> float:
|
||||||
|
pipe_to_blender.send(messages.Progress(p))
|
||||||
|
|
||||||
|
from . import cache
|
||||||
|
cache_dir = cache.cache_directory('downloads')
|
||||||
|
|
||||||
|
try:
|
||||||
|
package.install(install_path, cache_dir, progress_callback=prog)
|
||||||
|
except exceptions.DownloadException as err:
|
||||||
|
pipe_to_blender.send(messages.DownloadError(err))
|
||||||
|
log.exception(err)
|
||||||
|
return
|
||||||
|
except exceptions.InstallException as err:
|
||||||
|
pipe_to_blender.send(messages.InstallError(err))
|
||||||
|
log.exception(err)
|
||||||
|
return
|
||||||
|
|
||||||
|
pipe_to_blender.send(messages.Success())
|
||||||
|
|
||||||
|
|
||||||
|
def uninstall_package(pipe_to_blender, package: Package, install_path: Path):
|
||||||
|
"""Deletes the given package's files from the install directory"""
|
||||||
|
# TODO: move package to cache and present an "undo" button to user, to give nicer UX on misclicks
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".uninstall_package")
|
||||||
|
files_to_remove = [install_path / Path(p) for p in package.files]
|
||||||
|
|
||||||
|
for pkgfile in files_to_remove:
|
||||||
|
if not pkgfile.exists():
|
||||||
|
pipe_to_blender.send(messages.UninstallError(
|
||||||
|
"Could not find file owned by package: '%s'. Refusing to uninstall." % pkgfile))
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
for pkgfile in files_to_remove:
|
||||||
|
utils.rm(pkgfile)
|
||||||
|
except Exception as err:
|
||||||
|
msg = "Failed to remove file '%s', see console for details" % pkgfile
|
||||||
|
pipe_to_blender.send(messages.UninstallError(msg))
|
||||||
|
log.exception(err)
|
||||||
|
return
|
||||||
|
|
||||||
|
pipe_to_blender.send(messages.Success())
|
||||||
|
|
||||||
|
|
||||||
|
def refresh_repositories(pipe_to_blender, repo_storage_path: Path, repository_urls: str):
|
||||||
|
"""Downloads and stores the given repository"""
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + '.refresh_repository')
|
||||||
|
|
||||||
|
def progress_callback(p: float) -> float:
|
||||||
|
progress_callback._progress += p
|
||||||
|
pipe_to_blender.send(messages.Progress(progress_callback._progress))
|
||||||
|
progress_callback._progress = 0.0
|
||||||
|
|
||||||
|
repos = utils.load_repositories(repo_storage_path)
|
||||||
|
|
||||||
|
def prog(p: float):
|
||||||
|
progress_callback(p / len(repos))
|
||||||
|
|
||||||
|
known_repo_urls = [repo.url for repo in repos]
|
||||||
|
for repo_url in repository_urls:
|
||||||
|
if repo_url not in known_repo_urls:
|
||||||
|
repos.append(Repository(repo_url))
|
||||||
|
|
||||||
|
for repo in repos:
|
||||||
|
try:
|
||||||
|
repo.refresh(repo_storage_path, progress_callback=prog)
|
||||||
|
except exceptions.DownloadException as err:
|
||||||
|
pipe_to_blender.send(messages.DownloadError(err))
|
||||||
|
log.exception("Download error")
|
||||||
|
except exceptions.BadRepositoryException as err:
|
||||||
|
pipe_to_blender.send(messages.BadRepositoryError(err))
|
||||||
|
log.exception("Bad repository")
|
||||||
|
|
||||||
|
pipe_to_blender.send(messages.Success())
|
655
release/scripts/modules/bpkg/types.py
Normal file
655
release/scripts/modules/bpkg/types.py
Normal file
@@ -0,0 +1,655 @@
|
|||||||
|
import logging
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from . import exceptions
|
||||||
|
from . import utils
|
||||||
|
from . import actions
|
||||||
|
from . import display
|
||||||
|
|
||||||
|
|
||||||
|
class Package:
|
||||||
|
"""
|
||||||
|
Stores package methods and metadata
|
||||||
|
"""
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".Package")
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._bl_info = dict()
|
||||||
|
|
||||||
|
## bl_infos ##
|
||||||
|
# required fields
|
||||||
|
self.name = str()
|
||||||
|
self.version = tuple()
|
||||||
|
self.blender = tuple()
|
||||||
|
# optional fields
|
||||||
|
self.description = str()
|
||||||
|
self.author = str()
|
||||||
|
self.category = str()
|
||||||
|
self.location = str()
|
||||||
|
self.support = 'COMMUNITY'
|
||||||
|
self.warning = str()
|
||||||
|
self.wiki_url = str()
|
||||||
|
self.tracker_url = str()
|
||||||
|
|
||||||
|
## package stuff ##
|
||||||
|
self.url = str()
|
||||||
|
self.files = list()
|
||||||
|
|
||||||
|
## package stuff which is not stored in repo ##
|
||||||
|
self.installed = False
|
||||||
|
# contains Path() when not None
|
||||||
|
self.installed_location = None
|
||||||
|
self.is_user = False
|
||||||
|
self.enabled = False
|
||||||
|
self.repositories = set()
|
||||||
|
|
||||||
|
## other ##
|
||||||
|
# contains str() when not None
|
||||||
|
self.module_name = None
|
||||||
|
|
||||||
|
def set_from_dict(self, package_dict: dict):
|
||||||
|
"""
|
||||||
|
Get attributes from a dict such as produced by `to_dict`
|
||||||
|
"""
|
||||||
|
if package_dict is None:
|
||||||
|
raise PackageException("Can't set package from None")
|
||||||
|
|
||||||
|
self.files = package_dict['files']
|
||||||
|
self.url = package_dict['url']
|
||||||
|
self.bl_info = package_dict['bl_info']
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, package_dict: dict):
|
||||||
|
"""
|
||||||
|
Return a Package with values from dict
|
||||||
|
Used to read the package from json format
|
||||||
|
"""
|
||||||
|
pkg = cls()
|
||||||
|
pkg.set_from_dict(package_dict)
|
||||||
|
return pkg
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_blinfo(cls, blinfo: dict):
|
||||||
|
"""
|
||||||
|
Return a Package with bl_info filled in
|
||||||
|
"""
|
||||||
|
return cls.from_dict({'bl_info': blinfo})
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_module(cls, module):
|
||||||
|
"""
|
||||||
|
Return a Package object from an addon module
|
||||||
|
"""
|
||||||
|
from pathlib import Path
|
||||||
|
filepath = Path(module.__file__)
|
||||||
|
if filepath.name == '__init__.py':
|
||||||
|
filepath = filepath.parent
|
||||||
|
|
||||||
|
pkg = cls()
|
||||||
|
pkg.files = [filepath.name]
|
||||||
|
pkg.installed_location = str(filepath)
|
||||||
|
pkg.module_name = module.__name__
|
||||||
|
|
||||||
|
try:
|
||||||
|
pkg.bl_info = module.bl_info
|
||||||
|
except AttributeError as err:
|
||||||
|
raise exceptions.PackageException(
|
||||||
|
"Module does not appear to be an addon; no bl_info attribute") from err
|
||||||
|
return pkg
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
"""
|
||||||
|
Return a dict representation of the package
|
||||||
|
Used to store the package in json format
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
'bl_info': self.bl_info,
|
||||||
|
'url': self.url,
|
||||||
|
'files': self.files,
|
||||||
|
}
|
||||||
|
|
||||||
|
import typing
|
||||||
|
# bl_info properties
|
||||||
|
# required fields
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> typing.Optional[str]:
|
||||||
|
"""Get name from bl_info"""
|
||||||
|
return self._bl_info.get('name')
|
||||||
|
|
||||||
|
@name.setter
|
||||||
|
def name(self, name: str) -> typing.Optional[str]:
|
||||||
|
if not isinstance(name, str):
|
||||||
|
raise exceptions.PackageException(
|
||||||
|
"refusing to set name to non str %r" % name)
|
||||||
|
self._bl_info['name'] = name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self) -> typing.Optional[tuple]:
|
||||||
|
"""Get version from bl_info"""
|
||||||
|
return tuple(self._bl_info.get('version'))
|
||||||
|
|
||||||
|
@version.setter
|
||||||
|
def version(self, version: tuple) -> typing.Optional[tuple]:
|
||||||
|
if isinstance(version, str):
|
||||||
|
raise exceptions.PackageException(
|
||||||
|
"Refusing to set version to non tuple %r" % version)
|
||||||
|
self._bl_info['version'] = version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def blender(self) -> typing.Optional[tuple]:
|
||||||
|
"""Get blender from bl_info"""
|
||||||
|
return self._bl_info.get('blender')
|
||||||
|
|
||||||
|
@blender.setter
|
||||||
|
def blender(self, blender: tuple):
|
||||||
|
if isinstance(blender, str):
|
||||||
|
raise exceptions.PackageException(
|
||||||
|
"Refusing to set blender to non tuple %r" % blender)
|
||||||
|
self._bl_info['blender'] = blender
|
||||||
|
|
||||||
|
# optional fields
|
||||||
|
@property
|
||||||
|
def description(self) -> typing.Optional[str]:
|
||||||
|
"""Get description from bl_info"""
|
||||||
|
return self._bl_info.get('description')
|
||||||
|
|
||||||
|
@description.setter
|
||||||
|
def description(self, description: str):
|
||||||
|
self._bl_info['description'] = description
|
||||||
|
|
||||||
|
@property
|
||||||
|
def author(self) -> typing.Optional[str]:
|
||||||
|
"""Get author from bl_info"""
|
||||||
|
return self._bl_info.get('author')
|
||||||
|
|
||||||
|
@author.setter
|
||||||
|
def author(self, author: str):
|
||||||
|
self._bl_info['author'] = author
|
||||||
|
|
||||||
|
@property
|
||||||
|
def category(self) -> typing.Optional[str]:
|
||||||
|
"""Get category from bl_info"""
|
||||||
|
return self._bl_info.get('category')
|
||||||
|
|
||||||
|
@category.setter
|
||||||
|
def category(self, category: str):
|
||||||
|
self._bl_info['category'] = category
|
||||||
|
|
||||||
|
@property
|
||||||
|
def location(self) -> typing.Optional[str]:
|
||||||
|
"""Get location from bl_info"""
|
||||||
|
return self._bl_info.get('location')
|
||||||
|
|
||||||
|
@location.setter
|
||||||
|
def location(self, location: str):
|
||||||
|
self._bl_info['location'] = location
|
||||||
|
|
||||||
|
@property
|
||||||
|
def support(self) -> typing.Optional[str]:
|
||||||
|
"""Get support from bl_info"""
|
||||||
|
return self._bl_info.get('support')
|
||||||
|
|
||||||
|
@support.setter
|
||||||
|
def support(self, support: str):
|
||||||
|
self._bl_info['support'] = support
|
||||||
|
|
||||||
|
@property
|
||||||
|
def warning(self) -> typing.Optional[str]:
|
||||||
|
"""Get warning from bl_info"""
|
||||||
|
return self._bl_info.get('warning')
|
||||||
|
|
||||||
|
@warning.setter
|
||||||
|
def warning(self, warning: str):
|
||||||
|
self._bl_info['warning'] = warning
|
||||||
|
|
||||||
|
@property
|
||||||
|
def wiki_url(self) -> typing.Optional[str]:
|
||||||
|
"""Get wiki_url from bl_info"""
|
||||||
|
return self._bl_info.get('wiki_url')
|
||||||
|
|
||||||
|
@wiki_url.setter
|
||||||
|
def wiki_url(self, wiki_url: str):
|
||||||
|
self._bl_info['wiki_url'] = wiki_url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tracker_url(self) -> typing.Optional[str]:
|
||||||
|
"""Get tracker_url from bl_info"""
|
||||||
|
return self._bl_info.get('tracker_url')
|
||||||
|
|
||||||
|
@tracker_url.setter
|
||||||
|
def tracker_url(self, tracker_url: str):
|
||||||
|
self._bl_info['tracker_url'] = tracker_url
|
||||||
|
|
||||||
|
# useful for handling whole bl_info at once
|
||||||
|
@property
|
||||||
|
def bl_info(self) -> dict:
|
||||||
|
"""bl_info dict of package"""
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"version": self.version,
|
||||||
|
"blender": self.blender,
|
||||||
|
"description": self.description,
|
||||||
|
"author": self.author,
|
||||||
|
"category": self.category,
|
||||||
|
"location": self.location,
|
||||||
|
"support": self.support,
|
||||||
|
"warning": self.warning,
|
||||||
|
"wiki_url": self.wiki_url,
|
||||||
|
"tracker_url": self.tracker_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
@bl_info.setter
|
||||||
|
def bl_info(self, blinfo: dict):
|
||||||
|
self.name = blinfo["name"]
|
||||||
|
self.version = blinfo["version"]
|
||||||
|
self.blender = blinfo["blender"]
|
||||||
|
|
||||||
|
self.description = blinfo.get("description", self.description)
|
||||||
|
self.author = blinfo.get("author", self.author)
|
||||||
|
self.category = blinfo.get("category", self.category)
|
||||||
|
self.location = blinfo.get("location", self.location)
|
||||||
|
self.support = blinfo.get("support", self.support)
|
||||||
|
self.warning = blinfo.get("warning", self.warning)
|
||||||
|
self.wiki_url = blinfo.get("wiki_url", self.wiki_url)
|
||||||
|
self.tracker_url = blinfo.get("tracker_url", self.tracker_url)
|
||||||
|
|
||||||
|
def test_is_user(self) -> bool:
|
||||||
|
"""Return true if package's install location is in user or preferences scripts path"""
|
||||||
|
import bpy
|
||||||
|
user_script_path = bpy.utils.script_path_user()
|
||||||
|
prefs_script_path = bpy.utils.script_path_pref()
|
||||||
|
|
||||||
|
if user_script_path is not None:
|
||||||
|
in_user = Path(user_script_path) in Path(
|
||||||
|
self.installed_location).parents
|
||||||
|
else:
|
||||||
|
in_user = False
|
||||||
|
|
||||||
|
if prefs_script_path is not None:
|
||||||
|
in_prefs = Path(prefs_script_path) in Path(
|
||||||
|
self.installed_location).parents
|
||||||
|
else:
|
||||||
|
in_prefs = False
|
||||||
|
|
||||||
|
return in_user or in_prefs
|
||||||
|
|
||||||
|
def test_enabled(self) -> bool:
|
||||||
|
"""Return true if package is enabled"""
|
||||||
|
import bpy
|
||||||
|
if self.module_name is not None:
|
||||||
|
return (self.module_name in bpy.context.user_preferences.addons)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def enable(self):
|
||||||
|
"""Enable package"""
|
||||||
|
# TODO: just use addon_utils for now
|
||||||
|
if not self.module_name:
|
||||||
|
raise PackageException(
|
||||||
|
"Cannot enable package with unset module_name")
|
||||||
|
import addon_utils
|
||||||
|
addon_utils.enable(self.module_name, default_set=True)
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
def disable(self):
|
||||||
|
"""Disable package"""
|
||||||
|
if not self.module_name:
|
||||||
|
raise PackageException(
|
||||||
|
"Cannot disable package with unset module_name")
|
||||||
|
import addon_utils
|
||||||
|
addon_utils.enable(self.module_name, default_set=True)
|
||||||
|
self.enabled = False
|
||||||
|
|
||||||
|
def test_installed(self) -> bool:
|
||||||
|
"""Return true if package is installed"""
|
||||||
|
import addon_utils
|
||||||
|
return len([Package.from_module(mod) for mod in addon_utils.modules(refresh=False) if
|
||||||
|
addon_utils.module_bl_info(mod)['name'] == self.name and
|
||||||
|
addon_utils.module_bl_info(mod)['version'] == self.version]) > 0
|
||||||
|
|
||||||
|
def set_installed_metadata(self, installed_pkg):
|
||||||
|
"""Sets metadata specific to installed packages from the Package given as `installed_pkg`"""
|
||||||
|
self.installed = installed_pkg.test_installed()
|
||||||
|
self.enabled = installed_pkg.test_enabled()
|
||||||
|
self.is_user = installed_pkg.test_is_user()
|
||||||
|
self.module_name = installed_pkg.module_name
|
||||||
|
self.installed_location = installed_pkg.installed_location
|
||||||
|
|
||||||
|
def download(self, dest: Path, progress_callback=None) -> Path:
|
||||||
|
"""Downloads package to `dest`"""
|
||||||
|
|
||||||
|
if not self.url:
|
||||||
|
raise ValueError("Cannot download package without a URL")
|
||||||
|
|
||||||
|
return actions.download(self.url, dest, progress_callback)
|
||||||
|
|
||||||
|
def install(self, dest_dir: Path, cache_dir: Path, progress_callback=None):
|
||||||
|
"""Downloads package to `cache_dir`, then extracts/moves package to `dest_dir`"""
|
||||||
|
|
||||||
|
log = logging.getLogger('%s.install' % __name__)
|
||||||
|
|
||||||
|
downloaded = self.download(cache_dir, progress_callback)
|
||||||
|
|
||||||
|
if not downloaded:
|
||||||
|
log.debug('Download returned None, not going to install anything.')
|
||||||
|
return
|
||||||
|
|
||||||
|
actions.install(downloaded, dest_dir)
|
||||||
|
utils.rm(downloaded)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.name == other.name and self.version == other.version
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.version < other.version
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.name, self.version))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
# return self.name
|
||||||
|
return "Package('name': {}, 'version': {})".format(self.name, self.version)
|
||||||
|
|
||||||
|
|
||||||
|
class ConsolidatedPackage:
|
||||||
|
"""
|
||||||
|
Stores a grouping of different versions of the same package
|
||||||
|
"""
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".ConsolidatedPackage")
|
||||||
|
|
||||||
|
def __init__(self, pkg=None):
|
||||||
|
self.versions = []
|
||||||
|
# self.updateable = False
|
||||||
|
|
||||||
|
if pkg is not None:
|
||||||
|
self.add_version(pkg)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed(self) -> bool:
|
||||||
|
"""Return true if any version of this package is installed"""
|
||||||
|
for pkg in self.versions:
|
||||||
|
if pkg.installed:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
"""
|
||||||
|
Return name of this package. All package versions in a
|
||||||
|
ConsolidatedPackage should have the same name by definition
|
||||||
|
|
||||||
|
Returns None if there are no versions
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.versions[0].name
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_latest_installed_version(self) -> Package:
|
||||||
|
"""
|
||||||
|
Return the installed package with the highest version number.
|
||||||
|
If no packages are installed, return None.
|
||||||
|
"""
|
||||||
|
# self.versions is always sorted newer -> older, so we can just grab the first we find
|
||||||
|
for pkg in self.versions:
|
||||||
|
if pkg.installed:
|
||||||
|
return pkg
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_latest_version(self) -> Package:
|
||||||
|
"""Return package with highest version number, returns None if there are no versions"""
|
||||||
|
try:
|
||||||
|
# this is always sorted with the highest on top
|
||||||
|
return self.versions[0]
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_display_version(self) -> Package:
|
||||||
|
"""
|
||||||
|
Return installed package with highest version number.
|
||||||
|
If no version is installed, return highest uninstalled version.
|
||||||
|
"""
|
||||||
|
pkg = self.get_latest_installed_version()
|
||||||
|
if pkg is None:
|
||||||
|
pkg = self.get_latest_version()
|
||||||
|
return pkg
|
||||||
|
|
||||||
|
def test_updateable(self) -> bool:
|
||||||
|
"""Return true if latest installed version of package is older than latest known version"""
|
||||||
|
latest = self.get_latest_version()
|
||||||
|
latest_installed = self.get_latest_installed_version()
|
||||||
|
if latest is None or latest_installed is None:
|
||||||
|
return False
|
||||||
|
return latest_installed.version < latest.version
|
||||||
|
|
||||||
|
def add_version(self, newpkg: Package):
|
||||||
|
"""Adds a package to the collection of versions"""
|
||||||
|
|
||||||
|
if self.name and newpkg.name != self.name:
|
||||||
|
raise exceptions.PackageException(
|
||||||
|
"Name mismatch, refusing to add %s to %s" % (newpkg, self))
|
||||||
|
|
||||||
|
for pkg in self:
|
||||||
|
if pkg == newpkg:
|
||||||
|
pkg.repositories.union(newpkg.repositories)
|
||||||
|
if newpkg.installed:
|
||||||
|
pkg.set_installed_metadata(newpkg)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.versions.append(newpkg)
|
||||||
|
self.versions.sort(key=lambda v: v.version, reverse=True)
|
||||||
|
# self.updateable = self.test_updateable()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return (pkg for pkg in self.versions)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return ("ConsolidatedPackage<name={}>".format(self.name))
|
||||||
|
|
||||||
|
|
||||||
|
class Repository:
|
||||||
|
"""
|
||||||
|
Stores repository metadata (including packages)
|
||||||
|
"""
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".Repository")
|
||||||
|
|
||||||
|
def __init__(self, url=None):
|
||||||
|
self.name = str()
|
||||||
|
self.url = url if url is not None else str()
|
||||||
|
self.packages = list()
|
||||||
|
self.filepath = Path()
|
||||||
|
self._headers = dict()
|
||||||
|
|
||||||
|
def refresh(self, storage_path: Path, progress_callback=None):
|
||||||
|
"""
|
||||||
|
Requests repo.json from URL and embeds etag/last-modification headers
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
|
||||||
|
if progress_callback is None:
|
||||||
|
def progress_callback(x): return None
|
||||||
|
|
||||||
|
progress_callback(0.0)
|
||||||
|
|
||||||
|
if self.url is None:
|
||||||
|
raise ValueError("Cannot refresh repository without a URL")
|
||||||
|
|
||||||
|
url = utils.add_repojson_to_url(self.url)
|
||||||
|
|
||||||
|
self.log.debug("Refreshing repository from %s", self.url)
|
||||||
|
|
||||||
|
req_headers = {}
|
||||||
|
# Do things this way to avoid adding empty objects/None to the req_headers dict
|
||||||
|
try:
|
||||||
|
req_headers['If-None-Match'] = self._headers['etag']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
req_headers['If-Modified-Since'] = self._headers['last-modified']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = requests.get(url, headers=req_headers, timeout=60)
|
||||||
|
except requests.exceptions.InvalidSchema as err:
|
||||||
|
raise exceptions.DownloadException(
|
||||||
|
"Invalid schema. Did you mean to use http://?") from err
|
||||||
|
except requests.exceptions.ConnectionError as err:
|
||||||
|
raise exceptions.DownloadException(
|
||||||
|
"Failed to connect. Are you sure '%s' is the correct URL?" % url) from err
|
||||||
|
except requests.exceptions.RequestException as err:
|
||||||
|
raise exceptions.DownloadException(err) from err
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.HTTPError as err:
|
||||||
|
self.log.error('Error downloading %s: %s', url, err)
|
||||||
|
raise exceptions.DownloadException(
|
||||||
|
resp.status_code, resp.reason) from err
|
||||||
|
|
||||||
|
if resp.status_code == requests.codes.not_modified:
|
||||||
|
self.log.debug("Packagelist not modified")
|
||||||
|
progress_callback(1.0)
|
||||||
|
return
|
||||||
|
|
||||||
|
resp_headers = {}
|
||||||
|
try:
|
||||||
|
resp_headers['etag'] = resp.headers['etag']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
resp_headers['last-modified'] = resp.headers['last-modified']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.log.debug("Found headers: %s", resp_headers)
|
||||||
|
|
||||||
|
progress_callback(0.7)
|
||||||
|
|
||||||
|
try:
|
||||||
|
repodict = resp.json()
|
||||||
|
except json.decoder.JSONDecodeError:
|
||||||
|
self.log.exception("Failed to parse downloaded repository")
|
||||||
|
raise exceptions.BadRepositoryException(
|
||||||
|
"Could not parse repository downloaded from '%s'. Are you sure this is the correct URL?" % url
|
||||||
|
)
|
||||||
|
repodict['_headers'] = resp_headers
|
||||||
|
repodict['url'] = self.url
|
||||||
|
|
||||||
|
self.set_from_dict(repodict)
|
||||||
|
self.to_file(storage_path / utils.format_filename(self.name, ".json"))
|
||||||
|
|
||||||
|
progress_callback(1.0)
|
||||||
|
|
||||||
|
def to_dict(self, sort=False, ids=False) -> dict:
|
||||||
|
"""
|
||||||
|
Return a dict representation of the repository
|
||||||
|
Used to store the repository in json format
|
||||||
|
"""
|
||||||
|
packages = [p.to_dict() for p in self.packages]
|
||||||
|
|
||||||
|
if sort:
|
||||||
|
packages.sort(key=lambda p: p['bl_info']['name'].lower())
|
||||||
|
|
||||||
|
if ids:
|
||||||
|
for pkg in packages:
|
||||||
|
# hash may be too big for a C int
|
||||||
|
pkg['id'] = str(hash(pkg['url'] + pkg['bl_info']
|
||||||
|
['name'] + self.name + self.url))
|
||||||
|
|
||||||
|
return {
|
||||||
|
'name': self.name,
|
||||||
|
'packages': packages,
|
||||||
|
'url': self.url,
|
||||||
|
'_headers': self._headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
def set_from_dict(self, repodict: dict):
|
||||||
|
"""
|
||||||
|
Get repository attributes from a dict such as produced by `to_dict`
|
||||||
|
Used to read the repository from json format
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
name = repodict['name']
|
||||||
|
except KeyError as err:
|
||||||
|
raise exceptions.BadRepositoryException(
|
||||||
|
"Cannot set repository from dict; missing name") from err
|
||||||
|
try:
|
||||||
|
url = repodict['url']
|
||||||
|
except KeyError as err:
|
||||||
|
raise exceptions.BadRepositoryException(
|
||||||
|
"Cannot set repository from dict; missing url") from err
|
||||||
|
try:
|
||||||
|
pkg_dicts = repodict['packages']
|
||||||
|
except KeyError as err:
|
||||||
|
raise exceptions.BadRepositoryException(
|
||||||
|
"Cannot set repository from dict; missing packages") from err
|
||||||
|
headers = repodict.get('_headers', {})
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
self.url = url
|
||||||
|
for pkg_dict in pkg_dicts:
|
||||||
|
try:
|
||||||
|
pkg = Package.from_dict(pkg_dict)
|
||||||
|
except exceptions.PackageException as err:
|
||||||
|
msg = "Error parsing package {} in repository {}: {}".format(
|
||||||
|
pkg_dict['bl_info'].get('name'), self.name, err)
|
||||||
|
display.pkg_errors.append(msg)
|
||||||
|
else:
|
||||||
|
self.add_package(pkg)
|
||||||
|
self._headers = headers
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, repodict: dict):
|
||||||
|
"""
|
||||||
|
Like `set_from_dict`, but immutable
|
||||||
|
"""
|
||||||
|
repo = cls()
|
||||||
|
repo.set_from_dict(repodict)
|
||||||
|
return repo
|
||||||
|
|
||||||
|
def to_file(self, path: Path):
|
||||||
|
"""
|
||||||
|
Dump repository to a json file at `path`.
|
||||||
|
"""
|
||||||
|
if len(self.packages) <= 0:
|
||||||
|
self.log.warning("Writing an empty repository")
|
||||||
|
|
||||||
|
self.log.debug("URL is %s", self.url)
|
||||||
|
|
||||||
|
with path.open('w', encoding='utf-8') as repo_file:
|
||||||
|
json.dump(self.to_dict(), repo_file, indent=4, sort_keys=True)
|
||||||
|
self.log.debug("Repository written to %s" % path)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_file(cls, path: Path):
|
||||||
|
"""
|
||||||
|
Read repository from a json file at `path`.
|
||||||
|
"""
|
||||||
|
repo_file = path.open('r', encoding='utf-8')
|
||||||
|
|
||||||
|
with repo_file:
|
||||||
|
try:
|
||||||
|
repo = cls.from_dict(json.load(repo_file))
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
raise exceptions.BadRepositoryException(err) from err
|
||||||
|
if repo.url is None or len(repo.url) == 0:
|
||||||
|
raise exceptions.BadRepositoryException(
|
||||||
|
"Repository missing URL")
|
||||||
|
|
||||||
|
repo.filepath = path
|
||||||
|
cls.log.debug("Repository read from %s", path)
|
||||||
|
return repo
|
||||||
|
|
||||||
|
def add_package(self, pkg: Package):
|
||||||
|
"""Add package to repository instance"""
|
||||||
|
# TODO: check if package exists
|
||||||
|
self.packages.append(pkg)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Repository({}, {})".format(self.name, self.url)
|
104
release/scripts/modules/bpkg/utils.py
Normal file
104
release/scripts/modules/bpkg/utils.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
import shutil
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
def fmt_version(version_number: tuple) -> str:
|
||||||
|
"""Take version number as a tuple and format it as a string"""
|
||||||
|
vstr = str(version_number[0])
|
||||||
|
for component in version_number[1:]:
|
||||||
|
vstr += "." + str(component)
|
||||||
|
return vstr
|
||||||
|
|
||||||
|
|
||||||
|
def format_filename(s: str, ext=None) -> str:
|
||||||
|
"""Take a string and turn it into a reasonable filename"""
|
||||||
|
import string
|
||||||
|
if ext is None:
|
||||||
|
ext = ""
|
||||||
|
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
|
||||||
|
filename = ''.join(char for char in s if char in valid_chars)
|
||||||
|
filename = filename.replace(' ', '_')
|
||||||
|
filename.lower()
|
||||||
|
filename += ext
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_repository_url(url: str) -> str:
|
||||||
|
"""Sanitize repository url"""
|
||||||
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
parsed_url = urlsplit(url)
|
||||||
|
new_path = parsed_url.path.rstrip("repo.json")
|
||||||
|
return urlunsplit((parsed_url.scheme, parsed_url.netloc, new_path, parsed_url.query, parsed_url.fragment))
|
||||||
|
|
||||||
|
|
||||||
|
def add_repojson_to_url(url: str) -> str:
|
||||||
|
"""Add `repo.json` to the path component of a url"""
|
||||||
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
parsed_url = urlsplit(url)
|
||||||
|
new_path = parsed_url.path.rstrip('/')
|
||||||
|
new_path += "/repo.json"
|
||||||
|
return urlunsplit((parsed_url.scheme, parsed_url.netloc, new_path, parsed_url.query, parsed_url.fragment))
|
||||||
|
|
||||||
|
|
||||||
|
def load_repositories(repo_storage_path: Path) -> list:
|
||||||
|
"""Load all json files in repo storage path"""
|
||||||
|
repositories = []
|
||||||
|
from .types import Repository
|
||||||
|
for repofile in repo_storage_path.glob('*.json'):
|
||||||
|
repo = Repository.from_file(repofile)
|
||||||
|
repositories.append(repo)
|
||||||
|
return repositories
|
||||||
|
|
||||||
|
|
||||||
|
def rm(path: Path):
|
||||||
|
"""Delete whatever is specified by `path`"""
|
||||||
|
if path.is_dir():
|
||||||
|
shutil.rmtree(str(path))
|
||||||
|
else:
|
||||||
|
path.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
class InplaceBackup:
|
||||||
|
"""Utility class for moving a file out of the way by appending a '~'"""
|
||||||
|
|
||||||
|
log = logging.getLogger('%s.inplace-backup' % __name__)
|
||||||
|
|
||||||
|
def __init__(self, path: Path):
|
||||||
|
self.path = path
|
||||||
|
# contains Path() when not None
|
||||||
|
self.backup_path = None
|
||||||
|
self.backup()
|
||||||
|
|
||||||
|
def backup(self):
|
||||||
|
"""Move 'path' to 'path~'"""
|
||||||
|
if not self.path.exists():
|
||||||
|
raise FileNotFoundError("Can't backup path which doesn't exist")
|
||||||
|
|
||||||
|
self.backup_path = self.path.with_name(self.path.name + '~')
|
||||||
|
if self.backup_path.exists():
|
||||||
|
self.log.warning(
|
||||||
|
"Overwriting existing backup '{}'".format(self.backup_path))
|
||||||
|
rm(self.backup_path)
|
||||||
|
|
||||||
|
shutil.move(str(self.path), str(self.backup_path))
|
||||||
|
|
||||||
|
def restore(self):
|
||||||
|
"""Move 'path~' to 'path'"""
|
||||||
|
if not self.backup_path:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Can't restore file before backing it up") from err
|
||||||
|
|
||||||
|
if not self.backup_path.exists():
|
||||||
|
raise FileNotFoundError("Can't restore backup which doesn't exist")
|
||||||
|
|
||||||
|
if self.path.exists():
|
||||||
|
self.log.warning(
|
||||||
|
"Overwriting '{0}' with backup file".format(self.path))
|
||||||
|
rm(self.path)
|
||||||
|
|
||||||
|
shutil.move(str(self.backup_path), str(self.path))
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
"""Remove 'path~'"""
|
||||||
|
rm(self.backup_path)
|
@@ -38,6 +38,7 @@ _modules = [
|
|||||||
"object",
|
"object",
|
||||||
"object_randomize_transform",
|
"object_randomize_transform",
|
||||||
"object_quick_effects",
|
"object_quick_effects",
|
||||||
|
"package",
|
||||||
"presets",
|
"presets",
|
||||||
"rigidbody",
|
"rigidbody",
|
||||||
"screen_play_rendered_anim",
|
"screen_play_rendered_anim",
|
||||||
|
612
release/scripts/startup/bl_operators/package.py
Normal file
612
release/scripts/startup/bl_operators/package.py
Normal file
@@ -0,0 +1,612 @@
|
|||||||
|
# HACK:
|
||||||
|
# due to lack of fork() on windows, multiprocessing will re-execute this module
|
||||||
|
# in a new process and `import bpy` will fail. In such cases we only need
|
||||||
|
# subproc, everything else is only used to spawn the subprocess in the first
|
||||||
|
# place.
|
||||||
|
try:
|
||||||
|
import bpy
|
||||||
|
from bpy.types import Operator
|
||||||
|
except ImportError:
|
||||||
|
from bpkg import subproc
|
||||||
|
else:
|
||||||
|
import logging
|
||||||
|
import bpkg
|
||||||
|
from bpkg import (
|
||||||
|
subproc,
|
||||||
|
messages,
|
||||||
|
)
|
||||||
|
from bpkg.types import (
|
||||||
|
Package,
|
||||||
|
ConsolidatedPackage,
|
||||||
|
)
|
||||||
|
from pathlib import Path
|
||||||
|
from collections import OrderedDict
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
# Under windows, multiprocessing must start a new process entirely. It
|
||||||
|
# expects sys.executable to point to python, but in blender sys.executable
|
||||||
|
# points to blender's executable. We can override this with set_executable,
|
||||||
|
# but this acts globally unless we make a special context.
|
||||||
|
# Also see:
|
||||||
|
# https://docs.python.org/3.6/library/multiprocessing.html#multiprocessing.set_executable
|
||||||
|
mp_context = multiprocessing.get_context()
|
||||||
|
mp_context.set_executable(bpy.app.binary_path_python)
|
||||||
|
|
||||||
|
class SubprocMixin:
|
||||||
|
"""Mix-in class for things that need to be run in a subprocess."""
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + '.SubprocMixin')
|
||||||
|
_state = 'INITIALIZING'
|
||||||
|
# time at which we stop waiting for an abort response and just terminate the process
|
||||||
|
_abort_timeout = 0
|
||||||
|
# how long to wait (in seconds) to forcibly terminate subprocess after quit
|
||||||
|
_abort_wait = 10
|
||||||
|
|
||||||
|
# Mapping from message type (see bpkg.messages) to handler function.
|
||||||
|
# Should be constructed before modal() gets called.
|
||||||
|
msg_handlers = {}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
return self.invoke(context, None)
|
||||||
|
|
||||||
|
def quit(self):
|
||||||
|
"""Signals the state machine to stop this operator from running."""
|
||||||
|
self._state = 'QUIT'
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
self.pipe_blender, self.pipe_subproc = multiprocessing.Pipe()
|
||||||
|
|
||||||
|
# The subprocess should just be terminated when Blender quits. Without this,
|
||||||
|
# Blender would hang while closing, until the subprocess terminates itself.
|
||||||
|
# TODO: Perhaps it would be better to fork when blender exits?
|
||||||
|
self.process = self.create_subprocess()
|
||||||
|
self.process.daemon = True
|
||||||
|
self.process.start()
|
||||||
|
|
||||||
|
self._state = 'RUNNING'
|
||||||
|
|
||||||
|
wm = context.window_manager
|
||||||
|
wm.modal_handler_add(self)
|
||||||
|
self.timer = wm.event_timer_add(0.1, context.window)
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def modal(self, context, event):
|
||||||
|
import time
|
||||||
|
|
||||||
|
if event.type != 'TIMER':
|
||||||
|
return {'PASS_THROUGH'}
|
||||||
|
|
||||||
|
if self._state == 'ABORTING' and time.time() > self._abort_timeout:
|
||||||
|
self.log.error(
|
||||||
|
'No response from subprocess to abort request, terminating it.')
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'No response from subprocess to abort request, terminating it.')
|
||||||
|
self.process.terminate()
|
||||||
|
self._finish(context)
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
while self.pipe_blender.poll():
|
||||||
|
self.handle_received_data()
|
||||||
|
|
||||||
|
if self._state == 'QUIT':
|
||||||
|
self._finish(context)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
if not self.process.is_alive():
|
||||||
|
self.report_process_died()
|
||||||
|
self._finish(context)
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def abort(self):
|
||||||
|
import time
|
||||||
|
|
||||||
|
# Allow the subprocess 10 seconds to repsond to our abort message.
|
||||||
|
self._abort_timeout = time.time() + self._abort_wait
|
||||||
|
self._state = 'ABORTING'
|
||||||
|
|
||||||
|
self.pipe_blender.send(messages.Abort())
|
||||||
|
|
||||||
|
def _finish(self, context):
|
||||||
|
try:
|
||||||
|
self.cancel(context)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
global bpkg_operation_running
|
||||||
|
|
||||||
|
context.window_manager.event_timer_remove(self.timer)
|
||||||
|
bpkg_operation_running = False
|
||||||
|
|
||||||
|
if self.process and self.process.is_alive():
|
||||||
|
self.log.debug('Waiting for subprocess to quit')
|
||||||
|
try:
|
||||||
|
self.process.join(timeout=self._abort_wait)
|
||||||
|
except multiprocessing.TimeoutError:
|
||||||
|
self.log.warning(
|
||||||
|
'Subprocess is hanging, terminating it forcefully.')
|
||||||
|
self.process.terminate()
|
||||||
|
else:
|
||||||
|
self.log.debug(
|
||||||
|
'Subprocess stopped with exit code %i', self.process.exitcode)
|
||||||
|
|
||||||
|
def handle_received_data(self):
|
||||||
|
recvd = self.pipe_blender.recv()
|
||||||
|
|
||||||
|
self.log.debug('Received message from subprocess: %s', recvd)
|
||||||
|
try:
|
||||||
|
handler = self.msg_handlers[type(recvd)]
|
||||||
|
except KeyError:
|
||||||
|
self.log.error('Unable to handle received message %s', recvd)
|
||||||
|
# Maybe we shouldn't show this to the user?
|
||||||
|
self.report(
|
||||||
|
{'WARNING'}, 'Unable to handle received message %s' % recvd)
|
||||||
|
return
|
||||||
|
|
||||||
|
handler(recvd)
|
||||||
|
|
||||||
|
def create_subprocess(self):
|
||||||
|
"""Implement this in a subclass.
|
||||||
|
|
||||||
|
:rtype: multiprocessing.Process
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def report_process_died(self):
|
||||||
|
"""Provides the user with sensible information when the process has died.
|
||||||
|
|
||||||
|
Implement this in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
class PACKAGE_OT_install(SubprocMixin, Operator):
|
||||||
|
bl_idname = 'package.install'
|
||||||
|
bl_label = 'Install package'
|
||||||
|
bl_description = 'Downloads and installs a Blender add-on package'
|
||||||
|
bl_options = {'REGISTER'}
|
||||||
|
|
||||||
|
package_name = bpy.props.StringProperty(
|
||||||
|
name='package_name',
|
||||||
|
description='The name of the package to install'
|
||||||
|
)
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + '.PACKAGE_OT_install')
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
if not self.package_name:
|
||||||
|
self.report({'ERROR'}, 'Package name not given')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
return super().invoke(context, event)
|
||||||
|
|
||||||
|
def create_subprocess(self):
|
||||||
|
"""Starts the download process.
|
||||||
|
|
||||||
|
Also registers the message handlers.
|
||||||
|
|
||||||
|
:rtype: multiprocessing.Process
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.msg_handlers = {
|
||||||
|
messages.Progress: self._subproc_progress,
|
||||||
|
messages.DownloadError: self._subproc_download_error,
|
||||||
|
messages.InstallError: self._subproc_install_error,
|
||||||
|
messages.Success: self._subproc_success,
|
||||||
|
messages.Aborted: self._subproc_aborted,
|
||||||
|
}
|
||||||
|
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
wm.progress_begin(0, 1)
|
||||||
|
|
||||||
|
package = bpkg.packages[self.package_name].get_latest_version()
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
# TODO: We need other paths besides this one on subprocess end, so it might be better to pass them all at once.
|
||||||
|
# For now, just pass this one.
|
||||||
|
install_path = pathlib.Path(
|
||||||
|
bpy.utils.user_resource('SCRIPTS', 'addons', create=True))
|
||||||
|
self.log.debug("Using %s as install path", install_path)
|
||||||
|
|
||||||
|
import addon_utils
|
||||||
|
proc = mp_context.Process(target=subproc.download_and_install_package,
|
||||||
|
args=(self.pipe_subproc, package, install_path))
|
||||||
|
return proc
|
||||||
|
|
||||||
|
def _subproc_progress(self, progmsg: messages.Progress):
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
wm.progress_update(progmsg.progress)
|
||||||
|
|
||||||
|
def _subproc_download_error(self, error: messages.DownloadError):
|
||||||
|
self.report({'ERROR'}, 'Unable to download package: %s' %
|
||||||
|
error.message)
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_install_error(self, error: messages.InstallError):
|
||||||
|
self.report({'ERROR'}, 'Unable to install package: %s' %
|
||||||
|
error.message)
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_success(self, success: messages.Success):
|
||||||
|
self.report({'INFO'}, 'Package installed successfully')
|
||||||
|
bpkg.refresh_packages()
|
||||||
|
bpy.context.area.tag_redraw()
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
wm.progress_end()
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_aborted(self, aborted: messages.Aborted):
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Package installation aborted per your request')
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def report_process_died(self):
|
||||||
|
if self.process.exitcode:
|
||||||
|
self.log.error(
|
||||||
|
'Process died without telling us! Exit code was %i', self.process.exitcode)
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Error downloading package, exit code %i' % self.process.exitcode)
|
||||||
|
else:
|
||||||
|
self.log.error(
|
||||||
|
'Process died without telling us! Exit code was 0 though')
|
||||||
|
self.report(
|
||||||
|
{'WARNING'}, 'Error downloading package, but process finished OK. This is weird.')
|
||||||
|
|
||||||
|
class PACKAGE_OT_uninstall(SubprocMixin, Operator):
|
||||||
|
bl_idname = 'package.uninstall'
|
||||||
|
bl_label = 'Install package'
|
||||||
|
bl_description = "Remove installed package files from filesystem"
|
||||||
|
bl_options = {'REGISTER'}
|
||||||
|
|
||||||
|
package_name = bpy.props.StringProperty(
|
||||||
|
name='package_name', description='The name of the package to uninstall')
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + '.PACKAGE_OT_uninstall')
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
if not self.package_name:
|
||||||
|
self.report({'ERROR'}, 'Package name not given')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
return super().invoke(context, event)
|
||||||
|
|
||||||
|
def create_subprocess(self):
|
||||||
|
"""Starts the uninstall process and registers the message handlers.
|
||||||
|
:rtype: multiprocessing.Process
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.msg_handlers = {
|
||||||
|
messages.UninstallError: self._subproc_uninstall_error,
|
||||||
|
messages.Success: self._subproc_success,
|
||||||
|
}
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
install_path = pathlib.Path(
|
||||||
|
bpy.utils.user_resource('SCRIPTS', 'addons', create=True))
|
||||||
|
|
||||||
|
package = bpkg.packages[self.package_name].get_latest_version()
|
||||||
|
|
||||||
|
proc = mp_context.Process(target=subproc.uninstall_package,
|
||||||
|
args=(self.pipe_subproc, package, install_path))
|
||||||
|
return proc
|
||||||
|
|
||||||
|
def _subproc_uninstall_error(self, error: messages.InstallError):
|
||||||
|
self.report({'ERROR'}, error.message)
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_success(self, success: messages.Success):
|
||||||
|
self.report({'INFO'}, 'Package uninstalled successfully')
|
||||||
|
bpkg.refresh_packages()
|
||||||
|
bpy.context.area.tag_redraw()
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def report_process_died(self):
|
||||||
|
if self.process.exitcode:
|
||||||
|
self.log.error(
|
||||||
|
'Process died without telling us! Exit code was %i', self.process.exitcode)
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Error downloading package, exit code %i' % self.process.exitcode)
|
||||||
|
else:
|
||||||
|
self.log.error(
|
||||||
|
'Process died without telling us! Exit code was 0 though')
|
||||||
|
self.report(
|
||||||
|
{'WARNING'}, 'Error downloading package, but process finished OK. This is weird.')
|
||||||
|
|
||||||
|
class PACKAGE_OT_refresh(SubprocMixin, Operator):
|
||||||
|
bl_idname = "package.refresh"
|
||||||
|
bl_label = "Refresh"
|
||||||
|
bl_description = 'Check repositories for new and updated packages'
|
||||||
|
bl_options = {'REGISTER'}
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".PACKAGE_OT_refresh")
|
||||||
|
_running = False
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
wm = context.window_manager
|
||||||
|
self.repositories = wm.package_repositories
|
||||||
|
if not self.repositories:
|
||||||
|
bpkg.refresh_packages()
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
PACKAGE_OT_refresh._running = True
|
||||||
|
return super().invoke(context, event)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return not cls._running
|
||||||
|
|
||||||
|
def _finish(self, context):
|
||||||
|
super()._finish(context)
|
||||||
|
PACKAGE_OT_refresh._running = False
|
||||||
|
context.area.tag_redraw()
|
||||||
|
|
||||||
|
def create_subprocess(self):
|
||||||
|
"""Starts the download process.
|
||||||
|
|
||||||
|
Also registers the message handlers.
|
||||||
|
|
||||||
|
:rtype: multiprocessing.Process
|
||||||
|
"""
|
||||||
|
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
wm.progress_begin(0, 1)
|
||||||
|
|
||||||
|
self.msg_handlers = {
|
||||||
|
messages.Progress: self._subproc_progress,
|
||||||
|
messages.SubprocError: self._subproc_error,
|
||||||
|
messages.DownloadError: self._subproc_download_error,
|
||||||
|
messages.Success: self._subproc_success,
|
||||||
|
messages.BadRepositoryError: self._subproc_repository_error,
|
||||||
|
messages.Aborted: self._subproc_aborted,
|
||||||
|
}
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
storage_path = pathlib.Path(bpy.utils.user_resource(
|
||||||
|
'CONFIG', 'repositories', create=True))
|
||||||
|
repository_urls = [repo.url for repo in self.repositories]
|
||||||
|
self.log.debug("Repository urls %s", repository_urls)
|
||||||
|
|
||||||
|
proc = mp_context.Process(target=subproc.refresh_repositories,
|
||||||
|
args=(self.pipe_subproc, storage_path, repository_urls))
|
||||||
|
return proc
|
||||||
|
|
||||||
|
def _subproc_progress(self, progmsg: messages.Progress):
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
wm.progress_update(progmsg.progress)
|
||||||
|
|
||||||
|
def _subproc_error(self, error: messages.SubprocError):
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Unable to refresh package list: %s' % error.message)
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_download_error(self, error: messages.DownloadError):
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Unable to download package list: %s' % error.message)
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_repository_error(self, error: messages.BadRepositoryError):
|
||||||
|
self.report({'ERROR'}, str(error.message))
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_success(self, success: messages.Success):
|
||||||
|
self.report({'INFO'}, 'Finished refreshing lists')
|
||||||
|
bpkg.refresh_repository_props()
|
||||||
|
bpkg.refresh_packages()
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
wm.progress_end()
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def _subproc_aborted(self, aborted: messages.Aborted):
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Package list retrieval aborted per your request')
|
||||||
|
self.quit()
|
||||||
|
|
||||||
|
def report_process_died(self):
|
||||||
|
if self.process.exitcode:
|
||||||
|
self.log.error(
|
||||||
|
'Refresh process died without telling us! Exit code was %i', self.process.exitcode)
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, 'Error refreshing package lists, exit code %i' % self.process.exitcode)
|
||||||
|
else:
|
||||||
|
self.log.error(
|
||||||
|
'Refresh process died without telling us! Exit code was 0 though')
|
||||||
|
self.report(
|
||||||
|
{'WARNING'}, 'Error refreshing package lists, but process finished OK. This is weird.')
|
||||||
|
|
||||||
|
class PACKAGE_UL_repositories(bpy.types.UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
|
||||||
|
layout.alignment = 'LEFT'
|
||||||
|
layout.prop(item, "enabled", text="")
|
||||||
|
if len(item.name) == 0:
|
||||||
|
layout.label(item['url'])
|
||||||
|
else:
|
||||||
|
layout.label(item.name)
|
||||||
|
|
||||||
|
class PACKAGE_OT_add_repository(Operator):
|
||||||
|
bl_idname = "package.add_repository"
|
||||||
|
bl_label = "Add Repository"
|
||||||
|
|
||||||
|
url = bpy.props.StringProperty(name="Repository URL")
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
wm = context.window_manager
|
||||||
|
return wm.invoke_props_dialog(self)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
wm = context.window_manager
|
||||||
|
|
||||||
|
if not self.url:
|
||||||
|
self.report({'ERROR'}, "Repository URL not specified")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
for repo in wm.package_repositories:
|
||||||
|
if repo['url'] == self.url:
|
||||||
|
self.report({'ERROR'}, "Repository already added")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
repo = wm.package_repositories.add()
|
||||||
|
repo.url = bpkg.utils.sanitize_repository_url(self.url)
|
||||||
|
|
||||||
|
context.area.tag_redraw()
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
class PACKAGE_OT_remove_repository(Operator):
|
||||||
|
bl_idname = "package.remove_repository"
|
||||||
|
bl_label = "Remove Repository"
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
wm = context.window_manager
|
||||||
|
try:
|
||||||
|
repo = wm.package_repositories[wm.package_active_repository]
|
||||||
|
except AttributeError:
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
filepath = Path(repo['filepath'])
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if not filepath.exists():
|
||||||
|
raise ValueError("Failed find repository file")
|
||||||
|
filepath.unlink()
|
||||||
|
|
||||||
|
wm.package_repositories.remove(wm.package_active_repository)
|
||||||
|
context.area.tag_redraw()
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
class PACKAGE_OT_edit_repositories(Operator):
|
||||||
|
bl_idname = "package.edit_repositories"
|
||||||
|
bl_label = "Edit Repositories"
|
||||||
|
|
||||||
|
def check(self, context):
|
||||||
|
# TODO: always refresh settings for now
|
||||||
|
return True
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
bpy.ops.package.refresh()
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
wm = context.window_manager
|
||||||
|
return wm.invoke_props_dialog(self, width=500, height=300)
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
wm = context.window_manager
|
||||||
|
|
||||||
|
row = layout.row()
|
||||||
|
row.template_list("PACKAGE_UL_repositories", "", wm,
|
||||||
|
"package_repositories", wm, "package_active_repository")
|
||||||
|
col = row.column(align=True)
|
||||||
|
col.operator("package.add_repository", text="", icon='ZOOMIN')
|
||||||
|
col.operator("package.remove_repository", text="", icon='ZOOMOUT')
|
||||||
|
|
||||||
|
class WM_OT_package_toggle_expand(Operator):
|
||||||
|
bl_idname = "wm.package_toggle_expand"
|
||||||
|
bl_label = ""
|
||||||
|
bl_description = "Toggle display of extended information for given package (hold shift to collapse all other packages)"
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".WM_OT_package_toggle_expand")
|
||||||
|
|
||||||
|
package_name = bpy.props.StringProperty(
|
||||||
|
name="Package Name",
|
||||||
|
description="Name of package to expand/collapse",
|
||||||
|
)
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
if event.shift:
|
||||||
|
bpkg.display.expanded_packages.clear()
|
||||||
|
if self.package_name in bpkg.display.expanded_packages:
|
||||||
|
bpkg.display.expanded_packages.remove(self.package_name)
|
||||||
|
else:
|
||||||
|
bpkg.display.expanded_packages.append(self.package_name)
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
class WM_OT_package_toggle_preferences(Operator):
|
||||||
|
bl_idname = "wm.package_toggle_preferences"
|
||||||
|
bl_label = ""
|
||||||
|
bl_description = "Toggle display of package preferences"
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
|
package_name = bpy.props.StringProperty(
|
||||||
|
name="Package Name",
|
||||||
|
description="Name of package whos preferences to display",
|
||||||
|
)
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
if bpkg.display.preference_package == self.package_name:
|
||||||
|
bpkg.display.preference_package = None
|
||||||
|
else:
|
||||||
|
bpkg.display.preference_package = self.package_name
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
class PACKAGE_OT_toggle_enabled(Operator):
|
||||||
|
bl_idname = "package.toggle_enabled"
|
||||||
|
bl_label = ""
|
||||||
|
bl_description = "Enable given package if it's disabled, and vice versa if it's enabled"
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".PACKAGE_OT_toggle_enabled")
|
||||||
|
|
||||||
|
package_name = bpy.props.StringProperty(
|
||||||
|
name="Package Name",
|
||||||
|
description="Name of package to enable",
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
import addon_utils
|
||||||
|
metapkg = bpkg.packages[self.package_name]
|
||||||
|
|
||||||
|
if not metapkg.installed:
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, "Can't enable package which isn't installed")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
pkg = metapkg.get_latest_installed_version()
|
||||||
|
if pkg.enabled:
|
||||||
|
pkg.disable()
|
||||||
|
else:
|
||||||
|
pkg.enable()
|
||||||
|
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
class PACKAGE_OT_disable(Operator):
|
||||||
|
bl_idname = "package.disable"
|
||||||
|
bl_label = ""
|
||||||
|
bl_description = "Disable given package"
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__ + ".PACKAGE_OT_disable")
|
||||||
|
|
||||||
|
package_name = bpy.props.StringProperty(
|
||||||
|
name="Package Name",
|
||||||
|
description="Name of package to disable",
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
package = bpkg.packages[self.package_name].get_display_version()
|
||||||
|
|
||||||
|
if not package.module_name:
|
||||||
|
self.log.error("Can't disable package without a module name")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
ret = bpy.ops.wm.addon_disable(package.module_name)
|
||||||
|
if ret == {'FINISHED'}:
|
||||||
|
bpkg.packages[self.package_name].enabled = False
|
||||||
|
return ret
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
PACKAGE_OT_install,
|
||||||
|
PACKAGE_OT_uninstall,
|
||||||
|
PACKAGE_OT_toggle_enabled,
|
||||||
|
PACKAGE_OT_refresh,
|
||||||
|
WM_OT_package_toggle_expand,
|
||||||
|
WM_OT_package_toggle_preferences,
|
||||||
|
PACKAGE_OT_add_repository,
|
||||||
|
PACKAGE_OT_remove_repository,
|
||||||
|
PACKAGE_OT_edit_repositories,
|
||||||
|
PACKAGE_UL_repositories,
|
||||||
|
)
|
@@ -46,6 +46,7 @@ _modules = [
|
|||||||
"properties_object",
|
"properties_object",
|
||||||
"properties_paint_common",
|
"properties_paint_common",
|
||||||
"properties_grease_pencil_common",
|
"properties_grease_pencil_common",
|
||||||
|
"properties_package",
|
||||||
"properties_particle",
|
"properties_particle",
|
||||||
"properties_physics_cloth",
|
"properties_physics_cloth",
|
||||||
"properties_physics_common",
|
"properties_physics_common",
|
||||||
@@ -99,7 +100,7 @@ def register():
|
|||||||
register_class(cls)
|
register_class(cls)
|
||||||
|
|
||||||
# space_userprefs.py
|
# space_userprefs.py
|
||||||
from bpy.props import StringProperty, EnumProperty
|
from bpy.props import StringProperty, EnumProperty, CollectionProperty, IntProperty
|
||||||
from bpy.types import WindowManager
|
from bpy.types import WindowManager
|
||||||
|
|
||||||
def addon_filter_items(self, context):
|
def addon_filter_items(self, context):
|
||||||
@@ -120,9 +121,9 @@ def register():
|
|||||||
items.extend([(cat, cat, "") for cat in sorted(items_unique)])
|
items.extend([(cat, cat, "") for cat in sorted(items_unique)])
|
||||||
return items
|
return items
|
||||||
|
|
||||||
WindowManager.addon_search = StringProperty(
|
WindowManager.package_search = StringProperty(
|
||||||
name="Search",
|
name="Search",
|
||||||
description="Search within the selected filter",
|
description="Filter packages by name",
|
||||||
options={'TEXTEDIT_UPDATE'},
|
options={'TEXTEDIT_UPDATE'},
|
||||||
)
|
)
|
||||||
WindowManager.addon_filter = EnumProperty(
|
WindowManager.addon_filter = EnumProperty(
|
||||||
@@ -141,6 +142,31 @@ def register():
|
|||||||
default={'OFFICIAL', 'COMMUNITY'},
|
default={'OFFICIAL', 'COMMUNITY'},
|
||||||
options={'ENUM_FLAG'},
|
options={'ENUM_FLAG'},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
WindowManager.package_state_filter = EnumProperty(
|
||||||
|
items=[('AVAILABLE', "Available", "All packages in selected repositories"),
|
||||||
|
('INSTALLED', "Installed", "All installed packages"),
|
||||||
|
('UPDATES', "Updates", "All installed packages for which there is a newer version available")
|
||||||
|
],
|
||||||
|
name="Install filter",
|
||||||
|
default='AVAILABLE',
|
||||||
|
)
|
||||||
|
|
||||||
|
from bpkg import display
|
||||||
|
WindowManager.package_repository_filter = EnumProperty(
|
||||||
|
items=display.repository_items,
|
||||||
|
name="Repository filter",
|
||||||
|
options={'ENUM_FLAG'},
|
||||||
|
)
|
||||||
|
|
||||||
|
from .properties_package import RepositoryProperty
|
||||||
|
WindowManager.package_repositories = CollectionProperty(
|
||||||
|
type=RepositoryProperty,
|
||||||
|
name="Repositories",
|
||||||
|
)
|
||||||
|
|
||||||
|
WindowManager.package_active_repository = IntProperty()
|
||||||
|
|
||||||
# done...
|
# done...
|
||||||
|
|
||||||
|
|
||||||
|
27
release/scripts/startup/bl_ui/properties_package.py
Normal file
27
release/scripts/startup/bl_ui/properties_package.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import bpy
|
||||||
|
from bpy.types import UIList, PropertyGroup
|
||||||
|
|
||||||
|
class RepositoryProperty(PropertyGroup):
|
||||||
|
name = bpy.props.StringProperty(name="Name")
|
||||||
|
url = bpy.props.StringProperty(name="URL")
|
||||||
|
filepath = bpy.props.StringProperty(name="Filepath")
|
||||||
|
status = bpy.props.EnumProperty(name="Status", items=[
|
||||||
|
("OK", "Okay", "FILE_TICK"),
|
||||||
|
("NOTFOUND", "Not found", "ERROR"),
|
||||||
|
("NOCONNECT", "Could not connect", "QUESTION"),
|
||||||
|
])
|
||||||
|
enabled = bpy.props.BoolProperty(name="Enabled")
|
||||||
|
|
||||||
|
class PACKAGE_UL_repositories(UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
|
||||||
|
try:
|
||||||
|
layout.label(text=item['name'] + ":", icon='FILE_TICK')
|
||||||
|
except KeyError: #name not defined while still downloading
|
||||||
|
layout.label(text="", icon='FILE_REFRESH')
|
||||||
|
# TODO: for some reason unembossing the following causes blender to become unresponsive when ctrl clicking the url
|
||||||
|
layout.prop(item, "url", text="")
|
||||||
|
|
||||||
|
classes = (
|
||||||
|
RepositoryProperty,
|
||||||
|
PACKAGE_UL_repositories,
|
||||||
|
)
|
@@ -1312,6 +1312,379 @@ class USERPREF_MT_addons_online_resources(Menu):
|
|||||||
).url = bpy.types.WM_OT_doc_view._prefix + "/info_quickstart.html"
|
).url = bpy.types.WM_OT_doc_view._prefix + "/info_quickstart.html"
|
||||||
layout.operator("wm.url_open", text="Add-on Tutorial", icon='URL',
|
layout.operator("wm.url_open", text="Add-on Tutorial", icon='URL',
|
||||||
).url = bpy.types.WM_OT_doc_view._prefix + "/info_tutorial_addon.html"
|
).url = bpy.types.WM_OT_doc_view._prefix + "/info_tutorial_addon.html"
|
||||||
|
|
||||||
|
|
||||||
|
class USERPREF_PT_packages(Panel):
|
||||||
|
bl_label = "Package Management"
|
||||||
|
bl_space_type = 'USER_PREFERENCES'
|
||||||
|
bl_region_type = 'WINDOW'
|
||||||
|
bl_options = {'HIDE_HEADER'}
|
||||||
|
|
||||||
|
_started = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
userpref = context.user_preferences
|
||||||
|
return (userpref.active_section == 'PACKAGES')
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
import bpkg
|
||||||
|
import bpkg.display
|
||||||
|
from bpkg.types import (ConsolidatedPackage, Package)
|
||||||
|
layout = self.layout
|
||||||
|
wm = context.window_manager
|
||||||
|
|
||||||
|
mainrow = layout.row()
|
||||||
|
spl = mainrow.split(.2)
|
||||||
|
sidebar = spl.column(align=True)
|
||||||
|
pkgzone = spl.column()
|
||||||
|
|
||||||
|
sidebar.operator("package.refresh", text="Check for updates")
|
||||||
|
sidebar.label("Repositories")
|
||||||
|
|
||||||
|
col = sidebar.column(align=True)
|
||||||
|
col.prop(wm, "package_repository_filter")
|
||||||
|
# row.template_list("PACKAGE_UL_repositories", "", wm, "package_repositories", wm, "package_active_repository")
|
||||||
|
# col = row.column(align=True)
|
||||||
|
# col.operator("package.add_repository", text="", icon='ZOOMIN')
|
||||||
|
# col.operator("package.remove_repository", text="", icon='ZOOMOUT')
|
||||||
|
# sidebar.separator()
|
||||||
|
|
||||||
|
col.operator("package.edit_repositories")
|
||||||
|
|
||||||
|
sidebar.separator()
|
||||||
|
sidebar.label("Category")
|
||||||
|
sidebar.prop(wm, "addon_filter", text="")
|
||||||
|
|
||||||
|
sidebar.separator()
|
||||||
|
sidebar.label("Support level")
|
||||||
|
sidebar.prop(wm, "addon_support")
|
||||||
|
|
||||||
|
top = pkgzone.row()
|
||||||
|
spl = top.split(.6)
|
||||||
|
spl.prop(wm, "package_search", text="", icon='VIEWZOOM')
|
||||||
|
spl_r = spl.row()
|
||||||
|
spl_r.prop(wm, "package_state_filter", expand=True)
|
||||||
|
|
||||||
|
def filter_packages(filters: dict, packages: dict) -> list:# {{{
|
||||||
|
"""Returns filtered and sorted list of names of packages which match filters"""
|
||||||
|
|
||||||
|
#TODO: using lower() for case-insensitive comparison doesn't work for some languages
|
||||||
|
def match_contains(pkg: Package) -> bool:
|
||||||
|
return pkg.name.lower().__contains__(filters['search'].lower())
|
||||||
|
|
||||||
|
def match_startswith(pkg: Package) -> bool:
|
||||||
|
return pkg.name.lower().startswith(filters['search'].lower())
|
||||||
|
|
||||||
|
def match_support(pkg: Package) -> bool:
|
||||||
|
return set((pkg.support,)).issubset(filters['support'])
|
||||||
|
|
||||||
|
def match_installstate(metapkg: ConsolidatedPackage) -> bool:
|
||||||
|
if filters['installstate'] == 'AVAILABLE':
|
||||||
|
return True
|
||||||
|
|
||||||
|
if filters['installstate'] == 'INSTALLED':
|
||||||
|
return metapkg.installed
|
||||||
|
|
||||||
|
if filters['installstate'] == 'UPDATES':
|
||||||
|
return metapkg.installed and metapkg.test_updateable()
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def match_repositories(metapkg) -> bool:
|
||||||
|
pkg = metapkg.get_display_version()
|
||||||
|
if pkg.installed:
|
||||||
|
return True
|
||||||
|
if len(pkg.repositories) == 0:
|
||||||
|
return True
|
||||||
|
pkg_reponames = set(repo.name for repo in pkg.repositories)
|
||||||
|
if len(pkg_reponames.intersection(filters['repository'])) > 0:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def match_category(pkg: Package) -> bool:
|
||||||
|
filterstr = filters['category'].lower()
|
||||||
|
|
||||||
|
# handle special categories
|
||||||
|
if filterstr == 'all':
|
||||||
|
return True
|
||||||
|
if filterstr == 'user':
|
||||||
|
return pkg.is_user
|
||||||
|
if filterstr == 'enabled':
|
||||||
|
return pkg.enabled
|
||||||
|
if filterstr == 'disabled':
|
||||||
|
return not pkg.enabled
|
||||||
|
|
||||||
|
if not pkg.category:
|
||||||
|
return False
|
||||||
|
if pkg.category.lower() == filterstr:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# use two lists as a simple way of putting "matches from the beginning" on top
|
||||||
|
contains = []
|
||||||
|
startswith = []
|
||||||
|
|
||||||
|
for pkgname, metapkg in packages.items():
|
||||||
|
pkg = metapkg.get_display_version()
|
||||||
|
if match_repositories(metapkg)\
|
||||||
|
and match_category(pkg)\
|
||||||
|
and match_support(pkg)\
|
||||||
|
and match_installstate(metapkg):
|
||||||
|
if len(filters['search']) == 0:
|
||||||
|
startswith.append(pkgname)
|
||||||
|
continue
|
||||||
|
if match_startswith(pkg):
|
||||||
|
startswith.append(pkgname)
|
||||||
|
continue
|
||||||
|
if match_contains(pkg):
|
||||||
|
contains.append(pkgname)
|
||||||
|
continue
|
||||||
|
|
||||||
|
return sorted(startswith) + sorted(contains)# }}}
|
||||||
|
|
||||||
|
def draw_package(metapkg: ConsolidatedPackage, layout: bpy.types.UILayout): #{{{
|
||||||
|
"""Draws the given package"""
|
||||||
|
pkg = metapkg.get_display_version()
|
||||||
|
|
||||||
|
def draw_operators(metapkg, layout): # {{{
|
||||||
|
"""
|
||||||
|
Draws install, uninstall, update, enable, disable, and preferences
|
||||||
|
buttons as applicable for the given package
|
||||||
|
"""
|
||||||
|
pkg = metapkg.get_display_version()
|
||||||
|
|
||||||
|
if metapkg.installed:
|
||||||
|
if metapkg.test_updateable():
|
||||||
|
layout.operator(
|
||||||
|
"package.install",
|
||||||
|
text="Update to {}".format(bpkg.utils.fmt_version(metapkg.get_latest_version().version)),
|
||||||
|
).package_name=metapkg.name
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
#TODO: only show preferences button if addon has preferences to show
|
||||||
|
if pkg.enabled:
|
||||||
|
layout.operator(
|
||||||
|
"wm.package_toggle_preferences",
|
||||||
|
text="Preferences",
|
||||||
|
).package_name=metapkg.name
|
||||||
|
row = layout.row()
|
||||||
|
row.alignment='RIGHT'
|
||||||
|
row.operator(
|
||||||
|
"package.uninstall",
|
||||||
|
text="Uninstall",
|
||||||
|
).package_name=metapkg.name
|
||||||
|
row.enabled = pkg.is_user
|
||||||
|
else:
|
||||||
|
layout.operator(
|
||||||
|
"package.install",
|
||||||
|
text="Install",
|
||||||
|
).package_name=metapkg.name
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
def draw_preferences(pkg: Package, layout: bpy.types.UILayout):
|
||||||
|
"""Draw the package's preferences in the given layout"""
|
||||||
|
addon_preferences = context.user_preferences.addons[pkg.module_name].preferences
|
||||||
|
if addon_preferences is not None:
|
||||||
|
draw = getattr(addon_preferences, "draw", None)
|
||||||
|
if draw is not None:
|
||||||
|
addon_preferences_class = type(addon_preferences)
|
||||||
|
box_prefs = layout.box()
|
||||||
|
box_prefs.label("Preferences:")
|
||||||
|
addon_preferences_class.layout = box_prefs
|
||||||
|
try:
|
||||||
|
draw(context)
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
box_prefs.label(text="Error (see console)", icon='ERROR')
|
||||||
|
del addon_preferences_class.layout
|
||||||
|
|
||||||
|
def collapsed(metapkg, layout):# {{{
|
||||||
|
"""Draw collapsed version of package layout"""
|
||||||
|
pkg = metapkg.get_display_version()
|
||||||
|
|
||||||
|
# Only 'install' button is shown when package isn't installed,
|
||||||
|
# so allow more space for title/description.
|
||||||
|
spl = layout.split(.5 if pkg.installed else .8)
|
||||||
|
|
||||||
|
metacol = spl.column(align=True)
|
||||||
|
|
||||||
|
buttonrow = spl.row(align=True)
|
||||||
|
buttonrow.alignment = 'RIGHT'
|
||||||
|
|
||||||
|
l1 = metacol.row()
|
||||||
|
l2 = metacol.row()
|
||||||
|
|
||||||
|
draw_operators(metapkg, buttonrow)
|
||||||
|
|
||||||
|
if pkg.installed:
|
||||||
|
metacol.active = pkg.enabled
|
||||||
|
l1.operator("package.toggle_enabled",
|
||||||
|
icon='CHECKBOX_HLT' if pkg.enabled else 'CHECKBOX_DEHLT',
|
||||||
|
text="",
|
||||||
|
emboss=False,
|
||||||
|
).package_name = metapkg.name
|
||||||
|
|
||||||
|
if pkg.name:
|
||||||
|
l1.label(text=pkg.name)
|
||||||
|
if pkg.description:
|
||||||
|
l2.label(text=pkg.description)
|
||||||
|
l2.enabled = False #Give name more visual weight
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
|
||||||
|
def expanded(metapkg, layout, layoutbox):# {{{
|
||||||
|
"""Draw expanded version of package layout"""
|
||||||
|
|
||||||
|
pkg = metapkg.get_display_version()
|
||||||
|
|
||||||
|
metacol = layoutbox.column(align=True)
|
||||||
|
row1 = layout.row(align=True)
|
||||||
|
if metapkg.installed:
|
||||||
|
row1.operator("package.toggle_enabled",
|
||||||
|
icon='CHECKBOX_HLT' if pkg.enabled else 'CHECKBOX_DEHLT',
|
||||||
|
text="",
|
||||||
|
emboss=False,
|
||||||
|
).package_name = metapkg.name
|
||||||
|
row1.label(pkg.name)
|
||||||
|
|
||||||
|
if metapkg.installed:
|
||||||
|
metacol.active = pkg.enabled
|
||||||
|
row1.active = pkg.enabled
|
||||||
|
|
||||||
|
if pkg.description:
|
||||||
|
row = metacol.row()
|
||||||
|
row.label(pkg.description)
|
||||||
|
|
||||||
|
def draw_metadatum(label: str, value: str, layout: bpy.types.UILayout):
|
||||||
|
"""Draw the given key value pair in a new row in given layout container"""
|
||||||
|
row = layout.row()
|
||||||
|
row.scale_y = .8
|
||||||
|
spl = row.split(.15)
|
||||||
|
spl.label("{}:".format(label))
|
||||||
|
spl.label(value)
|
||||||
|
|
||||||
|
# don't compare against None here; we don't want to display empty arrays/strings either
|
||||||
|
if pkg.location:
|
||||||
|
draw_metadatum("Location", pkg.location, metacol)
|
||||||
|
if pkg.version:
|
||||||
|
draw_metadatum("Version", bpkg.utils.fmt_version(pkg.version), metacol)
|
||||||
|
if pkg.blender:
|
||||||
|
draw_metadatum("Blender version", bpkg.utils.fmt_version(pkg.blender), metacol)
|
||||||
|
if pkg.category:
|
||||||
|
draw_metadatum("Category", pkg.category, metacol)
|
||||||
|
if pkg.author:
|
||||||
|
draw_metadatum("Author", pkg.author, metacol)
|
||||||
|
if pkg.support:
|
||||||
|
draw_metadatum("Support level", pkg.support.title(), metacol)
|
||||||
|
if pkg.warning:
|
||||||
|
draw_metadatum("Warning", pkg.warning, metacol)
|
||||||
|
|
||||||
|
metacol.separator()
|
||||||
|
|
||||||
|
spl = layoutbox.row().split(.35)
|
||||||
|
urlrow = spl.row()
|
||||||
|
buttonrow = spl.row(align=True)
|
||||||
|
|
||||||
|
urlrow.alignment = 'LEFT'
|
||||||
|
if pkg.wiki_url:
|
||||||
|
urlrow.operator("wm.url_open", text="Documentation", icon='HELP').url=pkg.wiki_url
|
||||||
|
if pkg.tracker_url:
|
||||||
|
urlrow.operator("wm.url_open", text="Report a Bug", icon='URL').url=pkg.tracker_url
|
||||||
|
|
||||||
|
buttonrow.alignment = 'RIGHT'
|
||||||
|
draw_operators(metapkg, buttonrow)
|
||||||
|
|
||||||
|
def draw_version(layout: bpy.types.UILayout, pkg: Package):
|
||||||
|
"""Draw version of package"""
|
||||||
|
spl = layout.split(.9)
|
||||||
|
left = spl.column()
|
||||||
|
right = spl.column()
|
||||||
|
right.alignment = 'RIGHT'
|
||||||
|
|
||||||
|
left.label(text=bpkg.utils.fmt_version(pkg.version))
|
||||||
|
|
||||||
|
for repo in pkg.repositories:
|
||||||
|
draw_metadatum("Repository", repo.name, left)
|
||||||
|
|
||||||
|
if pkg.installed:
|
||||||
|
right.label(text="Installed")
|
||||||
|
|
||||||
|
draw_metadatum("Installed to", str(pkg.installed_location), left)
|
||||||
|
|
||||||
|
if len(metapkg.versions) > 1:
|
||||||
|
row = pkgbox.row()
|
||||||
|
row.label(text="There are multiple versions of this package:")
|
||||||
|
for version in metapkg.versions:
|
||||||
|
subvbox = pkgbox.box()
|
||||||
|
draw_version(subvbox, version)
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
is_expanded = (metapkg.name in bpkg.display.expanded_packages)
|
||||||
|
|
||||||
|
pkgbox = layout.box()
|
||||||
|
row = pkgbox.row(align=True)
|
||||||
|
row.operator(
|
||||||
|
"wm.package_toggle_expand",
|
||||||
|
icon='TRIA_DOWN' if is_expanded else 'TRIA_RIGHT',
|
||||||
|
emboss=False,
|
||||||
|
).package_name=metapkg.name
|
||||||
|
|
||||||
|
if is_expanded:
|
||||||
|
expanded(metapkg, row, pkgbox)
|
||||||
|
else:
|
||||||
|
collapsed(metapkg, row)# }}}
|
||||||
|
|
||||||
|
if pkg.installed and pkg.enabled and pkg.name == bpkg.display.preference_package:
|
||||||
|
draw_preferences(pkg, pkgbox)
|
||||||
|
|
||||||
|
|
||||||
|
def center_message(layout, msg: str):
|
||||||
|
"""draw a label in the center of an extra-tall row"""
|
||||||
|
row = layout.row()
|
||||||
|
row.label(text=msg)
|
||||||
|
row.alignment='CENTER'
|
||||||
|
row.scale_y = 10
|
||||||
|
|
||||||
|
|
||||||
|
# Things which only should be run once; initialize repository props and
|
||||||
|
# packages TODO: keeping it here in draw() means it's lazy loaded,
|
||||||
|
# perhaps it might be better to do asynchronously on startup
|
||||||
|
if not USERPREF_PT_packages._started:
|
||||||
|
USERPREF_PT_packages._started = True
|
||||||
|
bpkg.refresh_repository_props()
|
||||||
|
bpkg.refresh_packages()
|
||||||
|
# Enable all repositories by default
|
||||||
|
wm.package_repository_filter = set(repo['name'] for repo in wm.package_repositories)
|
||||||
|
|
||||||
|
if len(bpkg.packages) == 0:
|
||||||
|
center_message(pkgzone, "No packages found")
|
||||||
|
return
|
||||||
|
|
||||||
|
pkg_errors = bpkg.display.pkg_errors
|
||||||
|
if len(pkg_errors) > 0:
|
||||||
|
errbox = pkgzone.box()
|
||||||
|
for err in pkg_errors:
|
||||||
|
row = errbox.row()
|
||||||
|
row.label(text=err, icon='ERROR')
|
||||||
|
|
||||||
|
|
||||||
|
wm = bpy.context.window_manager
|
||||||
|
filters = {
|
||||||
|
'category': wm.addon_filter,
|
||||||
|
'search': wm.package_search,
|
||||||
|
'support': wm.addon_support,
|
||||||
|
'repository': wm.package_repository_filter,
|
||||||
|
'installstate': wm.package_state_filter,
|
||||||
|
}
|
||||||
|
bpkg.display.displayed_packages = filter_packages(filters, bpkg.packages)
|
||||||
|
|
||||||
|
for pkgname in bpkg.display.displayed_packages:
|
||||||
|
row = pkgzone.row()
|
||||||
|
draw_package(bpkg.packages[pkgname], row)
|
||||||
|
|
||||||
|
|
||||||
class USERPREF_PT_addons(Panel):
|
class USERPREF_PT_addons(Panel):
|
||||||
@@ -1559,6 +1932,7 @@ classes = (
|
|||||||
USERPREF_MT_splash_footer,
|
USERPREF_MT_splash_footer,
|
||||||
USERPREF_PT_interface,
|
USERPREF_PT_interface,
|
||||||
USERPREF_PT_edit,
|
USERPREF_PT_edit,
|
||||||
|
USERPREF_PT_packages,
|
||||||
USERPREF_PT_system,
|
USERPREF_PT_system,
|
||||||
USERPREF_MT_interface_theme_presets,
|
USERPREF_MT_interface_theme_presets,
|
||||||
USERPREF_PT_theme,
|
USERPREF_PT_theme,
|
||||||
|
@@ -594,7 +594,7 @@ typedef enum eUserPref_Section {
|
|||||||
USER_SECTION_SYSTEM = 3,
|
USER_SECTION_SYSTEM = 3,
|
||||||
USER_SECTION_THEME = 4,
|
USER_SECTION_THEME = 4,
|
||||||
USER_SECTION_INPUT = 5,
|
USER_SECTION_INPUT = 5,
|
||||||
USER_SECTION_ADDONS = 6,
|
USER_SECTION_PACKAGES = 6,
|
||||||
} eUserPref_Section;
|
} eUserPref_Section;
|
||||||
|
|
||||||
/* UserDef.flag */
|
/* UserDef.flag */
|
||||||
|
@@ -678,6 +678,7 @@ extern StructRNA RNA_UnknownType;
|
|||||||
extern StructRNA RNA_UserPreferences;
|
extern StructRNA RNA_UserPreferences;
|
||||||
extern StructRNA RNA_UserPreferencesEdit;
|
extern StructRNA RNA_UserPreferencesEdit;
|
||||||
extern StructRNA RNA_UserPreferencesFilePaths;
|
extern StructRNA RNA_UserPreferencesFilePaths;
|
||||||
|
extern StructRNA RNA_UserPreferencesPackages;
|
||||||
extern StructRNA RNA_UserPreferencesInput;
|
extern StructRNA RNA_UserPreferencesInput;
|
||||||
extern StructRNA RNA_UserPreferencesSystem;
|
extern StructRNA RNA_UserPreferencesSystem;
|
||||||
extern StructRNA RNA_UserPreferencesView;
|
extern StructRNA RNA_UserPreferencesView;
|
||||||
|
@@ -339,6 +339,11 @@ static PointerRNA rna_UserDef_filepaths_get(PointerRNA *ptr)
|
|||||||
return rna_pointer_inherit_refine(ptr, &RNA_UserPreferencesFilePaths, ptr->data);
|
return rna_pointer_inherit_refine(ptr, &RNA_UserPreferencesFilePaths, ptr->data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static PointerRNA rna_UserDef_packages_get(PointerRNA *ptr)
|
||||||
|
{
|
||||||
|
return rna_pointer_inherit_refine(ptr, &RNA_UserPreferencesPackages, ptr->data);
|
||||||
|
}
|
||||||
|
|
||||||
static PointerRNA rna_UserDef_system_get(PointerRNA *ptr)
|
static PointerRNA rna_UserDef_system_get(PointerRNA *ptr)
|
||||||
{
|
{
|
||||||
return rna_pointer_inherit_refine(ptr, &RNA_UserPreferencesSystem, ptr->data);
|
return rna_pointer_inherit_refine(ptr, &RNA_UserPreferencesSystem, ptr->data);
|
||||||
@@ -4595,6 +4600,18 @@ static void rna_def_userdef_filepaths(BlenderRNA *brna)
|
|||||||
"Enables automatic saving of preview images in the .blend file");
|
"Enables automatic saving of preview images in the .blend file");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void rna_def_userdef_packages(BlenderRNA *brna)
|
||||||
|
{
|
||||||
|
PropertyRNA *prop;
|
||||||
|
StructRNA *srna;
|
||||||
|
|
||||||
|
srna = RNA_def_struct(brna, "UserPreferencesPackages", NULL);
|
||||||
|
RNA_def_struct_sdna(srna, "UserDef");
|
||||||
|
RNA_def_struct_nested(brna, srna, "UserPreferences");
|
||||||
|
RNA_def_struct_clear_flag(srna, STRUCT_UNDO);
|
||||||
|
RNA_def_struct_ui_text(srna, "Packages", "Where packages are managed");
|
||||||
|
}
|
||||||
|
|
||||||
static void rna_def_userdef_addon_collection(BlenderRNA *brna, PropertyRNA *cprop)
|
static void rna_def_userdef_addon_collection(BlenderRNA *brna, PropertyRNA *cprop)
|
||||||
{
|
{
|
||||||
StructRNA *srna;
|
StructRNA *srna;
|
||||||
@@ -4656,7 +4673,7 @@ void RNA_def_userdef(BlenderRNA *brna)
|
|||||||
{USER_SECTION_INTERFACE, "INTERFACE", 0, "Interface", ""},
|
{USER_SECTION_INTERFACE, "INTERFACE", 0, "Interface", ""},
|
||||||
{USER_SECTION_EDIT, "EDITING", 0, "Editing", ""},
|
{USER_SECTION_EDIT, "EDITING", 0, "Editing", ""},
|
||||||
{USER_SECTION_INPUT, "INPUT", 0, "Input", ""},
|
{USER_SECTION_INPUT, "INPUT", 0, "Input", ""},
|
||||||
{USER_SECTION_ADDONS, "ADDONS", 0, "Add-ons", ""},
|
{USER_SECTION_PACKAGES, "PACKAGES", 0, "Packages", ""},
|
||||||
{USER_SECTION_THEME, "THEMES", 0, "Themes", ""},
|
{USER_SECTION_THEME, "THEMES", 0, "Themes", ""},
|
||||||
{USER_SECTION_FILE, "FILES", 0, "File", ""},
|
{USER_SECTION_FILE, "FILES", 0, "File", ""},
|
||||||
{USER_SECTION_SYSTEM, "SYSTEM", 0, "System", ""},
|
{USER_SECTION_SYSTEM, "SYSTEM", 0, "System", ""},
|
||||||
@@ -4730,7 +4747,13 @@ void RNA_def_userdef(BlenderRNA *brna)
|
|||||||
RNA_def_property_struct_type(prop, "UserPreferencesFilePaths");
|
RNA_def_property_struct_type(prop, "UserPreferencesFilePaths");
|
||||||
RNA_def_property_pointer_funcs(prop, "rna_UserDef_filepaths_get", NULL, NULL, NULL);
|
RNA_def_property_pointer_funcs(prop, "rna_UserDef_filepaths_get", NULL, NULL, NULL);
|
||||||
RNA_def_property_ui_text(prop, "File Paths", "Default paths for external files");
|
RNA_def_property_ui_text(prop, "File Paths", "Default paths for external files");
|
||||||
|
|
||||||
|
prop = RNA_def_property(srna, "packages", PROP_POINTER, PROP_NONE);
|
||||||
|
RNA_def_property_flag(prop, PROP_NEVER_NULL);
|
||||||
|
RNA_def_property_struct_type(prop, "UserPreferencesPackages");
|
||||||
|
RNA_def_property_pointer_funcs(prop, "rna_UserDef_packages_get", NULL, NULL, NULL);
|
||||||
|
RNA_def_property_ui_text(prop, "Packages", "test");
|
||||||
|
|
||||||
prop = RNA_def_property(srna, "system", PROP_POINTER, PROP_NONE);
|
prop = RNA_def_property(srna, "system", PROP_POINTER, PROP_NONE);
|
||||||
RNA_def_property_flag(prop, PROP_NEVER_NULL);
|
RNA_def_property_flag(prop, PROP_NEVER_NULL);
|
||||||
RNA_def_property_struct_type(prop, "UserPreferencesSystem");
|
RNA_def_property_struct_type(prop, "UserPreferencesSystem");
|
||||||
@@ -4747,6 +4770,7 @@ void RNA_def_userdef(BlenderRNA *brna)
|
|||||||
rna_def_userdef_edit(brna);
|
rna_def_userdef_edit(brna);
|
||||||
rna_def_userdef_input(brna);
|
rna_def_userdef_input(brna);
|
||||||
rna_def_userdef_filepaths(brna);
|
rna_def_userdef_filepaths(brna);
|
||||||
|
rna_def_userdef_packages(brna);
|
||||||
rna_def_userdef_system(brna);
|
rna_def_userdef_system(brna);
|
||||||
rna_def_userdef_addon(brna);
|
rna_def_userdef_addon(brna);
|
||||||
rna_def_userdef_addon_pref(brna);
|
rna_def_userdef_addon_pref(brna);
|
||||||
|
Reference in New Issue
Block a user