2016-03-11 17:52:12 +01:00
|
|
|
import asyncio
|
2016-03-21 17:21:51 +01:00
|
|
|
import json
|
2016-03-08 16:22:20 +01:00
|
|
|
import os
|
2016-03-09 17:34:37 +01:00
|
|
|
import functools
|
2016-03-15 13:47:21 +01:00
|
|
|
import logging
|
2016-03-22 13:26:24 +01:00
|
|
|
from contextlib import closing, contextmanager
|
2016-06-16 17:19:49 +02:00
|
|
|
import urllib.parse
|
2016-03-31 11:36:45 +02:00
|
|
|
import pathlib
|
2016-03-15 14:05:54 +01:00
|
|
|
|
|
|
|
import requests
|
2016-03-21 17:21:51 +01:00
|
|
|
import requests.structures
|
2016-03-08 16:22:20 +01:00
|
|
|
import pillarsdk
|
2016-03-08 17:56:32 +01:00
|
|
|
import pillarsdk.exceptions
|
2016-03-11 17:52:12 +01:00
|
|
|
import pillarsdk.utils
|
2016-03-22 14:31:03 +01:00
|
|
|
from pillarsdk.utils import sanitize_filename
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-03-22 15:20:22 +01:00
|
|
|
from . import cache
|
2016-03-18 16:53:52 +01:00
|
|
|
|
2016-04-12 16:59:34 +02:00
|
|
|
SUBCLIENT_ID = 'PILLAR'
|
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
_pillar_api = {} # will become a mapping from bool (cached/non-cached) to pillarsdk.Api objects.
|
2016-03-15 13:47:21 +01:00
|
|
|
log = logging.getLogger(__name__)
|
2016-03-18 16:53:52 +01:00
|
|
|
uncached_session = requests.session()
|
|
|
|
_testing_blender_id_profile = None # Just for testing, overrides what is returned by blender_id_profile.
|
2016-03-22 10:41:35 +01:00
|
|
|
_downloaded_urls = set() # URLs we've downloaded this Blender session.
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-03-22 13:26:24 +01:00
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
class UserNotLoggedInError(RuntimeError):
|
|
|
|
"""Raised when the user should be logged in on Blender ID, but isn't.
|
|
|
|
|
|
|
|
This is basically for every interaction with Pillar.
|
|
|
|
"""
|
|
|
|
|
2016-04-01 14:11:30 +02:00
|
|
|
def __str__(self):
|
2016-05-10 14:52:51 +02:00
|
|
|
return self.__class__.__name__
|
2016-04-01 14:11:30 +02:00
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-05-04 14:30:47 +02:00
|
|
|
class CredentialsNotSyncedError(UserNotLoggedInError):
|
|
|
|
"""Raised when the user may be logged in on Blender ID, but has no Blender Cloud token."""
|
|
|
|
|
2016-05-10 14:52:51 +02:00
|
|
|
|
|
|
|
class NotSubscribedToCloudError(UserNotLoggedInError):
|
|
|
|
"""Raised when the user may be logged in on Blender ID, but has no Blender Cloud token."""
|
2016-05-04 14:30:47 +02:00
|
|
|
|
|
|
|
|
2016-03-22 13:26:24 +01:00
|
|
|
class PillarError(RuntimeError):
|
|
|
|
"""Raised when there is some issue with the communication with Pillar.
|
|
|
|
|
|
|
|
This is only raised for logical errors (for example nodes that should
|
|
|
|
exist but still can't be found). HTTP communication errors are signalled
|
|
|
|
with other exceptions.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
2016-03-31 11:36:45 +02:00
|
|
|
class CloudPath(pathlib.PurePosixPath):
|
|
|
|
"""Cloud path, in the form of /project uuid/node uuid/node uuid/...
|
|
|
|
|
|
|
|
The components are:
|
|
|
|
- the root '/'
|
|
|
|
- the project UUID
|
|
|
|
- zero or more node UUIDs.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@property
|
|
|
|
def project_uuid(self) -> str:
|
|
|
|
assert self.parts[0] == '/'
|
2016-05-18 11:56:46 +02:00
|
|
|
if len(self.parts) <= 1:
|
|
|
|
return None
|
2016-03-31 11:36:45 +02:00
|
|
|
return self.parts[1]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def node_uuids(self) -> list:
|
|
|
|
assert self.parts[0] == '/'
|
|
|
|
return self.parts[2:]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def node_uuid(self) -> str:
|
2016-05-18 11:56:46 +02:00
|
|
|
if len(self.parts) <= 2:
|
2016-03-31 11:36:45 +02:00
|
|
|
return None
|
2016-05-18 11:56:46 +02:00
|
|
|
|
|
|
|
return self.parts[-1]
|
2016-03-31 11:36:45 +02:00
|
|
|
|
|
|
|
|
2016-03-22 13:26:24 +01:00
|
|
|
@contextmanager
|
|
|
|
def with_existing_dir(filename: str, open_mode: str, encoding=None):
|
|
|
|
"""Opens a file, ensuring its directory exists."""
|
|
|
|
|
|
|
|
directory = os.path.dirname(filename)
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
log.debug('Creating directory %s', directory)
|
2016-03-31 12:01:28 +02:00
|
|
|
os.makedirs(directory, exist_ok=True)
|
2016-03-22 13:26:24 +01:00
|
|
|
with open(filename, open_mode, encoding=encoding) as file_object:
|
|
|
|
yield file_object
|
|
|
|
|
|
|
|
|
|
|
|
def save_as_json(pillar_resource, json_filename):
|
|
|
|
with with_existing_dir(json_filename, 'w') as outfile:
|
|
|
|
log.debug('Saving metadata to %r' % json_filename)
|
|
|
|
json.dump(pillar_resource, outfile, sort_keys=True, cls=pillarsdk.utils.PillarJSONEncoder)
|
|
|
|
|
|
|
|
|
2016-04-01 17:16:29 +02:00
|
|
|
def blender_id_profile() -> 'blender_id.BlenderIdProfile':
|
2016-03-08 17:56:32 +01:00
|
|
|
"""Returns the Blender ID profile of the currently logged in user."""
|
|
|
|
|
2016-03-18 16:53:52 +01:00
|
|
|
# Allow overriding before we import the bpy module.
|
|
|
|
if _testing_blender_id_profile is not None:
|
|
|
|
return _testing_blender_id_profile
|
|
|
|
|
2016-04-01 14:11:12 +02:00
|
|
|
import blender_id
|
|
|
|
return blender_id.get_active_profile()
|
2016-03-08 17:56:32 +01:00
|
|
|
|
|
|
|
|
2016-06-16 17:19:49 +02:00
|
|
|
def blender_id_subclient() -> dict:
|
|
|
|
"""Returns the subclient dict, containing the 'subclient_user_id' and 'token' keys."""
|
|
|
|
|
|
|
|
profile = blender_id_profile()
|
|
|
|
if not profile:
|
|
|
|
raise UserNotLoggedInError()
|
|
|
|
|
|
|
|
subclient = profile.subclients.get(SUBCLIENT_ID)
|
|
|
|
if not subclient:
|
|
|
|
raise CredentialsNotSyncedError()
|
|
|
|
|
|
|
|
return subclient
|
|
|
|
|
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
def pillar_api(pillar_endpoint: str = None, caching=True) -> pillarsdk.Api:
|
2016-03-08 17:56:32 +01:00
|
|
|
"""Returns the Pillar SDK API object for the current user.
|
|
|
|
|
|
|
|
The user must be logged in.
|
2016-03-18 16:53:52 +01:00
|
|
|
|
|
|
|
:param pillar_endpoint: URL of the Pillar server, for testing purposes. If not specified,
|
|
|
|
it will use the addon preferences.
|
2016-06-17 15:46:45 +02:00
|
|
|
:param caching: whether to return a caching or non-caching API
|
2016-03-08 17:56:32 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
global _pillar_api
|
|
|
|
|
|
|
|
# Only return the Pillar API object if the user is still logged in.
|
2016-06-16 17:19:49 +02:00
|
|
|
subclient = blender_id_subclient()
|
2016-04-12 16:59:34 +02:00
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
if not _pillar_api:
|
2016-03-18 16:53:52 +01:00
|
|
|
# Allow overriding the endpoint before importing Blender-specific stuff.
|
|
|
|
if pillar_endpoint is None:
|
|
|
|
from . import blender
|
|
|
|
pillar_endpoint = blender.preferences().pillar_server
|
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
_caching_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
|
|
|
username=subclient['subclient_user_id'],
|
|
|
|
password=SUBCLIENT_ID,
|
|
|
|
token=subclient['token'])
|
|
|
|
_caching_api.requests_session = cache.requests_session()
|
|
|
|
|
|
|
|
_noncaching_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
|
|
|
username=subclient['subclient_user_id'],
|
|
|
|
password=SUBCLIENT_ID,
|
|
|
|
token=subclient['token'])
|
|
|
|
_noncaching_api.requests_session = uncached_session
|
2016-03-18 16:53:52 +01:00
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
_pillar_api = {
|
|
|
|
True: _caching_api,
|
|
|
|
False: _noncaching_api,
|
|
|
|
}
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
return _pillar_api[caching]
|
2016-03-08 17:56:32 +01:00
|
|
|
|
|
|
|
|
2016-04-01 18:47:06 +02:00
|
|
|
# No more than this many Pillar calls should be made simultaneously
|
|
|
|
pillar_semaphore = asyncio.Semaphore(3)
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
|
2016-06-17 15:46:45 +02:00
|
|
|
async def pillar_call(pillar_func, *args, caching=True, **kwargs):
|
|
|
|
partial = functools.partial(pillar_func, *args, api=pillar_api(caching=caching), **kwargs)
|
2016-03-11 17:52:12 +01:00
|
|
|
loop = asyncio.get_event_loop()
|
2016-04-01 18:47:06 +02:00
|
|
|
|
|
|
|
async with pillar_semaphore:
|
|
|
|
return await loop.run_in_executor(None, partial)
|
|
|
|
|
|
|
|
|
2016-05-04 14:30:47 +02:00
|
|
|
async def check_pillar_credentials():
|
|
|
|
"""Tries to obtain the user at Pillar using the user's credentials.
|
|
|
|
|
|
|
|
:raises UserNotLoggedInError: when the user is not logged in on Blender ID.
|
|
|
|
:raises CredentialsNotSyncedError: when the user is logged in on Blender ID but
|
|
|
|
doesn't have a valid subclient token for Pillar.
|
2016-06-16 16:33:21 +02:00
|
|
|
:returns: the Pillar User ID of the current user.
|
2016-05-04 14:30:47 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
profile = blender_id_profile()
|
|
|
|
if not profile:
|
|
|
|
raise UserNotLoggedInError()
|
|
|
|
|
|
|
|
subclient = profile.subclients.get(SUBCLIENT_ID)
|
|
|
|
if not subclient:
|
|
|
|
raise CredentialsNotSyncedError()
|
|
|
|
|
2016-05-10 14:52:51 +02:00
|
|
|
pillar_user_id = subclient['subclient_user_id']
|
|
|
|
if not pillar_user_id:
|
|
|
|
raise CredentialsNotSyncedError()
|
|
|
|
|
2016-05-04 14:30:47 +02:00
|
|
|
try:
|
2016-06-16 16:33:21 +02:00
|
|
|
db_user = await pillar_call(pillarsdk.User.me)
|
2016-06-17 13:14:10 +02:00
|
|
|
except (pillarsdk.UnauthorizedAccess, pillarsdk.ResourceNotFound, pillarsdk.ForbiddenAccess):
|
2016-05-04 14:30:47 +02:00
|
|
|
raise CredentialsNotSyncedError()
|
|
|
|
|
2016-05-10 14:52:51 +02:00
|
|
|
roles = db_user.roles
|
|
|
|
log.debug('User has roles %r', roles)
|
|
|
|
if not roles or not {'subscriber', 'demo'}.intersection(set(roles)):
|
|
|
|
# Delete the subclient info. This forces a re-check later, which can
|
|
|
|
# then pick up on the user's new status.
|
|
|
|
del profile.subclients[SUBCLIENT_ID]
|
|
|
|
profile.save_json()
|
|
|
|
raise NotSubscribedToCloudError()
|
|
|
|
|
2016-06-16 16:33:21 +02:00
|
|
|
return pillar_user_id
|
|
|
|
|
2016-05-04 14:30:47 +02:00
|
|
|
|
|
|
|
async def refresh_pillar_credentials():
|
|
|
|
"""Refreshes the authentication token on Pillar.
|
|
|
|
|
|
|
|
:raises blender_id.BlenderIdCommError: when Blender ID refuses to send a token to Pillar.
|
|
|
|
:raises Exception: when the Pillar credential check fails.
|
|
|
|
"""
|
|
|
|
|
|
|
|
global _pillar_api
|
|
|
|
|
|
|
|
import blender_id
|
|
|
|
|
|
|
|
from . import blender
|
|
|
|
pillar_endpoint = blender.preferences().pillar_server.rstrip('/')
|
|
|
|
|
|
|
|
# Create a subclient token and send it to Pillar.
|
|
|
|
# May raise a blender_id.BlenderIdCommError
|
2016-05-18 13:01:04 +02:00
|
|
|
try:
|
|
|
|
blender_id.create_subclient_token(SUBCLIENT_ID, pillar_endpoint)
|
|
|
|
except blender_id.communication.BlenderIdCommError as ex:
|
|
|
|
log.warning("Unable to create authentication token: %s", ex)
|
|
|
|
raise CredentialsNotSyncedError()
|
2016-05-04 14:30:47 +02:00
|
|
|
|
|
|
|
# Test the new URL
|
|
|
|
_pillar_api = None
|
2016-06-17 13:14:10 +02:00
|
|
|
return await check_pillar_credentials()
|
2016-05-04 14:30:47 +02:00
|
|
|
|
|
|
|
|
2016-04-01 18:47:06 +02:00
|
|
|
async def get_project_uuid(project_url: str) -> str:
|
|
|
|
"""Returns the UUID for the project, given its '/p/<project_url>' string."""
|
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
try:
|
2016-04-01 18:47:06 +02:00
|
|
|
project = await pillar_call(pillarsdk.Project.find_one, {
|
|
|
|
'where': {'url': project_url},
|
|
|
|
'projection': {'permissions': 1},
|
|
|
|
})
|
2016-03-08 17:56:32 +01:00
|
|
|
except pillarsdk.exceptions.ResourceNotFound:
|
2016-03-15 13:47:21 +01:00
|
|
|
log.error('Project with URL %r does not exist', project_url)
|
2016-03-08 17:56:32 +01:00
|
|
|
return None
|
|
|
|
|
2016-03-15 13:47:21 +01:00
|
|
|
log.info('Found project %r', project)
|
2016-03-08 17:56:32 +01:00
|
|
|
return project['_id']
|
|
|
|
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
2016-05-18 14:11:49 +02:00
|
|
|
node_type = None) -> list:
|
2016-03-09 14:09:06 +01:00
|
|
|
"""Gets nodes for either a project or given a parent node.
|
|
|
|
|
|
|
|
@param project_uuid: the UUID of the project, or None if only querying by parent_node_uuid.
|
|
|
|
@param parent_node_uuid: the UUID of the parent node. Can be the empty string if the
|
|
|
|
node should be a top-level node in the project. Can also be None to query all nodes in a
|
|
|
|
project. In both these cases the project UUID should be given.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not project_uuid and not parent_node_uuid:
|
|
|
|
raise ValueError('get_nodes(): either project_uuid or parent_node_uuid must be given.')
|
|
|
|
|
|
|
|
where = {'properties.status': 'published'}
|
|
|
|
|
|
|
|
# Build the parent node where-clause
|
|
|
|
if parent_node_uuid == '':
|
|
|
|
where['parent'] = {'$exists': False}
|
|
|
|
elif parent_node_uuid is not None:
|
|
|
|
where['parent'] = parent_node_uuid
|
|
|
|
|
|
|
|
# Build the project where-clause
|
|
|
|
if project_uuid:
|
|
|
|
where['project'] = project_uuid
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
if node_type:
|
2016-05-18 14:11:49 +02:00
|
|
|
if isinstance(node_type, str):
|
|
|
|
where['node_type'] = node_type
|
|
|
|
else:
|
|
|
|
where['node_type'] = {'$in': node_type}
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-04-01 18:47:06 +02:00
|
|
|
children = await pillar_call(pillarsdk.Node.all, {
|
2016-03-08 17:56:32 +01:00
|
|
|
'projection': {'name': 1, 'parent': 1, 'node_type': 1,
|
|
|
|
'properties.order': 1, 'properties.status': 1,
|
2016-03-21 17:21:51 +01:00
|
|
|
'properties.files': 1,
|
2016-03-11 17:52:12 +01:00
|
|
|
'properties.content_type': 1, 'picture': 1},
|
2016-03-09 14:09:06 +01:00
|
|
|
'where': where,
|
2016-04-01 18:47:06 +02:00
|
|
|
'embed': ['parent']})
|
2016-03-08 16:22:20 +01:00
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
return children['_items']
|
2016-03-09 14:09:06 +01:00
|
|
|
|
|
|
|
|
2016-05-18 11:56:46 +02:00
|
|
|
async def get_texture_projects() -> list:
|
|
|
|
"""Returns project dicts that contain textures."""
|
|
|
|
|
|
|
|
try:
|
2016-05-18 12:50:51 +02:00
|
|
|
children = await pillar_call(pillarsdk.Project.all_from_endpoint,
|
|
|
|
'/bcloud/texture-libraries')
|
2016-05-18 11:56:46 +02:00
|
|
|
except pillarsdk.ResourceNotFound as ex:
|
|
|
|
log.warning('Unable to find texture projects: %s', ex)
|
|
|
|
raise PillarError('Unable to find texture projects: %s' % ex)
|
|
|
|
|
|
|
|
return children['_items']
|
|
|
|
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
async def download_to_file(url, filename, *,
|
|
|
|
header_store: str,
|
|
|
|
chunk_size=100 * 1024,
|
|
|
|
future: asyncio.Future = None):
|
2016-03-11 17:52:12 +01:00
|
|
|
"""Downloads a file via HTTP(S) directly to the filesystem."""
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
stored_headers = {}
|
2016-03-21 17:36:43 +01:00
|
|
|
if os.path.exists(filename) and os.path.exists(header_store):
|
2016-03-21 17:21:51 +01:00
|
|
|
log.debug('Loading cached headers %r', header_store)
|
|
|
|
try:
|
|
|
|
with open(header_store, 'r') as infile:
|
|
|
|
stored_headers = requests.structures.CaseInsensitiveDict(json.load(infile))
|
2016-03-21 17:43:25 +01:00
|
|
|
|
|
|
|
# Check file length.
|
|
|
|
expected_content_length = int(stored_headers['Content-Length'])
|
|
|
|
statinfo = os.stat(filename)
|
2016-03-22 10:41:35 +01:00
|
|
|
if expected_content_length == statinfo.st_size:
|
|
|
|
# File exists, and is of the correct length. Don't bother downloading again
|
|
|
|
# if we already downloaded it this session.
|
|
|
|
if url in _downloaded_urls:
|
|
|
|
log.debug('Already downloaded %s this session, skipping this request.',
|
|
|
|
url)
|
|
|
|
return
|
|
|
|
else:
|
2016-03-21 17:43:25 +01:00
|
|
|
log.debug('File size should be %i but is %i; ignoring cache.',
|
|
|
|
expected_content_length, statinfo.st_size)
|
|
|
|
stored_headers = {}
|
2016-03-21 17:21:51 +01:00
|
|
|
except Exception as ex:
|
|
|
|
log.warning('Unable to load headers from %r, ignoring cache: %s', header_store, str(ex))
|
2016-03-15 14:23:11 +01:00
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
2016-03-15 14:23:11 +01:00
|
|
|
# Separated doing the GET and downloading the body of the GET, so that we can cancel
|
|
|
|
# the download in between.
|
|
|
|
|
2016-03-21 11:47:29 +01:00
|
|
|
def perform_get_request() -> requests.Request:
|
2016-03-21 17:21:51 +01:00
|
|
|
headers = {}
|
|
|
|
try:
|
|
|
|
if stored_headers['Last-Modified']:
|
|
|
|
headers['If-Modified-Since'] = stored_headers['Last-Modified']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
if stored_headers['ETag']:
|
|
|
|
headers['If-None-Match'] = stored_headers['ETag']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2016-03-22 11:30:00 +01:00
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('Downloading was cancelled before doing the GET.')
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
2016-03-22 13:26:24 +01:00
|
|
|
log.debug('Performing GET request, waiting for response.')
|
2016-03-21 17:21:51 +01:00
|
|
|
return uncached_session.get(url, headers=headers, stream=True, verify=True)
|
2016-03-15 14:05:54 +01:00
|
|
|
|
|
|
|
# Download the file in a different thread.
|
|
|
|
def download_loop():
|
2016-03-22 13:26:24 +01:00
|
|
|
with with_existing_dir(filename, 'wb') as outfile:
|
|
|
|
with closing(response):
|
|
|
|
for block in response.iter_content(chunk_size=chunk_size):
|
|
|
|
if is_cancelled(future):
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
|
|
|
outfile.write(block)
|
2016-03-15 14:05:54 +01:00
|
|
|
|
|
|
|
# Check for cancellation even before we start our GET request
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('Downloading was cancelled before doing the GET')
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
|
|
|
|
|
|
|
log.debug('Performing GET %s', url)
|
2016-03-21 11:47:29 +01:00
|
|
|
response = await loop.run_in_executor(None, perform_get_request)
|
|
|
|
log.debug('Status %i from GET %s', response.status_code, url)
|
|
|
|
response.raise_for_status()
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
if response.status_code == 304:
|
|
|
|
# The file we have cached is still good, just use that instead.
|
2016-03-22 10:41:35 +01:00
|
|
|
_downloaded_urls.add(url)
|
2016-03-21 17:21:51 +01:00
|
|
|
return
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
# After we performed the GET request, we should check whether we should start
|
|
|
|
# the download at all.
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('Downloading was cancelled before downloading the GET response')
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
2016-03-11 17:52:12 +01:00
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
log.debug('Downloading response of GET %s', url)
|
|
|
|
await loop.run_in_executor(None, download_loop)
|
|
|
|
log.debug('Done downloading response of GET %s', url)
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
# We're done downloading, now we have something cached we can use.
|
|
|
|
log.debug('Saving header cache to %s', header_store)
|
2016-03-22 10:41:35 +01:00
|
|
|
_downloaded_urls.add(url)
|
2016-03-22 13:26:24 +01:00
|
|
|
|
|
|
|
with with_existing_dir(header_store, 'w') as outfile:
|
2016-03-21 17:21:51 +01:00
|
|
|
json.dump({
|
|
|
|
'ETag': str(response.headers.get('etag', '')),
|
|
|
|
'Last-Modified': response.headers.get('Last-Modified'),
|
2016-03-21 17:36:43 +01:00
|
|
|
'Content-Length': response.headers.get('Content-Length'),
|
2016-03-21 17:21:51 +01:00
|
|
|
}, outfile, sort_keys=True)
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-03-22 10:51:45 +01:00
|
|
|
async def fetch_thumbnail_info(file: pillarsdk.File, directory: str, desired_size: str):
|
2016-03-15 14:23:11 +01:00
|
|
|
"""Fetches thumbnail information from Pillar.
|
2016-03-11 17:52:12 +01:00
|
|
|
|
|
|
|
@param file: the pillar File object that represents the image whose thumbnail to download.
|
|
|
|
@param directory: the directory to save the file to.
|
|
|
|
@param desired_size: thumbnail size
|
2016-03-15 14:23:11 +01:00
|
|
|
@return: (url, path), where 'url' is the URL to download the thumbnail from, and 'path' is the absolute path of the
|
|
|
|
where the thumbnail should be downloaded to. Returns None, None if the task was cancelled before downloading
|
|
|
|
finished.
|
2016-03-11 17:52:12 +01:00
|
|
|
"""
|
|
|
|
|
2016-05-18 11:56:46 +02:00
|
|
|
thumb_link = await pillar_call(file.thumbnail, desired_size)
|
2016-03-11 17:52:12 +01:00
|
|
|
|
|
|
|
if thumb_link is None:
|
|
|
|
raise ValueError("File {} has no thumbnail of size {}"
|
|
|
|
.format(file['_id'], desired_size))
|
|
|
|
|
|
|
|
root, ext = os.path.splitext(file['file_path'])
|
2016-03-22 14:31:03 +01:00
|
|
|
thumb_fname = sanitize_filename('{0}-{1}.jpg'.format(root, desired_size))
|
2016-03-11 17:52:12 +01:00
|
|
|
thumb_path = os.path.abspath(os.path.join(directory, thumb_fname))
|
|
|
|
|
2016-03-15 14:23:11 +01:00
|
|
|
return thumb_link, thumb_path
|
2016-03-11 17:52:12 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def fetch_texture_thumbs(parent_node_uuid: str, desired_size: str,
|
|
|
|
thumbnail_directory: str,
|
|
|
|
*,
|
|
|
|
thumbnail_loading: callable,
|
2016-03-15 14:05:54 +01:00
|
|
|
thumbnail_loaded: callable,
|
|
|
|
future: asyncio.Future = None):
|
2016-03-09 17:34:37 +01:00
|
|
|
"""Generator, fetches all texture thumbnails in a certain parent node.
|
2016-03-09 14:09:06 +01:00
|
|
|
|
|
|
|
@param parent_node_uuid: the UUID of the parent node. All sub-nodes will be downloaded.
|
|
|
|
@param desired_size: size indicator, from 'sbtmlh'.
|
2016-03-09 17:34:37 +01:00
|
|
|
@param thumbnail_directory: directory in which to store the downloaded thumbnails.
|
2016-03-21 11:46:47 +01:00
|
|
|
@param thumbnail_loading: callback function that takes (pillarsdk.Node, pillarsdk.File)
|
2016-03-14 17:23:56 +01:00
|
|
|
parameters, which is called before a thumbnail will be downloaded. This allows you to
|
2016-03-11 17:52:12 +01:00
|
|
|
show a "downloading" indicator.
|
2016-03-21 11:46:47 +01:00
|
|
|
@param thumbnail_loaded: callback function that takes (pillarsdk.Node, pillarsdk.File object,
|
2016-03-14 17:23:56 +01:00
|
|
|
thumbnail path) parameters, which is called for every thumbnail after it's been downloaded.
|
2016-03-15 14:05:54 +01:00
|
|
|
@param future: Future that's inspected; if it is not None and cancelled, texture downloading
|
|
|
|
is aborted.
|
2016-03-09 14:09:06 +01:00
|
|
|
"""
|
|
|
|
|
2016-03-14 09:33:43 +01:00
|
|
|
# Download all texture nodes in parallel.
|
2016-03-15 14:05:54 +01:00
|
|
|
log.debug('Getting child nodes of node %r', parent_node_uuid)
|
|
|
|
texture_nodes = await get_nodes(parent_node_uuid=parent_node_uuid,
|
|
|
|
node_type='texture')
|
|
|
|
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.warning('fetch_texture_thumbs: Texture downloading cancelled')
|
|
|
|
return
|
|
|
|
|
2016-04-01 18:47:06 +02:00
|
|
|
coros = (download_texture_thumbnail(texture_node, desired_size,
|
|
|
|
thumbnail_directory,
|
|
|
|
thumbnail_loading=thumbnail_loading,
|
|
|
|
thumbnail_loaded=thumbnail_loaded,
|
|
|
|
future=future)
|
|
|
|
for texture_node in texture_nodes)
|
|
|
|
|
|
|
|
# raises any exception from failed handle_texture_node() calls.
|
|
|
|
await asyncio.gather(*coros)
|
2016-03-15 14:05:54 +01:00
|
|
|
|
|
|
|
log.info('fetch_texture_thumbs: Done downloading texture thumbnails')
|
|
|
|
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
async def download_texture_thumbnail(texture_node, desired_size: str,
|
|
|
|
thumbnail_directory: str,
|
|
|
|
*,
|
|
|
|
thumbnail_loading: callable,
|
|
|
|
thumbnail_loaded: callable,
|
|
|
|
future: asyncio.Future = None):
|
|
|
|
# Skip non-texture nodes, as we can't thumbnail them anyway.
|
|
|
|
if texture_node['node_type'] != 'texture':
|
|
|
|
return
|
|
|
|
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('fetch_texture_thumbs cancelled before finding File for texture %r',
|
|
|
|
texture_node['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
2016-05-18 16:27:04 +02:00
|
|
|
# Find out which file to use for the thumbnail picture.
|
|
|
|
pic_uuid = texture_node.picture
|
|
|
|
if not pic_uuid:
|
|
|
|
# Fall back to the first texture file, if it exists.
|
|
|
|
log.debug('Node %r does not have a picture, falling back to first file.',
|
|
|
|
texture_node['_id'])
|
|
|
|
files = texture_node.properties and texture_node.properties.files
|
|
|
|
if not files:
|
|
|
|
log.info('Node %r does not have a picture nor files, skipping.', texture_node['_id'])
|
|
|
|
return
|
|
|
|
pic_uuid = files[0].file
|
|
|
|
if not pic_uuid:
|
|
|
|
log.info('Node %r does not have a picture nor files, skipping.', texture_node['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
# Load the File that belongs to this texture node's picture.
|
2016-03-21 17:21:51 +01:00
|
|
|
loop.call_soon_threadsafe(thumbnail_loading, texture_node, texture_node)
|
2016-04-01 18:47:06 +02:00
|
|
|
file_desc = await pillar_call(pillarsdk.File.find, pic_uuid, params={
|
|
|
|
'projection': {'filename': 1, 'variations': 1, 'width': 1, 'height': 1},
|
|
|
|
})
|
2016-03-21 17:21:51 +01:00
|
|
|
|
|
|
|
if file_desc is None:
|
|
|
|
log.warning('Unable to find file for texture node %s', pic_uuid)
|
|
|
|
thumb_path = None
|
|
|
|
else:
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('fetch_texture_thumbs cancelled before downloading file %r',
|
|
|
|
file_desc['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get the thumbnail information from Pillar
|
|
|
|
thumb_url, thumb_path = await fetch_thumbnail_info(file_desc, thumbnail_directory,
|
2016-03-22 10:51:45 +01:00
|
|
|
desired_size)
|
2016-03-21 17:21:51 +01:00
|
|
|
if thumb_path is None:
|
|
|
|
# The task got cancelled, we should abort too.
|
|
|
|
log.debug('fetch_texture_thumbs cancelled while downloading file %r',
|
|
|
|
file_desc['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
# Cached headers are stored next to thumbnails in sidecar files.
|
|
|
|
header_store = '%s.headers' % thumb_path
|
|
|
|
|
|
|
|
await download_to_file(thumb_url, thumb_path, header_store=header_store, future=future)
|
|
|
|
|
|
|
|
loop.call_soon_threadsafe(thumbnail_loaded, texture_node, file_desc, thumb_path)
|
|
|
|
|
|
|
|
|
2016-03-22 13:26:24 +01:00
|
|
|
async def download_file_by_uuid(file_uuid,
|
|
|
|
target_directory: str,
|
|
|
|
metadata_directory: str,
|
|
|
|
*,
|
2016-04-01 18:47:06 +02:00
|
|
|
map_type: str = None,
|
2016-06-17 15:43:13 +02:00
|
|
|
file_loading: callable = None,
|
|
|
|
file_loaded: callable = None,
|
2016-03-22 13:26:24 +01:00
|
|
|
future: asyncio.Future):
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('download_file_by_uuid(%r) cancelled.', file_uuid)
|
|
|
|
return
|
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
|
|
|
# Find the File document.
|
2016-04-01 18:47:06 +02:00
|
|
|
file_desc = await pillar_call(pillarsdk.File.find, file_uuid, params={
|
2016-03-22 13:26:24 +01:00
|
|
|
'projection': {'link': 1, 'filename': 1},
|
2016-04-01 18:47:06 +02:00
|
|
|
})
|
2016-03-22 13:26:24 +01:00
|
|
|
|
|
|
|
# Save the file document to disk
|
|
|
|
metadata_file = os.path.join(metadata_directory, 'files', '%s.json' % file_uuid)
|
|
|
|
save_as_json(file_desc, metadata_file)
|
|
|
|
|
2016-05-18 14:14:38 +02:00
|
|
|
root, ext = os.path.splitext(file_desc['filename'])
|
2016-06-17 15:43:13 +02:00
|
|
|
if map_type is None or root.endswith(map_type):
|
2016-05-20 16:20:33 +02:00
|
|
|
target_filename = '%s%s' % (root, ext)
|
|
|
|
else:
|
|
|
|
target_filename = '%s-%s%s' % (root, map_type, ext)
|
|
|
|
|
|
|
|
file_path = os.path.join(target_directory, sanitize_filename(target_filename))
|
2016-03-22 13:26:24 +01:00
|
|
|
file_url = file_desc['link']
|
|
|
|
# log.debug('Texture %r:\n%s', file_uuid, pprint.pformat(file_desc.to_dict()))
|
2016-06-17 15:43:13 +02:00
|
|
|
if file_loading is not None:
|
|
|
|
loop.call_soon_threadsafe(file_loading, file_path, file_desc)
|
2016-03-22 13:26:24 +01:00
|
|
|
|
|
|
|
# Cached headers are stored in the project space
|
2016-03-22 14:31:03 +01:00
|
|
|
header_store = os.path.join(metadata_directory, 'files',
|
|
|
|
sanitize_filename('%s.headers' % file_uuid))
|
2016-03-22 13:26:24 +01:00
|
|
|
|
|
|
|
await download_to_file(file_url, file_path, header_store=header_store, future=future)
|
|
|
|
|
2016-06-17 15:43:13 +02:00
|
|
|
if file_loaded is not None:
|
|
|
|
loop.call_soon_threadsafe(file_loaded, file_path, file_desc)
|
2016-03-22 13:26:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def download_texture(texture_node,
|
|
|
|
target_directory: str,
|
|
|
|
metadata_directory: str,
|
|
|
|
*,
|
|
|
|
texture_loading: callable,
|
|
|
|
texture_loaded: callable,
|
|
|
|
future: asyncio.Future):
|
|
|
|
if texture_node['node_type'] != 'texture':
|
|
|
|
raise TypeError("Node type should be 'texture', not %r" % texture_node['node_type'])
|
|
|
|
|
|
|
|
# Download every file. Eve doesn't support embedding from a list-of-dicts.
|
|
|
|
downloaders = (download_file_by_uuid(file_info['file'],
|
|
|
|
target_directory,
|
|
|
|
metadata_directory,
|
2016-03-22 14:47:55 +01:00
|
|
|
map_type=file_info['map_type'],
|
2016-03-22 13:26:24 +01:00
|
|
|
file_loading=texture_loading,
|
|
|
|
file_loaded=texture_loaded,
|
|
|
|
future=future)
|
|
|
|
for file_info in texture_node['properties']['files'])
|
|
|
|
|
2016-04-01 18:47:06 +02:00
|
|
|
return await asyncio.gather(*downloaders, return_exceptions=True)
|
2016-03-22 13:26:24 +01:00
|
|
|
|
|
|
|
|
2016-06-16 17:19:49 +02:00
|
|
|
async def upload_file(project_id: str, file_path: pathlib.Path, *,
|
|
|
|
future: asyncio.Future) -> str:
|
|
|
|
"""Uploads a file to the Blender Cloud, returning a file document ID."""
|
|
|
|
|
|
|
|
from .blender import PILLAR_SERVER_URL
|
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
url = urllib.parse.urljoin(PILLAR_SERVER_URL, '/storage/stream/%s' % project_id)
|
|
|
|
|
|
|
|
# Upload the file in a different thread.
|
|
|
|
def upload():
|
|
|
|
auth_token = blender_id_subclient()['token']
|
|
|
|
|
|
|
|
with file_path.open(mode='rb') as infile:
|
|
|
|
return uncached_session.post(url,
|
|
|
|
files={'file': infile},
|
|
|
|
auth=(auth_token, SUBCLIENT_ID))
|
|
|
|
|
|
|
|
# Check for cancellation even before we start our POST request
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('Uploading was cancelled before doing the POST')
|
|
|
|
raise asyncio.CancelledError('Uploading was cancelled')
|
|
|
|
|
|
|
|
log.debug('Performing POST %s', url)
|
|
|
|
response = await loop.run_in_executor(None, upload)
|
|
|
|
log.debug('Status %i from POST %s', response.status_code, url)
|
|
|
|
response.raise_for_status()
|
|
|
|
|
|
|
|
resp = response.json()
|
|
|
|
log.debug('Upload response: %s', resp)
|
|
|
|
|
|
|
|
try:
|
|
|
|
file_id = resp['file_id']
|
|
|
|
except KeyError:
|
|
|
|
log.error('No file ID in upload response: %s', resp)
|
|
|
|
raise PillarError('No file ID in upload response: %s' % resp)
|
|
|
|
|
|
|
|
log.info('Uploaded %s to file ID %s', file_path, file_id)
|
|
|
|
return file_id
|
|
|
|
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
def is_cancelled(future: asyncio.Future) -> bool:
|
2016-03-22 13:26:24 +01:00
|
|
|
# assert future is not None # for debugging purposes.
|
2016-03-15 15:35:06 +01:00
|
|
|
cancelled = future is not None and future.cancelled()
|
|
|
|
return cancelled
|