2016-03-11 17:52:12 +01:00
|
|
|
import asyncio
|
2016-03-21 17:21:51 +01:00
|
|
|
import json
|
2016-03-08 16:22:20 +01:00
|
|
|
import os
|
2016-03-09 17:34:37 +01:00
|
|
|
import functools
|
2016-03-15 13:47:21 +01:00
|
|
|
import logging
|
2016-03-15 14:05:54 +01:00
|
|
|
from contextlib import closing
|
|
|
|
|
|
|
|
import requests
|
2016-03-21 17:21:51 +01:00
|
|
|
import requests.structures
|
2016-03-08 16:22:20 +01:00
|
|
|
import pillarsdk
|
2016-03-08 17:56:32 +01:00
|
|
|
import pillarsdk.exceptions
|
2016-03-11 17:52:12 +01:00
|
|
|
import pillarsdk.utils
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-03-21 09:32:10 +01:00
|
|
|
from . import http_cache
|
2016-03-18 16:53:52 +01:00
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
_pillar_api = None # will become a pillarsdk.Api object.
|
2016-03-15 13:47:21 +01:00
|
|
|
log = logging.getLogger(__name__)
|
2016-03-18 16:53:52 +01:00
|
|
|
uncached_session = requests.session()
|
|
|
|
_testing_blender_id_profile = None # Just for testing, overrides what is returned by blender_id_profile.
|
2016-03-08 17:56:32 +01:00
|
|
|
|
|
|
|
|
|
|
|
class UserNotLoggedInError(RuntimeError):
|
|
|
|
"""Raised when the user should be logged in on Blender ID, but isn't.
|
|
|
|
|
|
|
|
This is basically for every interaction with Pillar.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
def blender_id_profile() -> dict:
|
|
|
|
"""Returns the Blender ID profile of the currently logged in user."""
|
|
|
|
|
2016-03-18 16:53:52 +01:00
|
|
|
# Allow overriding before we import the bpy module.
|
|
|
|
if _testing_blender_id_profile is not None:
|
|
|
|
return _testing_blender_id_profile
|
|
|
|
|
2016-03-09 14:09:06 +01:00
|
|
|
import bpy
|
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
active_user_id = getattr(bpy.context.window_manager, 'blender_id_active_profile', None)
|
|
|
|
if not active_user_id:
|
|
|
|
return None
|
|
|
|
|
|
|
|
import blender_id.profiles
|
|
|
|
return blender_id.profiles.get_active_profile()
|
|
|
|
|
|
|
|
|
2016-03-21 11:46:47 +01:00
|
|
|
def pillar_api(pillar_endpoint: str = None) -> pillarsdk.Api:
|
2016-03-08 17:56:32 +01:00
|
|
|
"""Returns the Pillar SDK API object for the current user.
|
|
|
|
|
|
|
|
The user must be logged in.
|
2016-03-18 16:53:52 +01:00
|
|
|
|
|
|
|
:param pillar_endpoint: URL of the Pillar server, for testing purposes. If not specified,
|
|
|
|
it will use the addon preferences.
|
2016-03-08 17:56:32 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
global _pillar_api
|
|
|
|
|
|
|
|
# Only return the Pillar API object if the user is still logged in.
|
|
|
|
profile = blender_id_profile()
|
|
|
|
if not profile:
|
|
|
|
raise UserNotLoggedInError()
|
|
|
|
|
|
|
|
if _pillar_api is None:
|
2016-03-18 16:53:52 +01:00
|
|
|
# Allow overriding the endpoint before importing Blender-specific stuff.
|
|
|
|
if pillar_endpoint is None:
|
|
|
|
from . import blender
|
|
|
|
pillar_endpoint = blender.preferences().pillar_server
|
|
|
|
|
2016-03-21 09:32:10 +01:00
|
|
|
pillarsdk.Api.requests_session = http_cache.requests_session()
|
2016-03-18 16:53:52 +01:00
|
|
|
|
|
|
|
_pillar_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
2016-03-08 17:56:32 +01:00
|
|
|
username=profile['username'],
|
|
|
|
password=None,
|
|
|
|
token=profile['token'])
|
|
|
|
|
|
|
|
return _pillar_api
|
|
|
|
|
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
async def get_project_uuid(project_url: str) -> str:
|
2016-03-08 17:56:32 +01:00
|
|
|
"""Returns the UUID for the project, given its '/p/<project_url>' string."""
|
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
find_one = functools.partial(pillarsdk.Project.find_one, {
|
|
|
|
'where': {'url': project_url},
|
|
|
|
'projection': {'permissions': 1},
|
|
|
|
}, api=pillar_api())
|
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
2016-03-08 17:56:32 +01:00
|
|
|
try:
|
2016-03-11 17:52:12 +01:00
|
|
|
project = await loop.run_in_executor(None, find_one)
|
2016-03-08 17:56:32 +01:00
|
|
|
except pillarsdk.exceptions.ResourceNotFound:
|
2016-03-15 13:47:21 +01:00
|
|
|
log.error('Project with URL %r does not exist', project_url)
|
2016-03-08 17:56:32 +01:00
|
|
|
return None
|
|
|
|
|
2016-03-15 13:47:21 +01:00
|
|
|
log.info('Found project %r', project)
|
2016-03-08 17:56:32 +01:00
|
|
|
return project['_id']
|
|
|
|
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
|
|
|
node_type: str = None) -> list:
|
2016-03-09 14:09:06 +01:00
|
|
|
"""Gets nodes for either a project or given a parent node.
|
|
|
|
|
|
|
|
@param project_uuid: the UUID of the project, or None if only querying by parent_node_uuid.
|
|
|
|
@param parent_node_uuid: the UUID of the parent node. Can be the empty string if the
|
|
|
|
node should be a top-level node in the project. Can also be None to query all nodes in a
|
|
|
|
project. In both these cases the project UUID should be given.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not project_uuid and not parent_node_uuid:
|
|
|
|
raise ValueError('get_nodes(): either project_uuid or parent_node_uuid must be given.')
|
|
|
|
|
|
|
|
where = {'properties.status': 'published'}
|
|
|
|
|
|
|
|
# Build the parent node where-clause
|
|
|
|
if parent_node_uuid == '':
|
|
|
|
where['parent'] = {'$exists': False}
|
|
|
|
elif parent_node_uuid is not None:
|
|
|
|
where['parent'] = parent_node_uuid
|
|
|
|
|
|
|
|
# Build the project where-clause
|
|
|
|
if project_uuid:
|
|
|
|
where['project'] = project_uuid
|
2016-03-08 17:56:32 +01:00
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
if node_type:
|
|
|
|
where['node_type'] = node_type
|
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
node_all = functools.partial(pillarsdk.Node.all, {
|
2016-03-08 17:56:32 +01:00
|
|
|
'projection': {'name': 1, 'parent': 1, 'node_type': 1,
|
|
|
|
'properties.order': 1, 'properties.status': 1,
|
2016-03-21 17:21:51 +01:00
|
|
|
'properties.files': 1,
|
2016-03-11 17:52:12 +01:00
|
|
|
'properties.content_type': 1, 'picture': 1},
|
2016-03-09 14:09:06 +01:00
|
|
|
'where': where,
|
2016-03-21 11:46:47 +01:00
|
|
|
'sort': 'properties.order',
|
|
|
|
'embed': ['parent']}, api=pillar_api())
|
2016-03-11 17:52:12 +01:00
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
children = await loop.run_in_executor(None, node_all)
|
2016-03-08 16:22:20 +01:00
|
|
|
|
2016-03-08 17:56:32 +01:00
|
|
|
return children['_items']
|
2016-03-09 14:09:06 +01:00
|
|
|
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
async def download_to_file(url, filename, *,
|
|
|
|
header_store: str,
|
|
|
|
chunk_size=100 * 1024,
|
|
|
|
future: asyncio.Future = None):
|
2016-03-11 17:52:12 +01:00
|
|
|
"""Downloads a file via HTTP(S) directly to the filesystem."""
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
stored_headers = {}
|
2016-03-21 17:36:43 +01:00
|
|
|
if os.path.exists(filename) and os.path.exists(header_store):
|
2016-03-21 17:21:51 +01:00
|
|
|
log.debug('Loading cached headers %r', header_store)
|
|
|
|
try:
|
|
|
|
with open(header_store, 'r') as infile:
|
|
|
|
stored_headers = requests.structures.CaseInsensitiveDict(json.load(infile))
|
2016-03-21 17:43:25 +01:00
|
|
|
|
|
|
|
# Check file length.
|
|
|
|
expected_content_length = int(stored_headers['Content-Length'])
|
|
|
|
statinfo = os.stat(filename)
|
|
|
|
if expected_content_length != statinfo.st_size:
|
|
|
|
log.debug('File size should be %i but is %i; ignoring cache.',
|
|
|
|
expected_content_length, statinfo.st_size)
|
|
|
|
stored_headers = {}
|
2016-03-21 17:21:51 +01:00
|
|
|
except Exception as ex:
|
|
|
|
log.warning('Unable to load headers from %r, ignoring cache: %s', header_store, str(ex))
|
2016-03-15 14:23:11 +01:00
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
2016-03-15 14:23:11 +01:00
|
|
|
# Separated doing the GET and downloading the body of the GET, so that we can cancel
|
|
|
|
# the download in between.
|
|
|
|
|
2016-03-21 11:47:29 +01:00
|
|
|
def perform_get_request() -> requests.Request:
|
2016-03-21 17:21:51 +01:00
|
|
|
headers = {}
|
|
|
|
try:
|
|
|
|
if stored_headers['Last-Modified']:
|
|
|
|
headers['If-Modified-Since'] = stored_headers['Last-Modified']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
if stored_headers['ETag']:
|
|
|
|
headers['If-None-Match'] = stored_headers['ETag']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return uncached_session.get(url, headers=headers, stream=True, verify=True)
|
2016-03-15 14:05:54 +01:00
|
|
|
|
|
|
|
# Download the file in a different thread.
|
|
|
|
def download_loop():
|
2016-03-15 15:35:28 +01:00
|
|
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
|
|
|
|
2016-03-21 11:47:29 +01:00
|
|
|
with closing(response), open(filename, 'wb') as outfile:
|
|
|
|
for block in response.iter_content(chunk_size=chunk_size):
|
2016-03-15 14:05:54 +01:00
|
|
|
if is_cancelled(future):
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
|
|
|
outfile.write(block)
|
|
|
|
|
|
|
|
# Check for cancellation even before we start our GET request
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('Downloading was cancelled before doing the GET')
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
|
|
|
|
|
|
|
log.debug('Performing GET %s', url)
|
2016-03-21 11:47:29 +01:00
|
|
|
response = await loop.run_in_executor(None, perform_get_request)
|
|
|
|
log.debug('Status %i from GET %s', response.status_code, url)
|
|
|
|
response.raise_for_status()
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
if response.status_code == 304:
|
|
|
|
# The file we have cached is still good, just use that instead.
|
|
|
|
return
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
# After we performed the GET request, we should check whether we should start
|
|
|
|
# the download at all.
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('Downloading was cancelled before downloading the GET response')
|
|
|
|
raise asyncio.CancelledError('Downloading was cancelled')
|
2016-03-11 17:52:12 +01:00
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
log.debug('Downloading response of GET %s', url)
|
|
|
|
await loop.run_in_executor(None, download_loop)
|
|
|
|
log.debug('Done downloading response of GET %s', url)
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
# We're done downloading, now we have something cached we can use.
|
|
|
|
log.debug('Saving header cache to %s', header_store)
|
|
|
|
with open(header_store, 'w') as outfile:
|
|
|
|
json.dump({
|
|
|
|
'ETag': str(response.headers.get('etag', '')),
|
|
|
|
'Last-Modified': response.headers.get('Last-Modified'),
|
2016-03-21 17:36:43 +01:00
|
|
|
'Content-Length': response.headers.get('Content-Length'),
|
2016-03-21 17:21:51 +01:00
|
|
|
}, outfile, sort_keys=True)
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-03-15 14:23:11 +01:00
|
|
|
async def fetch_thumbnail_info(file: pillarsdk.File, directory: str, desired_size: str, *,
|
2016-03-15 14:05:54 +01:00
|
|
|
future: asyncio.Future = None):
|
2016-03-15 14:23:11 +01:00
|
|
|
"""Fetches thumbnail information from Pillar.
|
2016-03-11 17:52:12 +01:00
|
|
|
|
|
|
|
@param file: the pillar File object that represents the image whose thumbnail to download.
|
|
|
|
@param directory: the directory to save the file to.
|
|
|
|
@param desired_size: thumbnail size
|
2016-03-15 14:23:11 +01:00
|
|
|
@return: (url, path), where 'url' is the URL to download the thumbnail from, and 'path' is the absolute path of the
|
|
|
|
where the thumbnail should be downloaded to. Returns None, None if the task was cancelled before downloading
|
|
|
|
finished.
|
2016-03-11 17:52:12 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
api = pillar_api()
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('stream_thumb_to_file(): cancelled before fetching thumbnail URL from Pillar')
|
2016-03-15 14:23:11 +01:00
|
|
|
return None, None
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
thumb_link = await loop.run_in_executor(None, functools.partial(
|
|
|
|
file.thumbnail_file, desired_size, api=api))
|
|
|
|
|
|
|
|
if thumb_link is None:
|
|
|
|
raise ValueError("File {} has no thumbnail of size {}"
|
|
|
|
.format(file['_id'], desired_size))
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('stream_thumb_to_file(): cancelled before downloading file')
|
2016-03-15 14:23:11 +01:00
|
|
|
return None, None
|
2016-03-15 14:05:54 +01:00
|
|
|
|
2016-03-11 17:52:12 +01:00
|
|
|
root, ext = os.path.splitext(file['file_path'])
|
|
|
|
thumb_fname = "{0}-{1}.jpg".format(root, desired_size)
|
|
|
|
thumb_path = os.path.abspath(os.path.join(directory, thumb_fname))
|
|
|
|
|
2016-03-15 14:23:11 +01:00
|
|
|
return thumb_link, thumb_path
|
2016-03-11 17:52:12 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def fetch_texture_thumbs(parent_node_uuid: str, desired_size: str,
|
|
|
|
thumbnail_directory: str,
|
|
|
|
*,
|
|
|
|
thumbnail_loading: callable,
|
2016-03-15 14:05:54 +01:00
|
|
|
thumbnail_loaded: callable,
|
|
|
|
future: asyncio.Future = None):
|
2016-03-09 17:34:37 +01:00
|
|
|
"""Generator, fetches all texture thumbnails in a certain parent node.
|
2016-03-09 14:09:06 +01:00
|
|
|
|
|
|
|
@param parent_node_uuid: the UUID of the parent node. All sub-nodes will be downloaded.
|
|
|
|
@param desired_size: size indicator, from 'sbtmlh'.
|
2016-03-09 17:34:37 +01:00
|
|
|
@param thumbnail_directory: directory in which to store the downloaded thumbnails.
|
2016-03-21 11:46:47 +01:00
|
|
|
@param thumbnail_loading: callback function that takes (pillarsdk.Node, pillarsdk.File)
|
2016-03-14 17:23:56 +01:00
|
|
|
parameters, which is called before a thumbnail will be downloaded. This allows you to
|
2016-03-11 17:52:12 +01:00
|
|
|
show a "downloading" indicator.
|
2016-03-21 11:46:47 +01:00
|
|
|
@param thumbnail_loaded: callback function that takes (pillarsdk.Node, pillarsdk.File object,
|
2016-03-14 17:23:56 +01:00
|
|
|
thumbnail path) parameters, which is called for every thumbnail after it's been downloaded.
|
2016-03-15 14:05:54 +01:00
|
|
|
@param future: Future that's inspected; if it is not None and cancelled, texture downloading
|
|
|
|
is aborted.
|
2016-03-09 14:09:06 +01:00
|
|
|
"""
|
|
|
|
|
2016-03-14 09:33:43 +01:00
|
|
|
# Download all texture nodes in parallel.
|
2016-03-15 14:05:54 +01:00
|
|
|
log.debug('Getting child nodes of node %r', parent_node_uuid)
|
|
|
|
texture_nodes = await get_nodes(parent_node_uuid=parent_node_uuid,
|
|
|
|
node_type='texture')
|
|
|
|
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.warning('fetch_texture_thumbs: Texture downloading cancelled')
|
|
|
|
return
|
|
|
|
|
|
|
|
# We don't want to gather too much in parallel, as it will make cancelling take more time.
|
|
|
|
# This is caused by HTTP requests going out in parallel, and once the socket is open and
|
|
|
|
# the GET request is sent, we can't cancel until the server starts streaming the response.
|
|
|
|
chunk_size = 2
|
|
|
|
for i in range(0, len(texture_nodes), chunk_size):
|
|
|
|
chunk = texture_nodes[i:i + chunk_size]
|
2016-03-14 09:33:43 +01:00
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
log.debug('fetch_texture_thumbs: Gathering texture[%i:%i] for parent node %r',
|
|
|
|
i, i + chunk_size, parent_node_uuid)
|
2016-03-21 17:21:51 +01:00
|
|
|
coros = (download_texture_thumbnail(texture_node, desired_size,
|
|
|
|
thumbnail_directory,
|
|
|
|
thumbnail_loading=thumbnail_loading,
|
|
|
|
thumbnail_loaded=thumbnail_loaded)
|
2016-03-15 14:05:54 +01:00
|
|
|
for texture_node in chunk)
|
2016-03-14 09:33:43 +01:00
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
# raises any exception from failed handle_texture_node() calls.
|
|
|
|
await asyncio.gather(*coros)
|
|
|
|
|
|
|
|
log.info('fetch_texture_thumbs: Done downloading texture thumbnails')
|
|
|
|
|
|
|
|
|
2016-03-21 17:21:51 +01:00
|
|
|
async def download_texture_thumbnail(texture_node, desired_size: str,
|
|
|
|
thumbnail_directory: str,
|
|
|
|
*,
|
|
|
|
thumbnail_loading: callable,
|
|
|
|
thumbnail_loaded: callable,
|
|
|
|
future: asyncio.Future = None):
|
|
|
|
# Skip non-texture nodes, as we can't thumbnail them anyway.
|
|
|
|
if texture_node['node_type'] != 'texture':
|
|
|
|
return
|
|
|
|
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('fetch_texture_thumbs cancelled before finding File for texture %r',
|
|
|
|
texture_node['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
api = pillar_api()
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
|
|
|
file_find = functools.partial(pillarsdk.File.find, params={
|
|
|
|
'projection': {'filename': 1, 'variations': 1, 'width': 1, 'height': 1},
|
|
|
|
}, api=api)
|
|
|
|
|
|
|
|
# Find the File that belongs to this texture node
|
|
|
|
pic_uuid = texture_node['picture']
|
|
|
|
loop.call_soon_threadsafe(thumbnail_loading, texture_node, texture_node)
|
|
|
|
file_desc = await loop.run_in_executor(None, file_find, pic_uuid)
|
|
|
|
|
|
|
|
if file_desc is None:
|
|
|
|
log.warning('Unable to find file for texture node %s', pic_uuid)
|
|
|
|
thumb_path = None
|
|
|
|
else:
|
|
|
|
if is_cancelled(future):
|
|
|
|
log.debug('fetch_texture_thumbs cancelled before downloading file %r',
|
|
|
|
file_desc['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get the thumbnail information from Pillar
|
|
|
|
thumb_url, thumb_path = await fetch_thumbnail_info(file_desc, thumbnail_directory,
|
|
|
|
desired_size, future=future)
|
|
|
|
if thumb_path is None:
|
|
|
|
# The task got cancelled, we should abort too.
|
|
|
|
log.debug('fetch_texture_thumbs cancelled while downloading file %r',
|
|
|
|
file_desc['_id'])
|
|
|
|
return
|
|
|
|
|
|
|
|
# Cached headers are stored next to thumbnails in sidecar files.
|
|
|
|
header_store = '%s.headers' % thumb_path
|
|
|
|
|
|
|
|
await download_to_file(thumb_url, thumb_path, header_store=header_store, future=future)
|
|
|
|
|
|
|
|
loop.call_soon_threadsafe(thumbnail_loaded, texture_node, file_desc, thumb_path)
|
|
|
|
|
|
|
|
|
2016-03-15 14:05:54 +01:00
|
|
|
def is_cancelled(future: asyncio.Future) -> bool:
|
2016-03-15 15:35:06 +01:00
|
|
|
cancelled = future is not None and future.cancelled()
|
|
|
|
return cancelled
|