Introducing Pillar Framework
Refactor of pillar-server and pillar-web into a single python package. This simplifies the overall architecture of pillar applications. Special thanks @sybren and @venomgfx
This commit is contained in:
15
pillar/api/__init__.py
Normal file
15
pillar/api/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
def setup_app(app):
|
||||
from . import encoding, blender_id, projects, local_auth, file_storage
|
||||
from . import users, nodes, latest, blender_cloud, service, activities
|
||||
|
||||
encoding.setup_app(app, url_prefix='/encoding')
|
||||
blender_id.setup_app(app, url_prefix='/blender_id')
|
||||
projects.setup_app(app, api_prefix='/p')
|
||||
local_auth.setup_app(app, url_prefix='/auth')
|
||||
file_storage.setup_app(app, url_prefix='/storage')
|
||||
latest.setup_app(app, url_prefix='/latest')
|
||||
blender_cloud.setup_app(app, url_prefix='/bcloud')
|
||||
users.setup_app(app, api_prefix='/users')
|
||||
service.setup_app(app, api_prefix='/service')
|
||||
nodes.setup_app(app, url_prefix='/nodes')
|
||||
activities.setup_app(app)
|
168
pillar/api/activities.py
Normal file
168
pillar/api/activities.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from flask import g, request, current_app
|
||||
from pillar.api.utils import gravatar
|
||||
|
||||
|
||||
def notification_parse(notification):
|
||||
activities_collection = current_app.data.driver.db['activities']
|
||||
activities_subscriptions_collection = \
|
||||
current_app.data.driver.db['activities-subscriptions']
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
activity = activities_collection.find_one({'_id': notification['activity']})
|
||||
|
||||
if activity is None or activity['object_type'] != 'node':
|
||||
return
|
||||
node = nodes_collection.find_one({'_id': activity['object']})
|
||||
# Initial support only for node_type comments
|
||||
if node['node_type'] != 'comment':
|
||||
return
|
||||
node['parent'] = nodes_collection.find_one({'_id': node['parent']})
|
||||
object_type = 'comment'
|
||||
object_name = ''
|
||||
object_id = activity['object']
|
||||
|
||||
if node['parent']['user'] == g.current_user['user_id']:
|
||||
owner = "your {0}".format(node['parent']['node_type'])
|
||||
else:
|
||||
parent_comment_user = users_collection.find_one(
|
||||
{'_id': node['parent']['user']})
|
||||
if parent_comment_user['_id'] == node['user']:
|
||||
user_name = 'their'
|
||||
else:
|
||||
user_name = "{0}'s".format(parent_comment_user['username'])
|
||||
owner = "{0} {1}".format(user_name, node['parent']['node_type'])
|
||||
|
||||
context_object_type = node['parent']['node_type']
|
||||
context_object_name = owner
|
||||
context_object_id = activity['context_object']
|
||||
if activity['verb'] == 'replied':
|
||||
action = 'replied to'
|
||||
elif activity['verb'] == 'commented':
|
||||
action = 'left a comment on'
|
||||
else:
|
||||
action = activity['verb']
|
||||
|
||||
lookup = {
|
||||
'user': g.current_user['user_id'],
|
||||
'context_object_type': 'node',
|
||||
'context_object': context_object_id,
|
||||
}
|
||||
subscription = activities_subscriptions_collection.find_one(lookup)
|
||||
if subscription and subscription['notifications']['web'] == True:
|
||||
is_subscribed = True
|
||||
else:
|
||||
is_subscribed = False
|
||||
|
||||
# Parse user_actor
|
||||
actor = users_collection.find_one({'_id': activity['actor_user']})
|
||||
if actor:
|
||||
parsed_actor = {
|
||||
'username': actor['username'],
|
||||
'avatar': gravatar(actor['email'])}
|
||||
else:
|
||||
parsed_actor = None
|
||||
|
||||
updates = dict(
|
||||
_id=notification['_id'],
|
||||
actor=parsed_actor,
|
||||
action=action,
|
||||
object_type=object_type,
|
||||
object_name=object_name,
|
||||
object_id=str(object_id),
|
||||
context_object_type=context_object_type,
|
||||
context_object_name=context_object_name,
|
||||
context_object_id=str(context_object_id),
|
||||
date=activity['_created'],
|
||||
is_read=('is_read' in notification and notification['is_read']),
|
||||
is_subscribed=is_subscribed,
|
||||
subscription=subscription['_id']
|
||||
)
|
||||
notification.update(updates)
|
||||
|
||||
|
||||
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
|
||||
subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
|
||||
lookup = {
|
||||
'user': {"$ne": actor_user_id},
|
||||
'context_object_type': context_object_type,
|
||||
'context_object': context_object_id,
|
||||
'is_subscribed': True,
|
||||
}
|
||||
return subscriptions_collection.find(lookup)
|
||||
|
||||
|
||||
def activity_subscribe(user_id, context_object_type, context_object_id):
|
||||
"""Subscribe a user to changes for a specific context. We create a subscription
|
||||
if none is found.
|
||||
|
||||
:param user_id: id of the user we are going to subscribe
|
||||
:param context_object_type: hardcoded index, check the notifications/model.py
|
||||
:param context_object_id: object id, to be traced with context_object_type_id
|
||||
"""
|
||||
subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
|
||||
lookup = {
|
||||
'user': user_id,
|
||||
'context_object_type': context_object_type,
|
||||
'context_object': context_object_id
|
||||
}
|
||||
subscription = subscriptions_collection.find_one(lookup)
|
||||
|
||||
# If no subscription exists, we create one
|
||||
if not subscription:
|
||||
current_app.post_internal('activities-subscriptions', lookup)
|
||||
|
||||
|
||||
def activity_object_add(actor_user_id, verb, object_type, object_id,
|
||||
context_object_type, context_object_id):
|
||||
"""Add a notification object and creates a notification for each user that
|
||||
- is not the original author of the post
|
||||
- is actively subscribed to the object
|
||||
|
||||
This works using the following pattern:
|
||||
|
||||
ACTOR -> VERB -> OBJECT -> CONTEXT
|
||||
|
||||
:param actor_user_id: id of the user who is changing the object
|
||||
:param verb: the action on the object ('commented', 'replied')
|
||||
:param object_type: hardcoded name
|
||||
:param object_id: object id, to be traced with object_type_id
|
||||
"""
|
||||
|
||||
subscriptions = notification_get_subscriptions(
|
||||
context_object_type, context_object_id, actor_user_id)
|
||||
|
||||
if subscriptions.count() > 0:
|
||||
activity = dict(
|
||||
actor_user=actor_user_id,
|
||||
verb=verb,
|
||||
object_type=object_type,
|
||||
object=object_id,
|
||||
context_object_type=context_object_type,
|
||||
context_object=context_object_id
|
||||
)
|
||||
|
||||
activity = current_app.post_internal('activities', activity)
|
||||
if activity[3] != 201:
|
||||
# If creation failed for any reason, do not create a any notifcation
|
||||
return
|
||||
for subscription in subscriptions:
|
||||
notification = dict(
|
||||
user=subscription['user'],
|
||||
activity=activity[0]['_id'])
|
||||
current_app.post_internal('notifications', notification)
|
||||
|
||||
|
||||
def before_returning_item_notifications(response):
|
||||
if request.args.get('parse'):
|
||||
notification_parse(response)
|
||||
|
||||
|
||||
def before_returning_resource_notifications(response):
|
||||
for item in response['_items']:
|
||||
if request.args.get('parse'):
|
||||
notification_parse(item)
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
app.on_fetched_item_notifications += before_returning_item_notifications
|
||||
app.on_fetched_resource_notifications += before_returning_resource_notifications
|
30
pillar/api/blender_cloud/__init__.py
Normal file
30
pillar/api/blender_cloud/__init__.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from flask import request
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
|
||||
def blender_cloud_addon_version():
|
||||
"""Returns the version of the Blender Cloud Addon, or None if not given in the request.
|
||||
|
||||
Uses the 'Blender-Cloud-Addon' HTTP header.
|
||||
|
||||
:returns: the version of the addon, as tuple (major, minor, micro)
|
||||
:rtype: tuple or None
|
||||
:raises: werkzeug.exceptions.BadRequest if the header is malformed.
|
||||
"""
|
||||
|
||||
header = request.headers.get('Blender-Cloud-Addon')
|
||||
if not header:
|
||||
return None
|
||||
|
||||
parts = header.split('.')
|
||||
try:
|
||||
return tuple(int(part) for part in parts)
|
||||
except ValueError:
|
||||
raise wz_exceptions.BadRequest('Invalid Blender-Cloud-Addon header')
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
from . import texture_libs, home_project
|
||||
|
||||
texture_libs.setup_app(app, url_prefix=url_prefix)
|
||||
home_project.setup_app(app, url_prefix=url_prefix)
|
423
pillar/api/blender_cloud/home_project.py
Normal file
423
pillar/api/blender_cloud/home_project.py
Normal file
@@ -0,0 +1,423 @@
|
||||
import copy
|
||||
import logging
|
||||
|
||||
import datetime
|
||||
from bson import ObjectId, tz_util
|
||||
from eve.methods.get import get
|
||||
from flask import Blueprint, g, current_app, request
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils import authentication, authorization
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from pillar.api.projects import utils as proj_utils
|
||||
|
||||
blueprint = Blueprint('blender_cloud.home_project', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Users with any of these roles will get a home project.
|
||||
HOME_PROJECT_USERS = set()
|
||||
|
||||
# Users with any of these roles will get full write access to their home project.
|
||||
HOME_PROJECT_WRITABLE_USERS = {u'subscriber', u'demo'}
|
||||
|
||||
HOME_PROJECT_DESCRIPTION = ('# Your home project\n\n'
|
||||
'This is your home project. It allows synchronisation '
|
||||
'of your Blender settings using the [Blender Cloud addon]'
|
||||
'(https://cloud.blender.org/services#blender-addon).')
|
||||
HOME_PROJECT_SUMMARY = 'This is your home project. Here you can sync your Blender settings!'
|
||||
# HOME_PROJECT_DESCRIPTION = ('# Your home project\n\n'
|
||||
# 'This is your home project. It has functionality to act '
|
||||
# 'as a pastebin for text, images and other assets, and '
|
||||
# 'allows synchronisation of your Blender settings.')
|
||||
# HOME_PROJECT_SUMMARY = 'This is your home project. Pastebin and Blender settings sync in one!'
|
||||
SYNC_GROUP_NODE_NAME = u'Blender Sync'
|
||||
SYNC_GROUP_NODE_DESC = ('The [Blender Cloud Addon](https://cloud.blender.org/services'
|
||||
'#blender-addon) will synchronize your Blender settings here.')
|
||||
|
||||
|
||||
def create_blender_sync_node(project_id, admin_group_id, user_id):
|
||||
"""Creates a node for Blender Sync, with explicit write access for the admin group.
|
||||
|
||||
Writes the node to the database.
|
||||
|
||||
:param project_id: ID of the home project
|
||||
:type project_id: ObjectId
|
||||
:param admin_group_id: ID of the admin group of the project. This group will
|
||||
receive write access to the node.
|
||||
:type admin_group_id: ObjectId
|
||||
:param user_id: ID of the owner of the node.
|
||||
:type user_id: ObjectId
|
||||
|
||||
:returns: The created node.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
log.debug('Creating sync node for project %s, user %s', project_id, user_id)
|
||||
|
||||
node = {
|
||||
'project': ObjectId(project_id),
|
||||
'node_type': 'group',
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': ObjectId(user_id),
|
||||
'description': SYNC_GROUP_NODE_DESC,
|
||||
'properties': {'status': 'published'},
|
||||
'permissions': {
|
||||
'users': [],
|
||||
'groups': [
|
||||
{'group': ObjectId(admin_group_id),
|
||||
'methods': ['GET', 'PUT', 'POST', 'DELETE']}
|
||||
],
|
||||
'world': [],
|
||||
}
|
||||
}
|
||||
|
||||
r, _, _, status = current_app.post_internal('nodes', node)
|
||||
if status != 201:
|
||||
log.warning('Unable to create Blender Sync node for home project %s: %s',
|
||||
project_id, r)
|
||||
raise wz_exceptions.InternalServerError('Unable to create Blender Sync node')
|
||||
|
||||
node.update(r)
|
||||
return node
|
||||
|
||||
|
||||
def create_home_project(user_id, write_access):
|
||||
"""Creates a home project for the given user.
|
||||
|
||||
:param user_id: the user ID of the owner
|
||||
:param write_access: whether the user has full write access to the home project.
|
||||
:type write_access: bool
|
||||
:returns: the project
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
log.info('Creating home project for user %s', user_id)
|
||||
overrides = {
|
||||
'category': 'home',
|
||||
'url': 'home',
|
||||
'summary': HOME_PROJECT_SUMMARY,
|
||||
'description': HOME_PROJECT_DESCRIPTION
|
||||
}
|
||||
|
||||
# Maybe the user has a deleted home project.
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
deleted_proj = proj_coll.find_one({'user': user_id, 'category': 'home', '_deleted': True})
|
||||
if deleted_proj:
|
||||
log.info('User %s has a deleted project %s, restoring', user_id, deleted_proj['_id'])
|
||||
project = deleted_proj
|
||||
else:
|
||||
log.debug('User %s does not have a deleted project', user_id)
|
||||
project = proj_utils.create_new_project(project_name='Home',
|
||||
user_id=ObjectId(user_id),
|
||||
overrides=overrides)
|
||||
|
||||
# Re-validate the authentication token, so that the put_internal call sees the
|
||||
# new group created for the project.
|
||||
authentication.validate_token()
|
||||
|
||||
# There are a few things in the on_insert_projects hook we need to adjust.
|
||||
|
||||
# Ensure that the project is private, even for admins.
|
||||
project['permissions']['world'] = []
|
||||
|
||||
# Set up the correct node types. No need to set permissions for them,
|
||||
# as the inherited project permissions are fine.
|
||||
from pillar.api.node_types.group import node_type_group
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
# from pillar.api.node_types.text import node_type_text
|
||||
from pillar.api.node_types.comment import node_type_comment
|
||||
|
||||
# For non-subscribers: take away write access from the admin group,
|
||||
# and grant it to certain node types.
|
||||
project['permissions']['groups'][0]['methods'] = home_project_permissions(write_access)
|
||||
|
||||
# Everybody should be able to comment on anything in this project.
|
||||
# This allows people to comment on shared images and see comments.
|
||||
node_type_comment = assign_permissions(
|
||||
node_type_comment,
|
||||
subscriber_methods=[u'GET', u'POST'],
|
||||
world_methods=[u'GET'])
|
||||
|
||||
project['node_types'] = [
|
||||
node_type_group,
|
||||
node_type_asset,
|
||||
# node_type_text,
|
||||
node_type_comment,
|
||||
]
|
||||
|
||||
result, _, _, status = current_app.put_internal('projects', utils.remove_private_keys(project),
|
||||
_id=project['_id'])
|
||||
if status != 200:
|
||||
log.error('Unable to update home project %s for user %s: %s',
|
||||
project['_id'], user_id, result)
|
||||
raise wz_exceptions.InternalServerError('Unable to update home project')
|
||||
project.update(result)
|
||||
|
||||
# Create the Blender Sync node, with explicit write permissions on the node itself.
|
||||
create_blender_sync_node(project['_id'],
|
||||
project['permissions']['groups'][0]['group'],
|
||||
user_id)
|
||||
|
||||
return project
|
||||
|
||||
|
||||
def assign_permissions(node_type, subscriber_methods, world_methods):
|
||||
"""Assigns permissions to the node type object.
|
||||
|
||||
:param node_type: a node type from pillar.api.node_types.
|
||||
:type node_type: dict
|
||||
:param subscriber_methods: allowed HTTP methods for users of role 'subscriber',
|
||||
'demo' and 'admin'.
|
||||
:type subscriber_methods: list
|
||||
:param subscriber_methods: allowed HTTP methods for world
|
||||
:type subscriber_methods: list
|
||||
:returns: a copy of the node type, with embedded permissions
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
from pillar.api import service
|
||||
|
||||
nt_with_perms = copy.deepcopy(node_type)
|
||||
|
||||
perms = nt_with_perms.setdefault('permissions', {})
|
||||
perms['groups'] = [
|
||||
{'group': service.role_to_group_id['subscriber'],
|
||||
'methods': subscriber_methods[:]},
|
||||
{'group': service.role_to_group_id['demo'],
|
||||
'methods': subscriber_methods[:]},
|
||||
{'group': service.role_to_group_id['admin'],
|
||||
'methods': subscriber_methods[:]},
|
||||
]
|
||||
perms['world'] = world_methods[:]
|
||||
|
||||
return nt_with_perms
|
||||
|
||||
|
||||
@blueprint.route('/home-project')
|
||||
@authorization.require_login()
|
||||
def home_project():
|
||||
"""Fetches the home project, creating it if necessary.
|
||||
|
||||
Eve projections are supported, but at least the following fields must be present:
|
||||
'permissions', 'category', 'user'
|
||||
"""
|
||||
user_id = g.current_user['user_id']
|
||||
roles = g.current_user.get('roles', ())
|
||||
|
||||
log.debug('Possibly creating home project for user %s with roles %s', user_id, roles)
|
||||
if HOME_PROJECT_USERS and not HOME_PROJECT_USERS.intersection(roles):
|
||||
log.debug('User %s is not a subscriber, not creating home project.', user_id)
|
||||
return 'No home project', 404
|
||||
|
||||
# Create the home project before we do the Eve query. This costs an extra round-trip
|
||||
# to the database, but makes it easier to do projections correctly.
|
||||
if not has_home_project(user_id):
|
||||
write_access = write_access_with_roles(roles)
|
||||
create_home_project(user_id, write_access)
|
||||
|
||||
resp, _, _, status, _ = get('projects', category=u'home', user=user_id)
|
||||
if status != 200:
|
||||
return utils.jsonify(resp), status
|
||||
|
||||
if resp['_items']:
|
||||
project = resp['_items'][0]
|
||||
else:
|
||||
log.warning('Home project for user %s not found, while we just created it! Could be '
|
||||
'due to projections and other arguments on the query string: %s',
|
||||
user_id, request.query_string)
|
||||
return 'No home project', 404
|
||||
|
||||
return utils.jsonify(project), status
|
||||
|
||||
|
||||
def write_access_with_roles(roles):
|
||||
"""Returns whether or not one of these roles grants write access to the home project.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
write_access = bool(not HOME_PROJECT_WRITABLE_USERS or
|
||||
HOME_PROJECT_WRITABLE_USERS.intersection(roles))
|
||||
return write_access
|
||||
|
||||
|
||||
def home_project_permissions(write_access):
|
||||
"""Returns the project permissions, given the write access of the user.
|
||||
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if write_access:
|
||||
return [u'GET', u'PUT', u'POST', u'DELETE']
|
||||
return [u'GET']
|
||||
|
||||
|
||||
def has_home_project(user_id):
|
||||
"""Returns True iff the user has a home project."""
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
return proj_coll.count({'user': user_id, 'category': 'home', '_deleted': False}) > 0
|
||||
|
||||
|
||||
def get_home_project(user_id, projection=None):
|
||||
"""Returns the home project"""
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
return proj_coll.find_one({'user': user_id, 'category': 'home', '_deleted': False},
|
||||
projection=projection)
|
||||
|
||||
|
||||
def is_home_project(project_id, user_id):
|
||||
"""Returns True iff the given project exists and is the user's home project."""
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
return proj_coll.count({'_id': project_id,
|
||||
'user': user_id,
|
||||
'category': 'home',
|
||||
'_deleted': False}) > 0
|
||||
|
||||
|
||||
def mark_node_updated(node_id):
|
||||
"""Uses pymongo to set the node's _updated to "now"."""
|
||||
|
||||
now = datetime.datetime.now(tz=tz_util.utc)
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
|
||||
return nodes_coll.update_one({'_id': node_id},
|
||||
{'$set': {'_updated': now}})
|
||||
|
||||
|
||||
def get_home_project_parent_node(node, projection, name_for_log):
|
||||
"""Returns a partial parent node document, but only if the node is a home project node."""
|
||||
|
||||
user_id = authentication.current_user_id()
|
||||
if not user_id:
|
||||
log.debug('%s: user not logged in.', name_for_log)
|
||||
return None
|
||||
|
||||
parent_id = node.get('parent')
|
||||
if not parent_id:
|
||||
log.debug('%s: ignoring top-level node.', name_for_log)
|
||||
return None
|
||||
|
||||
project_id = node.get('project')
|
||||
if not project_id:
|
||||
log.debug('%s: ignoring node without project ID', name_for_log)
|
||||
return None
|
||||
|
||||
project_id = ObjectId(project_id)
|
||||
if not is_home_project(project_id, user_id):
|
||||
log.debug('%s: node not part of home project.', name_for_log)
|
||||
return None
|
||||
|
||||
# Get the parent node for permission checking.
|
||||
parent_id = ObjectId(parent_id)
|
||||
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
projection['project'] = 1
|
||||
parent_node = nodes_coll.find_one(parent_id, projection=projection)
|
||||
|
||||
if parent_node['project'] != project_id:
|
||||
log.warning('%s: User %s is trying to reference '
|
||||
'parent node %s from different project %s, expected project %s.',
|
||||
name_for_log, user_id, parent_id, parent_node['project'], project_id)
|
||||
raise wz_exceptions.BadRequest('Trying to create cross-project links.')
|
||||
|
||||
return parent_node
|
||||
|
||||
|
||||
def check_home_project_nodes_permissions(nodes):
|
||||
for node in nodes:
|
||||
check_home_project_node_permissions(node)
|
||||
|
||||
|
||||
def check_home_project_node_permissions(node):
|
||||
"""Grants POST access to the node when the user has POST access on its parent."""
|
||||
|
||||
parent_node = get_home_project_parent_node(node,
|
||||
{'permissions': 1,
|
||||
'project': 1,
|
||||
'node_type': 1},
|
||||
'check_home_project_node_permissions')
|
||||
if parent_node is None or 'permissions' not in parent_node:
|
||||
return
|
||||
|
||||
parent_id = parent_node['_id']
|
||||
|
||||
has_access = authorization.has_permissions('nodes', parent_node, 'POST')
|
||||
if not has_access:
|
||||
log.debug('check_home_project_node_permissions: No POST access to parent node %s, '
|
||||
'ignoring.', parent_id)
|
||||
return
|
||||
|
||||
# Grant access!
|
||||
log.debug('check_home_project_node_permissions: POST access at parent node %s, '
|
||||
'so granting POST access to new child node.', parent_id)
|
||||
|
||||
# Make sure the permissions of the parent node are copied to this node.
|
||||
node['permissions'] = copy.deepcopy(parent_node['permissions'])
|
||||
|
||||
|
||||
def mark_parents_as_updated(nodes):
|
||||
for node in nodes:
|
||||
mark_parent_as_updated(node)
|
||||
|
||||
|
||||
def mark_parent_as_updated(node, original=None):
|
||||
parent_node = get_home_project_parent_node(node,
|
||||
{'permissions': 1,
|
||||
'node_type': 1},
|
||||
'mark_parent_as_updated')
|
||||
if parent_node is None:
|
||||
return
|
||||
|
||||
# Mark the parent node as 'updated' if this is an asset and the parent is a group.
|
||||
if node.get('node_type') == 'asset' and parent_node['node_type'] == 'group':
|
||||
log.debug('Node %s updated, marking parent=%s as updated too',
|
||||
node['_id'], parent_node['_id'])
|
||||
mark_node_updated(parent_node['_id'])
|
||||
|
||||
|
||||
def user_changed_role(sender, user):
|
||||
"""Responds to the 'user changed' signal from the Badger service.
|
||||
|
||||
Changes the permissions on the home project based on the 'subscriber' role.
|
||||
|
||||
:returns: whether this function actually made changes.
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
user_id = user['_id']
|
||||
if not has_home_project(user_id):
|
||||
log.debug('User %s does not have a home project, not changing access permissions', user_id)
|
||||
return
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
proj = get_home_project(user_id, projection={'permissions': 1, '_id': 1})
|
||||
|
||||
write_access = write_access_with_roles(user['roles'])
|
||||
target_permissions = home_project_permissions(write_access)
|
||||
|
||||
current_perms = proj['permissions']['groups'][0]['methods']
|
||||
if set(current_perms) == set(target_permissions):
|
||||
return False
|
||||
|
||||
project_id = proj['_id']
|
||||
log.info('Updating permissions on user %s home project %s from %s to %s',
|
||||
user_id, project_id, current_perms, target_permissions)
|
||||
proj_coll.update_one({'_id': project_id},
|
||||
{'$set': {'permissions.groups.0.methods': list(target_permissions)}})
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
||||
|
||||
app.on_insert_nodes += check_home_project_nodes_permissions
|
||||
app.on_inserted_nodes += mark_parents_as_updated
|
||||
app.on_updated_nodes += mark_parent_as_updated
|
||||
app.on_replaced_nodes += mark_parent_as_updated
|
||||
|
||||
from pillar.api import service
|
||||
service.signal_user_changed_role.connect(user_changed_role)
|
146
pillar/api/blender_cloud/texture_libs.py
Normal file
146
pillar/api/blender_cloud/texture_libs.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from eve.methods.get import get
|
||||
from eve.utils import config as eve_config
|
||||
from flask import Blueprint, request, current_app, g
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.authentication import current_user_id
|
||||
from pillar.api.utils.authorization import require_login
|
||||
from werkzeug.datastructures import MultiDict
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
|
||||
FIRST_ADDON_VERSION_WITH_HDRI = (1, 4, 0)
|
||||
TL_PROJECTION = utils.dumps({'name': 1, 'url': 1, 'permissions': 1,})
|
||||
TL_SORT = utils.dumps([('name', 1)])
|
||||
|
||||
TEXTURE_LIBRARY_QUERY_ARGS = {
|
||||
eve_config.QUERY_PROJECTION: TL_PROJECTION,
|
||||
eve_config.QUERY_SORT: TL_SORT,
|
||||
'max_results': 'null', # this needs to be there, or we get a KeyError.
|
||||
}
|
||||
|
||||
blueprint = Blueprint('blender_cloud.texture_libs', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def keep_fetching_texture_libraries(proj_filter):
|
||||
groups = g.current_user['groups']
|
||||
user_id = g.current_user['user_id']
|
||||
|
||||
page = 1
|
||||
max_page = float('inf')
|
||||
|
||||
while page <= max_page:
|
||||
request.args.setlist(eve_config.QUERY_PAGE, [page])
|
||||
|
||||
result, _, _, status, _ = get(
|
||||
'projects',
|
||||
{'$or': [
|
||||
{'user': user_id},
|
||||
{'permissions.groups.group': {'$in': groups}},
|
||||
{'permissions.world': 'GET'}
|
||||
]})
|
||||
|
||||
if status != 200:
|
||||
log.warning('Error fetching texture libraries: %s', result)
|
||||
raise InternalServerError('Error fetching texture libraries')
|
||||
|
||||
for proj in result['_items']:
|
||||
if proj_filter(proj):
|
||||
yield proj
|
||||
|
||||
# Compute the last page number we should query.
|
||||
meta = result['_meta']
|
||||
max_page = meta['total'] // meta['max_results']
|
||||
if meta['total'] % meta['max_results'] > 0:
|
||||
max_page += 1
|
||||
|
||||
page += 1
|
||||
|
||||
|
||||
@blueprint.route('/texture-libraries')
|
||||
@require_login()
|
||||
def texture_libraries():
|
||||
from . import blender_cloud_addon_version
|
||||
|
||||
# Use Eve method so that we get filtering on permissions for free.
|
||||
# This gives all the projects that contain the required node types.
|
||||
request.args = MultiDict(request.args) # allow changes; it's an ImmutableMultiDict by default.
|
||||
request.args.setlist(eve_config.QUERY_PROJECTION, [TL_PROJECTION])
|
||||
request.args.setlist(eve_config.QUERY_SORT, [TL_SORT])
|
||||
|
||||
# Determine whether to return HDRi projects too, based on the version
|
||||
# of the Blender Cloud Addon. If the addon version is None, we're dealing
|
||||
# with a version of the BCA that's so old it doesn't send its version along.
|
||||
addon_version = blender_cloud_addon_version()
|
||||
return_hdri = addon_version >= FIRST_ADDON_VERSION_WITH_HDRI
|
||||
log.debug('User %s has Blender Cloud Addon version %s; return_hdri=%s',
|
||||
current_user_id(), addon_version, return_hdri)
|
||||
|
||||
accept_as_library = functools.partial(has_texture_node, return_hdri=return_hdri)
|
||||
|
||||
# Construct eve-like response.
|
||||
projects = list(keep_fetching_texture_libraries(accept_as_library))
|
||||
result = {'_items': projects,
|
||||
'_meta': {
|
||||
'max_results': len(projects),
|
||||
'page': 1,
|
||||
'total': len(projects),
|
||||
}}
|
||||
|
||||
return utils.jsonify(result)
|
||||
|
||||
|
||||
def has_texture_node(proj, return_hdri=True):
|
||||
"""Returns True iff the project has a top-level (group)texture node."""
|
||||
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
|
||||
# See which types of nodes we support.
|
||||
node_types = ['group_texture']
|
||||
if return_hdri:
|
||||
node_types.append('group_hdri')
|
||||
|
||||
count = nodes_collection.count(
|
||||
{'node_type': {'$in': node_types},
|
||||
'project': proj['_id'],
|
||||
'parent': None})
|
||||
return count > 0
|
||||
|
||||
|
||||
def sort_by_image_width(node, original=None):
|
||||
"""Sort the files in an HDRi node by image file size."""
|
||||
|
||||
if node.get('node_type') != 'hdri':
|
||||
return
|
||||
|
||||
if not node.get('properties', {}).get('files'):
|
||||
return
|
||||
|
||||
# TODO: re-enable this once all current HDRis have been saved in correct order.
|
||||
# # Don't bother sorting when the files haven't changed.
|
||||
# if original is not None and \
|
||||
# original.get('properties', {}).get('files') == node['properties']['files']:
|
||||
# return
|
||||
|
||||
log.info('Sorting HDRi node %s', node.get('_id', 'NO-ID'))
|
||||
files_coll = current_app.data.driver.db['files']
|
||||
|
||||
def sort_key(file_ref):
|
||||
file_doc = files_coll.find_one(file_ref['file'], projection={'length': 1})
|
||||
return file_doc['length']
|
||||
|
||||
node['properties']['files'].sort(key=sort_key)
|
||||
|
||||
|
||||
def sort_nodes_by_image_width(nodes):
|
||||
for node in nodes:
|
||||
sort_by_image_width(node)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.on_replace_nodes += sort_by_image_width
|
||||
app.on_insert_nodes += sort_nodes_by_image_width
|
||||
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
240
pillar/api/blender_id.py
Normal file
240
pillar/api/blender_id.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""Blender ID subclient endpoint.
|
||||
|
||||
Also contains functionality for other parts of Pillar to perform communication
|
||||
with Blender ID.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import datetime
|
||||
import requests
|
||||
from bson import tz_util
|
||||
from flask import Blueprint, request, current_app, jsonify
|
||||
from pillar.api.utils import authentication, remove_private_keys
|
||||
from requests.adapters import HTTPAdapter
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
blender_id = Blueprint('blender_id', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@blender_id.route('/store_scst', methods=['POST'])
|
||||
def store_subclient_token():
|
||||
"""Verifies & stores a user's subclient-specific token."""
|
||||
|
||||
user_id = request.form['user_id'] # User ID at BlenderID
|
||||
subclient_id = request.form['subclient_id']
|
||||
scst = request.form['token']
|
||||
|
||||
db_user, status = validate_create_user(user_id, scst, subclient_id)
|
||||
|
||||
if db_user is None:
|
||||
log.warning('Unable to verify subclient token with Blender ID.')
|
||||
return jsonify({'status': 'fail',
|
||||
'error': 'BLENDER ID ERROR'}), 403
|
||||
|
||||
return jsonify({'status': 'success',
|
||||
'subclient_user_id': str(db_user['_id'])}), status
|
||||
|
||||
|
||||
def blender_id_endpoint():
|
||||
"""Gets the endpoint for the authentication API. If the env variable
|
||||
is defined, it's possible to override the (default) production address.
|
||||
"""
|
||||
return current_app.config['BLENDER_ID_ENDPOINT'].rstrip('/')
|
||||
|
||||
|
||||
def validate_create_user(blender_id_user_id, token, oauth_subclient_id):
|
||||
"""Validates a user against Blender ID, creating the user in our database.
|
||||
|
||||
:param blender_id_user_id: the user ID at the BlenderID server.
|
||||
:param token: the OAuth access token.
|
||||
:param oauth_subclient_id: the subclient ID, or empty string if not a subclient.
|
||||
:returns: (user in MongoDB, HTTP status 200 or 201)
|
||||
"""
|
||||
|
||||
# Verify with Blender ID
|
||||
log.debug('Storing token for BlenderID user %s', blender_id_user_id)
|
||||
user_info, token_expiry = validate_token(blender_id_user_id, token, oauth_subclient_id)
|
||||
|
||||
if user_info is None:
|
||||
log.debug('Unable to verify token with Blender ID.')
|
||||
return None, None
|
||||
|
||||
# Blender ID can be queried without user ID, and will always include the
|
||||
# correct user ID in its response.
|
||||
log.debug('Obtained user info from Blender ID: %s', user_info)
|
||||
blender_id_user_id = user_info['id']
|
||||
|
||||
# Store the user info in MongoDB.
|
||||
db_user = find_user_in_db(blender_id_user_id, user_info)
|
||||
db_id, status = upsert_user(db_user, blender_id_user_id)
|
||||
|
||||
# Store the token in MongoDB.
|
||||
authentication.store_token(db_id, token, token_expiry, oauth_subclient_id)
|
||||
|
||||
return db_user, status
|
||||
|
||||
|
||||
def upsert_user(db_user, blender_id_user_id):
|
||||
"""Inserts/updates the user in MongoDB.
|
||||
|
||||
Retries a few times when there are uniqueness issues in the username.
|
||||
|
||||
:returns: the user's database ID and the status of the PUT/POST.
|
||||
The status is 201 on insert, and 200 on update.
|
||||
:type: (ObjectId, int)
|
||||
"""
|
||||
|
||||
if u'subscriber' in db_user.get('groups', []):
|
||||
log.error('Non-ObjectID string found in user.groups: %s', db_user)
|
||||
raise wz_exceptions.InternalServerError('Non-ObjectID string found in user.groups: %s' % db_user)
|
||||
|
||||
r = {}
|
||||
for retry in range(5):
|
||||
if '_id' in db_user:
|
||||
# Update the existing user
|
||||
attempted_eve_method = 'PUT'
|
||||
db_id = db_user['_id']
|
||||
r, _, _, status = current_app.put_internal('users', remove_private_keys(db_user),
|
||||
_id=db_id)
|
||||
if status == 422:
|
||||
log.error('Status %i trying to PUT user %s with values %s, should not happen! %s',
|
||||
status, db_id, remove_private_keys(db_user), r)
|
||||
else:
|
||||
# Create a new user, retry for non-unique usernames.
|
||||
attempted_eve_method = 'POST'
|
||||
r, _, _, status = current_app.post_internal('users', db_user)
|
||||
|
||||
if status not in {200, 201}:
|
||||
log.error('Status %i trying to create user for BlenderID %s with values %s: %s',
|
||||
status, blender_id_user_id, db_user, r)
|
||||
raise wz_exceptions.InternalServerError()
|
||||
|
||||
db_id = r['_id']
|
||||
db_user.update(r) # update with database/eve-generated fields.
|
||||
|
||||
if status == 422:
|
||||
# Probably non-unique username, so retry a few times with different usernames.
|
||||
log.info('Error creating new user: %s', r)
|
||||
username_issue = r.get('_issues', {}).get(u'username', '')
|
||||
if u'not unique' in username_issue:
|
||||
# Retry
|
||||
db_user['username'] = authentication.make_unique_username(db_user['email'])
|
||||
continue
|
||||
|
||||
# Saving was successful, or at least didn't break on a non-unique username.
|
||||
break
|
||||
else:
|
||||
log.error('Unable to create new user %s: %s', db_user, r)
|
||||
raise wz_exceptions.InternalServerError()
|
||||
|
||||
if status not in (200, 201):
|
||||
log.error('internal response from %s to Eve: %r %r', attempted_eve_method, status, r)
|
||||
raise wz_exceptions.InternalServerError()
|
||||
|
||||
return db_id, status
|
||||
|
||||
|
||||
def validate_token(user_id, token, oauth_subclient_id):
|
||||
"""Verifies a subclient token with Blender ID.
|
||||
|
||||
:returns: (user info, token expiry) on success, or (None, None) on failure.
|
||||
The user information from Blender ID is returned as dict
|
||||
{'email': 'a@b', 'full_name': 'AB'}, token expiry as a datime.datetime.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
our_subclient_id = current_app.config['BLENDER_ID_SUBCLIENT_ID']
|
||||
|
||||
# Check that IF there is a subclient ID given, it is the correct one.
|
||||
if oauth_subclient_id and our_subclient_id != oauth_subclient_id:
|
||||
log.warning('validate_token(): BlenderID user %s is trying to use the wrong subclient '
|
||||
'ID %r; treating as invalid login.', user_id, oauth_subclient_id)
|
||||
return None, None
|
||||
|
||||
# Validate against BlenderID.
|
||||
log.debug('Validating subclient token for BlenderID user %r, subclient %r', user_id,
|
||||
oauth_subclient_id)
|
||||
payload = {'user_id': user_id,
|
||||
'token': token}
|
||||
if oauth_subclient_id:
|
||||
payload['subclient_id'] = oauth_subclient_id
|
||||
|
||||
url = '{0}/u/validate_token'.format(blender_id_endpoint())
|
||||
log.debug('POSTing to %r', url)
|
||||
|
||||
# Retry a few times when POSTing to BlenderID fails.
|
||||
# Source: http://stackoverflow.com/a/15431343/875379
|
||||
s = requests.Session()
|
||||
s.mount(blender_id_endpoint(), HTTPAdapter(max_retries=5))
|
||||
|
||||
# POST to Blender ID, handling errors as negative verification results.
|
||||
try:
|
||||
r = s.post(url, data=payload, timeout=5)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
log.error('Connection error trying to POST to %s, handling as invalid token.', url)
|
||||
return None, None
|
||||
|
||||
if r.status_code != 200:
|
||||
log.debug('Token %s invalid, HTTP status %i returned', token, r.status_code)
|
||||
return None, None
|
||||
|
||||
resp = r.json()
|
||||
if resp['status'] != 'success':
|
||||
log.warning('Failed response from %s: %s', url, resp)
|
||||
return None, None
|
||||
|
||||
expires = _compute_token_expiry(resp['token_expires'])
|
||||
|
||||
return resp['user'], expires
|
||||
|
||||
|
||||
def _compute_token_expiry(token_expires_string):
|
||||
"""Computes token expiry based on current time and BlenderID expiry.
|
||||
|
||||
Expires our side of the token when either the BlenderID token expires,
|
||||
or in one hour. The latter case is to ensure we periodically verify
|
||||
the token.
|
||||
"""
|
||||
|
||||
date_format = current_app.config['RFC1123_DATE_FORMAT']
|
||||
blid_expiry = datetime.datetime.strptime(token_expires_string, date_format)
|
||||
blid_expiry = blid_expiry.replace(tzinfo=tz_util.utc)
|
||||
our_expiry = datetime.datetime.now(tz=tz_util.utc) + datetime.timedelta(hours=1)
|
||||
|
||||
return min(blid_expiry, our_expiry)
|
||||
|
||||
|
||||
def find_user_in_db(blender_id_user_id, user_info):
|
||||
"""Find the user in our database, creating/updating the returned document where needed.
|
||||
|
||||
Does NOT update the user in the database.
|
||||
"""
|
||||
|
||||
users = current_app.data.driver.db['users']
|
||||
|
||||
query = {'auth': {'$elemMatch': {'user_id': str(blender_id_user_id),
|
||||
'provider': 'blender-id'}}}
|
||||
log.debug('Querying: %s', query)
|
||||
db_user = users.find_one(query)
|
||||
|
||||
if db_user:
|
||||
log.debug('User blender_id_user_id=%r already in our database, '
|
||||
'updating with info from Blender ID.', blender_id_user_id)
|
||||
db_user['email'] = user_info['email']
|
||||
else:
|
||||
log.debug('User %r not yet in our database, create a new one.', blender_id_user_id)
|
||||
db_user = authentication.create_new_user_document(
|
||||
email=user_info['email'],
|
||||
user_id=blender_id_user_id,
|
||||
username=user_info['full_name'])
|
||||
db_user['username'] = authentication.make_unique_username(user_info['email'])
|
||||
if not db_user['full_name']:
|
||||
db_user['full_name'] = db_user['username']
|
||||
|
||||
return db_user
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(blender_id, url_prefix=url_prefix)
|
82
pillar/api/custom_field_validation.py
Normal file
82
pillar/api/custom_field_validation.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from datetime import datetime
|
||||
from eve.io.mongo import Validator
|
||||
from flask import current_app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ValidateCustomFields(Validator):
|
||||
def convert_properties(self, properties, node_schema):
|
||||
date_format = current_app.config['RFC1123_DATE_FORMAT']
|
||||
|
||||
for prop in node_schema:
|
||||
if not prop in properties:
|
||||
continue
|
||||
schema_prop = node_schema[prop]
|
||||
prop_type = schema_prop['type']
|
||||
if prop_type == 'dict':
|
||||
properties[prop] = self.convert_properties(
|
||||
properties[prop], schema_prop['schema'])
|
||||
if prop_type == 'list':
|
||||
if properties[prop] in ['', '[]']:
|
||||
properties[prop] = []
|
||||
for k, val in enumerate(properties[prop]):
|
||||
if not 'schema' in schema_prop:
|
||||
continue
|
||||
item_schema = {'item': schema_prop['schema']}
|
||||
item_prop = {'item': properties[prop][k]}
|
||||
properties[prop][k] = self.convert_properties(
|
||||
item_prop, item_schema)['item']
|
||||
# Convert datetime string to RFC1123 datetime
|
||||
elif prop_type == 'datetime':
|
||||
prop_val = properties[prop]
|
||||
properties[prop] = datetime.strptime(prop_val, date_format)
|
||||
elif prop_type == 'objectid':
|
||||
prop_val = properties[prop]
|
||||
if prop_val:
|
||||
properties[prop] = ObjectId(prop_val)
|
||||
else:
|
||||
properties[prop] = None
|
||||
|
||||
return properties
|
||||
|
||||
def _validate_valid_properties(self, valid_properties, field, value):
|
||||
from pillar.api.utils import project_get_node_type
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
lookup = {'_id': ObjectId(self.document['project'])}
|
||||
|
||||
project = projects_collection.find_one(lookup, {
|
||||
'node_types.name': 1,
|
||||
'node_types.dyn_schema': 1,
|
||||
})
|
||||
if project is None:
|
||||
log.warning('Unknown project %s, declared by node %s',
|
||||
lookup, self.document.get('_id'))
|
||||
self._error(field, 'Unknown project')
|
||||
return False
|
||||
|
||||
node_type_name = self.document['node_type']
|
||||
node_type = project_get_node_type(project, node_type_name)
|
||||
if node_type is None:
|
||||
log.warning('Project %s has no node type %s, declared by node %s',
|
||||
project, node_type_name, self.document.get('_id'))
|
||||
self._error(field, 'Unknown node type')
|
||||
return False
|
||||
|
||||
try:
|
||||
value = self.convert_properties(value, node_type['dyn_schema'])
|
||||
except Exception as e:
|
||||
log.warning("Error converting form properties", exc_info=True)
|
||||
|
||||
v = Validator(node_type['dyn_schema'])
|
||||
val = v.validate(value)
|
||||
|
||||
if val:
|
||||
return True
|
||||
|
||||
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
|
||||
self._error(field, "Error validating properties")
|
178
pillar/api/encoding.py
Normal file
178
pillar/api/encoding.py
Normal file
@@ -0,0 +1,178 @@
|
||||
import logging
|
||||
|
||||
import datetime
|
||||
import os
|
||||
from bson import ObjectId, tz_util
|
||||
from flask import Blueprint
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
from pillar.api.utils import skip_when_testing
|
||||
|
||||
encoding = Blueprint('encoding', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def size_descriptor(width, height):
|
||||
"""Returns the size descriptor (like '1080p') for the given width.
|
||||
|
||||
>>> size_descriptor(720, 480)
|
||||
'576p'
|
||||
>>> size_descriptor(1920, 1080)
|
||||
'1080p'
|
||||
>>> size_descriptor(1920, 751) # 23:9
|
||||
'1080p'
|
||||
"""
|
||||
|
||||
widths = {
|
||||
720: '576p',
|
||||
640: '480p',
|
||||
1280: '720p',
|
||||
1920: '1080p',
|
||||
2048: '2k',
|
||||
4096: '4k',
|
||||
}
|
||||
|
||||
# If it is a known width, use it. Otherwise just return '{height}p'
|
||||
if width in widths:
|
||||
return widths[width]
|
||||
|
||||
return '%ip' % height
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def rename_on_gcs(bucket_name, from_path, to_path):
|
||||
gcs = GoogleCloudStorageBucket(str(bucket_name))
|
||||
blob = gcs.bucket.blob(from_path)
|
||||
gcs.bucket.rename_blob(blob, to_path)
|
||||
|
||||
|
||||
@encoding.route('/zencoder/notifications', methods=['POST'])
|
||||
def zencoder_notifications():
|
||||
"""
|
||||
|
||||
See: https://app.zencoder.com/docs/guides/getting-started/notifications#api_version_2
|
||||
|
||||
"""
|
||||
if current_app.config['ENCODING_BACKEND'] != 'zencoder':
|
||||
log.warning('Received notification from Zencoder but app not configured for Zencoder.')
|
||||
return abort(403)
|
||||
|
||||
if not current_app.config['DEBUG']:
|
||||
# If we are in production, look for the Zencoder header secret
|
||||
try:
|
||||
notification_secret_request = request.headers[
|
||||
'X-Zencoder-Notification-Secret']
|
||||
except KeyError:
|
||||
log.warning('Received Zencoder notification without secret.')
|
||||
return abort(401)
|
||||
# If the header is found, check it agains the one in the config
|
||||
notification_secret = current_app.config['ZENCODER_NOTIFICATIONS_SECRET']
|
||||
if notification_secret_request != notification_secret:
|
||||
log.warning('Received Zencoder notification with incorrect secret.')
|
||||
return abort(401)
|
||||
|
||||
# Cast request data into a dict
|
||||
data = request.get_json()
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
from pprint import pformat
|
||||
log.debug('Zencoder job JSON: %s', pformat(data))
|
||||
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
# Find the file object based on processing backend and job_id
|
||||
zencoder_job_id = data['job']['id']
|
||||
lookup = {'processing.backend': 'zencoder',
|
||||
'processing.job_id': str(zencoder_job_id)}
|
||||
file_doc = files_collection.find_one(lookup)
|
||||
if not file_doc:
|
||||
log.warning('Unknown Zencoder job id %r', zencoder_job_id)
|
||||
# Return 200 OK when debugging, or Zencoder will keep trying and trying and trying...
|
||||
# which is what we want in production.
|
||||
return "Not found, but that's okay.", 200 if current_app.config['DEBUG'] else 404
|
||||
|
||||
file_id = ObjectId(file_doc['_id'])
|
||||
# Remove internal keys (so that we can run put internal)
|
||||
file_doc = utils.remove_private_keys(file_doc)
|
||||
|
||||
# Update processing status
|
||||
job_state = data['job']['state']
|
||||
file_doc['processing']['status'] = job_state
|
||||
|
||||
if job_state == 'failed':
|
||||
log.warning('Zencoder job %i for file %s failed.', zencoder_job_id, file_id)
|
||||
# Log what Zencoder told us went wrong.
|
||||
for output in data['outputs']:
|
||||
if not any('error' in key for key in output):
|
||||
continue
|
||||
log.warning('Errors for output %s:', output['url'])
|
||||
for key in output:
|
||||
if 'error' in key:
|
||||
log.info(' %s: %s', key, output[key])
|
||||
|
||||
file_doc['status'] = 'failed'
|
||||
current_app.put_internal('files', file_doc, _id=file_id)
|
||||
return "You failed, but that's okay.", 200
|
||||
|
||||
log.info('Zencoder job %s for file %s completed with status %s.', zencoder_job_id, file_id,
|
||||
job_state)
|
||||
|
||||
# For every variation encoded, try to update the file object
|
||||
root, _ = os.path.splitext(file_doc['file_path'])
|
||||
|
||||
for output in data['outputs']:
|
||||
video_format = output['format']
|
||||
# Change the zencoder 'mpeg4' format to 'mp4' used internally
|
||||
video_format = 'mp4' if video_format == 'mpeg4' else video_format
|
||||
|
||||
# Find a variation matching format and resolution
|
||||
variation = next((v for v in file_doc['variations']
|
||||
if v['format'] == format and v['width'] == output['width']), None)
|
||||
# Fall back to a variation matching just the format
|
||||
if variation is None:
|
||||
variation = next((v for v in file_doc['variations']
|
||||
if v['format'] == video_format), None)
|
||||
if variation is None:
|
||||
log.warning('Unable to find variation for video format %s for file %s',
|
||||
video_format, file_id)
|
||||
continue
|
||||
|
||||
# Rename the file to include the now-known size descriptor.
|
||||
size = size_descriptor(output['width'], output['height'])
|
||||
new_fname = '{}-{}.{}'.format(root, size, video_format)
|
||||
|
||||
# Rename on Google Cloud Storage
|
||||
try:
|
||||
rename_on_gcs(file_doc['project'],
|
||||
'_/' + variation['file_path'],
|
||||
'_/' + new_fname)
|
||||
except Exception:
|
||||
log.warning('Unable to rename GCS blob %r to %r. Keeping old name.',
|
||||
variation['file_path'], new_fname, exc_info=True)
|
||||
else:
|
||||
variation['file_path'] = new_fname
|
||||
|
||||
# TODO: calculate md5 on the storage
|
||||
variation.update({
|
||||
'height': output['height'],
|
||||
'width': output['width'],
|
||||
'length': output['file_size_in_bytes'],
|
||||
'duration': data['input']['duration_in_ms'] / 1000,
|
||||
'md5': output['md5_checksum'] or '', # they don't do MD5 for GCS...
|
||||
'size': size,
|
||||
})
|
||||
|
||||
file_doc['status'] = 'complete'
|
||||
|
||||
# Force an update of the links on the next load of the file.
|
||||
file_doc['link_expires'] = datetime.datetime.now(tz=tz_util.utc) - datetime.timedelta(days=1)
|
||||
|
||||
current_app.put_internal('files', file_doc, _id=file_id)
|
||||
|
||||
return '', 204
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(encoding, url_prefix=url_prefix)
|
779
pillar/api/eve_settings.py
Normal file
779
pillar/api/eve_settings.py
Normal file
@@ -0,0 +1,779 @@
|
||||
import os
|
||||
|
||||
URL_PREFIX = 'api'
|
||||
|
||||
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
|
||||
# (if you omit this line, the API will default to ['GET'] and provide
|
||||
# read-only access to the endpoint).
|
||||
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
|
||||
|
||||
# Enable reads (GET), edits (PATCH), replacements (PUT) and deletes of
|
||||
# individual items (defaults to read-only item access).
|
||||
ITEM_METHODS = ['GET', 'PUT', 'DELETE']
|
||||
|
||||
PAGINATION_LIMIT = 250
|
||||
PAGINATION_DEFAULT = 250
|
||||
|
||||
_file_embedded_schema = {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'files',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
|
||||
_node_embedded_schema = {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'nodes',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
|
||||
_required_user_embedded_schema = {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
'data_relation': {
|
||||
'resource': 'users',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
}
|
||||
|
||||
_activity_object_type = {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'allowed': [
|
||||
'project',
|
||||
'user',
|
||||
'node'
|
||||
],
|
||||
}
|
||||
|
||||
users_schema = {
|
||||
'full_name': {
|
||||
'type': 'string',
|
||||
'minlength': 1,
|
||||
'maxlength': 128,
|
||||
'required': True,
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'minlength': 3,
|
||||
'maxlength': 128,
|
||||
'required': True,
|
||||
'unique': True,
|
||||
},
|
||||
'email': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 60,
|
||||
},
|
||||
'roles': {
|
||||
'type': 'list',
|
||||
'schema': {'type': 'string'}
|
||||
},
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'default': [],
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'groups',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
},
|
||||
'auth': {
|
||||
# Storage of authentication credentials (one will be able to auth with
|
||||
# multiple providers on the same account)
|
||||
'type': 'list',
|
||||
'required': True,
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'provider': {
|
||||
'type': 'string',
|
||||
'allowed': ["blender-id", "local"],
|
||||
},
|
||||
'user_id': {
|
||||
'type': 'string'
|
||||
},
|
||||
# A token is considered a "password" in case the provider is
|
||||
# "local".
|
||||
'token': {
|
||||
'type': 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'settings': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'email_communications': {
|
||||
'type': 'integer',
|
||||
'allowed': [0, 1]
|
||||
}
|
||||
}
|
||||
},
|
||||
'service': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'badger': {
|
||||
'type': 'list',
|
||||
'schema': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
organizations_schema = {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'minlength': 1,
|
||||
'maxlength': 128,
|
||||
'required': True
|
||||
},
|
||||
'email': {
|
||||
'type': 'string'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string',
|
||||
'minlength': 1,
|
||||
'maxlength': 128,
|
||||
'required': True
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
'website': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
'location': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
'picture': dict(
|
||||
nullable=True,
|
||||
**_file_embedded_schema),
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'default': [],
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'users',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
},
|
||||
'teams': {
|
||||
'type': 'list',
|
||||
'default': [],
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
# Team name
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'minlength': 1,
|
||||
'maxlength': 128,
|
||||
'required': True
|
||||
},
|
||||
# List of user ids for the team
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'default': [],
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'users',
|
||||
'field': '_id',
|
||||
}
|
||||
}
|
||||
},
|
||||
# List of groups assigned to the team (this will automatically
|
||||
# update the groups property of each user in the team)
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'default': [],
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'groups',
|
||||
'field': '_id',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
permissions_embedded_schema = {
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'group': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
'data_relation': {
|
||||
'resource': 'groups',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
},
|
||||
'methods': {
|
||||
'type': 'list',
|
||||
'required': True,
|
||||
'allowed': ['GET', 'PUT', 'POST', 'DELETE']
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'user': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
},
|
||||
'methods': {
|
||||
'type': 'list',
|
||||
'required': True,
|
||||
'allowed': ['GET', 'PUT', 'POST', 'DELETE']
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'world': {
|
||||
'type': 'list',
|
||||
# 'required': True,
|
||||
'allowed': ['GET', ]
|
||||
},
|
||||
'is_free': {
|
||||
'type': 'boolean',
|
||||
}
|
||||
}
|
||||
|
||||
nodes_schema = {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'minlength': 1,
|
||||
'maxlength': 128,
|
||||
'required': True,
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
},
|
||||
'picture': _file_embedded_schema,
|
||||
'order': {
|
||||
'type': 'integer',
|
||||
'minlength': 0,
|
||||
},
|
||||
'revision': {
|
||||
'type': 'integer',
|
||||
},
|
||||
'parent': _node_embedded_schema,
|
||||
'project': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'projects',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'user': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'users',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'node_type': {
|
||||
'type': 'string',
|
||||
'required': True
|
||||
},
|
||||
'properties': {
|
||||
'type': 'dict',
|
||||
'valid_properties': True,
|
||||
'required': True,
|
||||
},
|
||||
'permissions': {
|
||||
'type': 'dict',
|
||||
'schema': permissions_embedded_schema
|
||||
},
|
||||
'short_code': {
|
||||
'type': 'string',
|
||||
},
|
||||
}
|
||||
|
||||
tokens_schema = {
|
||||
'user': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
},
|
||||
'token': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'expire_time': {
|
||||
'type': 'datetime',
|
||||
'required': True,
|
||||
},
|
||||
'is_subclient_token': {
|
||||
'type': 'boolean',
|
||||
'required': False,
|
||||
}
|
||||
}
|
||||
|
||||
files_schema = {
|
||||
# Name of the file after processing, possibly hashed.
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
},
|
||||
'content_type': { # MIME type image/png video/mp4
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
# Duration in seconds, only if it's a video
|
||||
'duration': {
|
||||
'type': 'integer',
|
||||
},
|
||||
'size': { # xs, s, b, 720p, 2K
|
||||
'type': 'string'
|
||||
},
|
||||
'format': { # human readable format, like mp4, HLS, webm, mov
|
||||
'type': 'string'
|
||||
},
|
||||
'width': { # valid for images and video content_type
|
||||
'type': 'integer'
|
||||
},
|
||||
'height': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'user': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
},
|
||||
'length': { # Size in bytes
|
||||
'type': 'integer',
|
||||
'required': True,
|
||||
},
|
||||
'length_aggregate_in_bytes': { # Size of file + all variations
|
||||
'type': 'integer',
|
||||
'required': False,
|
||||
# it's computed on the fly anyway, so clients don't need to provide it.
|
||||
},
|
||||
'md5': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
|
||||
# Original filename as given by the user, cleaned-up to make it safe.
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'backend': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'allowed': ["attract-web", "pillar", "cdnsun", "gcs", "unittest"]
|
||||
},
|
||||
|
||||
# Where the file is in the backend storage itself. In the case of GCS,
|
||||
# it is relative to the /_ folder. In the other cases, it is relative
|
||||
# to the root of that storage backend. required=False to allow creation
|
||||
# before uploading to a storage, in case the final path is determined
|
||||
# by that storage backend.
|
||||
'file_path': {
|
||||
'type': 'string',
|
||||
},
|
||||
'link': {
|
||||
'type': 'string',
|
||||
},
|
||||
'link_expires': {
|
||||
'type': 'datetime',
|
||||
},
|
||||
'project': {
|
||||
# The project node the files belongs to (does not matter if it is
|
||||
# attached to an asset or something else). We use the project id as
|
||||
# top level filtering, folder or bucket name. Later on we will be able
|
||||
# to join permissions from the project and verify user access.
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'projects',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'variations': { # File variations (used to be children, see above)
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'is_public': { # If True, the link will not be hashed or signed
|
||||
'type': 'boolean'
|
||||
},
|
||||
'content_type': { # MIME type image/png video/mp4
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'duration': {
|
||||
'type': 'integer',
|
||||
},
|
||||
'size': { # xs, s, b, 720p, 2K
|
||||
'type': 'string'
|
||||
},
|
||||
'format': { # human readable format, like mp4, HLS, webm, mov
|
||||
'type': 'string'
|
||||
},
|
||||
'width': { # valid for images and video content_type
|
||||
'type': 'integer'
|
||||
},
|
||||
'height': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'length': { # Size in bytes
|
||||
'type': 'integer',
|
||||
'required': True,
|
||||
},
|
||||
'md5': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'file_path': {
|
||||
'type': 'string',
|
||||
},
|
||||
'link': {
|
||||
'type': 'string',
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'processing': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'job_id': {
|
||||
'type': 'string' # can be int, depending on the backend
|
||||
},
|
||||
'backend': {
|
||||
'type': 'string',
|
||||
'allowed': ["zencoder", "local"]
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': ["pending", "waiting", "processing", "finished",
|
||||
"failed", "cancelled"]
|
||||
},
|
||||
}
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': ['uploading', 'queued_for_processing', 'processing', 'complete', 'failed'],
|
||||
'required': False,
|
||||
'default': 'complete', # default value for backward compatibility.
|
||||
},
|
||||
}
|
||||
|
||||
groups_schema = {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'required': True
|
||||
}
|
||||
}
|
||||
|
||||
projects_schema = {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'minlength': 1,
|
||||
'maxlength': 128,
|
||||
'required': True,
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
},
|
||||
# Short summary for the project
|
||||
'summary': {
|
||||
'type': 'string',
|
||||
'maxlength': 128
|
||||
},
|
||||
# Logo
|
||||
'picture_square': _file_embedded_schema,
|
||||
# Header
|
||||
'picture_header': _file_embedded_schema,
|
||||
'header_node': dict(
|
||||
nullable=True,
|
||||
**_node_embedded_schema
|
||||
),
|
||||
'user': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
'data_relation': {
|
||||
'resource': 'users',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'category': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'training',
|
||||
'film',
|
||||
'assets',
|
||||
'software',
|
||||
'game',
|
||||
'home',
|
||||
],
|
||||
'required': True,
|
||||
},
|
||||
'is_private': {
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'organization': {
|
||||
'type': 'objectid',
|
||||
'nullable': True,
|
||||
'data_relation': {
|
||||
'resource': 'organizations',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
},
|
||||
# Latest nodes being edited
|
||||
'nodes_latest': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Featured nodes, manually added
|
||||
'nodes_featured': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Latest blog posts, manually added
|
||||
'nodes_blog': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Where Node type schemas for every projects are defined
|
||||
'node_types': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
# URL is the way we identify a node_type when calling it via
|
||||
# the helper methods in the Project API.
|
||||
'url': {'type': 'string'},
|
||||
'name': {'type': 'string'},
|
||||
'description': {'type': 'string'},
|
||||
# Allowed parents for the node_type
|
||||
'parent': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'dyn_schema': {
|
||||
'type': 'dict',
|
||||
'allow_unknown': True
|
||||
},
|
||||
'form_schema': {
|
||||
'type': 'dict',
|
||||
'allow_unknown': True
|
||||
},
|
||||
'permissions': {
|
||||
'type': 'dict',
|
||||
'schema': permissions_embedded_schema
|
||||
}
|
||||
},
|
||||
|
||||
}
|
||||
},
|
||||
'permissions': {
|
||||
'type': 'dict',
|
||||
'schema': permissions_embedded_schema
|
||||
}
|
||||
}
|
||||
|
||||
activities_subscriptions_schema = {
|
||||
'user': _required_user_embedded_schema,
|
||||
'context_object_type': _activity_object_type,
|
||||
'context_object': {
|
||||
'type': 'objectid',
|
||||
'required': True
|
||||
},
|
||||
'notifications': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'email': {
|
||||
'type': 'boolean',
|
||||
},
|
||||
'web': {
|
||||
'type': 'boolean',
|
||||
'default': True
|
||||
},
|
||||
}
|
||||
},
|
||||
'is_subscribed': {
|
||||
'type': 'boolean',
|
||||
'default': True
|
||||
}
|
||||
}
|
||||
|
||||
activities_schema = {
|
||||
'actor_user': _required_user_embedded_schema,
|
||||
'verb': {
|
||||
'type': 'string',
|
||||
'required': True
|
||||
},
|
||||
'object_type': _activity_object_type,
|
||||
'object': {
|
||||
'type': 'objectid',
|
||||
'required': True
|
||||
},
|
||||
'context_object_type': _activity_object_type,
|
||||
'context_object': {
|
||||
'type': 'objectid',
|
||||
'required': True
|
||||
},
|
||||
}
|
||||
|
||||
notifications_schema = {
|
||||
'user': _required_user_embedded_schema,
|
||||
'activity': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
},
|
||||
'is_read': {
|
||||
'type': 'boolean',
|
||||
},
|
||||
}
|
||||
|
||||
nodes = {
|
||||
'schema': nodes_schema,
|
||||
'public_methods': ['GET'],
|
||||
'public_item_methods': ['GET'],
|
||||
'soft_delete': True,
|
||||
}
|
||||
|
||||
users = {
|
||||
'item_title': 'user',
|
||||
|
||||
# We choose to override global cache-control directives for this resource.
|
||||
'cache_control': 'max-age=10,must-revalidate',
|
||||
'cache_expires': 10,
|
||||
|
||||
'resource_methods': ['GET'],
|
||||
'item_methods': ['GET', 'PUT', 'PATCH'],
|
||||
'public_item_methods': ['GET'],
|
||||
|
||||
# By default don't include the 'auth' field. It can still be obtained
|
||||
# using projections, though, so we block that in hooks.
|
||||
'datasource': {'projection': {u'auth': 0}},
|
||||
|
||||
'schema': users_schema
|
||||
}
|
||||
|
||||
tokens = {
|
||||
'resource_methods': ['GET', 'POST'],
|
||||
|
||||
# Allow 'token' to be returned with POST responses
|
||||
# 'extra_response_fields': ['token'],
|
||||
|
||||
'schema': tokens_schema
|
||||
}
|
||||
|
||||
files = {
|
||||
'resource_methods': ['GET', 'POST'],
|
||||
'item_methods': ['GET', 'PATCH'],
|
||||
'public_methods': ['GET'],
|
||||
'public_item_methods': ['GET'],
|
||||
'schema': files_schema
|
||||
}
|
||||
|
||||
groups = {
|
||||
'resource_methods': ['GET', 'POST'],
|
||||
'public_methods': ['GET'],
|
||||
'public_item_methods': ['GET'],
|
||||
'schema': groups_schema,
|
||||
}
|
||||
|
||||
organizations = {
|
||||
'schema': organizations_schema,
|
||||
'public_item_methods': ['GET'],
|
||||
'public_methods': ['GET']
|
||||
}
|
||||
|
||||
projects = {
|
||||
'schema': projects_schema,
|
||||
'public_item_methods': ['GET'],
|
||||
'public_methods': ['GET'],
|
||||
'soft_delete': True,
|
||||
}
|
||||
|
||||
activities = {
|
||||
'schema': activities_schema,
|
||||
}
|
||||
|
||||
activities_subscriptions = {
|
||||
'schema': activities_subscriptions_schema,
|
||||
}
|
||||
|
||||
notifications = {
|
||||
'schema': notifications_schema,
|
||||
}
|
||||
|
||||
DOMAIN = {
|
||||
'users': users,
|
||||
'nodes': nodes,
|
||||
'tokens': tokens,
|
||||
'files': files,
|
||||
'groups': groups,
|
||||
'organizations': organizations,
|
||||
'projects': projects,
|
||||
'activities': activities,
|
||||
'activities-subscriptions': activities_subscriptions,
|
||||
'notifications': notifications
|
||||
}
|
||||
|
||||
MONGO_HOST = os.environ.get('PILLAR_MONGO_HOST', 'localhost')
|
||||
MONGO_PORT = int(os.environ.get('PILLAR_MONGO_PORT', 27017))
|
||||
MONGO_DBNAME = os.environ.get('PILLAR_MONGO_DBNAME', 'eve')
|
||||
CACHE_EXPIRES = 60
|
||||
HATEOAS = False
|
||||
UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL.
|
||||
X_DOMAINS = '*'
|
||||
X_ALLOW_CREDENTIALS = True
|
||||
X_HEADERS = 'Authorization'
|
||||
XML = False
|
815
pillar/api/file_storage.py
Normal file
815
pillar/api/file_storage.py
Normal file
@@ -0,0 +1,815 @@
|
||||
import io
|
||||
import logging
|
||||
import mimetypes
|
||||
import tempfile
|
||||
import uuid
|
||||
from hashlib import md5
|
||||
|
||||
import bson.tz_util
|
||||
import datetime
|
||||
import eve.utils
|
||||
import os
|
||||
import pymongo
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from bson import ObjectId
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
from flask import send_from_directory
|
||||
from flask import url_for, helpers
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.imaging import generate_local_thumbnails
|
||||
from pillar.api.utils import remove_private_keys, authentication
|
||||
from pillar.api.utils.authorization import require_login, user_has_role, \
|
||||
user_matches_roles
|
||||
from pillar.api.utils.cdn import hash_file_path
|
||||
from pillar.api.utils.encoding import Encoder
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
file_storage = Blueprint('file_storage', __name__,
|
||||
template_folder='templates',
|
||||
static_folder='../../static/storage', )
|
||||
|
||||
# Add our own extensions to the mimetypes package
|
||||
mimetypes.add_type('application/x-blender', '.blend')
|
||||
mimetypes.add_type('application/x-radiance-hdr', '.hdr')
|
||||
|
||||
|
||||
@file_storage.route('/gcs/<bucket_name>/<subdir>/')
|
||||
@file_storage.route('/gcs/<bucket_name>/<subdir>/<path:file_path>')
|
||||
def browse_gcs(bucket_name, subdir, file_path=None):
|
||||
"""Browse the content of a Google Cloud Storage bucket"""
|
||||
|
||||
# Initialize storage client
|
||||
storage = GoogleCloudStorageBucket(bucket_name, subdir=subdir)
|
||||
if file_path:
|
||||
# If we provided a file_path, we try to fetch it
|
||||
file_object = storage.Get(file_path)
|
||||
if file_object:
|
||||
# If it exists, return file properties in a dictionary
|
||||
return jsonify(file_object)
|
||||
else:
|
||||
listing = storage.List(file_path)
|
||||
return jsonify(listing)
|
||||
# We always return an empty listing even if the directory does not
|
||||
# exist. This can be changed later.
|
||||
# return abort(404)
|
||||
|
||||
else:
|
||||
listing = storage.List('')
|
||||
return jsonify(listing)
|
||||
|
||||
|
||||
@file_storage.route('/file', methods=['POST'])
|
||||
@file_storage.route('/file/<path:file_name>', methods=['GET', 'POST'])
|
||||
def index(file_name=None):
|
||||
# GET file -> read it
|
||||
if request.method == 'GET':
|
||||
return send_from_directory(current_app.config['STORAGE_DIR'], file_name)
|
||||
|
||||
# POST file -> save it
|
||||
|
||||
# Sanitize the filename; source: http://stackoverflow.com/questions/7406102/
|
||||
file_name = request.form['name']
|
||||
keepcharacters = {' ', '.', '_'}
|
||||
file_name = ''.join(
|
||||
c for c in file_name if c.isalnum() or c in keepcharacters).strip()
|
||||
file_name = file_name.lstrip('.')
|
||||
|
||||
# Determine & create storage directory
|
||||
folder_name = file_name[:2]
|
||||
file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'],
|
||||
folder_name)
|
||||
if not os.path.exists(file_folder_path):
|
||||
log.info('Creating folder path %r', file_folder_path)
|
||||
os.mkdir(file_folder_path)
|
||||
|
||||
# Save uploaded file
|
||||
file_path = helpers.safe_join(file_folder_path, file_name)
|
||||
log.info('Saving file %r', file_path)
|
||||
request.files['data'].save(file_path)
|
||||
|
||||
# TODO: possibly nicer to just return a redirect to the file's URL.
|
||||
return jsonify({'url': url_for('file_storage.index', file_name=file_name)})
|
||||
|
||||
|
||||
def _process_image(gcs, file_id, local_file, src_file):
|
||||
from PIL import Image
|
||||
|
||||
im = Image.open(local_file)
|
||||
res = im.size
|
||||
src_file['width'] = res[0]
|
||||
src_file['height'] = res[1]
|
||||
|
||||
# Generate previews
|
||||
log.info('Generating thumbnails for file %s', file_id)
|
||||
src_file['variations'] = generate_local_thumbnails(src_file['name'],
|
||||
local_file.name)
|
||||
|
||||
# Send those previews to Google Cloud Storage.
|
||||
log.info('Uploading %i thumbnails for file %s to Google Cloud Storage '
|
||||
'(GCS)', len(src_file['variations']), file_id)
|
||||
|
||||
# TODO: parallelize this at some point.
|
||||
for variation in src_file['variations']:
|
||||
fname = variation['file_path']
|
||||
if current_app.config['TESTING']:
|
||||
log.warning(' - NOT sending thumbnail %s to GCS', fname)
|
||||
else:
|
||||
log.debug(' - Sending thumbnail %s to GCS', fname)
|
||||
blob = gcs.bucket.blob('_/' + fname, chunk_size=256 * 1024 * 2)
|
||||
blob.upload_from_filename(variation['local_path'],
|
||||
content_type=variation['content_type'])
|
||||
|
||||
if variation.get('size') == 't':
|
||||
blob.make_public()
|
||||
|
||||
try:
|
||||
os.unlink(variation['local_path'])
|
||||
except OSError:
|
||||
log.warning('Unable to unlink %s, ignoring this but it will need '
|
||||
'cleanup later.', variation['local_path'])
|
||||
|
||||
del variation['local_path']
|
||||
|
||||
log.info('Done processing file %s', file_id)
|
||||
src_file['status'] = 'complete'
|
||||
|
||||
|
||||
def _process_video(gcs, file_id, local_file, src_file):
|
||||
"""Video is processed by Zencoder; the file isn't even stored locally."""
|
||||
|
||||
log.info('Processing video for file %s', file_id)
|
||||
|
||||
# Create variations
|
||||
root, _ = os.path.splitext(src_file['file_path'])
|
||||
src_file['variations'] = []
|
||||
|
||||
# Most of these properties will be available after encode.
|
||||
v = 'mp4'
|
||||
file_variation = dict(
|
||||
format=v,
|
||||
content_type='video/{}'.format(v),
|
||||
file_path='{}-{}.{}'.format(root, v, v),
|
||||
size='',
|
||||
duration=0,
|
||||
width=0,
|
||||
height=0,
|
||||
length=0,
|
||||
md5='',
|
||||
)
|
||||
# Append file variation. Originally mp4 and webm were the available options,
|
||||
# that's why we build a list.
|
||||
src_file['variations'].append(file_variation)
|
||||
|
||||
if current_app.config['TESTING']:
|
||||
log.warning('_process_video: NOT sending out encoding job due to '
|
||||
'TESTING=%r', current_app.config['TESTING'])
|
||||
j = type('EncoderJob', (), {'process_id': 'fake-process-id',
|
||||
'backend': 'fake'})
|
||||
else:
|
||||
j = Encoder.job_create(src_file)
|
||||
if j is None:
|
||||
log.warning('_process_video: unable to create encoder job for file '
|
||||
'%s.', file_id)
|
||||
return
|
||||
|
||||
log.info('Created asynchronous Zencoder job %s for file %s',
|
||||
j['process_id'], file_id)
|
||||
|
||||
# Add the processing status to the file object
|
||||
src_file['processing'] = {
|
||||
'status': 'pending',
|
||||
'job_id': str(j['process_id']),
|
||||
'backend': j['backend']}
|
||||
|
||||
|
||||
def process_file(gcs, file_id, local_file):
|
||||
"""Process the file by creating thumbnails, sending to Zencoder, etc.
|
||||
|
||||
:param file_id: '_id' key of the file
|
||||
:type file_id: ObjectId or str
|
||||
:param local_file: locally stored file, or None if no local processing is
|
||||
needed.
|
||||
:type local_file: file
|
||||
"""
|
||||
|
||||
file_id = ObjectId(file_id)
|
||||
|
||||
# Fetch the src_file document from MongoDB.
|
||||
files = current_app.data.driver.db['files']
|
||||
src_file = files.find_one(file_id)
|
||||
if not src_file:
|
||||
log.warning('process_file(%s): no such file document found, ignoring.')
|
||||
return
|
||||
src_file = utils.remove_private_keys(src_file)
|
||||
|
||||
# Update the 'format' field from the content type.
|
||||
# TODO: overrule the content type based on file extention & magic numbers.
|
||||
mime_category, src_file['format'] = src_file['content_type'].split('/', 1)
|
||||
|
||||
# Prevent video handling for non-admins.
|
||||
if not user_has_role(u'admin') and mime_category == 'video':
|
||||
if src_file['format'].startswith('x-'):
|
||||
xified = src_file['format']
|
||||
else:
|
||||
xified = 'x-' + src_file['format']
|
||||
|
||||
src_file['content_type'] = 'application/%s' % xified
|
||||
mime_category = 'application'
|
||||
log.info('Not processing video file %s for non-admin user', file_id)
|
||||
|
||||
# Run the required processor, based on the MIME category.
|
||||
processors = {
|
||||
'image': _process_image,
|
||||
'video': _process_video,
|
||||
}
|
||||
|
||||
try:
|
||||
processor = processors[mime_category]
|
||||
except KeyError:
|
||||
log.info("POSTed file %s was of type %r, which isn't "
|
||||
"thumbnailed/encoded.", file_id,
|
||||
mime_category)
|
||||
src_file['status'] = 'complete'
|
||||
else:
|
||||
log.debug('process_file(%s): marking file status as "processing"',
|
||||
file_id)
|
||||
src_file['status'] = 'processing'
|
||||
update_file_doc(file_id, status='processing')
|
||||
|
||||
try:
|
||||
processor(gcs, file_id, local_file, src_file)
|
||||
except Exception:
|
||||
log.warning('process_file(%s): error when processing file, '
|
||||
'resetting status to '
|
||||
'"queued_for_processing"', file_id, exc_info=True)
|
||||
update_file_doc(file_id, status='queued_for_processing')
|
||||
return
|
||||
|
||||
# Update the original file with additional info, e.g. image resolution
|
||||
r, _, _, status = current_app.put_internal('files', src_file, _id=file_id)
|
||||
if status not in (200, 201):
|
||||
log.warning('process_file(%s): status %i when saving processed file '
|
||||
'info to MongoDB: %s',
|
||||
file_id, status, r)
|
||||
|
||||
|
||||
def delete_file(file_item):
|
||||
def process_file_delete(file_item):
|
||||
"""Given a file item, delete the actual file from the storage backend.
|
||||
This function can be probably made self-calling."""
|
||||
if file_item['backend'] == 'gcs':
|
||||
storage = GoogleCloudStorageBucket(str(file_item['project']))
|
||||
storage.Delete(file_item['file_path'])
|
||||
# Delete any file variation found in the file_item document
|
||||
if 'variations' in file_item:
|
||||
for v in file_item['variations']:
|
||||
storage.Delete(v['file_path'])
|
||||
return True
|
||||
elif file_item['backend'] == 'pillar':
|
||||
pass
|
||||
elif file_item['backend'] == 'cdnsun':
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
# Collect children (variations) of the original file
|
||||
children = files_collection.find({'parent': file_item['_id']})
|
||||
for child in children:
|
||||
process_file_delete(child)
|
||||
# Finally remove the original file
|
||||
process_file_delete(file_item)
|
||||
|
||||
|
||||
def generate_link(backend, file_path, project_id=None, is_public=False):
|
||||
"""Hook to check the backend of a file resource, to build an appropriate link
|
||||
that can be used by the client to retrieve the actual file.
|
||||
"""
|
||||
|
||||
if backend == 'gcs':
|
||||
if current_app.config['TESTING']:
|
||||
log.info('Skipping GCS link generation, and returning a fake link '
|
||||
'instead.')
|
||||
return '/path/to/testing/gcs/%s' % file_path
|
||||
|
||||
storage = GoogleCloudStorageBucket(project_id)
|
||||
blob = storage.Get(file_path)
|
||||
if blob is None:
|
||||
return ''
|
||||
|
||||
if is_public:
|
||||
return blob['public_url']
|
||||
return blob['signed_url']
|
||||
|
||||
if backend == 'pillar':
|
||||
return url_for('file_storage.index', file_name=file_path,
|
||||
_external=True, _scheme=current_app.config['SCHEME'])
|
||||
if backend == 'cdnsun':
|
||||
return hash_file_path(file_path, None)
|
||||
if backend == 'unittest':
|
||||
return md5(file_path).hexdigest()
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def before_returning_file(response):
|
||||
ensure_valid_link(response)
|
||||
|
||||
# Enable this call later, when we have implemented the is_public field on
|
||||
# files.
|
||||
# strip_link_and_variations(response)
|
||||
|
||||
|
||||
def strip_link_and_variations(response):
|
||||
# Check the access level of the user.
|
||||
if g.current_user is None:
|
||||
has_full_access = False
|
||||
else:
|
||||
user_roles = g.current_user['roles']
|
||||
access_roles = current_app.config['FULL_FILE_ACCESS_ROLES']
|
||||
has_full_access = bool(user_roles.intersection(access_roles))
|
||||
|
||||
# Strip all file variations (unless image) and link to the actual file.
|
||||
if not has_full_access:
|
||||
response.pop('link', None)
|
||||
response.pop('link_expires', None)
|
||||
|
||||
# Image files have public variations, other files don't.
|
||||
if not response.get('content_type', '').startswith('image/'):
|
||||
if response.get('variations') is not None:
|
||||
response['variations'] = []
|
||||
|
||||
|
||||
def before_returning_files(response):
|
||||
for item in response['_items']:
|
||||
ensure_valid_link(item)
|
||||
|
||||
|
||||
def ensure_valid_link(response):
|
||||
"""Ensures the file item has valid file links using generate_link(...)."""
|
||||
|
||||
# Log to function-specific logger, so we can easily turn it off.
|
||||
log_link = logging.getLogger('%s.ensure_valid_link' % __name__)
|
||||
# log.debug('Inspecting link for file %s', response['_id'])
|
||||
|
||||
# Check link expiry.
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
if 'link_expires' in response:
|
||||
link_expires = response['link_expires']
|
||||
if now < link_expires:
|
||||
# Not expired yet, so don't bother regenerating anything.
|
||||
log_link.debug('Link expires at %s, which is in the future, so not '
|
||||
'generating new link', link_expires)
|
||||
return
|
||||
|
||||
log_link.debug('Link expired at %s, which is in the past; generating '
|
||||
'new link', link_expires)
|
||||
else:
|
||||
log_link.debug('No expiry date for link; generating new link')
|
||||
|
||||
_generate_all_links(response, now)
|
||||
|
||||
|
||||
def _generate_all_links(response, now):
|
||||
"""Generate a new link for the file and all its variations.
|
||||
|
||||
:param response: the file document that should be updated.
|
||||
:param now: datetime that reflects 'now', for consistent expiry generation.
|
||||
"""
|
||||
|
||||
project_id = str(
|
||||
response['project']) if 'project' in response else None
|
||||
# TODO: add project id to all files
|
||||
backend = response['backend']
|
||||
response['link'] = generate_link(backend, response['file_path'], project_id)
|
||||
|
||||
variations = response.get('variations')
|
||||
if variations:
|
||||
for variation in variations:
|
||||
variation['link'] = generate_link(backend, variation['file_path'],
|
||||
project_id)
|
||||
|
||||
# Construct the new expiry datetime.
|
||||
validity_secs = current_app.config['FILE_LINK_VALIDITY'][backend]
|
||||
response['link_expires'] = now + datetime.timedelta(seconds=validity_secs)
|
||||
|
||||
patch_info = remove_private_keys(response)
|
||||
file_id = ObjectId(response['_id'])
|
||||
(patch_resp, _, _, _) = current_app.patch_internal('files', patch_info,
|
||||
_id=file_id)
|
||||
if patch_resp.get('_status') == 'ERR':
|
||||
log.warning('Unable to save new links for file %s: %r',
|
||||
response['_id'], patch_resp)
|
||||
# TODO: raise a snag.
|
||||
response['_updated'] = now
|
||||
else:
|
||||
response['_updated'] = patch_resp['_updated']
|
||||
|
||||
# Be silly and re-fetch the etag ourselves. TODO: handle this better.
|
||||
etag_doc = current_app.data.driver.db['files'].find_one({'_id': file_id},
|
||||
{'_etag': 1})
|
||||
response['_etag'] = etag_doc['_etag']
|
||||
|
||||
|
||||
def before_deleting_file(item):
|
||||
delete_file(item)
|
||||
|
||||
|
||||
def on_pre_get_files(_, lookup):
|
||||
# Override the HTTP header, we always want to fetch the document from
|
||||
# MongoDB.
|
||||
parsed_req = eve.utils.parse_request('files')
|
||||
parsed_req.if_modified_since = None
|
||||
|
||||
# Only fetch it if the date got expired.
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
lookup_expired = lookup.copy()
|
||||
lookup_expired['link_expires'] = {'$lte': now}
|
||||
|
||||
cursor = current_app.data.find('files', parsed_req, lookup_expired)
|
||||
for file_doc in cursor:
|
||||
# log.debug('Updating expired links for file %r.', file_doc['_id'])
|
||||
_generate_all_links(file_doc, now)
|
||||
|
||||
|
||||
def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
|
||||
if chunk_size:
|
||||
log.info('Refreshing the first %i links for project %s',
|
||||
chunk_size, project_uuid)
|
||||
else:
|
||||
log.info('Refreshing all links for project %s', project_uuid)
|
||||
|
||||
# Retrieve expired links.
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
expire_before = now + datetime.timedelta(seconds=expiry_seconds)
|
||||
log.info('Limiting to links that expire before %s', expire_before)
|
||||
|
||||
to_refresh = files_collection.find(
|
||||
{'project': ObjectId(project_uuid),
|
||||
'link_expires': {'$lt': expire_before},
|
||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size)
|
||||
|
||||
if to_refresh.count() == 0:
|
||||
log.info('No links to refresh.')
|
||||
return
|
||||
|
||||
for file_doc in to_refresh:
|
||||
log.debug('Refreshing links for file %s', file_doc['_id'])
|
||||
_generate_all_links(file_doc, now)
|
||||
|
||||
log.info('Refreshed %i links', min(chunk_size, to_refresh.count()))
|
||||
|
||||
|
||||
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
import gcloud.exceptions
|
||||
|
||||
# Retrieve expired links.
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
expire_before = now + datetime.timedelta(seconds=expiry_seconds)
|
||||
log.info('Limiting to links that expire before %s', expire_before)
|
||||
|
||||
to_refresh = files_collection.find(
|
||||
{'$or': [{'backend': backend_name, 'link_expires': None},
|
||||
{'backend': backend_name, 'link_expires': {
|
||||
'$lt': expire_before}},
|
||||
{'backend': backend_name, 'link': None}]
|
||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
|
||||
chunk_size).batch_size(5)
|
||||
|
||||
if to_refresh.count() == 0:
|
||||
log.info('No links to refresh.')
|
||||
return
|
||||
|
||||
refreshed = 0
|
||||
for file_doc in to_refresh:
|
||||
try:
|
||||
file_id = file_doc['_id']
|
||||
project_id = file_doc.get('project')
|
||||
if project_id is None:
|
||||
log.debug('Skipping file %s, it has no project.', file_id)
|
||||
continue
|
||||
|
||||
count = proj_coll.count({'_id': project_id, '$or': [
|
||||
{'_deleted': {'$exists': False}},
|
||||
{'_deleted': False},
|
||||
]})
|
||||
|
||||
if count == 0:
|
||||
log.debug('Skipping file %s, project %s does not exist.',
|
||||
file_id, project_id)
|
||||
continue
|
||||
|
||||
if 'file_path' not in file_doc:
|
||||
log.warning("Skipping file %s, missing 'file_path' property.",
|
||||
file_id)
|
||||
continue
|
||||
|
||||
log.debug('Refreshing links for file %s', file_id)
|
||||
|
||||
try:
|
||||
_generate_all_links(file_doc, now)
|
||||
except gcloud.exceptions.Forbidden:
|
||||
log.warning('Skipping file %s, GCS forbids us access to '
|
||||
'project %s bucket.', file_id, project_id)
|
||||
continue
|
||||
refreshed += 1
|
||||
except KeyboardInterrupt:
|
||||
log.warning('Aborting due to KeyboardInterrupt after refreshing %i '
|
||||
'links', refreshed)
|
||||
return
|
||||
|
||||
log.info('Refreshed %i links', refreshed)
|
||||
|
||||
|
||||
@require_login()
|
||||
def create_file_doc(name, filename, content_type, length, project,
|
||||
backend='gcs', **extra_fields):
|
||||
"""Creates a minimal File document for storage in MongoDB.
|
||||
|
||||
Doesn't save it to MongoDB yet.
|
||||
"""
|
||||
|
||||
current_user = g.get('current_user')
|
||||
|
||||
file_doc = {'name': name,
|
||||
'filename': filename,
|
||||
'file_path': '',
|
||||
'user': current_user['user_id'],
|
||||
'backend': backend,
|
||||
'md5': '',
|
||||
'content_type': content_type,
|
||||
'length': length,
|
||||
'project': project}
|
||||
file_doc.update(extra_fields)
|
||||
|
||||
return file_doc
|
||||
|
||||
|
||||
def override_content_type(uploaded_file):
|
||||
"""Overrides the content type based on file extensions.
|
||||
|
||||
:param uploaded_file: file from request.files['form-key']
|
||||
:type uploaded_file: werkzeug.datastructures.FileStorage
|
||||
"""
|
||||
|
||||
# Possibly use the browser-provided mime type
|
||||
mimetype = uploaded_file.mimetype
|
||||
if '/' in mimetype:
|
||||
mimecat = mimetype.split('/')[0]
|
||||
if mimecat in {'video', 'audio', 'image'}:
|
||||
# The browser's mime type is probably ok, just use it.
|
||||
return
|
||||
|
||||
# And then use it to set the mime type.
|
||||
(mimetype, encoding) = mimetypes.guess_type(uploaded_file.filename)
|
||||
|
||||
# Only override the mime type if we can detect it, otherwise just
|
||||
# keep whatever the browser gave us.
|
||||
if mimetype:
|
||||
# content_type property can't be set directly
|
||||
uploaded_file.headers['content-type'] = mimetype
|
||||
|
||||
# It has this, because we used uploaded_file.mimetype earlier this
|
||||
# function.
|
||||
del uploaded_file._parsed_content_type
|
||||
|
||||
|
||||
def assert_file_size_allowed(file_size):
|
||||
"""Asserts that the current user is allowed to upload a file of the given size.
|
||||
|
||||
:raises
|
||||
"""
|
||||
|
||||
roles = current_app.config['ROLES_FOR_UNLIMITED_UPLOADS']
|
||||
if user_matches_roles(require_roles=roles):
|
||||
return
|
||||
|
||||
filesize_limit = current_app.config['FILESIZE_LIMIT_BYTES_NONSUBS']
|
||||
if file_size < filesize_limit:
|
||||
return
|
||||
|
||||
filesize_limit_mb = filesize_limit / 2.0 ** 20
|
||||
log.info('User %s tried to upload a %.3f MiB file, but is only allowed '
|
||||
'%.3f MiB.',
|
||||
authentication.current_user_id(), file_size / 2.0 ** 20,
|
||||
filesize_limit_mb)
|
||||
raise wz_exceptions.RequestEntityTooLarge(
|
||||
'To upload files larger than %i MiB, subscribe to Blender Cloud' %
|
||||
filesize_limit_mb)
|
||||
|
||||
|
||||
@file_storage.route('/stream/<string:project_id>', methods=['POST', 'OPTIONS'])
|
||||
@require_login()
|
||||
def stream_to_gcs(project_id):
|
||||
project_oid = utils.str2id(project_id)
|
||||
|
||||
projects = current_app.data.driver.db['projects']
|
||||
project = projects.find_one(project_oid, projection={'_id': 1})
|
||||
|
||||
if not project:
|
||||
raise wz_exceptions.NotFound('Project %s does not exist' % project_id)
|
||||
|
||||
log.info('Streaming file to bucket for project=%s user_id=%s', project_id,
|
||||
authentication.current_user_id())
|
||||
log.info('request.headers[Origin] = %r', request.headers.get('Origin'))
|
||||
|
||||
uploaded_file = request.files['file']
|
||||
|
||||
# Not every upload has a Content-Length header. If it was passed, we might
|
||||
# as well check for its value before we require the user to upload the
|
||||
# entire file. (At least I hope that this part of the code is processed
|
||||
# before the body is read in its entirety)
|
||||
if uploaded_file.content_length:
|
||||
assert_file_size_allowed(uploaded_file.content_length)
|
||||
|
||||
override_content_type(uploaded_file)
|
||||
if not uploaded_file.content_type:
|
||||
log.warning('File uploaded to project %s without content type.', project_oid)
|
||||
raise wz_exceptions.BadRequest('Missing content type.')
|
||||
|
||||
if uploaded_file.content_type.startswith('image/'):
|
||||
# We need to do local thumbnailing, so we have to write the stream
|
||||
# both to Google Cloud Storage and to local storage.
|
||||
local_file = tempfile.NamedTemporaryFile(dir=current_app.config['STORAGE_DIR'])
|
||||
uploaded_file.save(local_file)
|
||||
local_file.seek(0) # Make sure that a re-read starts from the beginning.
|
||||
stream_for_gcs = local_file
|
||||
else:
|
||||
local_file = None
|
||||
stream_for_gcs = uploaded_file.stream
|
||||
|
||||
# Figure out the file size, as we need to pass this in explicitly to GCloud.
|
||||
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
|
||||
# supported by a BytesIO object (even though it does have a fileno attribute).
|
||||
if isinstance(stream_for_gcs, io.BytesIO):
|
||||
file_size = len(stream_for_gcs.getvalue())
|
||||
else:
|
||||
file_size = os.fstat(stream_for_gcs.fileno()).st_size
|
||||
|
||||
# Check the file size again, now that we know its size for sure.
|
||||
assert_file_size_allowed(file_size)
|
||||
|
||||
# Create file document in MongoDB.
|
||||
file_id, internal_fname, status = create_file_doc_for_upload(project_oid, uploaded_file)
|
||||
|
||||
if current_app.config['TESTING']:
|
||||
log.warning('NOT streaming to GCS because TESTING=%r', current_app.config['TESTING'])
|
||||
# Fake a Blob object.
|
||||
gcs = None
|
||||
blob = type('Blob', (), {'size': file_size})
|
||||
else:
|
||||
# Upload the file to GCS.
|
||||
from gcloud.streaming import transfer
|
||||
# Files larger than this many bytes will be streamed directly from disk, smaller
|
||||
# ones will be read into memory and then uploaded.
|
||||
transfer.RESUMABLE_UPLOAD_THRESHOLD = 102400
|
||||
try:
|
||||
gcs = GoogleCloudStorageBucket(project_id)
|
||||
blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2)
|
||||
blob.upload_from_file(stream_for_gcs, size=file_size,
|
||||
content_type=uploaded_file.mimetype)
|
||||
except Exception:
|
||||
log.exception('Error uploading file to Google Cloud Storage (GCS),'
|
||||
' aborting handling of uploaded file (id=%s).', file_id)
|
||||
update_file_doc(file_id, status='failed')
|
||||
raise wz_exceptions.InternalServerError('Unable to stream file to Google Cloud Storage')
|
||||
|
||||
if stream_for_gcs.closed:
|
||||
log.error('Eek, GCS closed its stream, Andy is not going to like this.')
|
||||
|
||||
# Reload the blob to get the file size according to Google.
|
||||
blob.reload()
|
||||
update_file_doc(file_id,
|
||||
status='queued_for_processing',
|
||||
file_path=internal_fname,
|
||||
length=blob.size,
|
||||
content_type=uploaded_file.mimetype)
|
||||
|
||||
process_file(gcs, file_id, local_file)
|
||||
|
||||
# Local processing is done, we can close the local file so it is removed.
|
||||
if local_file is not None:
|
||||
local_file.close()
|
||||
|
||||
log.debug('Handled uploaded file id=%s, fname=%s, size=%i', file_id, internal_fname, blob.size)
|
||||
|
||||
# Status is 200 if the file already existed, and 201 if it was newly created.
|
||||
# TODO: add a link to a thumbnail in the response.
|
||||
resp = jsonify(status='ok', file_id=str(file_id))
|
||||
resp.status_code = status
|
||||
add_access_control_headers(resp)
|
||||
return resp
|
||||
|
||||
|
||||
def add_access_control_headers(resp):
|
||||
"""Allows cross-site requests from the configured domain."""
|
||||
|
||||
if 'Origin' not in request.headers:
|
||||
return resp
|
||||
|
||||
resp.headers['Access-Control-Allow-Origin'] = request.headers['Origin']
|
||||
resp.headers['Access-Control-Allow-Credentials'] = 'true'
|
||||
return resp
|
||||
|
||||
|
||||
def update_file_doc(file_id, **updates):
|
||||
files = current_app.data.driver.db['files']
|
||||
res = files.update_one({'_id': ObjectId(file_id)},
|
||||
{'$set': updates})
|
||||
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
|
||||
file_id, updates, res.matched_count, res.modified_count)
|
||||
return res
|
||||
|
||||
|
||||
def create_file_doc_for_upload(project_id, uploaded_file):
|
||||
"""Creates a secure filename and a document in MongoDB for the file.
|
||||
|
||||
The (project_id, filename) tuple should be unique. If such a document already
|
||||
exists, it is updated with the new file.
|
||||
|
||||
:param uploaded_file: file from request.files['form-key']
|
||||
:type uploaded_file: werkzeug.datastructures.FileStorage
|
||||
:returns: a tuple (file_id, filename, status), where 'filename' is the internal
|
||||
filename used on GCS.
|
||||
"""
|
||||
|
||||
project_id = ObjectId(project_id)
|
||||
|
||||
# Hash the filename with path info to get the internal name. This should
|
||||
# be unique for the project.
|
||||
# internal_filename = uploaded_file.filename
|
||||
_, ext = os.path.splitext(uploaded_file.filename)
|
||||
internal_filename = uuid.uuid4().hex + ext
|
||||
|
||||
# For now, we don't support overwriting files, and create a new one every time.
|
||||
# # See if we can find a pre-existing file doc.
|
||||
# files = current_app.data.driver.db['files']
|
||||
# file_doc = files.find_one({'project': project_id,
|
||||
# 'name': internal_filename})
|
||||
file_doc = None
|
||||
|
||||
# TODO: at some point do name-based and content-based content-type sniffing.
|
||||
new_props = {'filename': uploaded_file.filename,
|
||||
'content_type': uploaded_file.mimetype,
|
||||
'length': uploaded_file.content_length,
|
||||
'project': project_id,
|
||||
'status': 'uploading'}
|
||||
|
||||
if file_doc is None:
|
||||
# Create a file document on MongoDB for this file.
|
||||
file_doc = create_file_doc(name=internal_filename, **new_props)
|
||||
file_fields, _, _, status = current_app.post_internal('files', file_doc)
|
||||
else:
|
||||
file_doc.update(new_props)
|
||||
file_fields, _, _, status = current_app.put_internal('files', remove_private_keys(file_doc))
|
||||
|
||||
if status not in (200, 201):
|
||||
log.error('Unable to create new file document in MongoDB, status=%i: %s',
|
||||
status, file_fields)
|
||||
raise wz_exceptions.InternalServerError()
|
||||
|
||||
return file_fields['_id'], internal_filename, status
|
||||
|
||||
|
||||
def compute_aggregate_length(file_doc, original=None):
|
||||
"""Computes the total length (in bytes) of the file and all variations.
|
||||
|
||||
Stores the result in file_doc['length_aggregate_in_bytes']
|
||||
"""
|
||||
|
||||
# Compute total size of all variations.
|
||||
variations = file_doc.get('variations', ())
|
||||
var_length = sum(var.get('length', 0) for var in variations)
|
||||
|
||||
file_doc['length_aggregate_in_bytes'] = file_doc.get('length', 0) + var_length
|
||||
|
||||
|
||||
def compute_aggregate_length_items(file_docs):
|
||||
for file_doc in file_docs:
|
||||
compute_aggregate_length(file_doc)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.on_pre_GET_files += on_pre_get_files
|
||||
|
||||
app.on_fetched_item_files += before_returning_file
|
||||
app.on_fetched_resource_files += before_returning_files
|
||||
|
||||
app.on_delete_item_files += before_deleting_file
|
||||
|
||||
app.on_update_files += compute_aggregate_length
|
||||
app.on_replace_files += compute_aggregate_length
|
||||
app.on_insert_files += compute_aggregate_length_items
|
||||
|
||||
app.register_api_blueprint(file_storage, url_prefix=url_prefix)
|
128
pillar/api/latest.py
Normal file
128
pillar/api/latest.py
Normal file
@@ -0,0 +1,128 @@
|
||||
import itertools
|
||||
|
||||
import pymongo
|
||||
from flask import Blueprint, current_app
|
||||
|
||||
from pillar.api.utils import jsonify
|
||||
|
||||
blueprint = Blueprint('latest', __name__)
|
||||
|
||||
|
||||
def keep_fetching(collection, db_filter, projection, sort, py_filter,
|
||||
batch_size=12):
|
||||
"""Yields results for which py_filter returns True"""
|
||||
|
||||
projection['_deleted'] = 1
|
||||
curs = collection.find(db_filter, projection).sort(sort)
|
||||
curs.batch_size(batch_size)
|
||||
|
||||
for doc in curs:
|
||||
if doc.get('_deleted'):
|
||||
continue
|
||||
doc.pop('_deleted', None)
|
||||
if py_filter(doc):
|
||||
yield doc
|
||||
|
||||
|
||||
def latest_nodes(db_filter, projection, py_filter, limit):
|
||||
nodes = current_app.data.driver.db['nodes']
|
||||
|
||||
proj = {
|
||||
'_created': 1,
|
||||
'_updated': 1,
|
||||
}
|
||||
proj.update(projection)
|
||||
|
||||
latest = keep_fetching(nodes, db_filter, proj,
|
||||
[('_created', pymongo.DESCENDING)],
|
||||
py_filter, limit)
|
||||
|
||||
result = list(itertools.islice(latest, limit))
|
||||
return result
|
||||
|
||||
|
||||
def has_public_project(node_doc):
|
||||
"""Returns True iff the project the node belongs to is public."""
|
||||
|
||||
project_id = node_doc.get('project')
|
||||
return is_project_public(project_id)
|
||||
|
||||
|
||||
# TODO: cache result, for a limited amt. of time, or for this HTTP request.
|
||||
def is_project_public(project_id):
|
||||
"""Returns True iff the project is public."""
|
||||
|
||||
project = current_app.data.driver.db['projects'].find_one(project_id)
|
||||
if not project:
|
||||
return False
|
||||
|
||||
return not project.get('is_private')
|
||||
|
||||
|
||||
@blueprint.route('/assets')
|
||||
def latest_assets():
|
||||
latest = latest_nodes({'node_type': 'asset',
|
||||
'properties.status': 'published'},
|
||||
{'name': 1, 'project': 1, 'user': 1, 'node_type': 1,
|
||||
'parent': 1, 'picture': 1, 'properties.status': 1,
|
||||
'properties.content_type': 1,
|
||||
'permissions.world': 1},
|
||||
has_public_project, 12)
|
||||
|
||||
embed_user(latest)
|
||||
embed_project(latest)
|
||||
|
||||
return jsonify({'_items': latest})
|
||||
|
||||
|
||||
def embed_user(latest):
|
||||
users = current_app.data.driver.db['users']
|
||||
|
||||
for comment in latest:
|
||||
user_id = comment['user']
|
||||
comment['user'] = users.find_one(user_id, {
|
||||
'auth': 0, 'groups': 0, 'roles': 0, 'settings': 0, 'email': 0,
|
||||
'_created': 0, '_updated': 0, '_etag': 0})
|
||||
|
||||
|
||||
def embed_project(latest):
|
||||
projects = current_app.data.driver.db['projects']
|
||||
|
||||
for comment in latest:
|
||||
project_id = comment['project']
|
||||
comment['project'] = projects.find_one(project_id, {'_id': 1, 'name': 1,
|
||||
'url': 1})
|
||||
|
||||
|
||||
@blueprint.route('/comments')
|
||||
def latest_comments():
|
||||
latest = latest_nodes({'node_type': 'comment',
|
||||
'properties.status': 'published'},
|
||||
{'project': 1, 'parent': 1, 'user': 1,
|
||||
'properties.content': 1, 'node_type': 1,
|
||||
'properties.status': 1,
|
||||
'properties.is_reply': 1},
|
||||
has_public_project, 6)
|
||||
|
||||
# Embed the comments' parents.
|
||||
nodes = current_app.data.driver.db['nodes']
|
||||
parents = {}
|
||||
for comment in latest:
|
||||
parent_id = comment['parent']
|
||||
|
||||
if parent_id in parents:
|
||||
comment['parent'] = parents[parent_id]
|
||||
continue
|
||||
|
||||
parent = nodes.find_one(parent_id)
|
||||
parents[parent_id] = parent
|
||||
comment['parent'] = parent
|
||||
|
||||
embed_project(latest)
|
||||
embed_user(latest)
|
||||
|
||||
return jsonify({'_items': latest})
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
97
pillar/api/local_auth.py
Normal file
97
pillar/api/local_auth.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
import bcrypt
|
||||
import datetime
|
||||
import rsa.randnum
|
||||
from bson import tz_util
|
||||
from flask import abort, Blueprint, current_app, jsonify, request
|
||||
from pillar.api.utils.authentication import create_new_user_document
|
||||
from pillar.api.utils.authentication import make_unique_username
|
||||
from pillar.api.utils.authentication import store_token
|
||||
|
||||
blueprint = Blueprint('authentication', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_auth_credentials(user, provider):
|
||||
return next((credentials for credentials in user['auth'] if 'provider'
|
||||
in credentials and credentials['provider'] == provider), None)
|
||||
|
||||
|
||||
def create_local_user(email, password):
|
||||
"""For internal user only. Given username and password, create a user."""
|
||||
# Hash the password
|
||||
hashed_password = hash_password(password, bcrypt.gensalt())
|
||||
db_user = create_new_user_document(email, '', email, provider='local',
|
||||
token=hashed_password)
|
||||
# Make username unique
|
||||
db_user['username'] = make_unique_username(email)
|
||||
# Create the user
|
||||
r, _, _, status = current_app.post_internal('users', db_user)
|
||||
if status != 201:
|
||||
log.error('internal response: %r %r', status, r)
|
||||
return abort(500)
|
||||
# Return user ID
|
||||
return r['_id']
|
||||
|
||||
|
||||
@blueprint.route('/make-token', methods=['POST'])
|
||||
def make_token():
|
||||
"""Direct login for a user, without OAuth, using local database. Generates
|
||||
a token that is passed back to Pillar Web and used in subsequent
|
||||
transactions.
|
||||
|
||||
:return: a token string
|
||||
"""
|
||||
username = request.form['username']
|
||||
password = request.form['password']
|
||||
|
||||
# Look up user in db
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
user = users_collection.find_one({'username': username})
|
||||
if not user:
|
||||
return abort(403)
|
||||
# Check if user has "local" auth type
|
||||
credentials = get_auth_credentials(user, 'local')
|
||||
if not credentials:
|
||||
return abort(403)
|
||||
# Verify password
|
||||
salt = credentials['token']
|
||||
hashed_password = hash_password(password, salt)
|
||||
if hashed_password != credentials['token']:
|
||||
return abort(403)
|
||||
|
||||
token = generate_and_store_token(user['_id'])
|
||||
return jsonify(token=token['token'])
|
||||
|
||||
|
||||
def generate_and_store_token(user_id, days=15, prefix=''):
|
||||
"""Generates token based on random bits.
|
||||
|
||||
:param user_id: ObjectId of the owning user.
|
||||
:param days: token will expire in this many days.
|
||||
:param prefix: the token will be prefixed by this string, for easy identification.
|
||||
:return: the token document.
|
||||
"""
|
||||
|
||||
random_bits = rsa.randnum.read_random_bits(256)
|
||||
|
||||
# Use 'xy' as altargs to prevent + and / characters from appearing.
|
||||
# We never have to b64decode the string anyway.
|
||||
token = prefix + base64.b64encode(random_bits, altchars='xy').strip('=')
|
||||
|
||||
token_expiry = datetime.datetime.now(tz=tz_util.utc) + datetime.timedelta(days=days)
|
||||
return store_token(user_id, token, token_expiry)
|
||||
|
||||
|
||||
def hash_password(password, salt):
|
||||
if isinstance(salt, unicode):
|
||||
salt = salt.encode('utf-8')
|
||||
encoded_password = base64.b64encode(hashlib.sha256(password).digest())
|
||||
return bcrypt.hashpw(encoded_password, salt)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
8
pillar/api/node_types/__init__.py
Normal file
8
pillar/api/node_types/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
_file_embedded_schema = {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'files',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
5
pillar/api/node_types/act.py
Normal file
5
pillar/api/node_types/act.py
Normal file
@@ -0,0 +1,5 @@
|
||||
node_type_act = {
|
||||
'name': 'act',
|
||||
'description': 'Act node type',
|
||||
'parent': []
|
||||
}
|
74
pillar/api/node_types/asset.py
Normal file
74
pillar/api/node_types/asset.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_asset = {
|
||||
'name': 'asset',
|
||||
'description': 'Basic Asset Type',
|
||||
# This data type does not have parent limitations (can be child
|
||||
# of any node). An empty parent declaration is required.
|
||||
'parent': ['group', ],
|
||||
'dyn_schema': {
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
'processing'
|
||||
],
|
||||
},
|
||||
# Used for sorting within the context of a group
|
||||
'order': {
|
||||
'type': 'integer'
|
||||
},
|
||||
# We expose the type of asset we point to. Usually image, video,
|
||||
# zipfile, ect.
|
||||
'content_type': {
|
||||
'type': 'string'
|
||||
},
|
||||
# We point to the original file (and use it to extract any relevant
|
||||
# variation useful for our scope).
|
||||
'file': _file_embedded_schema,
|
||||
'attachments': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'field': {'type': 'string'},
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'slug': {'type': 'string', 'minlength': 1},
|
||||
'size': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
# Tags for search
|
||||
'tags': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
# Simple string to represent hierarchical categories. Should follow
|
||||
# this schema: "Root > Nested Category > One More Nested Category"
|
||||
'categories': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'content_type': {'visible': False},
|
||||
'file': {},
|
||||
'attachments': {'visible': False},
|
||||
'order': {'visible': False},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False}
|
||||
},
|
||||
'permissions': {
|
||||
}
|
||||
}
|
29
pillar/api/node_types/blog.py
Normal file
29
pillar/api/node_types/blog.py
Normal file
@@ -0,0 +1,29 @@
|
||||
node_type_blog = {
|
||||
'name': 'blog',
|
||||
'description': 'Container for node_type post.',
|
||||
'dyn_schema': {
|
||||
# Path for a custom template to be used for rendering the posts
|
||||
'template': {
|
||||
'type': 'string',
|
||||
},
|
||||
'categories' : {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'categories': {},
|
||||
'template': {},
|
||||
},
|
||||
'parent': ['project',],
|
||||
'permissions': {
|
||||
# 'groups': [{
|
||||
# 'group': app.config['ADMIN_USER_GROUP'],
|
||||
# 'methods': ['GET', 'PUT', 'POST']
|
||||
# }],
|
||||
# 'users': [],
|
||||
# 'world': ['GET']
|
||||
}
|
||||
}
|
65
pillar/api/node_types/comment.py
Normal file
65
pillar/api/node_types/comment.py
Normal file
@@ -0,0 +1,65 @@
|
||||
node_type_comment = {
|
||||
'name': 'comment',
|
||||
'description': 'Comments for asset nodes, pages, etc.',
|
||||
'dyn_schema': {
|
||||
# The actual comment content (initially Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'flagged',
|
||||
'edited'
|
||||
],
|
||||
},
|
||||
# Total count of positive ratings (updated at every rating action)
|
||||
'rating_positive': {
|
||||
'type': 'integer',
|
||||
},
|
||||
# Total count of negative ratings (updated at every rating action)
|
||||
'rating_negative': {
|
||||
'type': 'integer',
|
||||
},
|
||||
# Collection of ratings, keyed by user
|
||||
'ratings': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'user': {
|
||||
'type': 'objectid'
|
||||
},
|
||||
'is_positive': {
|
||||
'type': 'boolean'
|
||||
},
|
||||
# Weight of the rating based on user rep and the context.
|
||||
# Currently we have the following weights:
|
||||
# - 1 auto null
|
||||
# - 2 manual null
|
||||
# - 3 auto valid
|
||||
# - 4 manual valid
|
||||
'weight': {
|
||||
'type': 'integer'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'confidence': {'type': 'float'},
|
||||
'is_reply': {'type': 'boolean'}
|
||||
},
|
||||
'form_schema': {
|
||||
'content': {},
|
||||
'status': {},
|
||||
'rating_positive': {},
|
||||
'rating_negative': {},
|
||||
'ratings': {},
|
||||
'confidence': {},
|
||||
'is_reply': {}
|
||||
},
|
||||
'parent': ['asset', 'comment'],
|
||||
'permissions': {
|
||||
}
|
||||
}
|
33
pillar/api/node_types/group.py
Normal file
33
pillar/api/node_types/group.py
Normal file
@@ -0,0 +1,33 @@
|
||||
node_type_group = {
|
||||
'name': 'group',
|
||||
'description': 'Generic group node type edited',
|
||||
'parent': ['group', 'project'],
|
||||
'dyn_schema': {
|
||||
# Used for sorting within the context of a group
|
||||
'order': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string',
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
},
|
||||
'notes': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'url': {'visible': False},
|
||||
'status': {},
|
||||
'notes': {'visible': False},
|
||||
'order': {'visible': False}
|
||||
},
|
||||
'permissions': {
|
||||
}
|
||||
}
|
22
pillar/api/node_types/group_hdri.py
Normal file
22
pillar/api/node_types/group_hdri.py
Normal file
@@ -0,0 +1,22 @@
|
||||
node_type_group_hdri = {
|
||||
'name': 'group_hdri',
|
||||
'description': 'Group for HDRi node type',
|
||||
'parent': ['group_hdri', 'project'],
|
||||
'dyn_schema': {
|
||||
# Used for sorting within the context of a group
|
||||
'order': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'order': {}
|
||||
}
|
||||
}
|
22
pillar/api/node_types/group_texture.py
Normal file
22
pillar/api/node_types/group_texture.py
Normal file
@@ -0,0 +1,22 @@
|
||||
node_type_group_texture = {
|
||||
'name': 'group_texture',
|
||||
'description': 'Group for texture node type',
|
||||
'parent': ['group_texture', 'project'],
|
||||
'dyn_schema': {
|
||||
# Used for sorting within the context of a group
|
||||
'order': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'order': {}
|
||||
}
|
||||
}
|
66
pillar/api/node_types/hdri.py
Normal file
66
pillar/api/node_types/hdri.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_hdri = {
|
||||
# When adding this node type, make sure to enable CORS from * on the GCS
|
||||
# bucket (https://cloud.google.com/storage/docs/cross-origin)
|
||||
'name': 'hdri',
|
||||
'description': 'HDR Image',
|
||||
'parent': ['group_hdri'],
|
||||
'dyn_schema': {
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
},
|
||||
# Used for sorting within the context of a group
|
||||
'order': {'type': 'integer'},
|
||||
# We point to the file resloutions (and use it to extract any relevant
|
||||
# variation useful for our scope).
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'resolution': {
|
||||
'type': 'string',
|
||||
'required': True}
|
||||
}
|
||||
}
|
||||
},
|
||||
# Tags for search
|
||||
'tags': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
# Simple string to represent hierarchical categories. Should follow
|
||||
# this schema: "Root > Nested Category > One More Nested Category"
|
||||
'categories': {
|
||||
'type': 'string'
|
||||
},
|
||||
'license_type': {
|
||||
'default': 'cc-by',
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'cc-by',
|
||||
'cc-0',
|
||||
'cc-by-sa',
|
||||
'cc-by-nd',
|
||||
'cc-by-nc',
|
||||
'copyright'
|
||||
]
|
||||
},
|
||||
'license_notes': {
|
||||
'type': 'string'
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'content_type': {'visible': False},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False},
|
||||
}
|
||||
}
|
54
pillar/api/node_types/page.py
Normal file
54
pillar/api/node_types/page.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_page = {
|
||||
'name': 'page',
|
||||
'description': 'A single page',
|
||||
'dyn_schema': {
|
||||
# The page content (Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending'
|
||||
],
|
||||
'default': 'pending'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'attachments': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'field': {'type': 'string'},
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'slug': {'type': 'string', 'minlength': 1},
|
||||
'size': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'content': {},
|
||||
'status': {},
|
||||
'url': {},
|
||||
'attachments': {'visible': False},
|
||||
},
|
||||
'parent': ['project', ],
|
||||
'permissions': {}
|
||||
}
|
59
pillar/api/node_types/post.py
Normal file
59
pillar/api/node_types/post.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_post = {
|
||||
'name': 'post',
|
||||
'description': 'A blog post, for any project',
|
||||
'dyn_schema': {
|
||||
# The blogpost content (Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending'
|
||||
],
|
||||
'default': 'pending'
|
||||
},
|
||||
# Global categories, will be enforced to be 1 word
|
||||
'category': {
|
||||
'type': 'string',
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'attachments': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'field': {'type': 'string'},
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'slug': {'type': 'string', 'minlength': 1},
|
||||
'size': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'content': {},
|
||||
'status': {},
|
||||
'category': {},
|
||||
'url': {},
|
||||
'attachments': {'visible': False},
|
||||
},
|
||||
'parent': ['blog', ],
|
||||
'permissions': {}
|
||||
}
|
124
pillar/api/node_types/project.py
Normal file
124
pillar/api/node_types/project.py
Normal file
@@ -0,0 +1,124 @@
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_project = {
|
||||
'name': 'project',
|
||||
'parent': {},
|
||||
'description': 'The official project type',
|
||||
'dyn_schema': {
|
||||
'category': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'training',
|
||||
'film',
|
||||
'assets',
|
||||
'software',
|
||||
'game'
|
||||
],
|
||||
'required': True,
|
||||
},
|
||||
'is_private': {
|
||||
'type': 'boolean'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'organization': {
|
||||
'type': 'objectid',
|
||||
'nullable': True,
|
||||
'data_relation': {
|
||||
'resource': 'organizations',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'owners': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'groups',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
},
|
||||
# Logo
|
||||
'picture_square': _file_embedded_schema,
|
||||
# Header
|
||||
'picture_header': _file_embedded_schema,
|
||||
# Short summary for the project
|
||||
'summary': {
|
||||
'type': 'string',
|
||||
'maxlength': 128
|
||||
},
|
||||
# Latest nodes being edited
|
||||
'nodes_latest': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Featured nodes, manually added
|
||||
'nodes_featured': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Latest blog posts, manually added
|
||||
'nodes_blog': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'is_private': {},
|
||||
# TODO add group parsing
|
||||
'category': {},
|
||||
'url': {},
|
||||
'organization': {},
|
||||
'picture_square': {},
|
||||
'picture_header': {},
|
||||
'summary': {},
|
||||
'owners': {
|
||||
'schema': {
|
||||
'users': {},
|
||||
'groups': {
|
||||
'items': [('Group', 'name')],
|
||||
},
|
||||
}
|
||||
},
|
||||
'status': {},
|
||||
'nodes_featured': {},
|
||||
'nodes_latest': {},
|
||||
'nodes_blog': {}
|
||||
},
|
||||
'permissions': {
|
||||
# 'groups': [{
|
||||
# 'group': app.config['ADMIN_USER_GROUP'],
|
||||
# 'methods': ['GET', 'PUT', 'POST']
|
||||
# }],
|
||||
# 'users': [],
|
||||
# 'world': ['GET']
|
||||
}
|
||||
}
|
5
pillar/api/node_types/scene.py
Normal file
5
pillar/api/node_types/scene.py
Normal file
@@ -0,0 +1,5 @@
|
||||
node_type_scene = {
|
||||
'name': 'scene',
|
||||
'description': 'Scene node type',
|
||||
'parent': ['act'],
|
||||
}
|
45
pillar/api/node_types/shot.py
Normal file
45
pillar/api/node_types/shot.py
Normal file
@@ -0,0 +1,45 @@
|
||||
node_type_shot = {
|
||||
'name': 'shot',
|
||||
'description': 'Shot Node Type, for shots',
|
||||
'dyn_schema': {
|
||||
'url': {
|
||||
'type': 'string',
|
||||
},
|
||||
'cut_in': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'cut_out': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'on_hold',
|
||||
'todo',
|
||||
'in_progress',
|
||||
'review',
|
||||
'final'
|
||||
],
|
||||
},
|
||||
'notes': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
'shot_group': {
|
||||
'type': 'string',
|
||||
#'data_relation': {
|
||||
# 'resource': 'nodes',
|
||||
# 'field': '_id',
|
||||
#},
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'url': {},
|
||||
'cut_in': {},
|
||||
'cut_out': {},
|
||||
'status': {},
|
||||
'notes': {},
|
||||
'shot_group': {}
|
||||
},
|
||||
'parent': ['scene']
|
||||
}
|
37
pillar/api/node_types/storage.py
Normal file
37
pillar/api/node_types/storage.py
Normal file
@@ -0,0 +1,37 @@
|
||||
node_type_storage = {
|
||||
'name': 'storage',
|
||||
'description': 'Entrypoint to a remote or local storage solution',
|
||||
'dyn_schema': {
|
||||
# The project ID, use for lookups in the storage backend. For example
|
||||
# when using Google Cloud Storage, the project id will be the name
|
||||
# of the bucket.
|
||||
'project': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'nodes',
|
||||
'field': '_id'
|
||||
},
|
||||
},
|
||||
# The entry point in a subdirectory of the main storage for the project
|
||||
'subdir': {
|
||||
'type': 'string',
|
||||
},
|
||||
# Which backend is used to store the files (gcs, pillar, bam, cdnsun)
|
||||
'backend': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'subdir': {},
|
||||
'project': {},
|
||||
'backend': {}
|
||||
},
|
||||
'parent': ['group', 'project'],
|
||||
'permissions': {
|
||||
# 'groups': [{
|
||||
# 'group': app.config['ADMIN_USER_GROUP'],
|
||||
# 'methods': ['GET', 'PUT', 'POST']
|
||||
# }],
|
||||
# 'users': [],
|
||||
}
|
||||
}
|
107
pillar/api/node_types/task.py
Normal file
107
pillar/api/node_types/task.py
Normal file
@@ -0,0 +1,107 @@
|
||||
node_type_task = {
|
||||
'name': 'task',
|
||||
'description': 'Task Node Type, for tasks',
|
||||
'dyn_schema': {
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'todo',
|
||||
'in_progress',
|
||||
'on_hold',
|
||||
'approved',
|
||||
'cbb',
|
||||
'final',
|
||||
'review'
|
||||
],
|
||||
'required': True,
|
||||
},
|
||||
'filepath': {
|
||||
'type': 'string',
|
||||
},
|
||||
'revision': {
|
||||
'type': 'integer',
|
||||
},
|
||||
'owners': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'time': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'start': {
|
||||
'type': 'datetime'
|
||||
},
|
||||
'duration': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'chunks': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'start': {
|
||||
'type': 'datetime',
|
||||
},
|
||||
'duration': {
|
||||
'type': 'integer',
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
'is_conflicting' : {
|
||||
'type': 'boolean'
|
||||
},
|
||||
'is_processing' : {
|
||||
'type': 'boolean'
|
||||
},
|
||||
'is_open' : {
|
||||
'type': 'boolean'
|
||||
}
|
||||
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'filepath': {},
|
||||
'revision': {},
|
||||
'owners': {
|
||||
'schema': {
|
||||
'users':{
|
||||
'items': [('User', 'first_name')],
|
||||
},
|
||||
'groups': {}
|
||||
}
|
||||
},
|
||||
'time': {
|
||||
'schema': {
|
||||
'start': {},
|
||||
'duration': {},
|
||||
'chunks': {
|
||||
'visible': False,
|
||||
'schema': {
|
||||
'start': {},
|
||||
'duration': {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'is_conflicting': {},
|
||||
'is_open': {},
|
||||
'is_processing': {},
|
||||
},
|
||||
'parent': ['shot']
|
||||
}
|
28
pillar/api/node_types/text.py
Normal file
28
pillar/api/node_types/text.py
Normal file
@@ -0,0 +1,28 @@
|
||||
node_type_text = {
|
||||
'name': 'text',
|
||||
'description': 'Text',
|
||||
'parent': ['group', 'project'],
|
||||
'dyn_schema': {
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'minlength': 3,
|
||||
'maxlength': 90000,
|
||||
},
|
||||
'shared_slug': {
|
||||
'type': 'string',
|
||||
'required': False,
|
||||
},
|
||||
'syntax': { # for syntax highlighting
|
||||
'type': 'string',
|
||||
'required': False,
|
||||
},
|
||||
'node_expires': {
|
||||
'type': 'datetime',
|
||||
'required': False,
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'shared_slug': {'visible': False},
|
||||
}
|
||||
}
|
72
pillar/api/node_types/texture.py
Normal file
72
pillar/api/node_types/texture.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_texture = {
|
||||
'name': 'texture',
|
||||
'description': 'Image Texture',
|
||||
# This data type does not have parent limitations (can be child
|
||||
# of any node). An empty parent declaration is required.
|
||||
'parent': ['group', ],
|
||||
'dyn_schema': {
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
},
|
||||
# Used for sorting within the context of a group
|
||||
'order': {'type': 'integer'},
|
||||
# We point to the file variations (and use it to extract any relevant
|
||||
# variation useful for our scope).
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'map_type': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'color',
|
||||
'specular',
|
||||
'bump',
|
||||
'normal',
|
||||
'translucency',
|
||||
'emission',
|
||||
'alpha'
|
||||
]}
|
||||
}
|
||||
}
|
||||
},
|
||||
# Properties of the texture files
|
||||
'is_tileable': {'type': 'boolean'},
|
||||
'is_landscape': {'type': 'boolean'},
|
||||
# Resolution in 'WIDTHxHEIGHT' format (e.g. 512x512)
|
||||
'resolution': {'type': 'string'},
|
||||
'aspect_ratio': {'type': 'float'},
|
||||
# Tags for search
|
||||
'tags': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
# Simple string to represent hierarchical categories. Should follow
|
||||
# this schema: "Root > Nested Category > One More Nested Category"
|
||||
'categories': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'content_type': {'visible': False},
|
||||
'files': {},
|
||||
'is_tileable': {},
|
||||
'is_landscape': {},
|
||||
'resolution': {},
|
||||
'aspect_ratio': {},
|
||||
'order': {},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False},
|
||||
}
|
||||
}
|
417
pillar/api/nodes/__init__.py
Normal file
417
pillar/api/nodes/__init__.py
Normal file
@@ -0,0 +1,417 @@
|
||||
import base64
|
||||
import logging
|
||||
import urlparse
|
||||
|
||||
import pymongo.errors
|
||||
import rsa.randnum
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from bson import ObjectId
|
||||
from flask import current_app, g, Blueprint, request
|
||||
from pillar.api import file_storage
|
||||
from pillar.api.activities import activity_subscribe, activity_object_add
|
||||
from pillar.api.utils.algolia import algolia_index_node_delete
|
||||
from pillar.api.utils.algolia import algolia_index_node_save
|
||||
from pillar.api.utils import str2id, jsonify
|
||||
from pillar.api.utils.authorization import check_permissions, require_login
|
||||
from pillar.api.utils.gcs import update_file_name
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('nodes_api', __name__)
|
||||
ROLES_FOR_SHARING = {u'subscriber', u'demo'}
|
||||
|
||||
|
||||
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
|
||||
@require_login(require_roles=ROLES_FOR_SHARING)
|
||||
def share_node(node_id):
|
||||
"""Shares a node, or returns sharing information."""
|
||||
|
||||
node_id = str2id(node_id)
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
|
||||
node = nodes_coll.find_one({'_id': node_id},
|
||||
projection={
|
||||
'project': 1,
|
||||
'node_type': 1,
|
||||
'short_code': 1
|
||||
})
|
||||
|
||||
check_permissions('nodes', node, request.method)
|
||||
|
||||
log.info('Sharing node %s', node_id)
|
||||
|
||||
short_code = node.get('short_code')
|
||||
status = 200
|
||||
|
||||
if not short_code:
|
||||
if request.method == 'POST':
|
||||
short_code = generate_and_store_short_code(node)
|
||||
make_world_gettable(node)
|
||||
status = 201
|
||||
else:
|
||||
return '', 204
|
||||
|
||||
return jsonify(short_link_info(short_code), status=status)
|
||||
|
||||
|
||||
def generate_and_store_short_code(node):
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
node_id = node['_id']
|
||||
|
||||
log.debug('Creating new short link for node %s', node_id)
|
||||
|
||||
max_attempts = 10
|
||||
for attempt in range(1, max_attempts):
|
||||
|
||||
# Generate a new short code
|
||||
short_code = create_short_code(node)
|
||||
log.debug('Created short code for node %s: %s', node_id, short_code)
|
||||
|
||||
node['short_code'] = short_code
|
||||
|
||||
# Store it in MongoDB
|
||||
try:
|
||||
result = nodes_coll.update_one({'_id': node_id},
|
||||
{'$set': {'short_code': short_code}})
|
||||
break
|
||||
except pymongo.errors.DuplicateKeyError:
|
||||
log.info('Duplicate key while creating short code, retrying (attempt %i/%i)',
|
||||
attempt, max_attempts)
|
||||
pass
|
||||
else:
|
||||
log.error('Unable to find unique short code for node %s after %i attempts, failing!',
|
||||
node_id, max_attempts)
|
||||
raise wz_exceptions.InternalServerError('Unable to create unique short code for node %s' %
|
||||
node_id)
|
||||
|
||||
# We were able to store a short code, now let's verify the result.
|
||||
if result.matched_count != 1:
|
||||
log.warning('Unable to update node %s with new short_links=%r', node_id, node['short_code'])
|
||||
raise wz_exceptions.InternalServerError('Unable to update node %s with new short links' %
|
||||
node_id)
|
||||
|
||||
return short_code
|
||||
|
||||
|
||||
def make_world_gettable(node):
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
node_id = node['_id']
|
||||
|
||||
log.debug('Ensuring the world can read node %s', node_id)
|
||||
|
||||
world_perms = set(node.get('permissions', {}).get('world', []))
|
||||
world_perms.add(u'GET')
|
||||
world_perms = list(world_perms)
|
||||
|
||||
result = nodes_coll.update_one({'_id': node_id},
|
||||
{'$set': {'permissions.world': world_perms}})
|
||||
|
||||
if result.matched_count != 1:
|
||||
log.warning('Unable to update node %s with new permissions.world=%r', node_id, world_perms)
|
||||
raise wz_exceptions.InternalServerError('Unable to update node %s with new permissions' %
|
||||
node_id)
|
||||
|
||||
|
||||
def create_short_code(node):
|
||||
"""Generates a new 'short code' for the node."""
|
||||
|
||||
length = current_app.config['SHORT_CODE_LENGTH']
|
||||
bits = rsa.randnum.read_random_bits(32)
|
||||
short_code = base64.b64encode(bits, altchars='xy').rstrip('=')
|
||||
short_code = short_code[:length]
|
||||
|
||||
return short_code
|
||||
|
||||
|
||||
def short_link_info(short_code):
|
||||
"""Returns the short link info in a dict."""
|
||||
|
||||
short_link = urlparse.urljoin(current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
||||
|
||||
return {
|
||||
'short_code': short_code,
|
||||
'short_link': short_link,
|
||||
}
|
||||
|
||||
|
||||
def item_parse_attachments(response):
|
||||
"""Before returning a response, check if the 'attachments' property is
|
||||
defined. If yes, load the file (for the moment only images) in the required
|
||||
variation, get the link and build a Markdown representation. Search in the
|
||||
'field' specified in the attachment and replace the 'slug' tag with the
|
||||
generated link.
|
||||
"""
|
||||
|
||||
attachments = response.get('properties', {}).get('attachments', None)
|
||||
if not attachments:
|
||||
return
|
||||
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
for attachment in attachments:
|
||||
# Make a list from the property path
|
||||
field_name_path = attachment['field'].split('.')
|
||||
# This currently allow to access only properties inside of
|
||||
# the properties property
|
||||
if len(field_name_path) > 1:
|
||||
field_content = response[field_name_path[0]][field_name_path[1]]
|
||||
# This is for the "normal" first level property
|
||||
else:
|
||||
field_content = response[field_name_path[0]]
|
||||
for af in attachment['files']:
|
||||
slug = af['slug']
|
||||
slug_tag = "[{0}]".format(slug)
|
||||
f = files_collection.find_one({'_id': ObjectId(af['file'])})
|
||||
if f is None:
|
||||
af['file'] = None
|
||||
continue
|
||||
size = f['size'] if 'size' in f else 'l'
|
||||
|
||||
# Get the correct variation from the file
|
||||
file_storage.ensure_valid_link(f)
|
||||
thumbnail = next((item for item in f['variations'] if
|
||||
item['size'] == size), None)
|
||||
|
||||
# Build Markdown img string
|
||||
l = ''.format(slug, thumbnail['link'], f['name'])
|
||||
# Parse the content of the file and replace the attachment
|
||||
# tag with the actual image link
|
||||
field_content = field_content.replace(slug_tag, l)
|
||||
|
||||
# Apply the parsed value back to the property. See above for
|
||||
# clarifications on how this is done.
|
||||
if len(field_name_path) > 1:
|
||||
response[field_name_path[0]][field_name_path[1]] = field_content
|
||||
else:
|
||||
response[field_name_path[0]] = field_content
|
||||
|
||||
|
||||
def resource_parse_attachments(response):
|
||||
for item in response['_items']:
|
||||
item_parse_attachments(item)
|
||||
|
||||
|
||||
def before_replacing_node(item, original):
|
||||
check_permissions('nodes', original, 'PUT')
|
||||
update_file_name(item)
|
||||
|
||||
|
||||
def after_replacing_node(item, original):
|
||||
"""Push an update to the Algolia index when a node item is updated. If the
|
||||
project is private, prevent public indexing.
|
||||
"""
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': item['project']})
|
||||
if project.get('is_private', False):
|
||||
# Skip index updating and return
|
||||
return
|
||||
|
||||
from algoliasearch.client import AlgoliaException
|
||||
status = item['properties'].get('status', 'unpublished')
|
||||
|
||||
if status == 'published':
|
||||
try:
|
||||
algolia_index_node_save(item)
|
||||
except AlgoliaException as ex:
|
||||
log.warning('Unable to push node info to Algolia for node %s; %s',
|
||||
item.get('_id'), ex)
|
||||
else:
|
||||
try:
|
||||
algolia_index_node_delete(item)
|
||||
except AlgoliaException as ex:
|
||||
log.warning('Unable to delete node info to Algolia for node %s; %s',
|
||||
item.get('_id'), ex)
|
||||
|
||||
|
||||
def before_inserting_nodes(items):
|
||||
"""Before inserting a node in the collection we check if the user is allowed
|
||||
and we append the project id to it.
|
||||
"""
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
|
||||
def find_parent_project(node):
|
||||
"""Recursive function that finds the ultimate parent of a node."""
|
||||
if node and 'parent' in node:
|
||||
parent = nodes_collection.find_one({'_id': node['parent']})
|
||||
return find_parent_project(parent)
|
||||
if node:
|
||||
return node
|
||||
else:
|
||||
return None
|
||||
|
||||
for item in items:
|
||||
check_permissions('nodes', item, 'POST')
|
||||
if 'parent' in item and 'project' not in item:
|
||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||
project = find_parent_project(parent)
|
||||
if project:
|
||||
item['project'] = project['_id']
|
||||
|
||||
# Default the 'user' property to the current user.
|
||||
item.setdefault('user', g.current_user['user_id'])
|
||||
|
||||
|
||||
def after_inserting_nodes(items):
|
||||
for item in items:
|
||||
# Skip subscriptions for first level items (since the context is not a
|
||||
# node, but a project).
|
||||
# TODO: support should be added for mixed context
|
||||
if 'parent' not in item:
|
||||
return
|
||||
context_object_id = item['parent']
|
||||
if item['node_type'] == 'comment':
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||
# Always subscribe to the parent node
|
||||
activity_subscribe(item['user'], 'node', item['parent'])
|
||||
if parent['node_type'] == 'comment':
|
||||
# If the parent is a comment, we provide its own parent as
|
||||
# context. We do this in order to point the user to an asset
|
||||
# or group when viewing the notification.
|
||||
verb = 'replied'
|
||||
context_object_id = parent['parent']
|
||||
# Subscribe to the parent of the parent comment (post or group)
|
||||
activity_subscribe(item['user'], 'node', parent['parent'])
|
||||
else:
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
verb = 'commented'
|
||||
else:
|
||||
verb = 'posted'
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
|
||||
activity_object_add(
|
||||
item['user'],
|
||||
verb,
|
||||
'node',
|
||||
item['_id'],
|
||||
'node',
|
||||
context_object_id
|
||||
)
|
||||
|
||||
|
||||
def deduct_content_type(node_doc, original=None):
|
||||
"""Deduct the content type from the attached file, if any."""
|
||||
|
||||
if node_doc['node_type'] != 'asset':
|
||||
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
|
||||
return
|
||||
|
||||
node_id = node_doc.get('_id')
|
||||
try:
|
||||
file_id = ObjectId(node_doc['properties']['file'])
|
||||
except KeyError:
|
||||
if node_id is None:
|
||||
# Creation of a file-less node is allowed, but updates aren't.
|
||||
return
|
||||
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
|
||||
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
|
||||
|
||||
files = current_app.data.driver.db['files']
|
||||
file_doc = files.find_one({'_id': file_id},
|
||||
{'content_type': 1})
|
||||
if not file_doc:
|
||||
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
|
||||
node_id, file_id)
|
||||
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
|
||||
|
||||
# Guess the node content type from the file content type
|
||||
file_type = file_doc['content_type']
|
||||
if file_type.startswith('video/'):
|
||||
content_type = 'video'
|
||||
elif file_type.startswith('image/'):
|
||||
content_type = 'image'
|
||||
else:
|
||||
content_type = 'file'
|
||||
|
||||
node_doc['properties']['content_type'] = content_type
|
||||
|
||||
|
||||
def nodes_deduct_content_type(nodes):
|
||||
for node in nodes:
|
||||
deduct_content_type(node)
|
||||
|
||||
|
||||
def before_returning_node(node):
|
||||
# Run validation process, since GET on nodes entry point is public
|
||||
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
|
||||
|
||||
# Embed short_link_info if the node has a short_code.
|
||||
short_code = node.get('short_code')
|
||||
if short_code:
|
||||
node['short_link'] = short_link_info(short_code)['short_link']
|
||||
|
||||
|
||||
def before_returning_nodes(nodes):
|
||||
for node in nodes['_items']:
|
||||
before_returning_node(node)
|
||||
|
||||
|
||||
def node_set_default_picture(node, original=None):
|
||||
"""Uses the image of an image asset or colour map of texture node as picture."""
|
||||
|
||||
if node.get('picture'):
|
||||
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
|
||||
return
|
||||
|
||||
node_type = node.get('node_type')
|
||||
props = node.get('properties', {})
|
||||
content = props.get('content_type')
|
||||
|
||||
if node_type == 'asset' and content == 'image':
|
||||
image_file_id = props.get('file')
|
||||
elif node_type == 'texture':
|
||||
# Find the colour map, defaulting to the first image map available.
|
||||
image_file_id = None
|
||||
for image in props.get('files', []):
|
||||
if image_file_id is None or image.get('map_type') == u'color':
|
||||
image_file_id = image.get('file')
|
||||
else:
|
||||
log.debug('Not setting default picture on node type %s content type %s',
|
||||
node_type, content)
|
||||
return
|
||||
|
||||
if image_file_id is None:
|
||||
log.debug('Nothing to set the picture to.')
|
||||
return
|
||||
|
||||
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
|
||||
node['picture'] = image_file_id
|
||||
|
||||
|
||||
def nodes_set_default_picture(nodes):
|
||||
for node in nodes:
|
||||
node_set_default_picture(node)
|
||||
|
||||
|
||||
def after_deleting_node(item):
|
||||
from algoliasearch.client import AlgoliaException
|
||||
try:
|
||||
algolia_index_node_delete(item)
|
||||
except AlgoliaException as ex:
|
||||
log.warning('Unable to delete node info to Algolia for node %s; %s',
|
||||
item.get('_id'), ex)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
|
||||
from . import patch
|
||||
patch.setup_app(app, url_prefix=url_prefix)
|
||||
|
||||
app.on_fetched_item_nodes += before_returning_node
|
||||
app.on_fetched_resource_nodes += before_returning_nodes
|
||||
|
||||
app.on_fetched_item_nodes += item_parse_attachments
|
||||
app.on_fetched_resource_nodes += resource_parse_attachments
|
||||
|
||||
app.on_replace_nodes += before_replacing_node
|
||||
app.on_replace_nodes += deduct_content_type
|
||||
app.on_replace_nodes += node_set_default_picture
|
||||
app.on_replaced_nodes += after_replacing_node
|
||||
|
||||
app.on_insert_nodes += before_inserting_nodes
|
||||
app.on_insert_nodes += nodes_deduct_content_type
|
||||
app.on_insert_nodes += nodes_set_default_picture
|
||||
app.on_inserted_nodes += after_inserting_nodes
|
||||
|
||||
app.on_deleted_item_nodes += after_deleting_node
|
||||
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
24
pillar/api/nodes/custom/__init__.py
Normal file
24
pillar/api/nodes/custom/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
patch_handlers = {} # mapping from node type to callable.
|
||||
|
||||
|
||||
def register_patch_handler(node_type):
|
||||
"""Decorator, registers the decorated function as patch handler for the given node type."""
|
||||
|
||||
def wrapper(func):
|
||||
if node_type in patch_handlers:
|
||||
raise ValueError('Node type %r already handled by %r' %
|
||||
(node_type, patch_handlers[node_type]))
|
||||
|
||||
log.debug('Registering %s as PATCH handler for node type %r',
|
||||
func, node_type)
|
||||
patch_handlers[node_type] = func
|
||||
return func
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# Import sub-modules so they can register themselves.
|
||||
from . import comment
|
126
pillar/api/nodes/custom/comment.py
Normal file
126
pillar/api/nodes/custom/comment.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""PATCH support for comment nodes."""
|
||||
import logging
|
||||
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from flask import current_app
|
||||
from pillar.api.utils import authorization, authentication, jsonify
|
||||
|
||||
from . import register_patch_handler
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
ROLES_FOR_COMMENT_VOTING = {u'subscriber', u'demo'}
|
||||
VALID_COMMENT_OPERATIONS = {u'upvote', u'downvote', u'revoke'}
|
||||
|
||||
|
||||
@register_patch_handler(u'comment')
|
||||
def patch_comment(node_id, patch):
|
||||
assert_is_valid_patch(node_id, patch)
|
||||
user_id = authentication.current_user_id()
|
||||
|
||||
# Find the node
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
node_query = {'_id': node_id,
|
||||
'$or': [{'properties.ratings.$.user': {'$exists': False}},
|
||||
{'properties.ratings.$.user': user_id}]}
|
||||
node = nodes_coll.find_one(node_query,
|
||||
projection={'properties': 1})
|
||||
if node is None:
|
||||
log.warning('How can the node not be found?')
|
||||
raise wz_exceptions.NotFound('Node %s not found' % node_id)
|
||||
|
||||
props = node['properties']
|
||||
|
||||
# Find the current rating (if any)
|
||||
rating = next((rating for rating in props.get('ratings', ())
|
||||
if rating.get('user') == user_id), None)
|
||||
|
||||
def revoke():
|
||||
if not rating:
|
||||
# No rating, this is a no-op.
|
||||
return
|
||||
|
||||
label = 'positive' if rating.get('is_positive') else 'negative'
|
||||
update = {'$pull': {'properties.ratings': rating},
|
||||
'$inc': {'properties.rating_%s' % label: -1}}
|
||||
return update
|
||||
|
||||
def upvote():
|
||||
if rating and rating.get('is_positive'):
|
||||
# There already was a positive rating, so this is a no-op.
|
||||
return
|
||||
|
||||
update = {'$inc': {'properties.rating_positive': 1}}
|
||||
if rating:
|
||||
update['$inc']['properties.rating_negative'] = -1
|
||||
update['$set'] = {'properties.ratings.$.is_positive': True}
|
||||
else:
|
||||
update['$push'] = {'properties.ratings': {
|
||||
'user': user_id, 'is_positive': True,
|
||||
}}
|
||||
return update
|
||||
|
||||
def downvote():
|
||||
if rating and not rating.get('is_positive'):
|
||||
# There already was a negative rating, so this is a no-op.
|
||||
return
|
||||
|
||||
update = {'$inc': {'properties.rating_negative': 1}}
|
||||
if rating:
|
||||
update['$inc']['properties.rating_positive'] = -1
|
||||
update['$set'] = {'properties.ratings.$.is_positive': False}
|
||||
else:
|
||||
update['$push'] = {'properties.ratings': {
|
||||
'user': user_id, 'is_positive': False,
|
||||
}}
|
||||
return update
|
||||
|
||||
actions = {
|
||||
u'upvote': upvote,
|
||||
u'downvote': downvote,
|
||||
u'revoke': revoke,
|
||||
}
|
||||
action = actions[patch['op']]
|
||||
mongo_update = action()
|
||||
|
||||
if mongo_update:
|
||||
log.info('Running %s', mongo_update)
|
||||
if rating:
|
||||
result = nodes_coll.update_one({'_id': node_id, 'properties.ratings.user': user_id},
|
||||
mongo_update)
|
||||
else:
|
||||
result = nodes_coll.update_one({'_id': node_id}, mongo_update)
|
||||
else:
|
||||
result = 'no-op'
|
||||
|
||||
# Fetch the new ratings, so the client can show these without querying again.
|
||||
node = nodes_coll.find_one(node_id,
|
||||
projection={'properties.rating_positive': 1,
|
||||
'properties.rating_negative': 1})
|
||||
|
||||
return jsonify({'_status': 'OK',
|
||||
'result': result,
|
||||
'properties': node['properties']
|
||||
})
|
||||
|
||||
|
||||
def assert_is_valid_patch(node_id, patch):
|
||||
"""Raises an exception when the patch isn't valid."""
|
||||
|
||||
try:
|
||||
op = patch['op']
|
||||
except KeyError:
|
||||
raise wz_exceptions.BadRequest("PATCH should have a key 'op' indicating the operation.")
|
||||
|
||||
if op not in VALID_COMMENT_OPERATIONS:
|
||||
raise wz_exceptions.BadRequest('Operation should be one of %s',
|
||||
', '.join(VALID_COMMENT_OPERATIONS))
|
||||
|
||||
# See whether the user is allowed to patch
|
||||
if authorization.user_matches_roles(ROLES_FOR_COMMENT_VOTING):
|
||||
log.debug('User is allowed to upvote/downvote comment')
|
||||
return
|
||||
|
||||
# Access denied.
|
||||
log.info('User %s wants to PATCH comment node %s, but is not allowed.',
|
||||
authentication.current_user_id(), node_id)
|
||||
raise wz_exceptions.Forbidden()
|
51
pillar/api/nodes/patch.py
Normal file
51
pillar/api/nodes/patch.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Generic node patching support.
|
||||
|
||||
Depends on node_type-specific patch handlers in submodules.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from flask import Blueprint, request
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils import authorization, authentication
|
||||
from pillar.api.utils import str2id
|
||||
|
||||
from . import custom
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('nodes.patch', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/<node_id>', methods=['PATCH'])
|
||||
@authorization.require_login()
|
||||
def patch_node(node_id):
|
||||
# Parse the request
|
||||
node_id = str2id(node_id)
|
||||
patch = request.get_json()
|
||||
|
||||
# Find the node type.
|
||||
node = mongo.find_one_or_404('nodes', node_id,
|
||||
projection={'node_type': 1})
|
||||
try:
|
||||
node_type = node['node_type']
|
||||
except KeyError:
|
||||
msg = 'Node %s has no node_type property' % node_id
|
||||
log.warning(msg)
|
||||
raise wz_exceptions.InternalServerError(msg)
|
||||
log.debug('User %s wants to PATCH %s node %s',
|
||||
authentication.current_user_id(), node_type, node_id)
|
||||
|
||||
# Find the PATCH handler for the node type.
|
||||
try:
|
||||
patch_handler = custom.patch_handlers[node_type]
|
||||
except KeyError:
|
||||
log.info('No patch handler for node type %r', node_type)
|
||||
raise wz_exceptions.MethodNotAllowed('PATCH on node type %r not allowed' % node_type)
|
||||
|
||||
# Let the PATCH handler do its thing.
|
||||
return patch_handler(node_id, patch)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
22
pillar/api/projects/__init__.py
Normal file
22
pillar/api/projects/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from . import hooks
|
||||
from .routes import blueprint_api
|
||||
|
||||
|
||||
def setup_app(app, api_prefix):
|
||||
app.on_replace_projects += hooks.override_is_private_field
|
||||
app.on_replace_projects += hooks.before_edit_check_permissions
|
||||
app.on_replace_projects += hooks.protect_sensitive_fields
|
||||
app.on_update_projects += hooks.override_is_private_field
|
||||
app.on_update_projects += hooks.before_edit_check_permissions
|
||||
app.on_update_projects += hooks.protect_sensitive_fields
|
||||
app.on_delete_item_projects += hooks.before_delete_project
|
||||
app.on_insert_projects += hooks.before_inserting_override_is_private_field
|
||||
app.on_insert_projects += hooks.before_inserting_projects
|
||||
app.on_inserted_projects += hooks.after_inserting_projects
|
||||
|
||||
app.on_fetched_item_projects += hooks.before_returning_project_permissions
|
||||
app.on_fetched_resource_projects += hooks.before_returning_project_resource_permissions
|
||||
app.on_fetched_item_projects += hooks.project_node_type_has_method
|
||||
app.on_fetched_resource_projects += hooks.projects_node_type_has_method
|
||||
|
||||
app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)
|
246
pillar/api/projects/hooks.py
Normal file
246
pillar/api/projects/hooks.py
Normal file
@@ -0,0 +1,246 @@
|
||||
import copy
|
||||
import logging
|
||||
|
||||
from flask import request, abort, current_app
|
||||
from gcloud import exceptions as gcs_exceptions
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
from pillar.api.node_types.comment import node_type_comment
|
||||
from pillar.api.node_types.group import node_type_group
|
||||
from pillar.api.node_types.group_texture import node_type_group_texture
|
||||
from pillar.api.node_types.texture import node_type_texture
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
from pillar.api.utils import authorization, authentication
|
||||
from pillar.api.utils import remove_private_keys
|
||||
from pillar.api.utils.authorization import user_has_role, check_permissions
|
||||
from .utils import abort_with_error
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Default project permissions for the admin group.
|
||||
DEFAULT_ADMIN_GROUP_PERMISSIONS = ['GET', 'PUT', 'POST', 'DELETE']
|
||||
|
||||
|
||||
def before_inserting_projects(items):
|
||||
"""Strip unwanted properties, that will be assigned after creation. Also,
|
||||
verify permission to create a project (check quota, check role).
|
||||
|
||||
:param items: List of project docs that have been inserted (normally one)
|
||||
"""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
for item in items:
|
||||
item.pop('url', None)
|
||||
|
||||
|
||||
def override_is_private_field(project, original):
|
||||
"""Override the 'is_private' property from the world permissions.
|
||||
|
||||
:param project: the project, which will be updated
|
||||
"""
|
||||
|
||||
# No permissions, no access.
|
||||
if 'permissions' not in project:
|
||||
project['is_private'] = True
|
||||
return
|
||||
|
||||
world_perms = project['permissions'].get('world', [])
|
||||
is_private = 'GET' not in world_perms
|
||||
project['is_private'] = is_private
|
||||
|
||||
|
||||
def before_inserting_override_is_private_field(projects):
|
||||
for project in projects:
|
||||
override_is_private_field(project, None)
|
||||
|
||||
|
||||
def before_edit_check_permissions(document, original):
|
||||
# Allow admin users to do whatever they want.
|
||||
# TODO: possibly move this into the check_permissions function.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
check_permissions('projects', original, request.method)
|
||||
|
||||
|
||||
def before_delete_project(document):
|
||||
"""Checks permissions before we allow deletion"""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
# TODO: possibly move this into the check_permissions function.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
check_permissions('projects', document, request.method)
|
||||
|
||||
|
||||
def protect_sensitive_fields(document, original):
|
||||
"""When not logged in as admin, prevents update to certain fields."""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
def revert(name):
|
||||
if name not in original:
|
||||
try:
|
||||
del document[name]
|
||||
except KeyError:
|
||||
pass
|
||||
return
|
||||
document[name] = original[name]
|
||||
|
||||
revert('status')
|
||||
revert('category')
|
||||
revert('user')
|
||||
|
||||
if 'url' in original:
|
||||
revert('url')
|
||||
|
||||
|
||||
def after_inserting_projects(projects):
|
||||
"""After inserting a project in the collection we do some processing such as:
|
||||
- apply the right permissions
|
||||
- define basic node types
|
||||
- optionally generate a url
|
||||
- initialize storage space
|
||||
|
||||
:param projects: List of project docs that have been inserted (normally one)
|
||||
"""
|
||||
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
for project in projects:
|
||||
owner_id = project.get('user', None)
|
||||
owner = users_collection.find_one(owner_id)
|
||||
after_inserting_project(project, owner)
|
||||
|
||||
|
||||
def after_inserting_project(project, db_user):
|
||||
project_id = project['_id']
|
||||
user_id = db_user['_id']
|
||||
|
||||
# Create a project-specific admin group (with name matching the project id)
|
||||
result, _, _, status = current_app.post_internal('groups', {'name': str(project_id)})
|
||||
if status != 201:
|
||||
log.error('Unable to create admin group for new project %s: %s',
|
||||
project_id, result)
|
||||
return abort_with_error(status)
|
||||
|
||||
admin_group_id = result['_id']
|
||||
log.debug('Created admin group %s for project %s', admin_group_id, project_id)
|
||||
|
||||
# Assign the current user to the group
|
||||
db_user.setdefault('groups', []).append(admin_group_id)
|
||||
|
||||
result, _, _, status = current_app.patch_internal('users', {'groups': db_user['groups']},
|
||||
_id=user_id)
|
||||
if status != 200:
|
||||
log.error('Unable to add user %s as member of admin group %s for new project %s: %s',
|
||||
user_id, admin_group_id, project_id, result)
|
||||
return abort_with_error(status)
|
||||
log.debug('Made user %s member of group %s', user_id, admin_group_id)
|
||||
|
||||
# Assign the group to the project with admin rights
|
||||
is_admin = authorization.is_admin(db_user)
|
||||
world_permissions = ['GET'] if is_admin else []
|
||||
permissions = {
|
||||
'world': world_permissions,
|
||||
'users': [],
|
||||
'groups': [
|
||||
{'group': admin_group_id,
|
||||
'methods': DEFAULT_ADMIN_GROUP_PERMISSIONS[:]},
|
||||
]
|
||||
}
|
||||
|
||||
def with_permissions(node_type):
|
||||
copied = copy.deepcopy(node_type)
|
||||
copied['permissions'] = permissions
|
||||
return copied
|
||||
|
||||
# Assign permissions to the project itself, as well as to the node_types
|
||||
project['permissions'] = permissions
|
||||
project['node_types'] = [
|
||||
with_permissions(node_type_group),
|
||||
with_permissions(node_type_asset),
|
||||
with_permissions(node_type_comment),
|
||||
with_permissions(node_type_texture),
|
||||
with_permissions(node_type_group_texture),
|
||||
]
|
||||
|
||||
# Allow admin users to use whatever url they want.
|
||||
if not is_admin or not project.get('url'):
|
||||
if project.get('category', '') == 'home':
|
||||
project['url'] = 'home'
|
||||
else:
|
||||
project['url'] = "p-{!s}".format(project_id)
|
||||
|
||||
# Initialize storage page (defaults to GCS)
|
||||
if current_app.config.get('TESTING'):
|
||||
log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
|
||||
else:
|
||||
try:
|
||||
gcs_storage = GoogleCloudStorageBucket(str(project_id))
|
||||
if gcs_storage.bucket.exists():
|
||||
log.info('Created GCS instance for project %s', project_id)
|
||||
else:
|
||||
log.warning('Unable to create GCS instance for project %s', project_id)
|
||||
except gcs_exceptions.Forbidden as ex:
|
||||
log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
|
||||
|
||||
# Commit the changes directly to the MongoDB; a PUT is not allowed yet,
|
||||
# as the project doesn't have a valid permission structure.
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
result = projects_collection.update_one({'_id': project_id},
|
||||
{'$set': remove_private_keys(project)})
|
||||
if result.matched_count != 1:
|
||||
log.warning('Unable to update project %s: %s', project_id, result.raw_result)
|
||||
abort_with_error(500)
|
||||
|
||||
|
||||
def before_returning_project_permissions(response):
|
||||
# Run validation process, since GET on nodes entry point is public
|
||||
check_permissions('projects', response, 'GET', append_allowed_methods=True)
|
||||
|
||||
|
||||
def before_returning_project_resource_permissions(response):
|
||||
# Return only those projects the user has access to.
|
||||
allow = []
|
||||
for project in response['_items']:
|
||||
if authorization.has_permissions('projects', project,
|
||||
'GET', append_allowed_methods=True):
|
||||
allow.append(project)
|
||||
else:
|
||||
log.debug('User %s requested project %s, but has no access to it; filtered out.',
|
||||
authentication.current_user_id(), project['_id'])
|
||||
|
||||
response['_items'] = allow
|
||||
|
||||
|
||||
def project_node_type_has_method(response):
|
||||
"""Check for a specific request arg, and check generate the allowed_methods
|
||||
list for the required node_type.
|
||||
"""
|
||||
|
||||
node_type_name = request.args.get('node_type', '')
|
||||
|
||||
# Proceed only node_type has been requested
|
||||
if not node_type_name:
|
||||
return
|
||||
|
||||
# Look up the node type in the project document
|
||||
if not any(node_type.get('name') == node_type_name
|
||||
for node_type in response['node_types']):
|
||||
return abort(404)
|
||||
|
||||
# Check permissions and append the allowed_methods to the node_type
|
||||
check_permissions('projects', response, 'GET', append_allowed_methods=True,
|
||||
check_node_type=node_type_name)
|
||||
|
||||
|
||||
def projects_node_type_has_method(response):
|
||||
for project in response['_items']:
|
||||
project_node_type_has_method(project)
|
||||
|
||||
|
138
pillar/api/projects/routes.py
Normal file
138
pillar/api/projects/routes.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import Blueprint, g, request, current_app, make_response, url_for
|
||||
from pillar.api.utils import authorization, jsonify, str2id
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils.authorization import require_login, check_permissions
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from . import utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
blueprint_api = Blueprint('projects_api', __name__)
|
||||
|
||||
|
||||
@blueprint_api.route('/create', methods=['POST'])
|
||||
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
|
||||
def create_project(overrides=None):
|
||||
"""Creates a new project."""
|
||||
|
||||
if request.mimetype == 'application/json':
|
||||
project_name = request.json['name']
|
||||
else:
|
||||
project_name = request.form['project_name']
|
||||
user_id = g.current_user['user_id']
|
||||
|
||||
project = utils.create_new_project(project_name, user_id, overrides)
|
||||
|
||||
# Return the project in the response.
|
||||
loc = url_for('projects|item_lookup', _id=project['_id'])
|
||||
return jsonify(project, status=201, headers={'Location': loc})
|
||||
|
||||
|
||||
@blueprint_api.route('/users', methods=['GET', 'POST'])
|
||||
@authorization.require_login()
|
||||
def project_manage_users():
|
||||
"""Manage users of a project. In this initial implementation, we handle
|
||||
addition and removal of a user to the admin group of a project.
|
||||
No changes are done on the project itself.
|
||||
"""
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
|
||||
# TODO: check if user is admin of the project before anything
|
||||
if request.method == 'GET':
|
||||
project_id = request.args['project_id']
|
||||
project = projects_collection.find_one({'_id': ObjectId(project_id)})
|
||||
admin_group_id = project['permissions']['groups'][0]['group']
|
||||
|
||||
users = users_collection.find(
|
||||
{'groups': {'$in': [admin_group_id]}},
|
||||
{'username': 1, 'email': 1, 'full_name': 1})
|
||||
return jsonify({'_status': 'OK', '_items': list(users)})
|
||||
|
||||
# The request is not a form, since it comes from the API sdk
|
||||
data = json.loads(request.data)
|
||||
project_id = ObjectId(data['project_id'])
|
||||
target_user_id = ObjectId(data['user_id'])
|
||||
action = data['action']
|
||||
current_user_id = g.current_user['user_id']
|
||||
|
||||
project = projects_collection.find_one({'_id': project_id})
|
||||
|
||||
# Check if the current_user is owner of the project, or removing themselves.
|
||||
remove_self = target_user_id == current_user_id and action == 'remove'
|
||||
if project['user'] != current_user_id and not remove_self:
|
||||
utils.abort_with_error(403)
|
||||
|
||||
admin_group = utils.get_admin_group(project)
|
||||
|
||||
# Get the user and add the admin group to it
|
||||
if action == 'add':
|
||||
operation = '$addToSet'
|
||||
log.info('project_manage_users: Adding user %s to admin group of project %s',
|
||||
target_user_id, project_id)
|
||||
elif action == 'remove':
|
||||
log.info('project_manage_users: Removing user %s from admin group of project %s',
|
||||
target_user_id, project_id)
|
||||
operation = '$pull'
|
||||
else:
|
||||
log.warning('project_manage_users: Unsupported action %r called by user %s',
|
||||
action, current_user_id)
|
||||
raise wz_exceptions.UnprocessableEntity()
|
||||
|
||||
users_collection.update({'_id': target_user_id},
|
||||
{operation: {'groups': admin_group['_id']}})
|
||||
|
||||
user = users_collection.find_one({'_id': target_user_id},
|
||||
{'username': 1, 'email': 1,
|
||||
'full_name': 1})
|
||||
|
||||
if not user:
|
||||
return jsonify({'_status': 'ERROR'}), 404
|
||||
|
||||
user['_status'] = 'OK'
|
||||
return jsonify(user)
|
||||
|
||||
|
||||
@blueprint_api.route('/<string:project_id>/quotas')
|
||||
@require_login()
|
||||
def project_quotas(project_id):
|
||||
"""Returns information about the project's limits."""
|
||||
|
||||
# Check that the user has GET permissions on the project itself.
|
||||
project = mongo.find_one_or_404('projects', project_id)
|
||||
check_permissions('projects', project, 'GET')
|
||||
|
||||
file_size_used = utils.project_total_file_size(project_id)
|
||||
|
||||
info = {
|
||||
'file_size_quota': None, # TODO: implement this later.
|
||||
'file_size_used': file_size_used,
|
||||
}
|
||||
|
||||
return jsonify(info)
|
||||
|
||||
|
||||
@blueprint_api.route('/<project_id>/<node_type>', methods=['OPTIONS', 'GET'])
|
||||
def get_allowed_methods(project_id=None, node_type=None):
|
||||
"""Returns allowed methods to create a node of a certain type.
|
||||
|
||||
Either project_id or parent_node_id must be given. If the latter is given,
|
||||
the former is deducted from it.
|
||||
"""
|
||||
|
||||
project = mongo.find_one_or_404('projects', str2id(project_id))
|
||||
proj_methods = authorization.compute_allowed_methods('projects', project, node_type)
|
||||
|
||||
resp = make_response()
|
||||
resp.headers['Allowed'] = ', '.join(sorted(proj_methods))
|
||||
resp.status_code = 204
|
||||
|
||||
return resp
|
||||
|
||||
|
92
pillar/api/projects/utils.py
Normal file
92
pillar/api/projects/utils.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import current_app
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
from werkzeug.exceptions import abort
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def project_total_file_size(project_id):
|
||||
"""Returns the total number of bytes used by files of this project."""
|
||||
|
||||
files = current_app.data.driver.db['files']
|
||||
file_size_used = files.aggregate([
|
||||
{'$match': {'project': ObjectId(project_id)}},
|
||||
{'$project': {'length_aggregate_in_bytes': 1}},
|
||||
{'$group': {'_id': None,
|
||||
'all_files': {'$sum': '$length_aggregate_in_bytes'}}}
|
||||
])
|
||||
|
||||
# The aggregate function returns a cursor, not a document.
|
||||
try:
|
||||
return next(file_size_used)['all_files']
|
||||
except StopIteration:
|
||||
# No files used at all.
|
||||
return 0
|
||||
|
||||
|
||||
def get_admin_group(project):
|
||||
"""Returns the admin group for the project."""
|
||||
|
||||
groups_collection = current_app.data.driver.db['groups']
|
||||
|
||||
# TODO: search through all groups to find the one with the project ID as its name.
|
||||
admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
|
||||
group = groups_collection.find_one({'_id': admin_group_id})
|
||||
|
||||
if group is None:
|
||||
raise ValueError('Unable to handle project without admin group.')
|
||||
|
||||
if group['name'] != str(project['_id']):
|
||||
return abort_with_error(403)
|
||||
|
||||
return group
|
||||
|
||||
|
||||
def abort_with_error(status):
|
||||
"""Aborts with the given status, or 500 if the status doesn't indicate an error.
|
||||
|
||||
If the status is < 400, status 500 is used instead.
|
||||
"""
|
||||
|
||||
abort(status if status // 100 >= 4 else 500)
|
||||
raise wz_exceptions.InternalServerError('abort() should have aborted!')
|
||||
|
||||
|
||||
def create_new_project(project_name, user_id, overrides):
|
||||
"""Creates a new project owned by the given user."""
|
||||
|
||||
log.info('Creating new project "%s" for user %s', project_name, user_id)
|
||||
|
||||
# Create the project itself, the rest will be done by the after-insert hook.
|
||||
project = {'description': '',
|
||||
'name': project_name,
|
||||
'node_types': [],
|
||||
'status': 'published',
|
||||
'user': user_id,
|
||||
'is_private': True,
|
||||
'permissions': {},
|
||||
'url': '',
|
||||
'summary': '',
|
||||
'category': 'assets', # TODO: allow the user to choose this.
|
||||
}
|
||||
if overrides is not None:
|
||||
project.update(overrides)
|
||||
|
||||
result, _, _, status = current_app.post_internal('projects', project)
|
||||
if status != 201:
|
||||
log.error('Unable to create project "%s": %s', project_name, result)
|
||||
return abort_with_error(status)
|
||||
project.update(result)
|
||||
|
||||
# Now re-fetch the project, as both the initial document and the returned
|
||||
# result do not contain the same etag as the database. This also updates
|
||||
# other fields set by hooks.
|
||||
document = current_app.data.driver.db['projects'].find_one(project['_id'])
|
||||
project.update(document)
|
||||
|
||||
log.info('Created project %s for user %s', project['_id'], user_id)
|
||||
|
||||
return project
|
198
pillar/api/service.py
Normal file
198
pillar/api/service.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""Service accounts."""
|
||||
|
||||
import logging
|
||||
|
||||
import blinker
|
||||
from flask import Blueprint, current_app, request
|
||||
from pillar.api import local_auth
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils import authorization, authentication, str2id, jsonify
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
blueprint = Blueprint('service', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
signal_user_changed_role = blinker.NamedSignal('badger:user_changed_role')
|
||||
|
||||
ROLES_WITH_GROUPS = {u'admin', u'demo', u'subscriber'}
|
||||
|
||||
# Map of role name to group ID, for the above groups.
|
||||
role_to_group_id = {}
|
||||
|
||||
|
||||
@blueprint.before_app_first_request
|
||||
def fetch_role_to_group_id_map():
|
||||
"""Fills the _role_to_group_id mapping upon application startup."""
|
||||
|
||||
global role_to_group_id
|
||||
|
||||
groups_coll = current_app.data.driver.db['groups']
|
||||
|
||||
for role in ROLES_WITH_GROUPS:
|
||||
group = groups_coll.find_one({'name': role}, projection={'_id': 1})
|
||||
if group is None:
|
||||
log.warning('Group for role %r not found', role)
|
||||
continue
|
||||
role_to_group_id[role] = group['_id']
|
||||
|
||||
log.debug('Group IDs for roles: %s', role_to_group_id)
|
||||
|
||||
|
||||
@blueprint.route('/badger', methods=['POST'])
|
||||
@authorization.require_login(require_roles={u'service', u'badger'}, require_all=True)
|
||||
def badger():
|
||||
if request.mimetype != 'application/json':
|
||||
log.debug('Received %s instead of application/json', request.mimetype)
|
||||
raise wz_exceptions.BadRequest()
|
||||
|
||||
# Parse the request
|
||||
args = request.json
|
||||
action = args.get('action', '')
|
||||
user_email = args.get('user_email', '')
|
||||
role = args.get('role', '')
|
||||
|
||||
current_user_id = authentication.current_user_id()
|
||||
log.info('Service account %s %ss role %r to/from user %s',
|
||||
current_user_id, action, role, user_email)
|
||||
|
||||
users_coll = current_app.data.driver.db['users']
|
||||
|
||||
# Check that the user is allowed to grant this role.
|
||||
srv_user = users_coll.find_one(current_user_id,
|
||||
projection={'service.badger': 1})
|
||||
if srv_user is None:
|
||||
log.error('badger(%s, %s, %s): current user %s not found -- how did they log in?',
|
||||
action, user_email, role, current_user_id)
|
||||
return 'User not found', 403
|
||||
|
||||
allowed_roles = set(srv_user.get('service', {}).get('badger', []))
|
||||
if role not in allowed_roles:
|
||||
log.warning('badger(%s, %s, %s): service account not authorized to %s role %s',
|
||||
action, user_email, role, action, role)
|
||||
return 'Role not allowed', 403
|
||||
|
||||
return do_badger(action, user_email, role)
|
||||
|
||||
|
||||
def do_badger(action, user_email, role):
|
||||
"""Performs a badger action, returning a HTTP response."""
|
||||
|
||||
if action not in {'grant', 'revoke'}:
|
||||
raise wz_exceptions.BadRequest('Action %r not supported' % action)
|
||||
|
||||
if not user_email:
|
||||
raise wz_exceptions.BadRequest('User email not given')
|
||||
|
||||
if not role:
|
||||
raise wz_exceptions.BadRequest('Role not given')
|
||||
|
||||
users_coll = current_app.data.driver.db['users']
|
||||
|
||||
# Fetch the user
|
||||
db_user = users_coll.find_one({'email': user_email}, projection={'roles': 1, 'groups': 1})
|
||||
if db_user is None:
|
||||
log.warning('badger(%s, %s, %s): user not found', action, user_email, role)
|
||||
return 'User not found', 404
|
||||
|
||||
# Apply the action
|
||||
roles = set(db_user.get('roles') or [])
|
||||
if action == 'grant':
|
||||
roles.add(role)
|
||||
else:
|
||||
roles.discard(role)
|
||||
|
||||
groups = manage_user_group_membership(db_user, role, action)
|
||||
|
||||
updates = {'roles': list(roles)}
|
||||
if groups is not None:
|
||||
updates['groups'] = list(groups)
|
||||
|
||||
users_coll.update_one({'_id': db_user['_id']},
|
||||
{'$set': updates})
|
||||
|
||||
# Let the rest of the world know this user was updated.
|
||||
db_user.update(updates)
|
||||
signal_user_changed_role.send(current_app, user=db_user)
|
||||
|
||||
return '', 204
|
||||
|
||||
|
||||
@blueprint.route('/urler/<project_id>', methods=['GET'])
|
||||
@authorization.require_login(require_roles={u'service', u'urler'}, require_all=True)
|
||||
def urler(project_id):
|
||||
"""Returns the URL of any project."""
|
||||
|
||||
project_id = str2id(project_id)
|
||||
project = mongo.find_one_or_404('projects', project_id,
|
||||
projection={'url': 1})
|
||||
return jsonify({
|
||||
'_id': project_id,
|
||||
'url': project['url']})
|
||||
|
||||
|
||||
def manage_user_group_membership(db_user, role, action):
|
||||
"""Some roles have associated groups; this function maintains group & role membership.
|
||||
|
||||
Does NOT alter the given user, nor the database.
|
||||
|
||||
:return: the new groups of the user, or None if the groups shouldn't be changed.
|
||||
:rtype: set
|
||||
"""
|
||||
|
||||
if action not in {'grant', 'revoke'}:
|
||||
raise ValueError('Action %r not supported' % action)
|
||||
|
||||
# Currently only three roles have associated groups.
|
||||
if role not in ROLES_WITH_GROUPS:
|
||||
return
|
||||
|
||||
# Find the group
|
||||
try:
|
||||
group_id = role_to_group_id[role]
|
||||
except KeyError:
|
||||
log.warning('Group for role %r cannot be found, unable to %s membership for user %s',
|
||||
role, action, db_user['_id'])
|
||||
return
|
||||
|
||||
user_groups = set(db_user.get('groups') or [])
|
||||
if action == 'grant':
|
||||
user_groups.add(group_id)
|
||||
else:
|
||||
user_groups.discard(group_id)
|
||||
|
||||
return user_groups
|
||||
|
||||
|
||||
def create_service_account(email, roles, service):
|
||||
"""Creates a service account with the given roles + the role 'service'.
|
||||
|
||||
:param email: email address associated with the account
|
||||
:type email: str
|
||||
:param roles: iterable of role names
|
||||
:param service: dict of the 'service' key in the user.
|
||||
:type service: dict
|
||||
:return: tuple (user doc, token doc)
|
||||
"""
|
||||
|
||||
# Create a user with the correct roles.
|
||||
roles = list(set(roles).union({u'service'}))
|
||||
user = {'username': email,
|
||||
'groups': [],
|
||||
'roles': roles,
|
||||
'settings': {'email_communications': 0},
|
||||
'auth': [],
|
||||
'full_name': email,
|
||||
'email': email,
|
||||
'service': service}
|
||||
result, _, _, status = current_app.post_internal('users', user)
|
||||
if status != 201:
|
||||
raise SystemExit('Error creating user {}: {}'.format(email, result))
|
||||
user.update(result)
|
||||
|
||||
# Create an authentication token that won't expire for a long time.
|
||||
token = local_auth.generate_and_store_token(user['_id'], days=36500, prefix='SRV')
|
||||
|
||||
return user, token
|
||||
|
||||
|
||||
def setup_app(app, api_prefix):
|
||||
app.register_api_blueprint(blueprint, url_prefix=api_prefix)
|
15
pillar/api/users/__init__.py
Normal file
15
pillar/api/users/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from . import hooks
|
||||
from .routes import blueprint_api
|
||||
|
||||
|
||||
def setup_app(app, api_prefix):
|
||||
app.on_pre_GET_users += hooks.check_user_access
|
||||
app.on_post_GET_users += hooks.post_GET_user
|
||||
app.on_pre_PUT_users += hooks.check_put_access
|
||||
app.on_pre_PUT_users += hooks.before_replacing_user
|
||||
app.on_replaced_users += hooks.push_updated_user_to_algolia
|
||||
app.on_replaced_users += hooks.send_blinker_signal_roles_changed
|
||||
app.on_fetched_item_users += hooks.after_fetching_user
|
||||
app.on_fetched_resource_users += hooks.after_fetching_user_resource
|
||||
|
||||
app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)
|
123
pillar/api/users/hooks.py
Normal file
123
pillar/api/users/hooks.py
Normal file
@@ -0,0 +1,123 @@
|
||||
import copy
|
||||
import json
|
||||
|
||||
from eve.utils import parse_request
|
||||
from flask import current_app, g
|
||||
from pillar.api.users.routes import log
|
||||
from pillar.api.utils.authorization import user_has_role
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
|
||||
def before_replacing_user(request, lookup):
|
||||
"""Loads the auth field from the database, preventing any changes."""
|
||||
|
||||
# Find the user that is being replaced
|
||||
req = parse_request('users')
|
||||
req.projection = json.dumps({'auth': 1})
|
||||
original = current_app.data.find_one('users', req, **lookup)
|
||||
|
||||
# Make sure that the replacement has a valid auth field.
|
||||
updates = request.get_json()
|
||||
assert updates is request.get_json() # We should get a ref to the cached JSON, and not a copy.
|
||||
|
||||
if 'auth' in original:
|
||||
updates['auth'] = copy.deepcopy(original['auth'])
|
||||
else:
|
||||
updates.pop('auth', None)
|
||||
|
||||
|
||||
def push_updated_user_to_algolia(user, original):
|
||||
"""Push an update to the Algolia index when a user item is updated"""
|
||||
|
||||
from algoliasearch.client import AlgoliaException
|
||||
from pillar.api.utils.algolia import algolia_index_user_save
|
||||
|
||||
try:
|
||||
algolia_index_user_save(user)
|
||||
except AlgoliaException as ex:
|
||||
log.warning('Unable to push user info to Algolia for user "%s", id=%s; %s',
|
||||
user.get('username'), user.get('_id'), ex)
|
||||
|
||||
|
||||
def send_blinker_signal_roles_changed(user, original):
|
||||
"""Sends a Blinker signal that the user roles were changed, so others can respond."""
|
||||
|
||||
if user.get('roles') == original.get('roles'):
|
||||
return
|
||||
|
||||
from pillar.api.service import signal_user_changed_role
|
||||
|
||||
log.info('User %s changed roles to %s, sending Blinker signal',
|
||||
user.get('_id'), user.get('roles'))
|
||||
signal_user_changed_role.send(current_app, user=user)
|
||||
|
||||
|
||||
def check_user_access(request, lookup):
|
||||
"""Modifies the lookup dict to limit returned user info."""
|
||||
|
||||
# No access when not logged in.
|
||||
current_user = g.get('current_user')
|
||||
current_user_id = current_user['user_id'] if current_user else None
|
||||
|
||||
# Admins can do anything and get everything, except the 'auth' block.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
if not lookup and not current_user:
|
||||
raise Forbidden()
|
||||
|
||||
# Add a filter to only return the current user.
|
||||
if '_id' not in lookup:
|
||||
lookup['_id'] = current_user['user_id']
|
||||
|
||||
|
||||
def check_put_access(request, lookup):
|
||||
"""Only allow PUT to the current user, or all users if admin."""
|
||||
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
current_user = g.get('current_user')
|
||||
if not current_user:
|
||||
raise Forbidden()
|
||||
|
||||
if str(lookup['_id']) != str(current_user['user_id']):
|
||||
raise Forbidden()
|
||||
|
||||
|
||||
def after_fetching_user(user):
|
||||
# Deny access to auth block; authentication stuff is managed by
|
||||
# custom end-points.
|
||||
user.pop('auth', None)
|
||||
|
||||
current_user = g.get('current_user')
|
||||
current_user_id = current_user['user_id'] if current_user else None
|
||||
|
||||
# Admins can do anything and get everything, except the 'auth' block.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
# Only allow full access to the current user.
|
||||
if str(user['_id']) == str(current_user_id):
|
||||
return
|
||||
|
||||
# Remove all fields except public ones.
|
||||
public_fields = {'full_name', 'email'}
|
||||
for field in list(user.keys()):
|
||||
if field not in public_fields:
|
||||
del user[field]
|
||||
|
||||
|
||||
def after_fetching_user_resource(response):
|
||||
for user in response['_items']:
|
||||
after_fetching_user(user)
|
||||
|
||||
|
||||
def post_GET_user(request, payload):
|
||||
json_data = json.loads(payload.data)
|
||||
# Check if we are querying the users endpoint (instead of the single user)
|
||||
if json_data.get('_id') is None:
|
||||
return
|
||||
# json_data['computed_permissions'] = \
|
||||
# compute_permissions(json_data['_id'], app.data.driver)
|
||||
payload.data = json.dumps(json_data)
|
19
pillar/api/users/routes.py
Normal file
19
pillar/api/users/routes.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import logging
|
||||
|
||||
from eve.methods.get import get
|
||||
from flask import g, Blueprint
|
||||
from pillar.api.utils import jsonify
|
||||
from pillar.api.utils.authorization import require_login
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint_api = Blueprint('users_api', __name__)
|
||||
|
||||
|
||||
@blueprint_api.route('/me')
|
||||
@require_login()
|
||||
def my_info():
|
||||
eve_resp, _, _, status, _ = get('users', {'_id': g.current_user['user_id']})
|
||||
resp = jsonify(eve_resp['_items'][0], status=status)
|
||||
return resp
|
||||
|
||||
|
116
pillar/api/utils/__init__.py
Normal file
116
pillar/api/utils/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import urllib
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
import logging
|
||||
|
||||
import bson.objectid
|
||||
from eve import RFC1123_DATE_FORMAT
|
||||
from flask import current_app
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
import pymongo.results
|
||||
|
||||
__all__ = ('remove_private_keys', 'PillarJSONEncoder')
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def remove_private_keys(document):
|
||||
"""Removes any key that starts with an underscore, returns result as new
|
||||
dictionary.
|
||||
"""
|
||||
doc_copy = copy.deepcopy(document)
|
||||
for key in list(doc_copy.keys()):
|
||||
if key.startswith('_'):
|
||||
del doc_copy[key]
|
||||
|
||||
try:
|
||||
del doc_copy['allowed_methods']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return doc_copy
|
||||
|
||||
|
||||
class PillarJSONEncoder(json.JSONEncoder):
|
||||
"""JSON encoder with support for Pillar resources."""
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return obj.strftime(RFC1123_DATE_FORMAT)
|
||||
|
||||
if isinstance(obj, bson.ObjectId):
|
||||
return str(obj)
|
||||
|
||||
if isinstance(obj, pymongo.results.UpdateResult):
|
||||
return obj.raw_result
|
||||
|
||||
# Let the base class default method raise the TypeError
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
|
||||
|
||||
def dumps(mongo_doc, **kwargs):
|
||||
"""json.dumps() for MongoDB documents."""
|
||||
return json.dumps(mongo_doc, cls=PillarJSONEncoder, **kwargs)
|
||||
|
||||
|
||||
def jsonify(mongo_doc, status=200, headers=None):
|
||||
"""JSonifies a Mongo document into a Flask response object."""
|
||||
|
||||
return current_app.response_class(dumps(mongo_doc),
|
||||
mimetype='application/json',
|
||||
status=status,
|
||||
headers=headers)
|
||||
|
||||
|
||||
def skip_when_testing(func):
|
||||
"""Decorator, skips the decorated function when app.config['TESTING']"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if current_app.config['TESTING']:
|
||||
log.debug('Skipping call to %s(...) due to TESTING', func.func_name)
|
||||
return None
|
||||
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def project_get_node_type(project_document, node_type_node_name):
|
||||
"""Return a node_type subdocument for a project. If none is found, return
|
||||
None.
|
||||
"""
|
||||
|
||||
if project_document is None:
|
||||
return None
|
||||
|
||||
return next((node_type for node_type in project_document['node_types']
|
||||
if node_type['name'] == node_type_node_name), None)
|
||||
|
||||
|
||||
def str2id(document_id):
|
||||
"""Returns the document ID as ObjectID, or raises a BadRequest exception.
|
||||
|
||||
:type document_id: str
|
||||
:rtype: bson.ObjectId
|
||||
:raises: wz_exceptions.BadRequest
|
||||
"""
|
||||
|
||||
if not document_id:
|
||||
log.debug('str2id(%r): Invalid Object ID', document_id)
|
||||
raise wz_exceptions.BadRequest('Invalid object ID %r' % document_id)
|
||||
|
||||
try:
|
||||
return bson.ObjectId(document_id)
|
||||
except bson.objectid.InvalidId:
|
||||
log.debug('str2id(%r): Invalid Object ID', document_id)
|
||||
raise wz_exceptions.BadRequest('Invalid object ID %r' % document_id)
|
||||
|
||||
|
||||
def gravatar(email, size=64):
|
||||
parameters = {'s': str(size), 'd': 'mm'}
|
||||
return "https://www.gravatar.com/avatar/" + \
|
||||
hashlib.md5(str(email)).hexdigest() + \
|
||||
"?" + urllib.urlencode(parameters)
|
98
pillar/api/utils/algolia.py
Normal file
98
pillar/api/utils/algolia.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import current_app
|
||||
|
||||
from pillar.api.file_storage import generate_link
|
||||
from . import skip_when_testing
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INDEX_ALLOWED_USER_ROLES = {'admin', 'subscriber', 'demo'}
|
||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_user_save(user):
|
||||
if current_app.algolia_index_users is None:
|
||||
return
|
||||
# Strip unneeded roles
|
||||
if 'roles' in user:
|
||||
roles = set(user['roles']).intersection(INDEX_ALLOWED_USER_ROLES)
|
||||
else:
|
||||
roles = set()
|
||||
if current_app.algolia_index_users:
|
||||
# Create or update Algolia index for the user
|
||||
current_app.algolia_index_users.save_object({
|
||||
'objectID': user['_id'],
|
||||
'full_name': user['full_name'],
|
||||
'username': user['username'],
|
||||
'roles': list(roles),
|
||||
'groups': user['groups'],
|
||||
'email': user['email']
|
||||
})
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_node_save(node):
|
||||
if not current_app.algolia_index_nodes:
|
||||
return
|
||||
if node['node_type'] not in INDEX_ALLOWED_NODE_TYPES:
|
||||
return
|
||||
# If a nodes does not have status published, do not index
|
||||
if node['properties'].get('status') != 'published':
|
||||
return
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': ObjectId(node['project'])})
|
||||
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
user = users_collection.find_one({'_id': ObjectId(node['user'])})
|
||||
|
||||
node_ob = {
|
||||
'objectID': node['_id'],
|
||||
'name': node['name'],
|
||||
'project': {
|
||||
'_id': project['_id'],
|
||||
'name': project['name']
|
||||
},
|
||||
'created': node['_created'],
|
||||
'updated': node['_updated'],
|
||||
'node_type': node['node_type'],
|
||||
'user': {
|
||||
'_id': user['_id'],
|
||||
'full_name': user['full_name']
|
||||
},
|
||||
}
|
||||
if 'description' in node and node['description']:
|
||||
node_ob['description'] = node['description']
|
||||
if 'picture' in node and node['picture']:
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
lookup = {'_id': ObjectId(node['picture'])}
|
||||
picture = files_collection.find_one(lookup)
|
||||
if picture['backend'] == 'gcs':
|
||||
variation_t = next((item for item in picture['variations'] \
|
||||
if item['size'] == 't'), None)
|
||||
if variation_t:
|
||||
node_ob['picture'] = generate_link(picture['backend'],
|
||||
variation_t['file_path'], project_id=str(picture['project']),
|
||||
is_public=True)
|
||||
# If the node has world permissions, compute the Free permission
|
||||
if 'permissions' in node and 'world' in node['permissions']:
|
||||
if 'GET' in node['permissions']['world']:
|
||||
node_ob['is_free'] = True
|
||||
# Append the media key if the node is of node_type 'asset'
|
||||
if node['node_type'] == 'asset':
|
||||
node_ob['media'] = node['properties']['content_type']
|
||||
# Add tags
|
||||
if 'tags' in node['properties']:
|
||||
node_ob['tags'] = node['properties']['tags']
|
||||
|
||||
current_app.algolia_index_nodes.save_object(node_ob)
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_node_delete(node):
|
||||
if current_app.algolia_index_nodes is None:
|
||||
return
|
||||
current_app.algolia_index_nodes.delete_object(node['_id'])
|
224
pillar/api/utils/authentication.py
Normal file
224
pillar/api/utils/authentication.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""Generic authentication.
|
||||
|
||||
Contains functionality to validate tokens, create users and tokens, and make
|
||||
unique usernames from emails. Calls out to the pillar_server.modules.blender_id
|
||||
module for Blender ID communication.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import datetime
|
||||
|
||||
from bson import tz_util
|
||||
from flask import g
|
||||
from flask import request
|
||||
from flask import current_app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_token():
|
||||
"""Validate the token provided in the request and populate the current_user
|
||||
flask.g object, so that permissions and access to a resource can be defined
|
||||
from it.
|
||||
|
||||
When the token is successfully validated, sets `g.current_user` to contain
|
||||
the user information, otherwise it is set to None.
|
||||
|
||||
@returns True iff the user is logged in with a valid Blender ID token.
|
||||
"""
|
||||
|
||||
if request.authorization:
|
||||
token = request.authorization.username
|
||||
oauth_subclient = request.authorization.password
|
||||
else:
|
||||
# Check the session, the user might be logged in through Flask-Login.
|
||||
from pillar import auth
|
||||
|
||||
token = auth.get_blender_id_oauth_token()
|
||||
if token and isinstance(token, (tuple, list)):
|
||||
token = token[0]
|
||||
oauth_subclient = None
|
||||
|
||||
if not token:
|
||||
# If no authorization headers are provided, we are getting a request
|
||||
# from a non logged in user. Proceed accordingly.
|
||||
log.debug('No authentication headers, so not logged in.')
|
||||
g.current_user = None
|
||||
return False
|
||||
|
||||
return validate_this_token(token, oauth_subclient) is not None
|
||||
|
||||
|
||||
def validate_this_token(token, oauth_subclient=None):
|
||||
"""Validates a given token, and sets g.current_user.
|
||||
|
||||
:returns: the user in MongoDB, or None if not a valid token.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
g.current_user = None
|
||||
_delete_expired_tokens()
|
||||
|
||||
# Check the users to see if there is one with this Blender ID token.
|
||||
db_token = find_token(token, oauth_subclient)
|
||||
if not db_token:
|
||||
log.debug('Token %s not found in our local database.', token)
|
||||
|
||||
# If no valid token is found in our local database, we issue a new
|
||||
# request to the Blender ID server to verify the validity of the token
|
||||
# passed via the HTTP header. We will get basic user info if the user
|
||||
# is authorized, and we will store the token in our local database.
|
||||
from pillar.api import blender_id
|
||||
|
||||
db_user, status = blender_id.validate_create_user('', token, oauth_subclient)
|
||||
else:
|
||||
# log.debug("User is already in our database and token hasn't expired yet.")
|
||||
users = current_app.data.driver.db['users']
|
||||
db_user = users.find_one(db_token['user'])
|
||||
|
||||
if db_user is None:
|
||||
log.debug('Validation failed, user not logged in')
|
||||
return None
|
||||
|
||||
g.current_user = {'user_id': db_user['_id'],
|
||||
'groups': db_user['groups'],
|
||||
'roles': set(db_user.get('roles', []))}
|
||||
|
||||
return db_user
|
||||
|
||||
|
||||
def find_token(token, is_subclient_token=False, **extra_filters):
|
||||
"""Returns the token document, or None if it doesn't exist (or is expired)."""
|
||||
|
||||
tokens_collection = current_app.data.driver.db['tokens']
|
||||
|
||||
# TODO: remove expired tokens from collection.
|
||||
lookup = {'token': token,
|
||||
'is_subclient_token': True if is_subclient_token else {'$in': [False, None]},
|
||||
'expire_time': {"$gt": datetime.datetime.now(tz=tz_util.utc)}}
|
||||
lookup.update(extra_filters)
|
||||
|
||||
db_token = tokens_collection.find_one(lookup)
|
||||
return db_token
|
||||
|
||||
|
||||
def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
|
||||
"""Stores an authentication token.
|
||||
|
||||
:returns: the token document from MongoDB
|
||||
"""
|
||||
|
||||
assert isinstance(token, (str, unicode)), 'token must be string type, not %r' % type(token)
|
||||
|
||||
token_data = {
|
||||
'user': user_id,
|
||||
'token': token,
|
||||
'expire_time': token_expiry,
|
||||
}
|
||||
if oauth_subclient_id:
|
||||
token_data['is_subclient_token'] = True
|
||||
|
||||
r, _, _, status = current_app.post_internal('tokens', token_data)
|
||||
|
||||
if status not in {200, 201}:
|
||||
log.error('Unable to store authentication token: %s', r)
|
||||
raise RuntimeError('Unable to store authentication token.')
|
||||
|
||||
token_data.update(r)
|
||||
return token_data
|
||||
|
||||
|
||||
def create_new_user(email, username, user_id):
|
||||
"""Creates a new user in our local database.
|
||||
|
||||
@param email: the user's email
|
||||
@param username: the username, which is also used as full name.
|
||||
@param user_id: the user ID from the Blender ID server.
|
||||
@returns: the user ID from our local database.
|
||||
"""
|
||||
|
||||
user_data = create_new_user_document(email, user_id, username)
|
||||
r = current_app.post_internal('users', user_data)
|
||||
user_id = r[0]['_id']
|
||||
return user_id
|
||||
|
||||
|
||||
def create_new_user_document(email, user_id, username, provider='blender-id',
|
||||
token=''):
|
||||
"""Creates a new user document, without storing it in MongoDB. The token
|
||||
parameter is a password in case provider is "local".
|
||||
"""
|
||||
|
||||
user_data = {
|
||||
'full_name': username,
|
||||
'username': username,
|
||||
'email': email,
|
||||
'auth': [{
|
||||
'provider': provider,
|
||||
'user_id': str(user_id),
|
||||
'token': token}],
|
||||
'settings': {
|
||||
'email_communications': 1
|
||||
},
|
||||
'groups': [],
|
||||
}
|
||||
return user_data
|
||||
|
||||
|
||||
def make_unique_username(email):
|
||||
"""Creates a unique username from the email address.
|
||||
|
||||
@param email: the email address
|
||||
@returns: the new username
|
||||
@rtype: str
|
||||
"""
|
||||
|
||||
username = email.split('@')[0]
|
||||
# Check for min length of username (otherwise validation fails)
|
||||
username = "___{0}".format(username) if len(username) < 3 else username
|
||||
|
||||
users = current_app.data.driver.db['users']
|
||||
user_from_username = users.find_one({'username': username})
|
||||
|
||||
if not user_from_username:
|
||||
return username
|
||||
|
||||
# Username exists, make it unique by adding some number after it.
|
||||
suffix = 1
|
||||
while True:
|
||||
unique_name = '%s%i' % (username, suffix)
|
||||
user_from_username = users.find_one({'username': unique_name})
|
||||
if user_from_username is None:
|
||||
return unique_name
|
||||
suffix += 1
|
||||
|
||||
|
||||
def _delete_expired_tokens():
|
||||
"""Deletes tokens that have expired.
|
||||
|
||||
For debugging, we keep expired tokens around for a few days, so that we
|
||||
can determine that a token was expired rather than not created in the
|
||||
first place. It also grants some leeway in clock synchronisation.
|
||||
"""
|
||||
|
||||
token_coll = current_app.data.driver.db['tokens']
|
||||
|
||||
now = datetime.datetime.now(tz_util.utc)
|
||||
expiry_date = now - datetime.timedelta(days=7)
|
||||
|
||||
result = token_coll.delete_many({'expire_time': {"$lt": expiry_date}})
|
||||
# log.debug('Deleted %i expired authentication tokens', result.deleted_count)
|
||||
|
||||
|
||||
def current_user_id():
|
||||
"""None-safe fetching of user ID. Can return None itself, though."""
|
||||
|
||||
current_user = g.get('current_user') or {}
|
||||
return current_user.get('user_id')
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
@app.before_request
|
||||
def validate_token_at_each_request():
|
||||
validate_token()
|
||||
return None
|
372
pillar/api/utils/authorization.py
Normal file
372
pillar/api/utils/authorization.py
Normal file
@@ -0,0 +1,372 @@
|
||||
import logging
|
||||
import functools
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import g
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
CHECK_PERMISSIONS_IMPLEMENTED_FOR = {'projects', 'nodes'}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_permissions(collection_name, resource, method, append_allowed_methods=False,
|
||||
check_node_type=None):
|
||||
"""Check user permissions to access a node. We look up node permissions from
|
||||
world to groups to users and match them with the computed user permissions.
|
||||
If there is not match, we raise 403.
|
||||
|
||||
:param collection_name: name of the collection the resource comes from.
|
||||
:param resource: resource from MongoDB
|
||||
:type resource: dict
|
||||
:param method: name of the requested HTTP method
|
||||
:param append_allowed_methods: whether to return the list of allowed methods
|
||||
in the resource. Only valid when method='GET'.
|
||||
:param check_node_type: node type to check. Only valid when collection_name='projects'.
|
||||
:type check_node_type: str
|
||||
"""
|
||||
|
||||
if not has_permissions(collection_name, resource, method, append_allowed_methods,
|
||||
check_node_type):
|
||||
abort(403)
|
||||
|
||||
|
||||
def compute_allowed_methods(collection_name, resource, check_node_type=None):
|
||||
"""Computes the HTTP methods that are allowed on a given resource.
|
||||
|
||||
:param collection_name: name of the collection the resource comes from.
|
||||
:param resource: resource from MongoDB
|
||||
:type resource: dict
|
||||
:param check_node_type: node type to check. Only valid when collection_name='projects'.
|
||||
:type check_node_type: str
|
||||
:returns: Set of allowed methods
|
||||
:rtype: set
|
||||
"""
|
||||
|
||||
# Check some input values.
|
||||
if collection_name not in CHECK_PERMISSIONS_IMPLEMENTED_FOR:
|
||||
raise ValueError('compute_allowed_methods only implemented for %s, not for %s',
|
||||
CHECK_PERMISSIONS_IMPLEMENTED_FOR, collection_name)
|
||||
|
||||
if check_node_type is not None and collection_name != 'projects':
|
||||
raise ValueError('check_node_type parameter is only valid for checking projects.')
|
||||
|
||||
computed_permissions = compute_aggr_permissions(collection_name, resource, check_node_type)
|
||||
|
||||
if not computed_permissions:
|
||||
log.info('No permissions available to compute for resource %r',
|
||||
resource.get('node_type', resource))
|
||||
return set()
|
||||
|
||||
# Accumulate allowed methods from the user, group and world level.
|
||||
allowed_methods = set()
|
||||
current_user = g.current_user
|
||||
if current_user:
|
||||
# If the user is authenticated, proceed to compare the group permissions
|
||||
for permission in computed_permissions.get('groups', ()):
|
||||
if permission['group'] in current_user['groups']:
|
||||
allowed_methods.update(permission['methods'])
|
||||
|
||||
for permission in computed_permissions.get('users', ()):
|
||||
if current_user['user_id'] == permission['user']:
|
||||
allowed_methods.update(permission['methods'])
|
||||
|
||||
# Check if the node is public or private. This must be set for non logged
|
||||
# in users to see the content. For most BI projects this is on by default,
|
||||
# while for private project this will not be set at all.
|
||||
if 'world' in computed_permissions:
|
||||
allowed_methods.update(computed_permissions['world'])
|
||||
|
||||
return allowed_methods
|
||||
|
||||
|
||||
def has_permissions(collection_name, resource, method, append_allowed_methods=False,
|
||||
check_node_type=None):
|
||||
"""Check user permissions to access a node. We look up node permissions from
|
||||
world to groups to users and match them with the computed user permissions.
|
||||
|
||||
:param collection_name: name of the collection the resource comes from.
|
||||
:param resource: resource from MongoDB
|
||||
:type resource: dict
|
||||
:param method: name of the requested HTTP method
|
||||
:param append_allowed_methods: whether to return the list of allowed methods
|
||||
in the resource. Only valid when method='GET'.
|
||||
:param check_node_type: node type to check. Only valid when collection_name='projects'.
|
||||
:type check_node_type: str
|
||||
:returns: True if the user has access, False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
# Check some input values.
|
||||
if append_allowed_methods and method != 'GET':
|
||||
raise ValueError("append_allowed_methods only allowed with 'GET' method")
|
||||
|
||||
allowed_methods = compute_allowed_methods(collection_name, resource, check_node_type)
|
||||
|
||||
permission_granted = method in allowed_methods
|
||||
if permission_granted:
|
||||
if append_allowed_methods:
|
||||
# TODO: rename this field _allowed_methods
|
||||
if check_node_type:
|
||||
node_type = next((node_type for node_type in resource['node_types']
|
||||
if node_type['name'] == check_node_type))
|
||||
assign_to = node_type
|
||||
else:
|
||||
assign_to = resource
|
||||
assign_to['allowed_methods'] = list(set(allowed_methods))
|
||||
return True
|
||||
else:
|
||||
log.debug('Permission denied, method %s not in allowed methods %s',
|
||||
method, allowed_methods)
|
||||
return False
|
||||
|
||||
|
||||
def compute_aggr_permissions(collection_name, resource, check_node_type=None):
|
||||
"""Returns a permissions dict."""
|
||||
|
||||
# We always need the know the project.
|
||||
if collection_name == 'projects':
|
||||
project = resource
|
||||
if check_node_type is None:
|
||||
return project['permissions']
|
||||
node_type_name = check_node_type
|
||||
else:
|
||||
# Not a project, so it's a node.
|
||||
assert 'project' in resource
|
||||
assert 'node_type' in resource
|
||||
|
||||
node_type_name = resource['node_type']
|
||||
|
||||
if isinstance(resource['project'], dict):
|
||||
# embedded project
|
||||
project = resource['project']
|
||||
else:
|
||||
project_id = resource['project']
|
||||
project = _find_project_node_type(project_id, node_type_name)
|
||||
|
||||
# Every node should have a project.
|
||||
if project is None:
|
||||
log.warning('Resource %s from "%s" refers to a project that does not exist.',
|
||||
resource['_id'], collection_name)
|
||||
raise Forbidden()
|
||||
|
||||
project_permissions = project['permissions']
|
||||
|
||||
# Find the node type from the project.
|
||||
node_type = next((node_type for node_type in project['node_types']
|
||||
if node_type['name'] == node_type_name), None)
|
||||
if node_type is None: # This node type is not known, so doesn't give permissions.
|
||||
node_type_permissions = {}
|
||||
else:
|
||||
node_type_permissions = node_type.get('permissions', {})
|
||||
|
||||
# For projects or specific node types in projects, we're done now.
|
||||
if collection_name == 'projects':
|
||||
return merge_permissions(project_permissions, node_type_permissions)
|
||||
|
||||
node_permissions = resource.get('permissions', {})
|
||||
return merge_permissions(project_permissions, node_type_permissions, node_permissions)
|
||||
|
||||
|
||||
def _find_project_node_type(project_id, node_type_name):
|
||||
"""Returns the project with just the one named node type."""
|
||||
|
||||
# Cache result per request, as many nodes of the same project can be checked.
|
||||
cache = g.get('_find_project_node_type_cache')
|
||||
if cache is None:
|
||||
cache = g._find_project_node_type_cache = {}
|
||||
|
||||
try:
|
||||
return cache[(project_id, node_type_name)]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one(
|
||||
ObjectId(project_id),
|
||||
{'permissions': 1,
|
||||
'node_types': {'$elemMatch': {'name': node_type_name}},
|
||||
'node_types.name': 1,
|
||||
'node_types.permissions': 1})
|
||||
|
||||
cache[(project_id, node_type_name)] = project
|
||||
|
||||
return project
|
||||
|
||||
|
||||
def merge_permissions(*args):
|
||||
"""Merges all given permissions.
|
||||
|
||||
:param args: list of {'user': ..., 'group': ..., 'world': ...} dicts.
|
||||
:returns: combined list of permissions.
|
||||
"""
|
||||
|
||||
if not args:
|
||||
return {}
|
||||
|
||||
if len(args) == 1:
|
||||
return args[0]
|
||||
|
||||
effective = {}
|
||||
|
||||
# When testing we want stable results, and not be dependent on PYTHONHASH values etc.
|
||||
if current_app.config['TESTING']:
|
||||
maybe_sorted = sorted
|
||||
else:
|
||||
def maybe_sorted(arg):
|
||||
return arg
|
||||
|
||||
def merge(field_name):
|
||||
plural_name = field_name + 's'
|
||||
|
||||
from0 = args[0].get(plural_name, [])
|
||||
from1 = args[1].get(plural_name, [])
|
||||
|
||||
asdict0 = {permission[field_name]: permission['methods'] for permission in from0}
|
||||
asdict1 = {permission[field_name]: permission['methods'] for permission in from1}
|
||||
|
||||
keys = set(asdict0.keys() + asdict1.keys())
|
||||
for key in maybe_sorted(keys):
|
||||
methods0 = asdict0.get(key, [])
|
||||
methods1 = asdict1.get(key, [])
|
||||
methods = maybe_sorted(set(methods0).union(set(methods1)))
|
||||
effective.setdefault(plural_name, []).append({field_name: key, u'methods': methods})
|
||||
|
||||
merge(u'user')
|
||||
merge(u'group')
|
||||
|
||||
# Gather permissions for world
|
||||
world0 = args[0].get('world', [])
|
||||
world1 = args[1].get('world', [])
|
||||
world_methods = set(world0).union(set(world1))
|
||||
if world_methods:
|
||||
effective[u'world'] = maybe_sorted(world_methods)
|
||||
|
||||
# Recurse for longer merges
|
||||
if len(args) > 2:
|
||||
return merge_permissions(effective, *args[2:])
|
||||
|
||||
return effective
|
||||
|
||||
|
||||
def require_login(require_roles=set(),
|
||||
require_all=False):
|
||||
"""Decorator that enforces users to authenticate.
|
||||
|
||||
Optionally only allows access to users with a certain role.
|
||||
|
||||
:param require_roles: set of roles.
|
||||
:param require_all:
|
||||
When False (the default): if the user's roles have a
|
||||
non-empty intersection with the given roles, access is granted.
|
||||
When True: require the user to have all given roles before access is
|
||||
granted.
|
||||
"""
|
||||
|
||||
if not isinstance(require_roles, set):
|
||||
raise TypeError('require_roles param should be a set, but is a %r' % type(require_roles))
|
||||
|
||||
if require_all and not require_roles:
|
||||
raise ValueError('require_login(require_all=True) cannot be used with empty require_roles.')
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if not user_matches_roles(require_roles, require_all):
|
||||
if g.current_user is None:
|
||||
# We don't need to log at a higher level, as this is very common.
|
||||
# Many browsers first try to see whether authentication is needed
|
||||
# at all, before sending the password.
|
||||
log.debug('Unauthenticated acces to %s attempted.', func)
|
||||
else:
|
||||
log.warning('User %s is authenticated, but does not have required roles %s to '
|
||||
'access %s', g.current_user['user_id'], require_roles, func)
|
||||
abort(403)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def ab_testing(require_roles=set(),
|
||||
require_all=False):
|
||||
"""Decorator that raises a 404 when the user doesn't match the roles..
|
||||
|
||||
:param require_roles: set of roles.
|
||||
:param require_all:
|
||||
When False (the default): if the user's roles have a
|
||||
non-empty intersection with the given roles, access is granted.
|
||||
When True: require the user to have all given roles before access is
|
||||
granted.
|
||||
"""
|
||||
|
||||
if not isinstance(require_roles, set):
|
||||
raise TypeError('require_roles param should be a set, but is a %r' % type(require_roles))
|
||||
|
||||
if require_all and not require_roles:
|
||||
raise ValueError('require_login(require_all=True) cannot be used with empty require_roles.')
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if not user_matches_roles(require_roles, require_all):
|
||||
abort(404)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def user_has_role(role, user=None):
|
||||
"""Returns True iff the user is logged in and has the given role."""
|
||||
|
||||
if user is None:
|
||||
user = g.get('current_user')
|
||||
|
||||
if user is None:
|
||||
return False
|
||||
|
||||
roles = user.get('roles') or ()
|
||||
return role in roles
|
||||
|
||||
|
||||
def user_matches_roles(require_roles=set(),
|
||||
require_all=False):
|
||||
"""Returns True iff the user's roles matches the query.
|
||||
|
||||
:param require_roles: set of roles.
|
||||
:param require_all:
|
||||
When False (the default): if the user's roles have a
|
||||
non-empty intersection with the given roles, returns True.
|
||||
When True: require the user to have all given roles before
|
||||
returning True.
|
||||
"""
|
||||
|
||||
if not isinstance(require_roles, set):
|
||||
raise TypeError('require_roles param should be a set, but is a %r' % type(require_roles))
|
||||
|
||||
if require_all and not require_roles:
|
||||
raise ValueError('require_login(require_all=True) cannot be used with empty require_roles.')
|
||||
|
||||
current_user = g.get('current_user')
|
||||
|
||||
if current_user is None:
|
||||
return False
|
||||
|
||||
intersection = require_roles.intersection(current_user['roles'])
|
||||
if require_all:
|
||||
return len(intersection) == len(require_roles)
|
||||
|
||||
return not bool(require_roles) or bool(intersection)
|
||||
|
||||
|
||||
def is_admin(user):
|
||||
"""Returns True iff the given user has the admin role."""
|
||||
|
||||
return user_has_role(u'admin', user)
|
39
pillar/api/utils/cdn.py
Normal file
39
pillar/api/utils/cdn.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import datetime
|
||||
from hashlib import md5
|
||||
from flask import current_app
|
||||
|
||||
|
||||
def hash_file_path(file_path, expiry_timestamp=None):
|
||||
if not file_path.startswith('/'):
|
||||
file_path = '/' + file_path
|
||||
service_domain = current_app.config['CDN_SERVICE_DOMAIN']
|
||||
domain_subfolder = current_app.config['CDN_CONTENT_SUBFOLDER']
|
||||
asset_url = current_app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \
|
||||
'://' + \
|
||||
service_domain + \
|
||||
domain_subfolder + \
|
||||
file_path
|
||||
|
||||
if current_app.config['CDN_USE_URL_SIGNING']:
|
||||
|
||||
url_signing_key = current_app.config['CDN_URL_SIGNING_KEY']
|
||||
hash_string = domain_subfolder + file_path + url_signing_key
|
||||
|
||||
if not expiry_timestamp:
|
||||
expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24)
|
||||
expiry_timestamp = expiry_timestamp.strftime('%s')
|
||||
|
||||
hash_string = expiry_timestamp + hash_string
|
||||
|
||||
expiry_timestamp = "," + str(expiry_timestamp)
|
||||
|
||||
hashed_file_path = md5(hash_string).digest().encode('base64')[:-1]
|
||||
hashed_file_path = hashed_file_path.replace('+', '-')
|
||||
hashed_file_path = hashed_file_path.replace('/', '_')
|
||||
|
||||
asset_url = asset_url + \
|
||||
'?secure=' + \
|
||||
hashed_file_path + \
|
||||
expiry_timestamp
|
||||
|
||||
return asset_url
|
54
pillar/api/utils/encoding.py
Normal file
54
pillar/api/utils/encoding.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from flask import current_app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Encoder:
|
||||
"""Generic Encoder wrapper. Provides a consistent API, independent from
|
||||
the encoding backend enabled.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def job_create(src_file):
|
||||
"""Create an encoding job. Return the backend used as well as an id.
|
||||
"""
|
||||
if current_app.config['ENCODING_BACKEND'] != 'zencoder' or \
|
||||
current_app.encoding_service_client is None:
|
||||
log.error('I can only work with Zencoder, check the config file.')
|
||||
return None
|
||||
|
||||
if src_file['backend'] != 'gcs':
|
||||
log.error('Unable to work with storage backend %r', src_file['backend'])
|
||||
return None
|
||||
|
||||
# Build the specific GCS input url, assuming the file is stored
|
||||
# in the _ subdirectory
|
||||
storage_base = "gcs://{0}/_/".format(src_file['project'])
|
||||
file_input = os.path.join(storage_base, src_file['file_path'])
|
||||
options = dict(notifications=current_app.config['ZENCODER_NOTIFICATIONS_URL'])
|
||||
|
||||
outputs = [{'format': v['format'],
|
||||
'url': os.path.join(storage_base, v['file_path'])}
|
||||
for v in src_file['variations']]
|
||||
r = current_app.encoding_service_client.job.create(file_input,
|
||||
outputs=outputs,
|
||||
options=options)
|
||||
if r.code != 201:
|
||||
log.error('Error %i creating Zencoder job: %s', r.code, r.body)
|
||||
return None
|
||||
|
||||
return {'process_id': r.body['id'],
|
||||
'backend': 'zencoder'}
|
||||
|
||||
@staticmethod
|
||||
def job_progress(job_id):
|
||||
from zencoder import Zencoder
|
||||
|
||||
if isinstance(current_app.encoding_service_client, Zencoder):
|
||||
r = current_app.encoding_service_client.job.progress(int(job_id))
|
||||
return r.body
|
||||
else:
|
||||
return None
|
222
pillar/api/utils/gcs.py
Normal file
222
pillar/api/utils/gcs.py
Normal file
@@ -0,0 +1,222 @@
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from gcloud.storage.client import Client
|
||||
from gcloud.exceptions import NotFound
|
||||
from flask import current_app, g
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_client():
|
||||
"""Stores the GCS client on the global Flask object.
|
||||
|
||||
The GCS client is not user-specific anyway.
|
||||
"""
|
||||
|
||||
_gcs = getattr(g, '_gcs_client', None)
|
||||
if _gcs is None:
|
||||
_gcs = g._gcs_client = Client()
|
||||
return _gcs
|
||||
|
||||
|
||||
# This hides the specifics of how/where we store the GCS client,
|
||||
# and allows the rest of the code to use 'gcs' as a simple variable
|
||||
# that does the right thing.
|
||||
gcs = LocalProxy(get_client)
|
||||
|
||||
|
||||
class GoogleCloudStorageBucket(object):
|
||||
"""Cloud Storage bucket interface. We create a bucket for every project. In
|
||||
the bucket we create first level subdirs as follows:
|
||||
- '_' (will contain hashed assets, and stays on top of default listing)
|
||||
- 'svn' (svn checkout mirror)
|
||||
- 'shared' (any additional folder of static folder that is accessed via a
|
||||
node of 'storage' node_type)
|
||||
|
||||
:type bucket_name: string
|
||||
:param bucket_name: Name of the bucket.
|
||||
|
||||
:type subdir: string
|
||||
:param subdir: The local entry point to browse the bucket.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket_name, subdir='_/'):
|
||||
try:
|
||||
self.bucket = gcs.get_bucket(bucket_name)
|
||||
except NotFound:
|
||||
self.bucket = gcs.bucket(bucket_name)
|
||||
# Hardcode the bucket location to EU
|
||||
self.bucket.location = 'EU'
|
||||
# Optionally enable CORS from * (currently only used for vrview)
|
||||
# self.bucket.cors = [
|
||||
# {
|
||||
# "origin": ["*"],
|
||||
# "responseHeader": ["Content-Type"],
|
||||
# "method": ["GET", "HEAD", "DELETE"],
|
||||
# "maxAgeSeconds": 3600
|
||||
# }
|
||||
# ]
|
||||
self.bucket.create()
|
||||
|
||||
self.subdir = subdir
|
||||
|
||||
def List(self, path=None):
|
||||
"""Display the content of a subdir in the project bucket. If the path
|
||||
points to a file the listing is simply empty.
|
||||
|
||||
:type path: string
|
||||
:param path: The relative path to the directory or asset.
|
||||
"""
|
||||
if path and not path.endswith('/'):
|
||||
path += '/'
|
||||
prefix = os.path.join(self.subdir, path)
|
||||
|
||||
fields_to_return = 'nextPageToken,items(name,size,contentType),prefixes'
|
||||
req = self.bucket.list_blobs(fields=fields_to_return, prefix=prefix,
|
||||
delimiter='/')
|
||||
|
||||
files = []
|
||||
for f in req:
|
||||
filename = os.path.basename(f.name)
|
||||
if filename != '': # Skip own folder name
|
||||
files.append(dict(
|
||||
path=os.path.relpath(f.name, self.subdir),
|
||||
text=filename,
|
||||
type=f.content_type))
|
||||
|
||||
directories = []
|
||||
for dir_path in req.prefixes:
|
||||
directory_name = os.path.basename(os.path.normpath(dir_path))
|
||||
directories.append(dict(
|
||||
text=directory_name,
|
||||
path=os.path.relpath(dir_path, self.subdir),
|
||||
type='group_storage',
|
||||
children=True))
|
||||
# print os.path.basename(os.path.normpath(path))
|
||||
|
||||
list_dict = dict(
|
||||
name=os.path.basename(os.path.normpath(path)),
|
||||
type='group_storage',
|
||||
children=files + directories
|
||||
)
|
||||
|
||||
return list_dict
|
||||
|
||||
def blob_to_dict(self, blob):
|
||||
blob.reload()
|
||||
expiration = datetime.datetime.now() + datetime.timedelta(days=1)
|
||||
expiration = int(time.mktime(expiration.timetuple()))
|
||||
return dict(
|
||||
updated=blob.updated,
|
||||
name=os.path.basename(blob.name),
|
||||
size=blob.size,
|
||||
content_type=blob.content_type,
|
||||
signed_url=blob.generate_signed_url(expiration),
|
||||
public_url=blob.public_url)
|
||||
|
||||
def Get(self, path, to_dict=True):
|
||||
"""Get selected file info if the path matches.
|
||||
|
||||
:type path: string
|
||||
:param path: The relative path to the file.
|
||||
:type to_dict: bool
|
||||
:param to_dict: Return the object as a dictionary.
|
||||
"""
|
||||
path = os.path.join(self.subdir, path)
|
||||
blob = self.bucket.blob(path)
|
||||
if blob.exists():
|
||||
if to_dict:
|
||||
return self.blob_to_dict(blob)
|
||||
else:
|
||||
return blob
|
||||
else:
|
||||
return None
|
||||
|
||||
def Post(self, full_path, path=None):
|
||||
"""Create new blob and upload data to it.
|
||||
"""
|
||||
path = path if path else os.path.join('_', os.path.basename(full_path))
|
||||
blob = self.bucket.blob(path)
|
||||
if blob.exists():
|
||||
return None
|
||||
blob.upload_from_filename(full_path)
|
||||
return blob
|
||||
# return self.blob_to_dict(blob) # Has issues with threading
|
||||
|
||||
def Delete(self, path):
|
||||
"""Delete blob (when removing an asset or replacing a preview)"""
|
||||
|
||||
# We want to get the actual blob to delete
|
||||
blob = self.Get(path, to_dict=False)
|
||||
try:
|
||||
blob.delete()
|
||||
return True
|
||||
except NotFound:
|
||||
return None
|
||||
|
||||
def update_name(self, blob, name):
|
||||
"""Set the ContentDisposition metadata so that when a file is downloaded
|
||||
it has a human-readable name.
|
||||
"""
|
||||
blob.content_disposition = u'attachment; filename="{0}"'.format(name)
|
||||
blob.patch()
|
||||
|
||||
|
||||
def update_file_name(node):
|
||||
"""Assign to the CGS blob the same name of the asset node. This way when
|
||||
downloading an asset we get a human-readable name.
|
||||
"""
|
||||
|
||||
# Process only files that are not processing
|
||||
if node['properties'].get('status', '') == 'processing':
|
||||
return
|
||||
|
||||
def _format_name(name, override_ext, size=None, map_type=u''):
|
||||
root, _ = os.path.splitext(name)
|
||||
size = u'-{}'.format(size) if size else u''
|
||||
map_type = u'-{}'.format(map_type) if map_type else u''
|
||||
return u'{}{}{}{}'.format(root, size, map_type, override_ext)
|
||||
|
||||
def _update_name(file_id, file_props):
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
file_doc = files_collection.find_one({'_id': ObjectId(file_id)})
|
||||
|
||||
if file_doc is None or file_doc.get('backend') != 'gcs':
|
||||
return
|
||||
|
||||
# For textures -- the map type should be part of the name.
|
||||
map_type = file_props.get('map_type', u'')
|
||||
|
||||
storage = GoogleCloudStorageBucket(str(node['project']))
|
||||
blob = storage.Get(file_doc['file_path'], to_dict=False)
|
||||
# Pick file extension from original filename
|
||||
_, ext = os.path.splitext(file_doc['filename'])
|
||||
name = _format_name(node['name'], ext, map_type=map_type)
|
||||
storage.update_name(blob, name)
|
||||
|
||||
# Assign the same name to variations
|
||||
for v in file_doc.get('variations', []):
|
||||
_, override_ext = os.path.splitext(v['file_path'])
|
||||
name = _format_name(node['name'], override_ext, v['size'], map_type=map_type)
|
||||
blob = storage.Get(v['file_path'], to_dict=False)
|
||||
if blob is None:
|
||||
log.info('Unable to find blob for file %s in project %s. This can happen if the '
|
||||
'video encoding is still processing.', v['file_path'], node['project'])
|
||||
continue
|
||||
storage.update_name(blob, name)
|
||||
|
||||
# Currently we search for 'file' and 'files' keys in the object properties.
|
||||
# This could become a bit more flexible and realy on a true reference of the
|
||||
# file object type from the schema.
|
||||
if 'file' in node['properties']:
|
||||
_update_name(node['properties']['file'], {})
|
||||
|
||||
if 'files' in node['properties']:
|
||||
for file_props in node['properties']['files']:
|
||||
_update_name(file_props['file'], file_props)
|
203
pillar/api/utils/imaging.py
Normal file
203
pillar/api/utils/imaging.py
Normal file
@@ -0,0 +1,203 @@
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
from PIL import Image
|
||||
from flask import current_app
|
||||
|
||||
|
||||
def generate_local_thumbnails(name_base, src):
|
||||
"""Given a source image, use Pillow to generate thumbnails according to the
|
||||
application settings.
|
||||
|
||||
:param name_base: the thumbnail will get a field 'name': '{basename}-{thumbsize}.jpg'
|
||||
:type name_base: str
|
||||
:param src: the path of the image to be thumbnailed
|
||||
:type src: str
|
||||
"""
|
||||
|
||||
thumbnail_settings = current_app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS']
|
||||
thumbnails = []
|
||||
|
||||
save_to_base, _ = os.path.splitext(src)
|
||||
name_base, _ = os.path.splitext(name_base)
|
||||
|
||||
for size, settings in thumbnail_settings.iteritems():
|
||||
dst = '{0}-{1}{2}'.format(save_to_base, size, '.jpg')
|
||||
name = '{0}-{1}{2}'.format(name_base, size, '.jpg')
|
||||
|
||||
if settings['crop']:
|
||||
resize_and_crop(src, dst, settings['size'])
|
||||
width, height = settings['size']
|
||||
else:
|
||||
im = Image.open(src).convert('RGB')
|
||||
im.thumbnail(settings['size'])
|
||||
im.save(dst, "JPEG")
|
||||
width, height = im.size
|
||||
|
||||
thumb_info = {'size': size,
|
||||
'file_path': name,
|
||||
'local_path': dst,
|
||||
'length': os.stat(dst).st_size,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'md5': '',
|
||||
'content_type': 'image/jpeg'}
|
||||
|
||||
if size == 't':
|
||||
thumb_info['is_public'] = True
|
||||
|
||||
thumbnails.append(thumb_info)
|
||||
|
||||
return thumbnails
|
||||
|
||||
|
||||
def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
|
||||
"""
|
||||
Resize and crop an image to fit the specified size. Thanks to:
|
||||
https://gist.github.com/sigilioso/2957026
|
||||
|
||||
args:
|
||||
img_path: path for the image to resize.
|
||||
modified_path: path to store the modified image.
|
||||
size: `(width, height)` tuple.
|
||||
crop_type: can be 'top', 'middle' or 'bottom', depending on this
|
||||
value, the image will cropped getting the 'top/left', 'middle' or
|
||||
'bottom/right' of the image to fit the size.
|
||||
raises:
|
||||
Exception: if can not open the file in img_path of there is problems
|
||||
to save the image.
|
||||
ValueError: if an invalid `crop_type` is provided.
|
||||
|
||||
"""
|
||||
# If height is higher we resize vertically, if not we resize horizontally
|
||||
img = Image.open(img_path).convert('RGB')
|
||||
# Get current and desired ratio for the images
|
||||
img_ratio = img.size[0] / float(img.size[1])
|
||||
ratio = size[0] / float(size[1])
|
||||
# The image is scaled/cropped vertically or horizontally depending on the ratio
|
||||
if ratio > img_ratio:
|
||||
img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),
|
||||
Image.ANTIALIAS)
|
||||
# Crop in the top, middle or bottom
|
||||
if crop_type == 'top':
|
||||
box = (0, 0, img.size[0], size[1])
|
||||
elif crop_type == 'middle':
|
||||
box = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0],
|
||||
int(round((img.size[1] + size[1]) / 2)))
|
||||
elif crop_type == 'bottom':
|
||||
box = (0, img.size[1] - size[1], img.size[0], img.size[1])
|
||||
else:
|
||||
raise ValueError('ERROR: invalid value for crop_type')
|
||||
img = img.crop(box)
|
||||
elif ratio < img_ratio:
|
||||
img = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]),
|
||||
Image.ANTIALIAS)
|
||||
# Crop in the top, middle or bottom
|
||||
if crop_type == 'top':
|
||||
box = (0, 0, size[0], img.size[1])
|
||||
elif crop_type == 'middle':
|
||||
box = (int(round((img.size[0] - size[0]) / 2)), 0,
|
||||
int(round((img.size[0] + size[0]) / 2)), img.size[1])
|
||||
elif crop_type == 'bottom':
|
||||
box = (img.size[0] - size[0], 0, img.size[0], img.size[1])
|
||||
else:
|
||||
raise ValueError('ERROR: invalid value for crop_type')
|
||||
img = img.crop(box)
|
||||
else:
|
||||
img = img.resize((size[0], size[1]),
|
||||
Image.ANTIALIAS)
|
||||
# If the scale is the same, we do not need to crop
|
||||
img.save(modified_path, "JPEG")
|
||||
|
||||
|
||||
def get_video_data(filepath):
|
||||
"""Return video duration and resolution given an input file path"""
|
||||
outdata = None
|
||||
ffprobe_inspect = [
|
||||
current_app.config['BIN_FFPROBE'],
|
||||
'-loglevel',
|
||||
'error',
|
||||
'-show_streams',
|
||||
filepath,
|
||||
'-print_format',
|
||||
'json']
|
||||
|
||||
ffprobe_ouput = json.loads(subprocess.check_output(ffprobe_inspect))
|
||||
|
||||
video_stream = None
|
||||
# Loop throught audio and video streams searching for the video
|
||||
for stream in ffprobe_ouput['streams']:
|
||||
if stream['codec_type'] == 'video':
|
||||
video_stream = stream
|
||||
|
||||
if video_stream:
|
||||
# If video is webm we can't get the duration (seems to be an ffprobe
|
||||
# issue)
|
||||
if video_stream['codec_name'] == 'vp8':
|
||||
duration = None
|
||||
else:
|
||||
duration = int(float(video_stream['duration']))
|
||||
outdata = dict(
|
||||
duration=duration,
|
||||
res_x=video_stream['width'],
|
||||
res_y=video_stream['height'],
|
||||
)
|
||||
if video_stream['sample_aspect_ratio'] != '1:1':
|
||||
print '[warning] Pixel aspect ratio is not square!'
|
||||
|
||||
return outdata
|
||||
|
||||
|
||||
def ffmpeg_encode(src, format, res_y=720):
|
||||
# The specific FFMpeg command, called multiple times
|
||||
args = []
|
||||
args.append("-i")
|
||||
args.append(src)
|
||||
|
||||
if format == 'mp4':
|
||||
# Example mp4 encoding
|
||||
# ffmpeg -i INPUT -vcodec libx264 -pix_fmt yuv420p -preset fast -crf 20
|
||||
# -acodec libfdk_aac -ab 112k -ar 44100 -movflags +faststart OUTPUT
|
||||
args.extend([
|
||||
'-threads', '1',
|
||||
'-vf', 'scale=-2:{0}'.format(res_y),
|
||||
'-vcodec', 'libx264',
|
||||
'-pix_fmt', 'yuv420p',
|
||||
'-preset', 'fast',
|
||||
'-crf', '20',
|
||||
'-acodec', 'libfdk_aac', '-ab', '112k', '-ar', '44100',
|
||||
'-movflags', '+faststart'])
|
||||
elif format == 'webm':
|
||||
# Example webm encoding
|
||||
# ffmpeg -i INPUT -vcodec libvpx -g 120 -lag-in-frames 16 -deadline good
|
||||
# -cpu-used 0 -vprofile 0 -qmax 51 -qmin 11 -slices 4 -b:v 2M -f webm
|
||||
|
||||
args.extend([
|
||||
'-vf', 'scale=-2:{0}'.format(res_y),
|
||||
'-vcodec', 'libvpx',
|
||||
'-g', '120',
|
||||
'-lag-in-frames', '16',
|
||||
'-deadline', 'good',
|
||||
'-cpu-used', '0',
|
||||
'-vprofile', '0',
|
||||
'-qmax', '51', '-qmin', '11', '-slices', '4', '-b:v', '2M',
|
||||
# '-acodec', 'libmp3lame', '-ab', '112k', '-ar', '44100',
|
||||
'-f', 'webm'])
|
||||
|
||||
if not os.environ.get('VERBOSE'):
|
||||
args.extend(['-loglevel', 'quiet'])
|
||||
|
||||
dst = os.path.splitext(src)
|
||||
dst = "{0}-{1}p.{2}".format(dst[0], res_y, format)
|
||||
args.append(dst)
|
||||
print "Encoding {0} to {1}".format(src, format)
|
||||
returncode = subprocess.call([current_app.config['BIN_FFMPEG']] + args)
|
||||
if returncode == 0:
|
||||
print "Successfully encoded {0}".format(dst)
|
||||
else:
|
||||
print "Error during encode"
|
||||
print "Code: {0}".format(returncode)
|
||||
print "Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args))
|
||||
dst = None
|
||||
# return path of the encoded video
|
||||
return dst
|
29
pillar/api/utils/mongo.py
Normal file
29
pillar/api/utils/mongo.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Utility functions for MongoDB stuff."""
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import current_app
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
|
||||
def find_one_or_404(collection_name, object_id,
|
||||
projection=None):
|
||||
"""Returns the found object from the collection, or raises a NotFound exception.
|
||||
|
||||
:param collection_name: name of the collection, such as 'users' or 'files'
|
||||
:type collection_name: str
|
||||
:param object_id: ID of the object to find.
|
||||
:type object_id: str or bson.ObjectId
|
||||
:returns: the found object
|
||||
:rtype: dict
|
||||
|
||||
:raises: werkzeug.exceptions.NotFound
|
||||
"""
|
||||
|
||||
collection = current_app.data.driver.db[collection_name]
|
||||
found = collection.find_one(ObjectId(object_id),
|
||||
projection=projection)
|
||||
|
||||
if found is None:
|
||||
raise NotFound()
|
||||
|
||||
return found
|
83
pillar/api/utils/storage.py
Normal file
83
pillar/api/utils/storage.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import subprocess
|
||||
|
||||
import os
|
||||
from flask import current_app
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
|
||||
|
||||
def get_sizedata(filepath):
|
||||
outdata = dict(
|
||||
size=int(os.stat(filepath).st_size)
|
||||
)
|
||||
return outdata
|
||||
|
||||
|
||||
def rsync(path, remote_dir=''):
|
||||
BIN_SSH = current_app.config['BIN_SSH']
|
||||
BIN_RSYNC = current_app.config['BIN_RSYNC']
|
||||
|
||||
DRY_RUN = False
|
||||
arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable']
|
||||
logs_path = current_app.config['CDN_SYNC_LOGS']
|
||||
storage_address = current_app.config['CDN_STORAGE_ADDRESS']
|
||||
user = current_app.config['CDN_STORAGE_USER']
|
||||
rsa_key_path = current_app.config['CDN_RSA_KEY']
|
||||
known_hosts_path = current_app.config['CDN_KNOWN_HOSTS']
|
||||
|
||||
if DRY_RUN:
|
||||
arguments.append('--dry-run')
|
||||
folder_arguments = list(arguments)
|
||||
if rsa_key_path:
|
||||
folder_arguments.append(
|
||||
'-e ' + BIN_SSH + ' -i ' + rsa_key_path + ' -o "StrictHostKeyChecking=no"')
|
||||
# if known_hosts_path:
|
||||
# folder_arguments.append("-o UserKnownHostsFile " + known_hosts_path)
|
||||
folder_arguments.append("--log-file=" + logs_path + "/rsync.log")
|
||||
folder_arguments.append(path)
|
||||
folder_arguments.append(user + "@" + storage_address + ":/public/" + remote_dir)
|
||||
# print (folder_arguments)
|
||||
devnull = open(os.devnull, 'wb')
|
||||
# DEBUG CONFIG
|
||||
# print folder_arguments
|
||||
# proc = subprocess.Popen(['rsync'] + folder_arguments)
|
||||
# stdout, stderr = proc.communicate()
|
||||
subprocess.Popen(['nohup', BIN_RSYNC] + folder_arguments, stdout=devnull, stderr=devnull)
|
||||
|
||||
|
||||
def remote_storage_sync(path): # can be both folder and file
|
||||
if os.path.isfile(path):
|
||||
filename = os.path.split(path)[1]
|
||||
rsync(path, filename[:2] + '/')
|
||||
else:
|
||||
if os.path.exists(path):
|
||||
rsync(path)
|
||||
else:
|
||||
raise IOError('ERROR: path not found')
|
||||
|
||||
|
||||
def push_to_storage(project_id, full_path, backend='cgs'):
|
||||
"""Move a file from temporary/processing local storage to a storage endpoint.
|
||||
By default we store items in a Google Cloud Storage bucket named after the
|
||||
project id.
|
||||
"""
|
||||
|
||||
def push_single_file(project_id, full_path, backend):
|
||||
if backend == 'cgs':
|
||||
storage = GoogleCloudStorageBucket(project_id, subdir='_')
|
||||
blob = storage.Post(full_path)
|
||||
# XXX Make public on the fly if it's an image and small preview.
|
||||
# This should happen by reading the database (push to storage
|
||||
# should change to accomodate it).
|
||||
if blob is not None and full_path.endswith('-t.jpg'):
|
||||
blob.make_public()
|
||||
os.remove(full_path)
|
||||
|
||||
if os.path.isfile(full_path):
|
||||
push_single_file(project_id, full_path, backend)
|
||||
else:
|
||||
if os.path.exists(full_path):
|
||||
for root, dirs, files in os.walk(full_path):
|
||||
for name in files:
|
||||
push_single_file(project_id, os.path.join(root, name), backend)
|
||||
else:
|
||||
raise IOError('ERROR: path not found')
|
Reference in New Issue
Block a user