Refactored content of main __init__ into utils

This commit is contained in:
Francesco Siddi 2016-01-26 15:34:56 +01:00
parent 8256ab40dc
commit 1a5fcc1744
6 changed files with 293 additions and 288 deletions

View File

@ -1,166 +1,20 @@
import os import os
import json import json
import requests from bson import ObjectId
from datetime import datetime
import bugsnag import bugsnag
from bugsnag.flask import handle_exceptions from bugsnag.flask import handle_exceptions
from eve import Eve
from pymongo import MongoClient
from eve.auth import TokenAuth
from eve.auth import BasicAuth
from eve.io.mongo import Validator
from eve.methods.post import post_internal
from bson import ObjectId
from flask import g from flask import g
from flask import request from flask import request
from flask import url_for from flask import url_for
from flask import abort from flask import abort
from eve import Eve
from eve.io.mongo import Validator
from datetime import datetime from application.utils.authentication import validate_token
from datetime import timedelta from application.utils.authentication import NewAuth
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
class SystemUtility():
def __new__(cls, *args, **kwargs):
raise TypeError("Base class may not be instantiated")
@staticmethod
def blender_id_endpoint():
"""Gets the endpoint for the authentication API. If the env variable
is defined, it's possible to override the (default) production address.
"""
return os.environ.get(
'BLENDER_ID_ENDPOINT', "https://www.blender.org/id")
def validate(token):
"""Validate a token against the Blender ID server. This simple lookup
returns a dictionary with the following keys:
- message: a success message
- valid: a boolean, stating if the token is valid
- user: a dictionary with information regarding the user
"""
payload = dict(
token=token)
try:
r = requests.post("{0}/u/validate_token".format(
SystemUtility.blender_id_endpoint()), data=payload)
except requests.exceptions.ConnectionError as e:
raise e
if r.status_code == 200:
response = r.json()
else:
response = None
return response
def validate_token():
"""Validate the token provided in the request and populate the current_user
flask.g object, so that permissions and access to a resource can be defined
from it.
"""
if not request.authorization:
# If no authorization headers are provided, we are getting a request
# from a non logged in user. Proceed accordingly.
return None
current_user = {}
token = request.authorization.username
tokens_collection = app.data.driver.db['tokens']
lookup = {'token': token, 'expire_time': {"$gt": datetime.now()}}
db_token = tokens_collection.find_one(lookup)
if not db_token:
# If no valid token is found, we issue a new request to the Blender ID
# to verify the validity of the token. We will get basic user info if
# the user is authorized and we will make a new token.
validation = validate(token)
if validation['status'] == 'success':
users = app.data.driver.db['users']
email = validation['data']['user']['email']
db_user = users.find_one({'email': email})
# Ensure unique username
username = email.split('@')[0]
def make_unique_username(username, index=1):
"""Ensure uniqueness of a username by appending an incremental
digit at the end of it.
"""
user_from_username = users.find_one({'username': username})
if user_from_username:
if index > 1:
index += 1
username = username[:-1]
username = "{0}{1}".format(username, index)
return make_unique_username(username, index=index)
return username
# Check for min length of username (otherwise validation fails)
username = "___{0}".format(username) if len(username) < 3 else username
username = make_unique_username(username)
full_name = username
if not db_user:
user_data = {
'full_name': full_name,
'username': username,
'email': email,
'auth': [{
'provider': 'blender-id',
'user_id': str(validation['data']['user']['id']),
'token': ''}],
'settings': {
'email_communications': 1
}
}
r = post_internal('users', user_data)
user_id = r[0]['_id']
groups = None
else:
user_id = db_user['_id']
groups = db_user['groups']
token_data = {
'user': user_id,
'token': token,
'expire_time': datetime.now() + timedelta(hours=1)
}
post_internal('tokens', token_data)
current_user = dict(
user_id=user_id,
token=token,
groups=groups,
token_expire_time=datetime.now() + timedelta(hours=1))
#return token_data
else:
return None
else:
users = app.data.driver.db['users']
db_user = users.find_one(db_token['user'])
current_user = dict(
user_id=db_token['user'],
token=db_token['token'],
groups=db_user['groups'],
token_expire_time=db_token['expire_time'])
setattr(g, 'current_user', current_user)
class NewAuth(TokenAuth):
def check_auth(self, token, allowed_roles, resource, method):
if not token:
return False
else:
validate_token()
return True
class ValidateCustomFields(Validator): class ValidateCustomFields(Validator):
def convert_properties(self, properties, node_schema): def convert_properties(self, properties, node_schema):
for prop in node_schema: for prop in node_schema:
@ -219,13 +73,8 @@ class ValidateCustomFields(Validator):
self._error( self._error(
field, "Error validating properties") field, "Error validating properties")
def post_item(entry, data):
return post_internal(entry, data)
# We specify a settings.py file because when running on wsgi we can't detect it # We specify a settings.py file because when running on wsgi we can't detect it
# automatically. The default path (which work in Docker) can be overriden with # automatically. The default path (which works in Docker) can be overriden with
# an env variable. # an env variable.
settings_path = os.environ.get('EVE_SETTINGS', '/data/git/pillar/pillar/settings.py') settings_path = os.environ.get('EVE_SETTINGS', '/data/git/pillar/pillar/settings.py')
app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth) app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
@ -233,140 +82,17 @@ app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
import config import config
app.config.from_object(config.Deployment) app.config.from_object(config.Deployment)
client = MongoClient(app.config['MONGO_HOST'], 27017)
db = client.eve
bugsnag.configure( bugsnag.configure(
api_key = app.config['BUGSNAG_API_KEY'], api_key = app.config['BUGSNAG_API_KEY'],
project_root = "/data/git/pillar/pillar", project_root = "/data/git/pillar/pillar",
) )
handle_exceptions(app) handle_exceptions(app)
from utils.cdn import hash_file_path
from application.utils.authorization import check_permissions
from application.utils.cdn import hash_file_path
from application.utils.gcs import GoogleCloudStorageBucket from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.gcs import update_file_name
def update_file_name(item):
"""Assign to the CGS blob the same name of the asset node. This way when
downloading an asset we get a human-readable name.
"""
def _format_name(name, format, size=None):
# If the name already has an extention, and such extension matches the
# format, only inject the size.
root, ext = os.path.splitext(name)
size = "-{0}".format(size) if size else ''
ext = ext if len(ext) > 1 and ext[1:] == format else ".{0}".format(format)
return "{0}{1}{2}".format(root, size, ext)
def _update_name(item, file_id):
files_collection = app.data.driver.db['files']
f = files_collection.find_one({'_id': file_id})
status = item['properties']['status']
if f and f['backend'] == 'gcs' and status != 'processing':
# Process only files that are on GCS and that are not processing
try:
storage = GoogleCloudStorageBucket(str(item['project']))
blob = storage.Get(f['file_path'], to_dict=False)
name = _format_name(item['name'], f['format'])
storage.update_name(blob, name)
try:
# Assign the same name to variations
for v in f['variations']:
blob = storage.Get(v['file_path'], to_dict=False)
name = _format_name(item['name'], v['format'], v['size'])
storage.update_name(blob, name)
except KeyError:
pass
except AttributeError:
bugsnag.notify(Exception('Missing or conflicting ids detected'),
meta_data={'nodes_info':
{'node_id': item['_id'], 'file_id': file_id}})
# Currently we search for 'file' and 'files' keys in the object properties.
# This could become a bit more flexible and realy on a true reference of the
# file object type from the schema.
if 'file' in item['properties']:
_update_name(item, item['properties']['file'])
elif 'files' in item['properties']:
for f in item['properties']['files']:
_update_name(item, f['file'])
def check_permissions(resource, method, append_allowed_methods=False):
"""Check user permissions to access a node. We look up node permissions from
world to groups to users and match them with the computed user permissions.
If there is not match, we return 403.
"""
if method != 'GET' and append_allowed_methods:
raise ValueError("append_allowed_methods only allowed with 'GET' method")
allowed_methods = []
current_user = g.get('current_user', None)
if 'permissions' in resource:
# If permissions are embedded in the node (this overrides any other
# matching permission originally set at node_type level)
resource_permissions = resource['permissions']
else:
resource_permissions = None
if 'node_type' in resource:
if type(resource['node_type']) is dict:
# If the node_type is embedded in the document, extract permissions
# from there
computed_permissions = resource['node_type']['permissions']
else:
# If the node_type is referenced with an ObjectID (was not embedded on
# request) query for if from the database and get the permissions
# node_types_collection = app.data.driver.db['node_types']
# node_type = node_types_collection.find_one(resource['node_type'])
if type(resource['project']) is dict:
project = resource['project']
else:
projects_collection = app.data.driver.db['projects']
project = projects_collection.find_one(resource['project'])
node_type = next(
(item for item in project['node_types'] if item.get('name') \
and item['name'] == resource['node_type']), None)
computed_permissions = node_type['permissions']
else:
computed_permissions = None
# Override computed_permissions if override is provided
if resource_permissions and computed_permissions:
for k, v in resource_permissions.iteritems():
computed_permissions[k] = v
elif resource_permissions and not computed_permissions:
computed_permissions = resource_permissions
if current_user:
# If the user is authenticated, proceed to compare the group permissions
for permission in computed_permissions['groups']:
if permission['group'] in current_user['groups']:
allowed_methods += permission['methods']
if method in permission['methods'] and not append_allowed_methods:
return
for permission in computed_permissions['users']:
if current_user['user_id'] == permission['user']:
allowed_methods += permission['methods']
if method in permission['methods'] and not append_allowed_methods:
return
# Check if the node is public or private. This must be set for non logged
# in users to see the content. For most BI projects this is on by default,
# while for private project this will not be set at all.
if 'world' in computed_permissions:
allowed_methods += computed_permissions['world']
if method in computed_permissions['world'] and not append_allowed_methods:
return
if append_allowed_methods and method in allowed_methods:
resource['allowed_methods'] = list(set(allowed_methods))
return resource
return None
def before_returning_item_permissions(response): def before_returning_item_permissions(response):
# Run validation process, since GET on nodes entry point is public # Run validation process, since GET on nodes entry point is public

View File

@ -9,7 +9,6 @@ from flask import jsonify
from flask import send_from_directory from flask import send_from_directory
from eve.methods.put import put_internal from eve.methods.put import put_internal
from application import app from application import app
from application import post_item
from application.utils.imaging import generate_local_thumbnails from application.utils.imaging import generate_local_thumbnails
from application.utils.imaging import get_video_data from application.utils.imaging import get_video_data
from application.utils.imaging import ffmpeg_encode from application.utils.imaging import ffmpeg_encode

View File

@ -0,0 +1,146 @@
import requests
from datetime import datetime
from datetime import timedelta
from flask import g
from flask import request
from flask import url_for
from flask import abort
from eve.auth import TokenAuth
from eve.auth import BasicAuth
class SystemUtility():
def __new__(cls, *args, **kwargs):
raise TypeError("Base class may not be instantiated")
@staticmethod
def blender_id_endpoint():
"""Gets the endpoint for the authentication API. If the env variable
is defined, it's possible to override the (default) production address.
"""
return os.environ.get(
'BLENDER_ID_ENDPOINT', "https://www.blender.org/id")
def validate(token):
"""Validate a token against the Blender ID server. This simple lookup
returns a dictionary with the following keys:
- message: a success message
- valid: a boolean, stating if the token is valid
- user: a dictionary with information regarding the user
"""
payload = dict(
token=token)
try:
r = requests.post("{0}/u/validate_token".format(
SystemUtility.blender_id_endpoint()), data=payload)
except requests.exceptions.ConnectionError as e:
raise e
if r.status_code == 200:
response = r.json()
else:
response = None
return response
def validate_token():
"""Validate the token provided in the request and populate the current_user
flask.g object, so that permissions and access to a resource can be defined
from it.
"""
if not request.authorization:
# If no authorization headers are provided, we are getting a request
# from a non logged in user. Proceed accordingly.
return None
current_user = {}
token = request.authorization.username
tokens_collection = app.data.driver.db['tokens']
lookup = {'token': token, 'expire_time': {"$gt": datetime.now()}}
db_token = tokens_collection.find_one(lookup)
if not db_token:
# If no valid token is found, we issue a new request to the Blender ID
# to verify the validity of the token. We will get basic user info if
# the user is authorized and we will make a new token.
validation = validate(token)
if validation['status'] == 'success':
users = app.data.driver.db['users']
email = validation['data']['user']['email']
db_user = users.find_one({'email': email})
# Ensure unique username
username = email.split('@')[0]
def make_unique_username(username, index=1):
"""Ensure uniqueness of a username by appending an incremental
digit at the end of it.
"""
user_from_username = users.find_one({'username': username})
if user_from_username:
if index > 1:
index += 1
username = username[:-1]
username = "{0}{1}".format(username, index)
return make_unique_username(username, index=index)
return username
# Check for min length of username (otherwise validation fails)
username = "___{0}".format(username) if len(username) < 3 else username
username = make_unique_username(username)
full_name = username
if not db_user:
user_data = {
'full_name': full_name,
'username': username,
'email': email,
'auth': [{
'provider': 'blender-id',
'user_id': str(validation['data']['user']['id']),
'token': ''}],
'settings': {
'email_communications': 1
}
}
r = post_internal('users', user_data)
user_id = r[0]['_id']
groups = None
else:
user_id = db_user['_id']
groups = db_user['groups']
token_data = {
'user': user_id,
'token': token,
'expire_time': datetime.now() + timedelta(hours=1)
}
post_internal('tokens', token_data)
current_user = dict(
user_id=user_id,
token=token,
groups=groups,
token_expire_time=datetime.now() + timedelta(hours=1))
#return token_data
else:
return None
else:
users = app.data.driver.db['users']
db_user = users.find_one(db_token['user'])
current_user = dict(
user_id=db_token['user'],
token=db_token['token'],
groups=db_user['groups'],
token_expire_time=db_token['expire_time'])
setattr(g, 'current_user', current_user)
class NewAuth(TokenAuth):
def check_auth(self, token, allowed_roles, resource, method):
if not token:
return False
else:
validate_token()
return True

View File

@ -0,0 +1,83 @@
from flask import g
from flask import request
from flask import url_for
from flask import abort
from application import app
def check_permissions(resource, method, append_allowed_methods=False):
"""Check user permissions to access a node. We look up node permissions from
world to groups to users and match them with the computed user permissions.
If there is not match, we return 403.
"""
if method != 'GET' and append_allowed_methods:
raise ValueError("append_allowed_methods only allowed with 'GET' method")
allowed_methods = []
current_user = g.get('current_user', None)
if 'permissions' in resource:
# If permissions are embedded in the node (this overrides any other
# matching permission originally set at node_type level)
resource_permissions = resource['permissions']
else:
resource_permissions = None
if 'node_type' in resource:
if type(resource['node_type']) is dict:
# If the node_type is embedded in the document, extract permissions
# from there
computed_permissions = resource['node_type']['permissions']
else:
# If the node_type is referenced with an ObjectID (was not embedded on
# request) query for if from the database and get the permissions
# node_types_collection = app.data.driver.db['node_types']
# node_type = node_types_collection.find_one(resource['node_type'])
if type(resource['project']) is dict:
project = resource['project']
else:
projects_collection = app.data.driver.db['projects']
project = projects_collection.find_one(resource['project'])
node_type = next(
(item for item in project['node_types'] if item.get('name') \
and item['name'] == resource['node_type']), None)
computed_permissions = node_type['permissions']
else:
computed_permissions = None
# Override computed_permissions if override is provided
if resource_permissions and computed_permissions:
for k, v in resource_permissions.iteritems():
computed_permissions[k] = v
elif resource_permissions and not computed_permissions:
computed_permissions = resource_permissions
if current_user:
# If the user is authenticated, proceed to compare the group permissions
for permission in computed_permissions['groups']:
if permission['group'] in current_user['groups']:
allowed_methods += permission['methods']
if method in permission['methods'] and not append_allowed_methods:
return
for permission in computed_permissions['users']:
if current_user['user_id'] == permission['user']:
allowed_methods += permission['methods']
if method in permission['methods'] and not append_allowed_methods:
return
# Check if the node is public or private. This must be set for non logged
# in users to see the content. For most BI projects this is on by default,
# while for private project this will not be set at all.
if 'world' in computed_permissions:
allowed_methods += computed_permissions['world']
if method in computed_permissions['world'] and not append_allowed_methods:
return
if append_allowed_methods and method in allowed_methods:
resource['allowed_methods'] = list(set(allowed_methods))
return resource
return None

View File

@ -1,6 +1,7 @@
import os import os
import time import time
import datetime import datetime
import bugsnag
from gcloud.storage.client import Client from gcloud.storage.client import Client
from gcloud.exceptions import NotFound from gcloud.exceptions import NotFound
from oauth2client.client import SignedJwtAssertionCredentials from oauth2client.client import SignedJwtAssertionCredentials
@ -149,3 +150,51 @@ class GoogleCloudStorageBucket(object):
""" """
blob.content_disposition = "attachment; filename={0}".format(name) blob.content_disposition = "attachment; filename={0}".format(name)
blob.patch() blob.patch()
def update_file_name(item):
"""Assign to the CGS blob the same name of the asset node. This way when
downloading an asset we get a human-readable name.
"""
def _format_name(name, format, size=None):
# If the name already has an extention, and such extension matches the
# format, only inject the size.
root, ext = os.path.splitext(name)
size = "-{0}".format(size) if size else ''
ext = ext if len(ext) > 1 and ext[1:] == format else ".{0}".format(format)
return "{0}{1}{2}".format(root, size, ext)
def _update_name(item, file_id):
files_collection = app.data.driver.db['files']
f = files_collection.find_one({'_id': file_id})
status = item['properties']['status']
if f and f['backend'] == 'gcs' and status != 'processing':
# Process only files that are on GCS and that are not processing
try:
storage = GoogleCloudStorageBucket(str(item['project']))
blob = storage.Get(f['file_path'], to_dict=False)
name = _format_name(item['name'], f['format'])
storage.update_name(blob, name)
try:
# Assign the same name to variations
for v in f['variations']:
blob = storage.Get(v['file_path'], to_dict=False)
name = _format_name(item['name'], v['format'], v['size'])
storage.update_name(blob, name)
except KeyError:
pass
except AttributeError:
bugsnag.notify(Exception('Missing or conflicting ids detected'),
meta_data={'nodes_info':
{'node_id': item['_id'], 'file_id': file_id}})
# Currently we search for 'file' and 'files' keys in the object properties.
# This could become a bit more flexible and realy on a true reference of the
# file object type from the schema.
if 'file' in item['properties']:
_update_name(item, item['properties']['file'])
elif 'files' in item['properties']:
for f in item['properties']['files']:
_update_name(item, f['file'])

View File

@ -5,8 +5,6 @@ from eve.methods.put import put_internal
from eve.methods.post import post_internal from eve.methods.post import post_internal
from flask.ext.script import Manager from flask.ext.script import Manager
from application import app from application import app
from application import db
from application import post_item
from manage.node_types.act import node_type_act from manage.node_types.act import node_type_act
from manage.node_types.asset import node_type_asset from manage.node_types.asset import node_type_asset
from manage.node_types.blog import node_type_blog from manage.node_types.blog import node_type_blog
@ -51,6 +49,10 @@ def runserver():
debug=DEBUG) debug=DEBUG)
def post_item(entry, data):
return post_internal(entry, data)
def put_item(collection, item): def put_item(collection, item):
item_id = item['_id'] item_id = item['_id']
internal_fields = ['_id', '_etag', '_updated', '_created'] internal_fields = ['_id', '_etag', '_updated', '_created']