Updated Eve to 0.6.3

This also updates Cerberus to 0.9.2 and simplejson to 3.8.2.

I've also changed the way we get to the application object, by replacing
   from application import app
with
   from flask import current_app
This commit is contained in:
Sybren A. Stüvel 2016-04-25 15:21:17 +02:00
parent a6258f5193
commit 5116b74d1d
16 changed files with 126 additions and 91 deletions

View File

@ -5,7 +5,7 @@ from eve.methods.put import put_internal
from flask import Blueprint from flask import Blueprint
from flask import abort from flask import abort
from flask import request from flask import request
from application import app from flask import current_app
from application import utils from application import utils
encoding = Blueprint('encoding', __name__) encoding = Blueprint('encoding', __name__)
@ -14,11 +14,11 @@ log = logging.getLogger(__name__)
@encoding.route('/zencoder/notifications', methods=['POST']) @encoding.route('/zencoder/notifications', methods=['POST'])
def zencoder_notifications(): def zencoder_notifications():
if app.config['ENCODING_BACKEND'] != 'zencoder': if current_app.config['ENCODING_BACKEND'] != 'zencoder':
log.warning('Received notification from Zencoder but app not configured for Zencoder.') log.warning('Received notification from Zencoder but app not configured for Zencoder.')
return abort(403) return abort(403)
if not app.config['DEBUG']: if not current_app.config['DEBUG']:
# If we are in production, look for the Zencoder header secret # If we are in production, look for the Zencoder header secret
try: try:
notification_secret_request = request.headers[ notification_secret_request = request.headers[
@ -27,14 +27,14 @@ def zencoder_notifications():
log.warning('Received Zencoder notification without secret.') log.warning('Received Zencoder notification without secret.')
return abort(401) return abort(401)
# If the header is found, check it agains the one in the config # If the header is found, check it agains the one in the config
notification_secret = app.config['ZENCODER_NOTIFICATIONS_SECRET'] notification_secret = current_app.config['ZENCODER_NOTIFICATIONS_SECRET']
if notification_secret_request != notification_secret: if notification_secret_request != notification_secret:
log.warning('Received Zencoder notification with incorrect secret.') log.warning('Received Zencoder notification with incorrect secret.')
return abort(401) return abort(401)
# Cast request data into a dict # Cast request data into a dict
data = request.get_json() data = request.get_json()
files_collection = app.data.driver.db['files'] files_collection = current_app.data.driver.db['files']
# Find the file object based on processing backend and job_id # Find the file object based on processing backend and job_id
lookup = {'processing.backend': 'zencoder', 'processing.job_id': str(data['job']['id'])} lookup = {'processing.backend': 'zencoder', 'processing.job_id': str(data['job']['id'])}
f = files_collection.find_one(lookup) f = files_collection.find_one(lookup)

View File

@ -16,6 +16,7 @@ from flask import request
from flask import abort from flask import abort
from flask import send_from_directory from flask import send_from_directory
from flask import url_for, helpers from flask import url_for, helpers
from flask import current_app
from application import utils from application import utils
from application.utils import remove_private_keys from application.utils import remove_private_keys
@ -67,9 +68,7 @@ def build_thumbnails(file_path=None, file_id=None):
variation properties. variation properties.
""" """
from application import app files_collection = current_app.data.driver.db['files']
files_collection = app.data.driver.db['files']
if file_path: if file_path:
# Search file with backend "pillar" and path=file_path # Search file with backend "pillar" and path=file_path
file_ = files_collection.find({"file_path": "{0}".format(file_path)}) file_ = files_collection.find({"file_path": "{0}".format(file_path)})
@ -79,7 +78,7 @@ def build_thumbnails(file_path=None, file_id=None):
file_ = files_collection.find_one({"_id": ObjectId(file_id)}) file_ = files_collection.find_one({"_id": ObjectId(file_id)})
file_path = file_['name'] file_path = file_['name']
file_full_path = safe_join(safe_join(app.config['SHARED_DIR'], file_path[:2]), file_full_path = safe_join(safe_join(current_app.config['SHARED_DIR'], file_path[:2]),
file_path) file_path)
# Does the original file exist? # Does the original file exist?
if not os.path.isfile(file_full_path): if not os.path.isfile(file_full_path):
@ -120,11 +119,10 @@ def build_thumbnails(file_path=None, file_id=None):
@file_storage.route('/file', methods=['POST']) @file_storage.route('/file', methods=['POST'])
@file_storage.route('/file/<path:file_name>', methods=['GET', 'POST']) @file_storage.route('/file/<path:file_name>', methods=['GET', 'POST'])
def index(file_name=None): def index(file_name=None):
from application import app
# GET file -> read it # GET file -> read it
if request.method == 'GET': if request.method == 'GET':
return send_from_directory(app.config['STORAGE_DIR'], file_name) return send_from_directory(current_app.config['STORAGE_DIR'], file_name)
# POST file -> save it # POST file -> save it
@ -137,7 +135,7 @@ def index(file_name=None):
# Determine & create storage directory # Determine & create storage directory
folder_name = file_name[:2] folder_name = file_name[:2]
file_folder_path = helpers.safe_join(app.config['STORAGE_DIR'], folder_name) file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'], folder_name)
if not os.path.exists(file_folder_path): if not os.path.exists(file_folder_path):
log.info('Creating folder path %r', file_folder_path) log.info('Creating folder path %r', file_folder_path)
os.mkdir(file_folder_path) os.mkdir(file_folder_path)
@ -157,12 +155,11 @@ def process_file(file_id, src_file):
:param file_id: '_id' key of the file :param file_id: '_id' key of the file
:param src_file: POSTed data of the file, lacks private properties. :param src_file: POSTed data of the file, lacks private properties.
""" """
from application import app
src_file = utils.remove_private_keys(src_file) src_file = utils.remove_private_keys(src_file)
filename = src_file['name'] filename = src_file['name']
file_abs_path = safe_join(safe_join(app.config['SHARED_DIR'], filename[:2]), filename) file_abs_path = safe_join(safe_join(current_app.config['SHARED_DIR'], filename[:2]), filename)
if not os.path.exists(file_abs_path): if not os.path.exists(file_abs_path):
log.warning("POSTed file document %r refers to non-existant file on file system %s!", log.warning("POSTed file document %r refers to non-existant file on file system %s!",
@ -227,7 +224,7 @@ def process_file(file_id, src_file):
def encode(src_path, src_file, res_y): def encode(src_path, src_file, res_y):
# For every variation in the list call video_encode # For every variation in the list call video_encode
# print "encoding {0}".format(variations) # print "encoding {0}".format(variations)
if app.config['ENCODING_BACKEND'] == 'zencoder': if current_app.config['ENCODING_BACKEND'] == 'zencoder':
# Move the source file in place on the remote storage (which can # Move the source file in place on the remote storage (which can
# be accessed from zencoder) # be accessed from zencoder)
push_to_storage(str(src_file['project']), src_path) push_to_storage(str(src_file['project']), src_path)
@ -244,7 +241,7 @@ def process_file(file_id, src_file):
pass pass
except KeyError: except KeyError:
pass pass
elif app.config['ENCODING_BACKEND'] == 'local': elif current_app.config['ENCODING_BACKEND'] == 'local':
for v in src_file['variations']: for v in src_file['variations']:
path = ffmpeg_encode(src_path, v['format'], res_y) path = ffmpeg_encode(src_path, v['format'], res_y)
# Update size data after encoding # Update size data after encoding
@ -291,9 +288,7 @@ def delete_file(file_item):
else: else:
pass pass
from application import app files_collection = current_app.data.driver.db['files']
files_collection = app.data.driver.db['files']
# Collect children (variations) of the original file # Collect children (variations) of the original file
children = files_collection.find({'parent': file_item['_id']}) children = files_collection.find({'parent': file_item['_id']})
for child in children: for child in children:
@ -306,7 +301,6 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
"""Hook to check the backend of a file resource, to build an appropriate link """Hook to check the backend of a file resource, to build an appropriate link
that can be used by the client to retrieve the actual file. that can be used by the client to retrieve the actual file.
""" """
from application import app
if backend == 'gcs': if backend == 'gcs':
storage = GoogleCloudStorageBucket(project_id) storage = GoogleCloudStorageBucket(project_id)
@ -319,7 +313,7 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
link = None link = None
elif backend == 'pillar': elif backend == 'pillar':
link = url_for('file_storage.index', file_name=file_path, _external=True, link = url_for('file_storage.index', file_name=file_path, _external=True,
_scheme=app.config['SCHEME']) _scheme=current_app.config['SCHEME'])
elif backend == 'cdnsun': elif backend == 'cdnsun':
link = hash_file_path(file_path, None) link = hash_file_path(file_path, None)
elif backend == 'unittest': elif backend == 'unittest':
@ -367,8 +361,6 @@ def _generate_all_links(response, now):
:param now: datetime that reflects 'now', for consistent expiry generation. :param now: datetime that reflects 'now', for consistent expiry generation.
""" """
from application import app
project_id = str( project_id = str(
response['project']) if 'project' in response else None # TODO: add project id to all files response['project']) if 'project' in response else None # TODO: add project id to all files
backend = response['backend'] backend = response['backend']
@ -378,7 +370,7 @@ def _generate_all_links(response, now):
variation['link'] = generate_link(backend, variation['file_path'], project_id) variation['link'] = generate_link(backend, variation['file_path'], project_id)
# Construct the new expiry datetime. # Construct the new expiry datetime.
validity_secs = app.config['FILE_LINK_VALIDITY'][backend] validity_secs = current_app.config['FILE_LINK_VALIDITY'][backend]
response['link_expires'] = now + datetime.timedelta(seconds=validity_secs) response['link_expires'] = now + datetime.timedelta(seconds=validity_secs)
patch_info = remove_private_keys(response) patch_info = remove_private_keys(response)
@ -410,8 +402,6 @@ def before_deleting_file(item):
def on_pre_get_files(_, lookup): def on_pre_get_files(_, lookup):
from application import app
# Override the HTTP header, we always want to fetch the document from MongoDB. # Override the HTTP header, we always want to fetch the document from MongoDB.
parsed_req = eve.utils.parse_request('files') parsed_req = eve.utils.parse_request('files')
parsed_req.if_modified_since = None parsed_req.if_modified_since = None
@ -421,22 +411,20 @@ def on_pre_get_files(_, lookup):
lookup_expired = lookup.copy() lookup_expired = lookup.copy()
lookup_expired['link_expires'] = {'$lte': now} lookup_expired['link_expires'] = {'$lte': now}
cursor = app.data.find('files', parsed_req, lookup_expired) cursor = current_app.data.find('files', parsed_req, lookup_expired)
for file_doc in cursor: for file_doc in cursor:
log.debug('Updating expired links for file %r.', file_doc['_id']) log.debug('Updating expired links for file %r.', file_doc['_id'])
_generate_all_links(file_doc, now) _generate_all_links(file_doc, now)
def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds): def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
from application import app
if chunk_size: if chunk_size:
log.info('Refreshing the first %i links for project %s', chunk_size, project_uuid) log.info('Refreshing the first %i links for project %s', chunk_size, project_uuid)
else: else:
log.info('Refreshing all links for project %s', project_uuid) log.info('Refreshing all links for project %s', project_uuid)
# Retrieve expired links. # Retrieve expired links.
files_collection = app.data.driver.db['files'] files_collection = current_app.data.driver.db['files']
now = datetime.datetime.now(tz=bson.tz_util.utc) now = datetime.datetime.now(tz=bson.tz_util.utc)
expire_before = now + datetime.timedelta(seconds=expiry_seconds) expire_before = now + datetime.timedelta(seconds=expiry_seconds)

View File

@ -3,7 +3,6 @@ import logging
import json import json
from eve.methods.post import post_internal from eve.methods.post import post_internal
from eve.methods.put import put_internal
from eve.methods.patch import patch_internal from eve.methods.patch import patch_internal
from flask import g, Blueprint, request, abort, current_app from flask import g, Blueprint, request, abort, current_app

View File

@ -17,6 +17,11 @@ def remove_private_keys(document):
if key.startswith('_'): if key.startswith('_'):
del doc_copy[key] del doc_copy[key]
try:
del doc_copy['allowed_methods']
except KeyError:
pass
return doc_copy return doc_copy

View File

@ -1,15 +1,15 @@
from flask import g from flask import g
from flask import current_app
from eve.methods.post import post_internal from eve.methods.post import post_internal
from application import app
from application.modules.users import gravatar from application.modules.users import gravatar
def notification_parse(notification): def notification_parse(notification):
activities_collection = app.data.driver.db['activities'] activities_collection = current_app.data.driver.db['activities']
activities_subscriptions_collection = \ activities_subscriptions_collection = \
app.data.driver.db['activities-subscriptions'] current_app.data.driver.db['activities-subscriptions']
users_collection = app.data.driver.db['users'] users_collection = current_app.data.driver.db['users']
nodes_collection = app.data.driver.db['nodes'] nodes_collection = current_app.data.driver.db['nodes']
activity = activities_collection.find_one({'_id': notification['activity']}) activity = activities_collection.find_one({'_id': notification['activity']})
if activity['object_type'] != 'node': if activity['object_type'] != 'node':
@ -83,7 +83,7 @@ def notification_parse(notification):
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id): def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
subscriptions_collection = app.data.driver.db['activities-subscriptions'] subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
lookup = { lookup = {
'user': {"$ne": actor_user_id}, 'user': {"$ne": actor_user_id},
'context_object_type': context_object_type, 'context_object_type': context_object_type,
@ -101,7 +101,7 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
:param context_object_type: hardcoded index, check the notifications/model.py :param context_object_type: hardcoded index, check the notifications/model.py
:param context_object_id: object id, to be traced with context_object_type_id :param context_object_id: object id, to be traced with context_object_type_id
""" """
subscriptions_collection = app.data.driver.db['activities-subscriptions'] subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
lookup = { lookup = {
'user': user_id, 'user': user_id,
'context_object_type': context_object_type, 'context_object_type': context_object_type,

View File

@ -1,5 +1,5 @@
from bson import ObjectId from bson import ObjectId
from application import app from flask import current_app
from application import algolia_index_users from application import algolia_index_users
from application import algolia_index_nodes from application import algolia_index_nodes
from application.modules.file_storage import generate_link from application.modules.file_storage import generate_link
@ -33,10 +33,10 @@ def algolia_index_node_save(node):
and node['properties']['status'] != 'published': and node['properties']['status'] != 'published':
return return
projects_collection = app.data.driver.db['projects'] projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': ObjectId(node['project'])}) project = projects_collection.find_one({'_id': ObjectId(node['project'])})
users_collection = app.data.driver.db['users'] users_collection = current_app.data.driver.db['users']
user = users_collection.find_one({'_id': ObjectId(node['user'])}) user = users_collection.find_one({'_id': ObjectId(node['user'])})
node_ob = { node_ob = {
@ -57,7 +57,7 @@ def algolia_index_node_save(node):
if 'description' in node and node['description']: if 'description' in node and node['description']:
node_ob['description'] = node['description'] node_ob['description'] = node['description']
if 'picture' in node and node['picture']: if 'picture' in node and node['picture']:
files_collection = app.data.driver.db['files'] files_collection = current_app.data.driver.db['files']
lookup = {'_id': ObjectId(node['picture'])} lookup = {'_id': ObjectId(node['picture'])}
picture = files_collection.find_one(lookup) picture = files_collection.find_one(lookup)
if picture['backend'] == 'gcs': if picture['backend'] == 'gcs':

View File

@ -11,10 +11,9 @@ from bson import tz_util
from datetime import datetime from datetime import datetime
from flask import g from flask import g
from flask import request from flask import request
from flask import current_app
from eve.methods.post import post_internal from eve.methods.post import post_internal
from application import app
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -55,7 +54,7 @@ def validate_token():
db_user, status = blender_id.validate_create_user('', token, oauth_subclient) db_user, status = blender_id.validate_create_user('', token, oauth_subclient)
else: else:
log.debug("User is already in our database and token hasn't expired yet.") log.debug("User is already in our database and token hasn't expired yet.")
users = app.data.driver.db['users'] users = current_app.data.driver.db['users']
db_user = users.find_one(db_token['user']) db_user = users.find_one(db_token['user'])
if db_user is None: if db_user is None:
@ -72,7 +71,7 @@ def validate_token():
def find_token(token, is_subclient_token=False, **extra_filters): def find_token(token, is_subclient_token=False, **extra_filters):
"""Returns the token document, or None if it doesn't exist (or is expired).""" """Returns the token document, or None if it doesn't exist (or is expired)."""
tokens_collection = app.data.driver.db['tokens'] tokens_collection = current_app.data.driver.db['tokens']
# TODO: remove expired tokens from collection. # TODO: remove expired tokens from collection.
lookup = {'token': token, lookup = {'token': token,
@ -152,7 +151,7 @@ def make_unique_username(email):
# Check for min length of username (otherwise validation fails) # Check for min length of username (otherwise validation fails)
username = "___{0}".format(username) if len(username) < 3 else username username = "___{0}".format(username) if len(username) < 3 else username
users = app.data.driver.db['users'] users = current_app.data.driver.db['users']
user_from_username = users.find_one({'username': username}) user_from_username = users.find_one({'username': username})
if not user_from_username: if not user_from_username:

View File

@ -2,7 +2,7 @@ import logging
import functools import functools
from flask import g from flask import g
from flask import abort from flask import abort
from application import app from flask import current_app
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -38,7 +38,7 @@ def check_permissions(resource, method, append_allowed_methods=False):
if type(resource['project']) is dict: if type(resource['project']) is dict:
project = resource['project'] project = resource['project']
else: else:
projects_collection = app.data.driver.db['projects'] projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one(resource['project']) project = projects_collection.find_one(resource['project'])
node_type = next( node_type = next(
(item for item in project['node_types'] if item.get('name') \ (item for item in project['node_types'] if item.get('name') \
@ -77,6 +77,7 @@ def check_permissions(resource, method, append_allowed_methods=False):
permission_granted = method in allowed_methods permission_granted = method in allowed_methods
if permission_granted: if permission_granted:
if append_allowed_methods: if append_allowed_methods:
# TODO: rename this field _allowed_methods
resource['allowed_methods'] = list(set(allowed_methods)) resource['allowed_methods'] = list(set(allowed_methods))
return return
@ -116,3 +117,5 @@ def user_has_role(role):
return False return False
return role in current_user['roles'] return role in current_user['roles']

View File

@ -1,31 +1,31 @@
import datetime import datetime
from hashlib import md5 from hashlib import md5
from application import app from flask import current_app
def hash_file_path(file_path, expiry_timestamp=None): def hash_file_path(file_path, expiry_timestamp=None):
if not file_path.startswith('/'): if not file_path.startswith('/'):
file_path = '/' + file_path; file_path = '/' + file_path
service_domain = app.config['CDN_SERVICE_DOMAIN'] service_domain = current_app.config['CDN_SERVICE_DOMAIN']
domain_subfolder = app.config['CDN_CONTENT_SUBFOLDER'] domain_subfolder = current_app.config['CDN_CONTENT_SUBFOLDER']
asset_url = app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \ asset_url = current_app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \
'://' + \ '://' + \
service_domain + \ service_domain + \
domain_subfolder + \ domain_subfolder + \
file_path file_path
if app.config['CDN_USE_URL_SIGNING']: if current_app.config['CDN_USE_URL_SIGNING']:
url_signing_key = app.config['CDN_URL_SIGNING_KEY'] url_signing_key = current_app.config['CDN_URL_SIGNING_KEY']
hash_string = domain_subfolder + file_path + url_signing_key; hash_string = domain_subfolder + file_path + url_signing_key
if not expiry_timestamp: if not expiry_timestamp:
expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24) expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24)
expiry_timestamp = expiry_timestamp.strftime('%s') expiry_timestamp = expiry_timestamp.strftime('%s')
hash_string = expiry_timestamp + hash_string; hash_string = expiry_timestamp + hash_string
expiry_timestamp = "," + str(expiry_timestamp); expiry_timestamp = "," + str(expiry_timestamp)
hashed_file_path = md5(hash_string).digest().encode('base64')[:-1] hashed_file_path = md5(hash_string).digest().encode('base64')[:-1]
hashed_file_path = hashed_file_path.replace('+', '-') hashed_file_path = hashed_file_path.replace('+', '-')

View File

@ -1,7 +1,9 @@
import os import os
from flask import current_app
from zencoder import Zencoder from zencoder import Zencoder
from application import encoding_service_client from application import encoding_service_client
from application import app
class Encoder: class Encoder:
@ -20,7 +22,7 @@ class Encoder:
storage_base = "gcs://{0}/_/".format(src_file['project']) storage_base = "gcs://{0}/_/".format(src_file['project'])
file_input = os.path.join(storage_base, src_file['file_path']) file_input = os.path.join(storage_base, src_file['file_path'])
outputs = [] outputs = []
options = dict(notifications=app.config['ZENCODER_NOTIFICATIONS_URL']) options = dict(notifications=current_app.config['ZENCODER_NOTIFICATIONS_URL'])
for v in src_file['variations']: for v in src_file['variations']:
outputs.append({ outputs.append({
'format': v['format'], 'format': v['format'],

View File

@ -5,7 +5,7 @@ import bugsnag
from bson import ObjectId from bson import ObjectId
from gcloud.storage.client import Client from gcloud.storage.client import Client
from gcloud.exceptions import NotFound from gcloud.exceptions import NotFound
from application import app from flask import current_app
class GoogleCloudStorageBucket(object): class GoogleCloudStorageBucket(object):
@ -153,7 +153,7 @@ def update_file_name(item):
return "{0}{1}{2}".format(root, size, ext) return "{0}{1}{2}".format(root, size, ext)
def _update_name(item, file_id): def _update_name(item, file_id):
files_collection = app.data.driver.db['files'] files_collection = current_app.data.driver.db['files']
f = files_collection.find_one({'_id': ObjectId(file_id)}) f = files_collection.find_one({'_id': ObjectId(file_id)})
status = item['properties']['status'] status = item['properties']['status']
if f and f['backend'] == 'gcs' and status != 'processing': if f and f['backend'] == 'gcs' and status != 'processing':

View File

@ -2,7 +2,7 @@ import os
import json import json
import subprocess import subprocess
from PIL import Image from PIL import Image
from application import app from flask import current_app
def generate_local_thumbnails(src, return_image_stats=False): def generate_local_thumbnails(src, return_image_stats=False):
@ -15,7 +15,7 @@ def generate_local_thumbnails(src, return_image_stats=False):
resolution, format and path of the thumbnailed image resolution, format and path of the thumbnailed image
""" """
thumbnail_settings = app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS'] thumbnail_settings = current_app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS']
thumbnails = {} thumbnails = {}
for size, settings in thumbnail_settings.iteritems(): for size, settings in thumbnail_settings.iteritems():
root, ext = os.path.splitext(src) root, ext = os.path.splitext(src)
@ -118,7 +118,7 @@ def get_video_data(filepath):
"""Return video duration and resolution given an input file path""" """Return video duration and resolution given an input file path"""
outdata = None outdata = None
ffprobe_inspect = [ ffprobe_inspect = [
app.config['BIN_FFPROBE'], current_app.config['BIN_FFPROBE'],
'-loglevel', '-loglevel',
'error', 'error',
'-show_streams', '-show_streams',
@ -195,13 +195,13 @@ def ffmpeg_encode(src, format, res_y=720):
dst = "{0}-{1}p.{2}".format(dst[0], res_y, format) dst = "{0}-{1}p.{2}".format(dst[0], res_y, format)
args.append(dst) args.append(dst)
print "Encoding {0} to {1}".format(src, format) print "Encoding {0} to {1}".format(src, format)
returncode = subprocess.call([app.config['BIN_FFMPEG']] + args) returncode = subprocess.call([current_app.config['BIN_FFMPEG']] + args)
if returncode == 0: if returncode == 0:
print "Successfully encoded {0}".format(dst) print "Successfully encoded {0}".format(dst)
else: else:
print "Error during encode" print "Error during encode"
print "Code: {0}".format(returncode) print "Code: {0}".format(returncode)
print "Command: {0}".format(app.config['BIN_FFMPEG'] + " " + " ".join(args)) print "Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args))
dst = None dst = None
# return path of the encoded video # return path of the encoded video
return dst return dst

View File

@ -1,13 +1,8 @@
import os import os
import subprocess import subprocess
# import logging
from application import app
from application.utils.gcs import GoogleCloudStorageBucket
BIN_FFPROBE = app.config['BIN_FFPROBE'] from flask import current_app
BIN_FFMPEG = app.config['BIN_FFMPEG'] from application.utils.gcs import GoogleCloudStorageBucket
BIN_SSH = app.config['BIN_SSH']
BIN_RSYNC = app.config['BIN_RSYNC']
def get_sizedata(filepath): def get_sizedata(filepath):
@ -18,13 +13,16 @@ def get_sizedata(filepath):
def rsync(path, remote_dir=''): def rsync(path, remote_dir=''):
BIN_SSH = current_app.config['BIN_SSH']
BIN_RSYNC = current_app.config['BIN_RSYNC']
DRY_RUN = False DRY_RUN = False
arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable'] arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable']
logs_path = app.config['CDN_SYNC_LOGS'] logs_path = current_app.config['CDN_SYNC_LOGS']
storage_address = app.config['CDN_STORAGE_ADDRESS'] storage_address = current_app.config['CDN_STORAGE_ADDRESS']
user = app.config['CDN_STORAGE_USER'] user = current_app.config['CDN_STORAGE_USER']
rsa_key_path = app.config['CDN_RSA_KEY'] rsa_key_path = current_app.config['CDN_RSA_KEY']
known_hosts_path = app.config['CDN_KNOWN_HOSTS'] known_hosts_path = current_app.config['CDN_KNOWN_HOSTS']
if DRY_RUN: if DRY_RUN:
arguments.append('--dry-run') arguments.append('--dry-run')

View File

@ -780,3 +780,4 @@ MONGO_PORT = os.environ.get('MONGO_PORT', 27017)
MONGO_DBNAME = os.environ.get('MONGO_DBNAME', 'eve') MONGO_DBNAME = os.environ.get('MONGO_DBNAME', 'eve')
CACHE_EXPIRES = 60 CACHE_EXPIRES = 60
HATEOAS = False HATEOAS = False
UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL.

View File

@ -1,8 +1,9 @@
# Primary requirements
algoliasearch==1.8.0 algoliasearch==1.8.0
blinker==1.4 blinker==1.4
bugsnag==2.3.1 bugsnag==2.3.1
Cerberus==0.9.1 Cerberus==0.9.2
Eve==0.5.3 Eve==0.6.3
Events==0.2.1 Events==0.2.1
Flask-Script==2.0.5 Flask-Script==2.0.5
flup==1.0.2 flup==1.0.2
@ -18,11 +19,37 @@ pycrypto==2.6.1
pyOpenSSL==0.15.1 pyOpenSSL==0.15.1
requests==2.9.1 requests==2.9.1
rsa==3.3 rsa==3.3
simplejson==3.8.1 simplejson==3.8.2
WebOb==1.5.0 WebOb==1.5.0
wheel==0.24.0 wheel==0.24.0
zencoder==0.6.5 zencoder==0.6.5
# development requirements # Development requirements
pytest==2.9.1 pytest==2.9.1
responses==0.5.1 responses==0.5.1
# Secondary requirements
Flask==0.10.1
Flask-PyMongo==0.4.1
Jinja2==2.8
Werkzeug==0.11.3
argparse==1.2.1
cffi==1.6.0
cookies==2.2.1
cryptography==1.3.1
enum34==1.1.3
funcsigs==1.0.1
googleapis-common-protos==1.1.0
ipaddress==1.0.16
itsdangerous==0.24
mock==2.0.0
oauth2client==2.0.2
pbr==1.9.1
protobuf==3.0.0b2.post2
protorpc==0.11.1
py==1.4.31
pyasn1==0.1.9
pyasn1-modules==0.0.8
pymongo==3.2.2
six==1.10.0
wsgiref==0.1.2

View File

@ -41,15 +41,21 @@ class ProjectCreationTest(AbstractProjectTest):
def test_project_creation_good_role(self): def test_project_creation_good_role(self):
user_id = self._create_user_with_token([u'subscriber'], 'token') user_id = self._create_user_with_token([u'subscriber'], 'token')
resp = self._create_project(u'Prøject El Niño', 'token') resp = self._create_project(u'Prøject El Niño', 'token')
self.assertEqual(201, resp.status_code) self.assertEqual(201, resp.status_code)
project = json.loads(resp.data.decode('utf-8'))
project_id = project['_id'] # The response of a POST is not the entire document, just some _xxx fields.
project_info = json.loads(resp.data.decode('utf-8'))
project_id = project_info['_id']
# Test that the Location header contains the location of the project document. # Test that the Location header contains the location of the project document.
self.assertEqual('http://localhost/projects/%s' % project_id, self.assertEqual('http://localhost/projects/%s' % project_id,
resp.headers['Location']) resp.headers['Location'])
# Actually get the project.
resp = self.client.get(resp.headers['Location'])
project = json.loads(resp.data.decode('utf-8'))
project_id = project['_id']
# Check some of the more complex/interesting fields. # Check some of the more complex/interesting fields.
self.assertEqual(u'Prøject El Niño', project['name']) self.assertEqual(u'Prøject El Niño', project['name'])
self.assertEqual(str(user_id), project['user']) self.assertEqual(str(user_id), project['user'])
@ -82,8 +88,11 @@ class ProjectEditTest(AbstractProjectTest):
from application.utils import remove_private_keys, PillarJSONEncoder from application.utils import remove_private_keys, PillarJSONEncoder
dumps = functools.partial(json.dumps, cls=PillarJSONEncoder) dumps = functools.partial(json.dumps, cls=PillarJSONEncoder)
project = self._create_user_and_project([u'subscriber']) project_info = self._create_user_and_project([u'subscriber'])
project_url = '/projects/%(_id)s' % project project_url = '/projects/%(_id)s' % project_info
resp = self.client.get(project_url)
project = json.loads(resp.data.decode('utf-8'))
# Create another user we can try and assign the project to. # Create another user we can try and assign the project to.
other_user_id = 'f00dd00df00dd00df00dd00d' other_user_id = 'f00dd00df00dd00df00dd00d'
@ -133,8 +142,11 @@ class ProjectEditTest(AbstractProjectTest):
from application.utils import remove_private_keys, PillarJSONEncoder from application.utils import remove_private_keys, PillarJSONEncoder
dumps = functools.partial(json.dumps, cls=PillarJSONEncoder) dumps = functools.partial(json.dumps, cls=PillarJSONEncoder)
project = self._create_user_and_project([u'subscriber', u'admin']) project_info = self._create_user_and_project([u'subscriber', u'admin'])
project_url = '/projects/%(_id)s' % project project_url = '/projects/%(_id)s' % project_info
resp = self.client.get(project_url)
project = json.loads(resp.data.decode('utf-8'))
# Create another user we can try and assign the project to. # Create another user we can try and assign the project to.
other_user_id = 'f00dd00df00dd00df00dd00d' other_user_id = 'f00dd00df00dd00df00dd00d'
@ -152,6 +164,7 @@ class ProjectEditTest(AbstractProjectTest):
put_project['category'] = 'software' put_project['category'] = 'software'
put_project['user'] = other_user_id put_project['user'] = other_user_id
resp = self.client.put(project_url, resp = self.client.put(project_url,
data=dumps(put_project), data=dumps(put_project),
headers={'Authorization': self.make_header('token'), headers={'Authorization': self.make_header('token'),