PEP8 formatting for the entire project.

Conflicts:
	pillar/application/modules/file_storage.py
This commit is contained in:
2016-03-21 12:25:07 +01:00
parent 9bb95ab054
commit 8d9a23b7b9
11 changed files with 105 additions and 83 deletions

View File

@@ -15,7 +15,6 @@ from eve import Eve
from eve.auth import TokenAuth from eve.auth import TokenAuth
from eve.io.mongo import Validator from eve.io.mongo import Validator
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
@@ -63,7 +62,7 @@ class ValidateCustomFields(Validator):
project = projects_collection.find_one(lookup) project = projects_collection.find_one(lookup)
node_type = next( node_type = next(
(item for item in project['node_types'] if item.get('name') \ (item for item in project['node_types'] if item.get('name') \
and item['name'] == self.document['node_type']), None) and item['name'] == self.document['node_type']), None)
try: try:
value = self.convert_properties(value, node_type['dyn_schema']) value = self.convert_properties(value, node_type['dyn_schema'])
except Exception, e: except Exception, e:
@@ -82,6 +81,7 @@ class ValidateCustomFields(Validator):
self._error( self._error(
field, "Error validating properties") field, "Error validating properties")
# We specify a settings.py file because when running on wsgi we can't detect it # We specify a settings.py file because when running on wsgi we can't detect it
# automatically. The default path (which works in Docker) can be overridden with # automatically. The default path (which works in Docker) can be overridden with
# an env variable. # an env variable.
@@ -90,6 +90,7 @@ settings_path = os.environ.get(
app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth) app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
import config import config
app.config.from_object(config.Deployment) app.config.from_object(config.Deployment)
# Configure logging # Configure logging
@@ -104,8 +105,8 @@ log.setLevel(logging.DEBUG if app.config['DEBUG'] else logging.INFO)
log.info('Pillar starting') log.info('Pillar starting')
bugsnag.configure( bugsnag.configure(
api_key=app.config['BUGSNAG_API_KEY'], api_key=app.config['BUGSNAG_API_KEY'],
project_root="/data/git/pillar/pillar", project_root="/data/git/pillar/pillar",
) )
handle_exceptions(app) handle_exceptions(app)
@@ -125,8 +126,8 @@ except KeyError:
# Algolia search # Algolia search
if 'ALGOLIA_USER' in app.config: if 'ALGOLIA_USER' in app.config:
client = algoliasearch.Client( client = algoliasearch.Client(
app.config['ALGOLIA_USER'], app.config['ALGOLIA_USER'],
app.config['ALGOLIA_API_KEY']) app.config['ALGOLIA_API_KEY'])
algolia_index_users = client.init_index(app.config['ALGOLIA_INDEX_USERS']) algolia_index_users = client.init_index(app.config['ALGOLIA_INDEX_USERS'])
algolia_index_nodes = client.init_index(app.config['ALGOLIA_INDEX_NODES']) algolia_index_nodes = client.init_index(app.config['ALGOLIA_INDEX_NODES'])
else: else:
@@ -157,24 +158,29 @@ def before_returning_item_permissions(response):
validate_token() validate_token()
check_permissions(response, 'GET', append_allowed_methods=True) check_permissions(response, 'GET', append_allowed_methods=True)
def before_returning_resource_permissions(response): def before_returning_resource_permissions(response):
for item in response['_items']: for item in response['_items']:
validate_token() validate_token()
check_permissions(item, 'GET', append_allowed_methods=True) check_permissions(item, 'GET', append_allowed_methods=True)
def before_replacing_node(item, original): def before_replacing_node(item, original):
check_permissions(original, 'PUT') check_permissions(original, 'PUT')
update_file_name(item) update_file_name(item)
def after_replacing_node(item, original): def after_replacing_node(item, original):
"""Push an update to the Algolia index when a node item is updated""" """Push an update to the Algolia index when a node item is updated"""
algolia_index_node_save(item) algolia_index_node_save(item)
def before_inserting_nodes(items): def before_inserting_nodes(items):
"""Before inserting a node in the collection we check if the user is allowed """Before inserting a node in the collection we check if the user is allowed
and we append the project id to it. and we append the project id to it.
""" """
nodes_collection = app.data.driver.db['nodes'] nodes_collection = app.data.driver.db['nodes']
def find_parent_project(node): def find_parent_project(node):
"""Recursive function that finds the ultimate parent of a node.""" """Recursive function that finds the ultimate parent of a node."""
if node and 'parent' in node: if node and 'parent' in node:
@@ -184,6 +190,7 @@ def before_inserting_nodes(items):
return node return node
else: else:
return None return None
for item in items: for item in items:
check_permissions(item, 'POST') check_permissions(item, 'POST')
if 'parent' in item and 'project' not in item: if 'parent' in item and 'project' not in item:
@@ -223,7 +230,8 @@ def after_inserting_nodes(items):
item['_id'], item['_id'],
'node', 'node',
context_object_id context_object_id
) )
def item_parse_attachments(response): def item_parse_attachments(response):
"""Before returning a response, check if the 'attachments' property is """Before returning a response, check if the 'attachments' property is
@@ -252,7 +260,7 @@ def item_parse_attachments(response):
size = f['size'] if 'size' in f else 'l' size = f['size'] if 'size' in f else 'l'
# Get the correc variation from the file # Get the correc variation from the file
thumbnail = next((item for item in f['variations'] if thumbnail = next((item for item in f['variations'] if
item['size'] == size), None) item['size'] == size), None)
l = generate_link(f['backend'], thumbnail['file_path'], str(f['project'])) l = generate_link(f['backend'], thumbnail['file_path'], str(f['project']))
# Build Markdown img string # Build Markdown img string
l = '![{0}]({1} "{2}")'.format(slug, l, f['name']) l = '![{0}]({1} "{2}")'.format(slug, l, f['name'])
@@ -266,10 +274,12 @@ def item_parse_attachments(response):
else: else:
response[field_name_path[0]] = field_content response[field_name_path[0]] = field_content
def resource_parse_attachments(response): def resource_parse_attachments(response):
for item in response['_items']: for item in response['_items']:
item_parse_attachments(item) item_parse_attachments(item)
def project_node_type_has_method(response): def project_node_type_has_method(response):
"""Check for a specific request arg, and check generate the allowed_methods """Check for a specific request arg, and check generate the allowed_methods
list for the required node_type. list for the required node_type.
@@ -283,7 +293,7 @@ def project_node_type_has_method(response):
# Look up the node type in the project document # Look up the node type in the project document
node_type = next( node_type = next(
(item for item in response['node_types'] if item.get('name') \ (item for item in response['node_types'] if item.get('name') \
and item['name'] == node_type_name), None) and item['name'] == node_type_name), None)
if not node_type: if not node_type:
return abort(404) return abort(404)
# Check permissions and append the allowed_methods to the node_type # Check permissions and append the allowed_methods to the node_type
@@ -300,6 +310,7 @@ def before_returning_resource_notifications(response):
if request.args.get('parse'): if request.args.get('parse'):
notification_parse(item) notification_parse(item)
app.on_fetched_item_nodes += before_returning_item_permissions app.on_fetched_item_nodes += before_returning_item_permissions
app.on_fetched_item_nodes += item_parse_attachments app.on_fetched_item_nodes += item_parse_attachments
app.on_fetched_resource_nodes += before_returning_resource_permissions app.on_fetched_resource_nodes += before_returning_resource_permissions
@@ -316,6 +327,7 @@ app.on_fetched_item_projects += before_returning_item_permissions
app.on_fetched_item_projects += project_node_type_has_method app.on_fetched_item_projects += project_node_type_has_method
app.on_fetched_resource_projects += before_returning_resource_permissions app.on_fetched_resource_projects += before_returning_resource_permissions
def post_GET_user(request, payload): def post_GET_user(request, payload):
json_data = json.loads(payload.data) json_data = json.loads(payload.data)
# Check if we are querying the users endpoint (instead of the single user) # Check if we are querying the users endpoint (instead of the single user)
@@ -325,19 +337,23 @@ def post_GET_user(request, payload):
# compute_permissions(json_data['_id'], app.data.driver) # compute_permissions(json_data['_id'], app.data.driver)
payload.data = json.dumps(json_data) payload.data = json.dumps(json_data)
def after_replacing_user(item, original): def after_replacing_user(item, original):
"""Push an update to the Algolia index when a user item is updated""" """Push an update to the Algolia index when a user item is updated"""
algolia_index_user_save(item) algolia_index_user_save(item)
app.on_post_GET_users += post_GET_user app.on_post_GET_users += post_GET_user
app.on_replace_users += after_replacing_user app.on_replace_users += after_replacing_user
def post_POST_files(request, payload): def post_POST_files(request, payload):
"""After an file object has been created, we do the necessary processing """After an file object has been created, we do the necessary processing
and further update it. and further update it.
""" """
process_file(request.get_json()) process_file(request.get_json())
app.on_post_POST_files += post_POST_files app.on_post_POST_files += post_POST_files
@@ -351,6 +367,7 @@ def before_returning_file(response):
variation['link'] = generate_link( variation['link'] = generate_link(
response['backend'], variation['file_path'], project_id) response['backend'], variation['file_path'], project_id)
def before_returning_files(response): def before_returning_files(response):
for item in response['_items']: for item in response['_items']:
# TODO: add project id to all files # TODO: add project id to all files
@@ -365,12 +382,15 @@ app.on_fetched_resource_files += before_returning_files
def before_deleting_file(item): def before_deleting_file(item):
delete_file(item) delete_file(item)
app.on_delete_item_files += before_deleting_file app.on_delete_item_files += before_deleting_file
# The file_storage module needs app to be defined # The file_storage module needs app to be defined
from modules.file_storage import file_storage from modules.file_storage import file_storage
#from modules.file_storage.serve import *
# from modules.file_storage.serve import *
app.register_blueprint(file_storage, url_prefix='/storage') app.register_blueprint(file_storage, url_prefix='/storage')
# The encoding module (receive notification and report progress) # The encoding module (receive notification and report progress)
from modules.encoding import encoding from modules.encoding import encoding
app.register_blueprint(encoding, url_prefix='/encoding') app.register_blueprint(encoding, url_prefix='/encoding')

View File

@@ -15,7 +15,7 @@ def zencoder_notifications():
# If we are in production, look for the Zencoder header secret # If we are in production, look for the Zencoder header secret
try: try:
notification_secret_request = request.headers[ notification_secret_request = request.headers[
'X-Zencoder-Notification-Secret'] 'X-Zencoder-Notification-Secret']
except KeyError: except KeyError:
return abort(401) return abort(401)
# If the header is found, check it agains the one in the config # If the header is found, check it agains the one in the config
@@ -44,7 +44,7 @@ def zencoder_notifications():
format = 'mp4' if format == 'mpeg4' else format format = 'mp4' if format == 'mpeg4' else format
# Find a variation matching format and resolution # Find a variation matching format and resolution
variation = next((v for v in f['variations'] if v['format'] == format \ variation = next((v for v in f['variations'] if v['format'] == format \
and v['width'] == output['width']), None) and v['width'] == output['width']), None)
# If found, update with delivered file size # If found, update with delivered file size
# TODO: calculate md5 on the storage # TODO: calculate md5 on the storage
if variation: if variation:

View File

@@ -17,12 +17,11 @@ from application.utils.cdn import hash_file_path
from application.utils.gcs import GoogleCloudStorageBucket from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.encoding import Encoder from application.utils.encoding import Encoder
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
file_storage = Blueprint('file_storage', __name__, file_storage = Blueprint('file_storage', __name__,
template_folder='templates', template_folder='templates',
static_folder='../../static/storage',) static_folder='../../static/storage', )
@file_storage.route('/gcs/<bucket_name>/<subdir>/') @file_storage.route('/gcs/<bucket_name>/<subdir>/')
@@ -50,7 +49,7 @@ def browse_gcs(bucket_name, subdir, file_path=None):
return jsonify(listing) return jsonify(listing)
#@file_storage.route('/build_thumbnails/<path:file_path>') # @file_storage.route('/build_thumbnails/<path:file_path>')
def build_thumbnails(file_path=None, file_id=None): def build_thumbnails(file_path=None, file_id=None):
"""Given a file path or file ObjectId pointing to an image file, fetch it """Given a file path or file ObjectId pointing to an image file, fetch it
and generate a set of predefined variations (using generate_local_thumbnails). and generate a set of predefined variations (using generate_local_thumbnails).
@@ -92,7 +91,7 @@ def build_thumbnails(file_path=None, file_id=None):
length=thumbnail['length'], length=thumbnail['length'],
md5=thumbnail['md5'], md5=thumbnail['md5'],
file_path=basename, file_path=basename,
) )
# XXX Inject is_public for size 't' (should be part of the upload), # XXX Inject is_public for size 't' (should be part of the upload),
# and currently we set it here and then on the fly during blob # and currently we set it here and then on the fly during blob
# creation by simply parsing the extension of the filename. This is # creation by simply parsing the extension of the filename. This is
@@ -198,10 +197,10 @@ def process_file(src_file):
width=src_video_data['res_x'], width=src_video_data['res_x'],
height=src_video_data['res_y'], height=src_video_data['res_y'],
content_type="video/{0}".format(v), content_type="video/{0}".format(v),
length=0, # Available after encode length=0, # Available after encode
md5="", # Available after encode md5="", # Available after encode
file_path=filename, file_path=filename,
) )
# Append file variation # Append file variation
src_file['variations'].append(file_variation) src_file['variations'].append(file_variation)
@@ -221,7 +220,7 @@ def process_file(src_file):
backend=j['backend']) backend=j['backend'])
# Add the processing status to the file object # Add the processing status to the file object
r = put_internal('files', r = put_internal('files',
src_file, **{'_id': ObjectId(file_id)}) src_file, **{'_id': ObjectId(file_id)})
pass pass
except KeyError: except KeyError:
pass pass
@@ -268,6 +267,7 @@ def delete_file(file_item):
pass pass
else: else:
pass pass
files_collection = app.data.driver.db['files'] files_collection = app.data.driver.db['files']
# Collect children (variations) of the original file # Collect children (variations) of the original file
children = files_collection.find({'parent': file_item['_id']}) children = files_collection.find({'parent': file_item['_id']})
@@ -292,7 +292,7 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
link = None link = None
elif backend == 'pillar': elif backend == 'pillar':
link = url_for('file_storage.index', file_name=file_path, _external=True, link = url_for('file_storage.index', file_name=file_path, _external=True,
_scheme=app.config['SCHEME']) _scheme=app.config['SCHEME'])
elif backend == 'cdnsun': elif backend == 'cdnsun':
link = hash_file_path(file_path, None) link = hash_file_path(file_path, None)
else: else:

View File

@@ -75,7 +75,7 @@ def notification_parse(notification):
is_read=('is_read' in notification and notification['is_read']), is_read=('is_read' in notification and notification['is_read']),
is_subscribed=is_subscribed, is_subscribed=is_subscribed,
subscription=subscription['_id'] subscription=subscription['_id']
) )
notification.update(updates) notification.update(updates)
@@ -112,7 +112,7 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
def activity_object_add(actor_user_id, verb, object_type, object_id, def activity_object_add(actor_user_id, verb, object_type, object_id,
context_object_type, context_object_id): context_object_type, context_object_id):
"""Add a notification object and creates a notification for each user that """Add a notification object and creates a notification for each user that
- is not the original author of the post - is not the original author of the post
- is actively subscribed to the object - is actively subscribed to the object
@@ -138,7 +138,7 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
object=object_id, object=object_id,
context_object_type=context_object_type, context_object_type=context_object_type,
context_object=context_object_id context_object=context_object_id
) )
activity = post_internal('activities', activity) activity = post_internal('activities', activity)
if activity[3] != 201: if activity[3] != 201:

View File

@@ -4,6 +4,7 @@ from application import algolia_index_users
from application import algolia_index_nodes from application import algolia_index_nodes
from application.modules.file_storage import generate_link from application.modules.file_storage import generate_link
def algolia_index_user_save(user): def algolia_index_user_save(user):
# Define accepted roles # Define accepted roles
accepted_roles = ['admin', 'subscriber', 'demo'] accepted_roles = ['admin', 'subscriber', 'demo']
@@ -29,7 +30,7 @@ def algolia_index_node_save(node):
if node['node_type'] in accepted_node_types and algolia_index_nodes: if node['node_type'] in accepted_node_types and algolia_index_nodes:
# If a nodes does not have status published, do not index # If a nodes does not have status published, do not index
if 'status' in node['properties'] \ if 'status' in node['properties'] \
and node['properties']['status'] != 'published': and node['properties']['status'] != 'published':
return return
projects_collection = app.data.driver.db['projects'] projects_collection = app.data.driver.db['projects']
@@ -44,15 +45,15 @@ def algolia_index_node_save(node):
'project': { 'project': {
'_id': project['_id'], '_id': project['_id'],
'name': project['name'] 'name': project['name']
}, },
'created': node['_created'], 'created': node['_created'],
'updated': node['_updated'], 'updated': node['_updated'],
'node_type': node['node_type'], 'node_type': node['node_type'],
'user': { 'user': {
'_id': user['_id'], '_id': user['_id'],
'full_name': user['full_name'] 'full_name': user['full_name']
}, },
} }
if 'description' in node and node['description']: if 'description' in node and node['description']:
node_ob['description'] = node['description'] node_ob['description'] = node['description']
if 'picture' in node and node['picture']: if 'picture' in node and node['picture']:
@@ -61,11 +62,11 @@ def algolia_index_node_save(node):
picture = files_collection.find_one(lookup) picture = files_collection.find_one(lookup)
if picture['backend'] == 'gcs': if picture['backend'] == 'gcs':
variation_t = next((item for item in picture['variations'] \ variation_t = next((item for item in picture['variations'] \
if item['size'] == 't'), None) if item['size'] == 't'), None)
if variation_t: if variation_t:
node_ob['picture'] = generate_link(picture['backend'], node_ob['picture'] = generate_link(picture['backend'],
variation_t['file_path'], project_id=str(picture['project']), variation_t['file_path'], project_id=str(picture['project']),
is_public=True) is_public=True)
# If the node has world permissions, compute the Free permission # If the node has world permissions, compute the Free permission
if 'permissions' in node and 'world' in node['permissions']: if 'permissions' in node and 'world' in node['permissions']:
if 'GET' in node['permissions']['world']: if 'GET' in node['permissions']['world']:

View File

@@ -44,7 +44,7 @@ def check_permissions(resource, method, append_allowed_methods=False):
project = projects_collection.find_one(resource['project']) project = projects_collection.find_one(resource['project'])
node_type = next( node_type = next(
(item for item in project['node_types'] if item.get('name') \ (item for item in project['node_types'] if item.get('name') \
and item['name'] == resource['node_type']), None) and item['name'] == resource['node_type']), None)
computed_permissions = node_type['permissions'] computed_permissions = node_type['permissions']
else: else:
computed_permissions = None computed_permissions = None

View File

@@ -2,37 +2,38 @@ import datetime
from hashlib import md5 from hashlib import md5
from application import app from application import app
def hash_file_path(file_path, expiry_timestamp=None): def hash_file_path(file_path, expiry_timestamp=None):
if not file_path.startswith('/'): if not file_path.startswith('/'):
file_path = '/' + file_path; file_path = '/' + file_path;
service_domain = app.config['CDN_SERVICE_DOMAIN'] service_domain = app.config['CDN_SERVICE_DOMAIN']
domain_subfolder = app.config['CDN_CONTENT_SUBFOLDER'] domain_subfolder = app.config['CDN_CONTENT_SUBFOLDER']
asset_url = app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \ asset_url = app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \
'://' + \ '://' + \
service_domain + \ service_domain + \
domain_subfolder + \ domain_subfolder + \
file_path file_path
if app.config['CDN_USE_URL_SIGNING']: if app.config['CDN_USE_URL_SIGNING']:
url_signing_key = app.config['CDN_URL_SIGNING_KEY'] url_signing_key = app.config['CDN_URL_SIGNING_KEY']
hash_string = domain_subfolder + file_path + url_signing_key; hash_string = domain_subfolder + file_path + url_signing_key;
if not expiry_timestamp: if not expiry_timestamp:
expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24) expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24)
expiry_timestamp = expiry_timestamp.strftime('%s') expiry_timestamp = expiry_timestamp.strftime('%s')
hash_string = expiry_timestamp + hash_string; hash_string = expiry_timestamp + hash_string;
expiry_timestamp = "," + str(expiry_timestamp); expiry_timestamp = "," + str(expiry_timestamp);
hashed_file_path = md5(hash_string).digest().encode('base64')[:-1] hashed_file_path = md5(hash_string).digest().encode('base64')[:-1]
hashed_file_path = hashed_file_path.replace('+', '-') hashed_file_path = hashed_file_path.replace('+', '-')
hashed_file_path = hashed_file_path.replace('/', '_') hashed_file_path = hashed_file_path.replace('/', '_')
asset_url = asset_url + \ asset_url = asset_url + \
'?secure=' + \ '?secure=' + \
hashed_file_path + \ hashed_file_path + \
expiry_timestamp expiry_timestamp
return asset_url return asset_url

View File

@@ -3,6 +3,7 @@ from zencoder import Zencoder
from application import encoding_service_client from application import encoding_service_client
from application import app from application import app
class Encoder: class Encoder:
"""Generic Encoder wrapper. Provides a consistent API, independent from """Generic Encoder wrapper. Provides a consistent API, independent from
the encoding backend enabled. the encoding backend enabled.
@@ -26,7 +27,7 @@ class Encoder:
'witdh': v['width'], 'witdh': v['width'],
'url': os.path.join(storage_base, v['file_path'])}) 'url': os.path.join(storage_base, v['file_path'])})
r = encoding_service_client.job.create(file_input, outputs=outputs, r = encoding_service_client.job.create(file_input, outputs=outputs,
options=options) options=options)
if r.code == 201: if r.code == 201:
return dict(process_id=r.body['id'], backend='zencoder') return dict(process_id=r.body['id'], backend='zencoder')
else: else:

View File

@@ -47,7 +47,7 @@ class GoogleCloudStorageBucket(object):
files = [] files = []
for f in req: for f in req:
filename = os.path.basename(f.name) filename = os.path.basename(f.name)
if filename != '': # Skip own folder name if filename != '': # Skip own folder name
files.append(dict( files.append(dict(
path=os.path.relpath(f.name, self.subdir), path=os.path.relpath(f.name, self.subdir),
text=filename, text=filename,
@@ -67,7 +67,7 @@ class GoogleCloudStorageBucket(object):
name=os.path.basename(os.path.normpath(path)), name=os.path.basename(os.path.normpath(path)),
type='group_storage', type='group_storage',
children=files + directories children=files + directories
) )
return list_dict return list_dict
@@ -165,8 +165,8 @@ def update_file_name(item):
pass pass
except AttributeError: except AttributeError:
bugsnag.notify(Exception('Missing or conflicting ids detected'), bugsnag.notify(Exception('Missing or conflicting ids detected'),
meta_data={'nodes_info': meta_data={'nodes_info':
{'node_id': item['_id'], 'file_id': file_id}}) {'node_id': item['_id'], 'file_id': file_id}})
# Currently we search for 'file' and 'files' keys in the object properties. # Currently we search for 'file' and 'files' keys in the object properties.
# This could become a bit more flexible and realy on a true reference of the # This could become a bit more flexible and realy on a true reference of the

View File

@@ -43,13 +43,13 @@ def generate_local_thumbnails(src, return_image_stats=False):
format = im.format.lower() format = im.format.lower()
# Get format # Get format
thumbnails[size] = dict( thumbnails[size] = dict(
file_path=dst, # Full path, to be processed before storage file_path=dst, # Full path, to be processed before storage
length=length, length=length,
width=width, width=width,
height=height, height=height,
md5='--', md5='--',
content_type='image/' + format, content_type='image/' + format,
) )
if return_image_stats: if return_image_stats:
return thumbnails return thumbnails
@@ -78,38 +78,38 @@ def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
# Get current and desired ratio for the images # Get current and desired ratio for the images
img_ratio = img.size[0] / float(img.size[1]) img_ratio = img.size[0] / float(img.size[1])
ratio = size[0] / float(size[1]) ratio = size[0] / float(size[1])
#The image is scaled/cropped vertically or horizontally depending on the ratio # The image is scaled/cropped vertically or horizontally depending on the ratio
if ratio > img_ratio: if ratio > img_ratio:
img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))), img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),
Image.ANTIALIAS) Image.ANTIALIAS)
# Crop in the top, middle or bottom # Crop in the top, middle or bottom
if crop_type == 'top': if crop_type == 'top':
box = (0, 0, img.size[0], size[1]) box = (0, 0, img.size[0], size[1])
elif crop_type == 'middle': elif crop_type == 'middle':
box = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0], box = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0],
int(round((img.size[1] + size[1]) / 2))) int(round((img.size[1] + size[1]) / 2)))
elif crop_type == 'bottom': elif crop_type == 'bottom':
box = (0, img.size[1] - size[1], img.size[0], img.size[1]) box = (0, img.size[1] - size[1], img.size[0], img.size[1])
else : else:
raise ValueError('ERROR: invalid value for crop_type') raise ValueError('ERROR: invalid value for crop_type')
img = img.crop(box) img = img.crop(box)
elif ratio < img_ratio: elif ratio < img_ratio:
img = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]), img = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]),
Image.ANTIALIAS) Image.ANTIALIAS)
# Crop in the top, middle or bottom # Crop in the top, middle or bottom
if crop_type == 'top': if crop_type == 'top':
box = (0, 0, size[0], img.size[1]) box = (0, 0, size[0], img.size[1])
elif crop_type == 'middle': elif crop_type == 'middle':
box = (int(round((img.size[0] - size[0]) / 2)), 0, box = (int(round((img.size[0] - size[0]) / 2)), 0,
int(round((img.size[0] + size[0]) / 2)), img.size[1]) int(round((img.size[0] + size[0]) / 2)), img.size[1])
elif crop_type == 'bottom': elif crop_type == 'bottom':
box = (img.size[0] - size[0], 0, img.size[0], img.size[1]) box = (img.size[0] - size[0], 0, img.size[0], img.size[1])
else : else:
raise ValueError('ERROR: invalid value for crop_type') raise ValueError('ERROR: invalid value for crop_type')
img = img.crop(box) img = img.crop(box)
else : else:
img = img.resize((size[0], size[1]), img = img.resize((size[0], size[1]),
Image.ANTIALIAS) Image.ANTIALIAS)
# If the scale is the same, we do not need to crop # If the scale is the same, we do not need to crop
img.save(modified_path, "JPEG") img.save(modified_path, "JPEG")
@@ -145,7 +145,7 @@ def get_video_data(filepath):
duration=duration, duration=duration,
res_x=video_stream['width'], res_x=video_stream['width'],
res_y=video_stream['height'], res_y=video_stream['height'],
) )
if video_stream['sample_aspect_ratio'] != '1:1': if video_stream['sample_aspect_ratio'] != '1:1':
print '[warning] Pixel aspect ratio is not square!' print '[warning] Pixel aspect ratio is not square!'
@@ -184,8 +184,8 @@ def ffmpeg_encode(src, format, res_y=720):
'-deadline', 'good', '-deadline', 'good',
'-cpu-used', '0', '-cpu-used', '0',
'-vprofile', '0', '-vprofile', '0',
'-qmax', '51', '-qmin', '11', '-slices', '4','-b:v', '2M', '-qmax', '51', '-qmin', '11', '-slices', '4', '-b:v', '2M',
#'-acodec', 'libmp3lame', '-ab', '112k', '-ar', '44100', # '-acodec', 'libmp3lame', '-ab', '112k', '-ar', '44100',
'-f', 'webm']) '-f', 'webm'])
if not os.environ.get('VERBOSE'): if not os.environ.get('VERBOSE'):
@@ -205,4 +205,3 @@ def ffmpeg_encode(src, format, res_y=720):
dst = None dst = None
# return path of the encoded video # return path of the encoded video
return dst return dst

View File

@@ -1,6 +1,6 @@
import os import os
import subprocess import subprocess
#import logging # import logging
from application import app from application import app
from application.utils.gcs import GoogleCloudStorageBucket from application.utils.gcs import GoogleCloudStorageBucket
@@ -12,14 +12,14 @@ BIN_RSYNC = app.config['BIN_RSYNC']
def get_sizedata(filepath): def get_sizedata(filepath):
outdata = dict( outdata = dict(
size = int(os.stat(filepath).st_size) size=int(os.stat(filepath).st_size)
) )
return outdata return outdata
def rsync(path, remote_dir=''): def rsync(path, remote_dir=''):
DRY_RUN = False DRY_RUN = False
arguments=['--verbose', '--ignore-existing', '--recursive', '--human-readable'] arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable']
logs_path = app.config['CDN_SYNC_LOGS'] logs_path = app.config['CDN_SYNC_LOGS']
storage_address = app.config['CDN_STORAGE_ADDRESS'] storage_address = app.config['CDN_STORAGE_ADDRESS']
user = app.config['CDN_STORAGE_USER'] user = app.config['CDN_STORAGE_USER']
@@ -34,7 +34,7 @@ def rsync(path, remote_dir=''):
'-e ' + BIN_SSH + ' -i ' + rsa_key_path + ' -o "StrictHostKeyChecking=no"') '-e ' + BIN_SSH + ' -i ' + rsa_key_path + ' -o "StrictHostKeyChecking=no"')
# if known_hosts_path: # if known_hosts_path:
# folder_arguments.append("-o UserKnownHostsFile " + known_hosts_path) # folder_arguments.append("-o UserKnownHostsFile " + known_hosts_path)
folder_arguments.append("--log-file=" + logs_path + "/rsync.log") folder_arguments.append("--log-file=" + logs_path + "/rsync.log")
folder_arguments.append(path) folder_arguments.append(path)
folder_arguments.append(user + "@" + storage_address + ":/public/" + remote_dir) folder_arguments.append(user + "@" + storage_address + ":/public/" + remote_dir)
# print (folder_arguments) # print (folder_arguments)
@@ -46,7 +46,7 @@ def rsync(path, remote_dir=''):
subprocess.Popen(['nohup', BIN_RSYNC] + folder_arguments, stdout=devnull, stderr=devnull) subprocess.Popen(['nohup', BIN_RSYNC] + folder_arguments, stdout=devnull, stderr=devnull)
def remote_storage_sync(path): #can be both folder and file def remote_storage_sync(path): # can be both folder and file
if os.path.isfile(path): if os.path.isfile(path):
filename = os.path.split(path)[1] filename = os.path.split(path)[1]
rsync(path, filename[:2] + '/') rsync(path, filename[:2] + '/')
@@ -62,6 +62,7 @@ def push_to_storage(project_id, full_path, backend='cgs'):
By default we store items in a Google Cloud Storage bucket named after the By default we store items in a Google Cloud Storage bucket named after the
project id. project id.
""" """
def push_single_file(project_id, full_path, backend): def push_single_file(project_id, full_path, backend):
if backend == 'cgs': if backend == 'cgs':
storage = GoogleCloudStorageBucket(project_id, subdir='_') storage = GoogleCloudStorageBucket(project_id, subdir='_')
@@ -82,4 +83,3 @@ def push_to_storage(project_id, full_path, backend='cgs'):
push_single_file(project_id, os.path.join(root, name), backend) push_single_file(project_id, os.path.join(root, name), backend)
else: else:
raise IOError('ERROR: path not found') raise IOError('ERROR: path not found')