PEP8 formatting for the entire project.
Conflicts: pillar/application/modules/file_storage.py
This commit is contained in:
@@ -15,7 +15,6 @@ from eve import Eve
|
|||||||
from eve.auth import TokenAuth
|
from eve.auth import TokenAuth
|
||||||
from eve.io.mongo import Validator
|
from eve.io.mongo import Validator
|
||||||
|
|
||||||
|
|
||||||
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
|
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
|
||||||
|
|
||||||
|
|
||||||
@@ -82,6 +81,7 @@ class ValidateCustomFields(Validator):
|
|||||||
self._error(
|
self._error(
|
||||||
field, "Error validating properties")
|
field, "Error validating properties")
|
||||||
|
|
||||||
|
|
||||||
# We specify a settings.py file because when running on wsgi we can't detect it
|
# We specify a settings.py file because when running on wsgi we can't detect it
|
||||||
# automatically. The default path (which works in Docker) can be overridden with
|
# automatically. The default path (which works in Docker) can be overridden with
|
||||||
# an env variable.
|
# an env variable.
|
||||||
@@ -90,6 +90,7 @@ settings_path = os.environ.get(
|
|||||||
app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
|
app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
|
||||||
|
|
||||||
import config
|
import config
|
||||||
|
|
||||||
app.config.from_object(config.Deployment)
|
app.config.from_object(config.Deployment)
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
@@ -157,24 +158,29 @@ def before_returning_item_permissions(response):
|
|||||||
validate_token()
|
validate_token()
|
||||||
check_permissions(response, 'GET', append_allowed_methods=True)
|
check_permissions(response, 'GET', append_allowed_methods=True)
|
||||||
|
|
||||||
|
|
||||||
def before_returning_resource_permissions(response):
|
def before_returning_resource_permissions(response):
|
||||||
for item in response['_items']:
|
for item in response['_items']:
|
||||||
validate_token()
|
validate_token()
|
||||||
check_permissions(item, 'GET', append_allowed_methods=True)
|
check_permissions(item, 'GET', append_allowed_methods=True)
|
||||||
|
|
||||||
|
|
||||||
def before_replacing_node(item, original):
|
def before_replacing_node(item, original):
|
||||||
check_permissions(original, 'PUT')
|
check_permissions(original, 'PUT')
|
||||||
update_file_name(item)
|
update_file_name(item)
|
||||||
|
|
||||||
|
|
||||||
def after_replacing_node(item, original):
|
def after_replacing_node(item, original):
|
||||||
"""Push an update to the Algolia index when a node item is updated"""
|
"""Push an update to the Algolia index when a node item is updated"""
|
||||||
algolia_index_node_save(item)
|
algolia_index_node_save(item)
|
||||||
|
|
||||||
|
|
||||||
def before_inserting_nodes(items):
|
def before_inserting_nodes(items):
|
||||||
"""Before inserting a node in the collection we check if the user is allowed
|
"""Before inserting a node in the collection we check if the user is allowed
|
||||||
and we append the project id to it.
|
and we append the project id to it.
|
||||||
"""
|
"""
|
||||||
nodes_collection = app.data.driver.db['nodes']
|
nodes_collection = app.data.driver.db['nodes']
|
||||||
|
|
||||||
def find_parent_project(node):
|
def find_parent_project(node):
|
||||||
"""Recursive function that finds the ultimate parent of a node."""
|
"""Recursive function that finds the ultimate parent of a node."""
|
||||||
if node and 'parent' in node:
|
if node and 'parent' in node:
|
||||||
@@ -184,6 +190,7 @@ def before_inserting_nodes(items):
|
|||||||
return node
|
return node
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
check_permissions(item, 'POST')
|
check_permissions(item, 'POST')
|
||||||
if 'parent' in item and 'project' not in item:
|
if 'parent' in item and 'project' not in item:
|
||||||
@@ -225,6 +232,7 @@ def after_inserting_nodes(items):
|
|||||||
context_object_id
|
context_object_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def item_parse_attachments(response):
|
def item_parse_attachments(response):
|
||||||
"""Before returning a response, check if the 'attachments' property is
|
"""Before returning a response, check if the 'attachments' property is
|
||||||
defined. If yes, load the file (for the moment only images) in the required
|
defined. If yes, load the file (for the moment only images) in the required
|
||||||
@@ -266,10 +274,12 @@ def item_parse_attachments(response):
|
|||||||
else:
|
else:
|
||||||
response[field_name_path[0]] = field_content
|
response[field_name_path[0]] = field_content
|
||||||
|
|
||||||
|
|
||||||
def resource_parse_attachments(response):
|
def resource_parse_attachments(response):
|
||||||
for item in response['_items']:
|
for item in response['_items']:
|
||||||
item_parse_attachments(item)
|
item_parse_attachments(item)
|
||||||
|
|
||||||
|
|
||||||
def project_node_type_has_method(response):
|
def project_node_type_has_method(response):
|
||||||
"""Check for a specific request arg, and check generate the allowed_methods
|
"""Check for a specific request arg, and check generate the allowed_methods
|
||||||
list for the required node_type.
|
list for the required node_type.
|
||||||
@@ -300,6 +310,7 @@ def before_returning_resource_notifications(response):
|
|||||||
if request.args.get('parse'):
|
if request.args.get('parse'):
|
||||||
notification_parse(item)
|
notification_parse(item)
|
||||||
|
|
||||||
|
|
||||||
app.on_fetched_item_nodes += before_returning_item_permissions
|
app.on_fetched_item_nodes += before_returning_item_permissions
|
||||||
app.on_fetched_item_nodes += item_parse_attachments
|
app.on_fetched_item_nodes += item_parse_attachments
|
||||||
app.on_fetched_resource_nodes += before_returning_resource_permissions
|
app.on_fetched_resource_nodes += before_returning_resource_permissions
|
||||||
@@ -316,6 +327,7 @@ app.on_fetched_item_projects += before_returning_item_permissions
|
|||||||
app.on_fetched_item_projects += project_node_type_has_method
|
app.on_fetched_item_projects += project_node_type_has_method
|
||||||
app.on_fetched_resource_projects += before_returning_resource_permissions
|
app.on_fetched_resource_projects += before_returning_resource_permissions
|
||||||
|
|
||||||
|
|
||||||
def post_GET_user(request, payload):
|
def post_GET_user(request, payload):
|
||||||
json_data = json.loads(payload.data)
|
json_data = json.loads(payload.data)
|
||||||
# Check if we are querying the users endpoint (instead of the single user)
|
# Check if we are querying the users endpoint (instead of the single user)
|
||||||
@@ -325,19 +337,23 @@ def post_GET_user(request, payload):
|
|||||||
# compute_permissions(json_data['_id'], app.data.driver)
|
# compute_permissions(json_data['_id'], app.data.driver)
|
||||||
payload.data = json.dumps(json_data)
|
payload.data = json.dumps(json_data)
|
||||||
|
|
||||||
|
|
||||||
def after_replacing_user(item, original):
|
def after_replacing_user(item, original):
|
||||||
"""Push an update to the Algolia index when a user item is updated"""
|
"""Push an update to the Algolia index when a user item is updated"""
|
||||||
algolia_index_user_save(item)
|
algolia_index_user_save(item)
|
||||||
|
|
||||||
|
|
||||||
app.on_post_GET_users += post_GET_user
|
app.on_post_GET_users += post_GET_user
|
||||||
app.on_replace_users += after_replacing_user
|
app.on_replace_users += after_replacing_user
|
||||||
|
|
||||||
|
|
||||||
def post_POST_files(request, payload):
|
def post_POST_files(request, payload):
|
||||||
"""After an file object has been created, we do the necessary processing
|
"""After an file object has been created, we do the necessary processing
|
||||||
and further update it.
|
and further update it.
|
||||||
"""
|
"""
|
||||||
process_file(request.get_json())
|
process_file(request.get_json())
|
||||||
|
|
||||||
|
|
||||||
app.on_post_POST_files += post_POST_files
|
app.on_post_POST_files += post_POST_files
|
||||||
|
|
||||||
|
|
||||||
@@ -351,6 +367,7 @@ def before_returning_file(response):
|
|||||||
variation['link'] = generate_link(
|
variation['link'] = generate_link(
|
||||||
response['backend'], variation['file_path'], project_id)
|
response['backend'], variation['file_path'], project_id)
|
||||||
|
|
||||||
|
|
||||||
def before_returning_files(response):
|
def before_returning_files(response):
|
||||||
for item in response['_items']:
|
for item in response['_items']:
|
||||||
# TODO: add project id to all files
|
# TODO: add project id to all files
|
||||||
@@ -365,12 +382,15 @@ app.on_fetched_resource_files += before_returning_files
|
|||||||
def before_deleting_file(item):
|
def before_deleting_file(item):
|
||||||
delete_file(item)
|
delete_file(item)
|
||||||
|
|
||||||
|
|
||||||
app.on_delete_item_files += before_deleting_file
|
app.on_delete_item_files += before_deleting_file
|
||||||
|
|
||||||
# The file_storage module needs app to be defined
|
# The file_storage module needs app to be defined
|
||||||
from modules.file_storage import file_storage
|
from modules.file_storage import file_storage
|
||||||
#from modules.file_storage.serve import *
|
|
||||||
|
# from modules.file_storage.serve import *
|
||||||
app.register_blueprint(file_storage, url_prefix='/storage')
|
app.register_blueprint(file_storage, url_prefix='/storage')
|
||||||
# The encoding module (receive notification and report progress)
|
# The encoding module (receive notification and report progress)
|
||||||
from modules.encoding import encoding
|
from modules.encoding import encoding
|
||||||
|
|
||||||
app.register_blueprint(encoding, url_prefix='/encoding')
|
app.register_blueprint(encoding, url_prefix='/encoding')
|
||||||
|
@@ -17,12 +17,11 @@ from application.utils.cdn import hash_file_path
|
|||||||
from application.utils.gcs import GoogleCloudStorageBucket
|
from application.utils.gcs import GoogleCloudStorageBucket
|
||||||
from application.utils.encoding import Encoder
|
from application.utils.encoding import Encoder
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
file_storage = Blueprint('file_storage', __name__,
|
file_storage = Blueprint('file_storage', __name__,
|
||||||
template_folder='templates',
|
template_folder='templates',
|
||||||
static_folder='../../static/storage',)
|
static_folder='../../static/storage', )
|
||||||
|
|
||||||
|
|
||||||
@file_storage.route('/gcs/<bucket_name>/<subdir>/')
|
@file_storage.route('/gcs/<bucket_name>/<subdir>/')
|
||||||
@@ -50,7 +49,7 @@ def browse_gcs(bucket_name, subdir, file_path=None):
|
|||||||
return jsonify(listing)
|
return jsonify(listing)
|
||||||
|
|
||||||
|
|
||||||
#@file_storage.route('/build_thumbnails/<path:file_path>')
|
# @file_storage.route('/build_thumbnails/<path:file_path>')
|
||||||
def build_thumbnails(file_path=None, file_id=None):
|
def build_thumbnails(file_path=None, file_id=None):
|
||||||
"""Given a file path or file ObjectId pointing to an image file, fetch it
|
"""Given a file path or file ObjectId pointing to an image file, fetch it
|
||||||
and generate a set of predefined variations (using generate_local_thumbnails).
|
and generate a set of predefined variations (using generate_local_thumbnails).
|
||||||
@@ -268,6 +267,7 @@ def delete_file(file_item):
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
files_collection = app.data.driver.db['files']
|
files_collection = app.data.driver.db['files']
|
||||||
# Collect children (variations) of the original file
|
# Collect children (variations) of the original file
|
||||||
children = files_collection.find({'parent': file_item['_id']})
|
children = files_collection.find({'parent': file_item['_id']})
|
||||||
|
@@ -4,6 +4,7 @@ from application import algolia_index_users
|
|||||||
from application import algolia_index_nodes
|
from application import algolia_index_nodes
|
||||||
from application.modules.file_storage import generate_link
|
from application.modules.file_storage import generate_link
|
||||||
|
|
||||||
|
|
||||||
def algolia_index_user_save(user):
|
def algolia_index_user_save(user):
|
||||||
# Define accepted roles
|
# Define accepted roles
|
||||||
accepted_roles = ['admin', 'subscriber', 'demo']
|
accepted_roles = ['admin', 'subscriber', 'demo']
|
||||||
|
@@ -2,6 +2,7 @@ import datetime
|
|||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
from application import app
|
from application import app
|
||||||
|
|
||||||
|
|
||||||
def hash_file_path(file_path, expiry_timestamp=None):
|
def hash_file_path(file_path, expiry_timestamp=None):
|
||||||
if not file_path.startswith('/'):
|
if not file_path.startswith('/'):
|
||||||
file_path = '/' + file_path;
|
file_path = '/' + file_path;
|
||||||
|
@@ -3,6 +3,7 @@ from zencoder import Zencoder
|
|||||||
from application import encoding_service_client
|
from application import encoding_service_client
|
||||||
from application import app
|
from application import app
|
||||||
|
|
||||||
|
|
||||||
class Encoder:
|
class Encoder:
|
||||||
"""Generic Encoder wrapper. Provides a consistent API, independent from
|
"""Generic Encoder wrapper. Provides a consistent API, independent from
|
||||||
the encoding backend enabled.
|
the encoding backend enabled.
|
||||||
|
@@ -78,7 +78,7 @@ def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
|
|||||||
# Get current and desired ratio for the images
|
# Get current and desired ratio for the images
|
||||||
img_ratio = img.size[0] / float(img.size[1])
|
img_ratio = img.size[0] / float(img.size[1])
|
||||||
ratio = size[0] / float(size[1])
|
ratio = size[0] / float(size[1])
|
||||||
#The image is scaled/cropped vertically or horizontally depending on the ratio
|
# The image is scaled/cropped vertically or horizontally depending on the ratio
|
||||||
if ratio > img_ratio:
|
if ratio > img_ratio:
|
||||||
img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),
|
img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),
|
||||||
Image.ANTIALIAS)
|
Image.ANTIALIAS)
|
||||||
@@ -90,7 +90,7 @@ def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
|
|||||||
int(round((img.size[1] + size[1]) / 2)))
|
int(round((img.size[1] + size[1]) / 2)))
|
||||||
elif crop_type == 'bottom':
|
elif crop_type == 'bottom':
|
||||||
box = (0, img.size[1] - size[1], img.size[0], img.size[1])
|
box = (0, img.size[1] - size[1], img.size[0], img.size[1])
|
||||||
else :
|
else:
|
||||||
raise ValueError('ERROR: invalid value for crop_type')
|
raise ValueError('ERROR: invalid value for crop_type')
|
||||||
img = img.crop(box)
|
img = img.crop(box)
|
||||||
elif ratio < img_ratio:
|
elif ratio < img_ratio:
|
||||||
@@ -104,10 +104,10 @@ def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
|
|||||||
int(round((img.size[0] + size[0]) / 2)), img.size[1])
|
int(round((img.size[0] + size[0]) / 2)), img.size[1])
|
||||||
elif crop_type == 'bottom':
|
elif crop_type == 'bottom':
|
||||||
box = (img.size[0] - size[0], 0, img.size[0], img.size[1])
|
box = (img.size[0] - size[0], 0, img.size[0], img.size[1])
|
||||||
else :
|
else:
|
||||||
raise ValueError('ERROR: invalid value for crop_type')
|
raise ValueError('ERROR: invalid value for crop_type')
|
||||||
img = img.crop(box)
|
img = img.crop(box)
|
||||||
else :
|
else:
|
||||||
img = img.resize((size[0], size[1]),
|
img = img.resize((size[0], size[1]),
|
||||||
Image.ANTIALIAS)
|
Image.ANTIALIAS)
|
||||||
# If the scale is the same, we do not need to crop
|
# If the scale is the same, we do not need to crop
|
||||||
@@ -184,8 +184,8 @@ def ffmpeg_encode(src, format, res_y=720):
|
|||||||
'-deadline', 'good',
|
'-deadline', 'good',
|
||||||
'-cpu-used', '0',
|
'-cpu-used', '0',
|
||||||
'-vprofile', '0',
|
'-vprofile', '0',
|
||||||
'-qmax', '51', '-qmin', '11', '-slices', '4','-b:v', '2M',
|
'-qmax', '51', '-qmin', '11', '-slices', '4', '-b:v', '2M',
|
||||||
#'-acodec', 'libmp3lame', '-ab', '112k', '-ar', '44100',
|
# '-acodec', 'libmp3lame', '-ab', '112k', '-ar', '44100',
|
||||||
'-f', 'webm'])
|
'-f', 'webm'])
|
||||||
|
|
||||||
if not os.environ.get('VERBOSE'):
|
if not os.environ.get('VERBOSE'):
|
||||||
@@ -205,4 +205,3 @@ def ffmpeg_encode(src, format, res_y=720):
|
|||||||
dst = None
|
dst = None
|
||||||
# return path of the encoded video
|
# return path of the encoded video
|
||||||
return dst
|
return dst
|
||||||
|
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
#import logging
|
# import logging
|
||||||
from application import app
|
from application import app
|
||||||
from application.utils.gcs import GoogleCloudStorageBucket
|
from application.utils.gcs import GoogleCloudStorageBucket
|
||||||
|
|
||||||
@@ -12,14 +12,14 @@ BIN_RSYNC = app.config['BIN_RSYNC']
|
|||||||
|
|
||||||
def get_sizedata(filepath):
|
def get_sizedata(filepath):
|
||||||
outdata = dict(
|
outdata = dict(
|
||||||
size = int(os.stat(filepath).st_size)
|
size=int(os.stat(filepath).st_size)
|
||||||
)
|
)
|
||||||
return outdata
|
return outdata
|
||||||
|
|
||||||
|
|
||||||
def rsync(path, remote_dir=''):
|
def rsync(path, remote_dir=''):
|
||||||
DRY_RUN = False
|
DRY_RUN = False
|
||||||
arguments=['--verbose', '--ignore-existing', '--recursive', '--human-readable']
|
arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable']
|
||||||
logs_path = app.config['CDN_SYNC_LOGS']
|
logs_path = app.config['CDN_SYNC_LOGS']
|
||||||
storage_address = app.config['CDN_STORAGE_ADDRESS']
|
storage_address = app.config['CDN_STORAGE_ADDRESS']
|
||||||
user = app.config['CDN_STORAGE_USER']
|
user = app.config['CDN_STORAGE_USER']
|
||||||
@@ -46,7 +46,7 @@ def rsync(path, remote_dir=''):
|
|||||||
subprocess.Popen(['nohup', BIN_RSYNC] + folder_arguments, stdout=devnull, stderr=devnull)
|
subprocess.Popen(['nohup', BIN_RSYNC] + folder_arguments, stdout=devnull, stderr=devnull)
|
||||||
|
|
||||||
|
|
||||||
def remote_storage_sync(path): #can be both folder and file
|
def remote_storage_sync(path): # can be both folder and file
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
filename = os.path.split(path)[1]
|
filename = os.path.split(path)[1]
|
||||||
rsync(path, filename[:2] + '/')
|
rsync(path, filename[:2] + '/')
|
||||||
@@ -62,6 +62,7 @@ def push_to_storage(project_id, full_path, backend='cgs'):
|
|||||||
By default we store items in a Google Cloud Storage bucket named after the
|
By default we store items in a Google Cloud Storage bucket named after the
|
||||||
project id.
|
project id.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def push_single_file(project_id, full_path, backend):
|
def push_single_file(project_id, full_path, backend):
|
||||||
if backend == 'cgs':
|
if backend == 'cgs':
|
||||||
storage = GoogleCloudStorageBucket(project_id, subdir='_')
|
storage = GoogleCloudStorageBucket(project_id, subdir='_')
|
||||||
@@ -82,4 +83,3 @@ def push_to_storage(project_id, full_path, backend='cgs'):
|
|||||||
push_single_file(project_id, os.path.join(root, name), backend)
|
push_single_file(project_id, os.path.join(root, name), backend)
|
||||||
else:
|
else:
|
||||||
raise IOError('ERROR: path not found')
|
raise IOError('ERROR: path not found')
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user