PEP8 formatting for the entire project.

Conflicts:
	pillar/application/modules/file_storage.py
This commit is contained in:
2016-03-21 12:25:07 +01:00
parent 9bb95ab054
commit 8d9a23b7b9
11 changed files with 105 additions and 83 deletions

View File

@@ -15,7 +15,6 @@ from eve import Eve
from eve.auth import TokenAuth
from eve.io.mongo import Validator
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
@@ -82,6 +81,7 @@ class ValidateCustomFields(Validator):
self._error(
field, "Error validating properties")
# We specify a settings.py file because when running on wsgi we can't detect it
# automatically. The default path (which works in Docker) can be overridden with
# an env variable.
@@ -90,6 +90,7 @@ settings_path = os.environ.get(
app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
import config
app.config.from_object(config.Deployment)
# Configure logging
@@ -157,24 +158,29 @@ def before_returning_item_permissions(response):
validate_token()
check_permissions(response, 'GET', append_allowed_methods=True)
def before_returning_resource_permissions(response):
for item in response['_items']:
validate_token()
check_permissions(item, 'GET', append_allowed_methods=True)
def before_replacing_node(item, original):
check_permissions(original, 'PUT')
update_file_name(item)
def after_replacing_node(item, original):
"""Push an update to the Algolia index when a node item is updated"""
algolia_index_node_save(item)
def before_inserting_nodes(items):
"""Before inserting a node in the collection we check if the user is allowed
and we append the project id to it.
"""
nodes_collection = app.data.driver.db['nodes']
def find_parent_project(node):
"""Recursive function that finds the ultimate parent of a node."""
if node and 'parent' in node:
@@ -184,6 +190,7 @@ def before_inserting_nodes(items):
return node
else:
return None
for item in items:
check_permissions(item, 'POST')
if 'parent' in item and 'project' not in item:
@@ -225,6 +232,7 @@ def after_inserting_nodes(items):
context_object_id
)
def item_parse_attachments(response):
"""Before returning a response, check if the 'attachments' property is
defined. If yes, load the file (for the moment only images) in the required
@@ -266,10 +274,12 @@ def item_parse_attachments(response):
else:
response[field_name_path[0]] = field_content
def resource_parse_attachments(response):
for item in response['_items']:
item_parse_attachments(item)
def project_node_type_has_method(response):
"""Check for a specific request arg, and check generate the allowed_methods
list for the required node_type.
@@ -300,6 +310,7 @@ def before_returning_resource_notifications(response):
if request.args.get('parse'):
notification_parse(item)
app.on_fetched_item_nodes += before_returning_item_permissions
app.on_fetched_item_nodes += item_parse_attachments
app.on_fetched_resource_nodes += before_returning_resource_permissions
@@ -316,6 +327,7 @@ app.on_fetched_item_projects += before_returning_item_permissions
app.on_fetched_item_projects += project_node_type_has_method
app.on_fetched_resource_projects += before_returning_resource_permissions
def post_GET_user(request, payload):
json_data = json.loads(payload.data)
# Check if we are querying the users endpoint (instead of the single user)
@@ -325,19 +337,23 @@ def post_GET_user(request, payload):
# compute_permissions(json_data['_id'], app.data.driver)
payload.data = json.dumps(json_data)
def after_replacing_user(item, original):
"""Push an update to the Algolia index when a user item is updated"""
algolia_index_user_save(item)
app.on_post_GET_users += post_GET_user
app.on_replace_users += after_replacing_user
def post_POST_files(request, payload):
"""After an file object has been created, we do the necessary processing
and further update it.
"""
process_file(request.get_json())
app.on_post_POST_files += post_POST_files
@@ -351,6 +367,7 @@ def before_returning_file(response):
variation['link'] = generate_link(
response['backend'], variation['file_path'], project_id)
def before_returning_files(response):
for item in response['_items']:
# TODO: add project id to all files
@@ -365,12 +382,15 @@ app.on_fetched_resource_files += before_returning_files
def before_deleting_file(item):
delete_file(item)
app.on_delete_item_files += before_deleting_file
# The file_storage module needs app to be defined
from modules.file_storage import file_storage
# from modules.file_storage.serve import *
app.register_blueprint(file_storage, url_prefix='/storage')
# The encoding module (receive notification and report progress)
from modules.encoding import encoding
app.register_blueprint(encoding, url_prefix='/encoding')

View File

@@ -17,7 +17,6 @@ from application.utils.cdn import hash_file_path
from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.encoding import Encoder
log = logging.getLogger(__name__)
file_storage = Blueprint('file_storage', __name__,
@@ -268,6 +267,7 @@ def delete_file(file_item):
pass
else:
pass
files_collection = app.data.driver.db['files']
# Collect children (variations) of the original file
children = files_collection.find({'parent': file_item['_id']})

View File

@@ -4,6 +4,7 @@ from application import algolia_index_users
from application import algolia_index_nodes
from application.modules.file_storage import generate_link
def algolia_index_user_save(user):
# Define accepted roles
accepted_roles = ['admin', 'subscriber', 'demo']

View File

@@ -2,6 +2,7 @@ import datetime
from hashlib import md5
from application import app
def hash_file_path(file_path, expiry_timestamp=None):
if not file_path.startswith('/'):
file_path = '/' + file_path;

View File

@@ -3,6 +3,7 @@ from zencoder import Zencoder
from application import encoding_service_client
from application import app
class Encoder:
"""Generic Encoder wrapper. Provides a consistent API, independent from
the encoding backend enabled.

View File

@@ -205,4 +205,3 @@ def ffmpeg_encode(src, format, res_y=720):
dst = None
# return path of the encoded video
return dst

View File

@@ -62,6 +62,7 @@ def push_to_storage(project_id, full_path, backend='cgs'):
By default we store items in a Google Cloud Storage bucket named after the
project id.
"""
def push_single_file(project_id, full_path, backend):
if backend == 'cgs':
storage = GoogleCloudStorageBucket(project_id, subdir='_')
@@ -82,4 +83,3 @@ def push_to_storage(project_id, full_path, backend='cgs'):
push_single_file(project_id, os.path.join(root, name), backend)
else:
raise IOError('ERROR: path not found')