2015-04-07 12:42:50 -03:00
|
|
|
import os
|
2015-05-20 12:14:38 -03:00
|
|
|
import json
|
2015-10-08 09:24:34 +02:00
|
|
|
import requests
|
2015-10-20 23:52:18 +02:00
|
|
|
import bugsnag
|
|
|
|
from bugsnag.flask import handle_exceptions
|
2015-03-10 11:38:57 +01:00
|
|
|
from eve import Eve
|
2015-09-11 15:04:25 +02:00
|
|
|
from pymongo import MongoClient
|
2015-03-14 15:08:36 +01:00
|
|
|
from eve.auth import TokenAuth
|
|
|
|
from eve.auth import BasicAuth
|
2015-03-10 11:38:57 +01:00
|
|
|
from eve.io.mongo import Validator
|
2015-04-09 16:36:32 -03:00
|
|
|
from eve.methods.post import post_internal
|
2015-03-12 15:05:10 +01:00
|
|
|
from bson import ObjectId
|
2015-03-10 11:38:57 +01:00
|
|
|
|
2015-05-18 11:42:17 -03:00
|
|
|
from flask import g
|
|
|
|
from flask import request
|
2015-09-08 15:06:45 +02:00
|
|
|
from flask import url_for
|
2015-10-11 22:20:18 +02:00
|
|
|
from flask import abort
|
2015-05-18 11:42:17 -03:00
|
|
|
|
2015-05-20 12:14:38 -03:00
|
|
|
|
2015-04-08 11:48:38 -03:00
|
|
|
from datetime import datetime
|
|
|
|
from datetime import timedelta
|
2015-03-14 15:08:36 +01:00
|
|
|
|
2015-05-20 12:14:38 -03:00
|
|
|
|
2015-04-14 12:04:50 -03:00
|
|
|
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
|
|
|
|
|
|
|
|
|
2015-04-07 12:42:50 -03:00
|
|
|
class SystemUtility():
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
raise TypeError("Base class may not be instantiated")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def blender_id_endpoint():
|
|
|
|
"""Gets the endpoint for the authentication API. If the env variable
|
|
|
|
is defined, it's possible to override the (default) production address.
|
|
|
|
"""
|
|
|
|
return os.environ.get(
|
|
|
|
'BLENDER_ID_ENDPOINT', "https://www.blender.org/id")
|
|
|
|
|
|
|
|
|
|
|
|
def validate(token):
|
2015-10-11 22:20:18 +02:00
|
|
|
"""Validate a token against the Blender ID server. This simple lookup
|
|
|
|
returns a dictionary with the following keys:
|
2015-10-08 09:24:34 +02:00
|
|
|
|
2015-10-11 22:20:18 +02:00
|
|
|
- message: a success message
|
|
|
|
- valid: a boolean, stating if the token is valid
|
|
|
|
- user: a dictionary with information regarding the user
|
|
|
|
"""
|
2015-04-07 12:42:50 -03:00
|
|
|
payload = dict(
|
|
|
|
token=token)
|
|
|
|
try:
|
|
|
|
r = requests.post("{0}/u/validate_token".format(
|
|
|
|
SystemUtility.blender_id_endpoint()), data=payload)
|
|
|
|
except requests.exceptions.ConnectionError as e:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
if r.status_code == 200:
|
2015-10-11 22:20:18 +02:00
|
|
|
response = r.json()
|
2015-04-07 12:42:50 -03:00
|
|
|
else:
|
2015-10-13 19:40:25 +02:00
|
|
|
response = None
|
|
|
|
return response
|
2015-04-07 12:42:50 -03:00
|
|
|
|
|
|
|
|
2015-05-18 11:42:17 -03:00
|
|
|
def validate_token():
|
2015-10-11 22:20:18 +02:00
|
|
|
"""Validate the token provided in the request and populate the current_user
|
|
|
|
flask.g object, so that permissions and access to a resource can be defined
|
|
|
|
from it.
|
|
|
|
"""
|
2015-10-10 16:27:02 +02:00
|
|
|
if not request.authorization:
|
2015-10-11 22:20:18 +02:00
|
|
|
# If no authorization headers are provided, we are getting a request
|
|
|
|
# from a non logged in user. Proceed accordingly.
|
2015-10-10 16:27:02 +02:00
|
|
|
return None
|
2015-10-11 22:20:18 +02:00
|
|
|
|
|
|
|
current_user = {}
|
|
|
|
|
2015-05-18 11:42:17 -03:00
|
|
|
token = request.authorization.username
|
2015-11-04 12:59:08 +01:00
|
|
|
tokens_collection = app.data.driver.db['tokens']
|
2015-10-11 22:20:18 +02:00
|
|
|
|
2015-05-18 11:42:17 -03:00
|
|
|
lookup = {'token': token, 'expire_time': {"$gt": datetime.now()}}
|
2015-11-04 12:59:08 +01:00
|
|
|
db_token = tokens_collection.find_one(lookup)
|
2015-10-11 22:20:18 +02:00
|
|
|
if not db_token:
|
|
|
|
# If no valid token is found, we issue a new request to the Blender ID
|
|
|
|
# to verify the validity of the token. We will get basic user info if
|
|
|
|
# the user is authorized and we will make a new token.
|
2015-05-18 11:42:17 -03:00
|
|
|
validation = validate(token)
|
2015-10-13 19:40:25 +02:00
|
|
|
if validation['status'] == 'success':
|
2015-10-11 22:20:18 +02:00
|
|
|
users = app.data.driver.db['users']
|
2015-10-13 19:40:25 +02:00
|
|
|
email = validation['data']['user']['email']
|
2015-10-11 22:20:18 +02:00
|
|
|
db_user = users.find_one({'email': email})
|
2015-11-04 22:12:42 +01:00
|
|
|
# Ensure unique username
|
2015-10-13 23:17:00 +02:00
|
|
|
username = email.split('@')[0]
|
2015-11-04 22:12:42 +01:00
|
|
|
def make_unique_username(username, index=1):
|
|
|
|
"""Ensure uniqueness of a username by appending an incremental
|
|
|
|
digit at the end of it.
|
|
|
|
"""
|
|
|
|
user_from_username = users.find_one({'username': username})
|
|
|
|
if user_from_username:
|
|
|
|
if index > 1:
|
|
|
|
index += 1
|
|
|
|
username = username[:-1]
|
|
|
|
username = "{0}{1}".format(username, index)
|
|
|
|
return make_unique_username(username, index=index)
|
|
|
|
return username
|
|
|
|
username = make_unique_username(username)
|
|
|
|
|
2015-10-13 23:17:00 +02:00
|
|
|
full_name = username
|
2015-10-11 22:20:18 +02:00
|
|
|
if not db_user:
|
2015-05-18 11:42:17 -03:00
|
|
|
user_data = {
|
2015-10-13 23:17:00 +02:00
|
|
|
'full_name': full_name,
|
|
|
|
'username': username,
|
2015-05-18 11:42:17 -03:00
|
|
|
'email': email,
|
2015-10-13 23:17:00 +02:00
|
|
|
'auth': [{
|
|
|
|
'provider': 'blender-id',
|
|
|
|
'user_id': str(validation['data']['user']['id']),
|
|
|
|
'token': ''}]
|
2015-05-18 11:42:17 -03:00
|
|
|
}
|
|
|
|
r = post_internal('users', user_data)
|
2015-10-11 22:20:18 +02:00
|
|
|
user_id = r[0]['_id']
|
|
|
|
groups = None
|
2015-05-18 11:42:17 -03:00
|
|
|
else:
|
2015-10-11 22:20:18 +02:00
|
|
|
user_id = db_user['_id']
|
|
|
|
groups = db_user['groups']
|
2015-05-18 11:42:17 -03:00
|
|
|
|
|
|
|
token_data = {
|
|
|
|
'user': user_id,
|
|
|
|
'token': token,
|
|
|
|
'expire_time': datetime.now() + timedelta(hours=1)
|
|
|
|
}
|
|
|
|
post_internal('tokens', token_data)
|
2015-10-11 22:20:18 +02:00
|
|
|
current_user = dict(
|
|
|
|
user_id=user_id,
|
|
|
|
token=token,
|
|
|
|
groups=groups,
|
|
|
|
token_expire_time=datetime.now() + timedelta(hours=1))
|
|
|
|
#return token_data
|
2015-05-18 11:42:17 -03:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
else:
|
2015-10-11 22:20:18 +02:00
|
|
|
users = app.data.driver.db['users']
|
|
|
|
db_user = users.find_one(db_token['user'])
|
|
|
|
current_user = dict(
|
|
|
|
user_id=db_token['user'],
|
|
|
|
token=db_token['token'],
|
|
|
|
groups=db_user['groups'],
|
|
|
|
token_expire_time=db_token['expire_time'])
|
2015-05-18 11:42:17 -03:00
|
|
|
|
2015-10-11 22:20:18 +02:00
|
|
|
setattr(g, 'current_user', current_user)
|
2015-05-18 11:42:17 -03:00
|
|
|
|
|
|
|
|
2015-10-11 22:20:18 +02:00
|
|
|
class NewAuth(TokenAuth):
|
|
|
|
def check_auth(self, token, allowed_roles, resource, method):
|
|
|
|
if not token:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
validate_token()
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2015-03-10 11:38:57 +01:00
|
|
|
class ValidateCustomFields(Validator):
|
2015-10-10 16:27:02 +02:00
|
|
|
def convert_properties(self, properties, node_schema):
|
2015-10-08 09:24:34 +02:00
|
|
|
for prop in node_schema:
|
|
|
|
if not prop in properties:
|
|
|
|
continue
|
|
|
|
schema_prop = node_schema[prop]
|
|
|
|
prop_type = schema_prop['type']
|
|
|
|
if prop_type == 'dict':
|
2015-10-10 16:27:02 +02:00
|
|
|
properties[prop] = self.convert_properties(
|
2015-10-08 09:24:34 +02:00
|
|
|
properties[prop], schema_prop['schema'])
|
|
|
|
if prop_type == 'list':
|
|
|
|
if properties[prop] in ['', '[]']:
|
|
|
|
properties[prop] = []
|
|
|
|
for k, val in enumerate(properties[prop]):
|
|
|
|
if not 'schema' in schema_prop:
|
|
|
|
continue
|
|
|
|
item_schema = {'item': schema_prop['schema']}
|
|
|
|
item_prop = {'item': properties[prop][k]}
|
2015-10-10 16:27:02 +02:00
|
|
|
properties[prop][k] = self.convert_properties(
|
2015-10-08 09:24:34 +02:00
|
|
|
item_prop, item_schema)['item']
|
|
|
|
# Convert datetime string to RFC1123 datetime
|
|
|
|
elif prop_type == 'datetime':
|
|
|
|
prop_val = properties[prop]
|
|
|
|
properties[prop] = datetime.strptime(prop_val, RFC1123_DATE_FORMAT)
|
|
|
|
elif prop_type == 'objectid':
|
|
|
|
prop_val = properties[prop]
|
|
|
|
if prop_val:
|
|
|
|
properties[prop] = ObjectId(prop_val)
|
|
|
|
else:
|
|
|
|
properties[prop] = None
|
|
|
|
|
|
|
|
return properties
|
|
|
|
|
2015-03-11 16:03:19 +01:00
|
|
|
def _validate_valid_properties(self, valid_properties, field, value):
|
2015-03-12 15:05:10 +01:00
|
|
|
node_types = app.data.driver.db['node_types']
|
2015-03-11 16:03:19 +01:00
|
|
|
lookup = {}
|
2015-03-12 15:05:10 +01:00
|
|
|
lookup['_id'] = ObjectId(self.document['node_type'])
|
2015-03-11 16:03:19 +01:00
|
|
|
node_type = node_types.find_one(lookup)
|
|
|
|
|
2015-04-15 10:25:31 -03:00
|
|
|
try:
|
2015-10-08 09:24:34 +02:00
|
|
|
value = self.convert_properties(value, node_type['dyn_schema'])
|
2015-04-15 11:51:55 -03:00
|
|
|
except Exception, e:
|
|
|
|
print ("Error converting: {0}".format(e))
|
2015-05-28 15:30:26 +02:00
|
|
|
#print (value)
|
2015-04-14 12:04:50 -03:00
|
|
|
|
2015-03-11 16:03:19 +01:00
|
|
|
v = Validator(node_type['dyn_schema'])
|
|
|
|
val = v.validate(value)
|
2015-04-15 10:25:31 -03:00
|
|
|
|
2015-03-11 16:03:19 +01:00
|
|
|
if val:
|
|
|
|
return True
|
|
|
|
else:
|
2015-04-15 10:25:31 -03:00
|
|
|
try:
|
|
|
|
print (val.errors)
|
|
|
|
except:
|
|
|
|
pass
|
2015-03-27 15:42:28 +01:00
|
|
|
self._error(
|
|
|
|
field, "Error validating properties")
|
2015-03-11 16:03:19 +01:00
|
|
|
|
|
|
|
|
2015-04-10 13:08:45 -03:00
|
|
|
def post_item(entry, data):
|
2015-05-08 11:28:58 -03:00
|
|
|
return post_internal(entry, data)
|
2015-03-11 16:03:19 +01:00
|
|
|
|
2015-10-20 23:52:18 +02:00
|
|
|
|
2015-10-08 10:26:22 +02:00
|
|
|
# We specify a settings.py file because when running on wsgi we can't detect it
|
|
|
|
# automatically. The default path (which work in Docker) can be overriden with
|
|
|
|
# an env variable.
|
2015-10-21 15:37:00 +02:00
|
|
|
settings_path = os.environ.get('EVE_SETTINGS', '/data/git/pillar/pillar/settings.py')
|
2015-10-11 22:20:18 +02:00
|
|
|
app = Eve(settings=settings_path, validator=ValidateCustomFields, auth=NewAuth)
|
2015-05-20 12:14:38 -03:00
|
|
|
|
2015-09-10 12:47:29 +02:00
|
|
|
import config
|
|
|
|
app.config.from_object(config.Deployment)
|
2015-09-11 15:04:25 +02:00
|
|
|
|
|
|
|
client = MongoClient(app.config['MONGO_HOST'], 27017)
|
|
|
|
db = client.eve
|
2015-10-20 23:52:18 +02:00
|
|
|
bugsnag.configure(
|
|
|
|
api_key = app.config['BUGSNAG_API_KEY'],
|
2015-10-21 15:37:00 +02:00
|
|
|
project_root = "/data/git/pillar/pillar",
|
2015-10-20 23:52:18 +02:00
|
|
|
)
|
|
|
|
handle_exceptions(app)
|
2015-09-11 15:04:25 +02:00
|
|
|
|
2015-05-20 12:14:38 -03:00
|
|
|
|
2015-10-12 18:27:16 +02:00
|
|
|
def check_permissions(resource, method, append_allowed_methods=False):
|
2015-10-11 22:20:18 +02:00
|
|
|
"""Check user permissions to access a node. We look up node permissions from
|
|
|
|
world to groups to users and match them with the computed user permissions.
|
|
|
|
If there is not match, we return 403.
|
|
|
|
"""
|
2015-10-12 18:27:16 +02:00
|
|
|
if method != 'GET' and append_allowed_methods:
|
|
|
|
raise ValueError("append_allowed_methods only allowed with 'GET' method")
|
|
|
|
|
|
|
|
allowed_methods = []
|
|
|
|
|
2015-10-11 22:20:18 +02:00
|
|
|
current_user = g.get('current_user', None)
|
|
|
|
|
|
|
|
if 'permissions' in resource:
|
|
|
|
# If permissions are embedde in the node (this overrides any other
|
|
|
|
# permission previously set)
|
|
|
|
resource_permissions = resource['permissions']
|
|
|
|
elif type(resource['node_type']) is dict:
|
|
|
|
# If the node_type is embedded in the document, extract permissions
|
|
|
|
# from there
|
|
|
|
resource_permissions = resource['node_type']['permissions']
|
|
|
|
else:
|
|
|
|
# If the node_type is referenced with an ObjectID (was not embedded on
|
|
|
|
# request) query for if from the database and get the permissions
|
|
|
|
node_types_collection = app.data.driver.db['node_types']
|
|
|
|
node_type = node_types_collection.find_one(resource['node_type'])
|
|
|
|
resource_permissions = node_type['permissions']
|
|
|
|
|
|
|
|
if current_user:
|
|
|
|
# If the user is authenticated, proceed to compare the group permissions
|
|
|
|
for permission in resource_permissions['groups']:
|
|
|
|
if permission['group'] in current_user['groups']:
|
2015-10-12 18:27:16 +02:00
|
|
|
allowed_methods += permission['methods']
|
|
|
|
if method in permission['methods'] and not append_allowed_methods:
|
2015-10-11 22:20:18 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
for permission in resource_permissions['users']:
|
|
|
|
if current_user['user_id'] == permission['user']:
|
2015-10-12 18:27:16 +02:00
|
|
|
allowed_methods += permission['methods']
|
|
|
|
if method in permission['methods'] and not append_allowed_methods:
|
2015-10-11 22:20:18 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
# Check if the node is public or private. This must be set for non logged
|
|
|
|
# in users to see the content. For most BI projects this is on by default,
|
|
|
|
# while for private project this will not be set at all.
|
2015-10-12 18:27:16 +02:00
|
|
|
if 'world' in resource_permissions:
|
|
|
|
allowed_methods += resource_permissions['world']
|
|
|
|
if method in resource_permissions['world'] and not append_allowed_methods:
|
|
|
|
return
|
|
|
|
|
|
|
|
if append_allowed_methods and method in allowed_methods:
|
|
|
|
resource['allowed_methods'] = list(set(allowed_methods))
|
2015-10-15 16:12:46 +02:00
|
|
|
return resource
|
2015-10-11 22:20:18 +02:00
|
|
|
|
2015-10-19 19:09:32 +02:00
|
|
|
return None
|
2015-10-11 22:20:18 +02:00
|
|
|
|
2015-10-20 11:38:12 +02:00
|
|
|
def before_returning_item_permissions(response):
|
2015-10-11 22:20:18 +02:00
|
|
|
# Run validation process, since GET on nodes entry point is public
|
|
|
|
validate_token()
|
2015-10-19 19:09:32 +02:00
|
|
|
if not check_permissions(response, 'GET', append_allowed_methods=True):
|
|
|
|
return abort(403)
|
2015-10-11 22:20:18 +02:00
|
|
|
|
2015-10-20 11:38:12 +02:00
|
|
|
def before_returning_resource_permissions(response):
|
2015-10-15 16:12:46 +02:00
|
|
|
for item in response['_items']:
|
|
|
|
validate_token()
|
2015-10-19 19:09:32 +02:00
|
|
|
check_permissions(item, 'GET', append_allowed_methods=True)
|
|
|
|
|
2015-10-11 22:20:18 +02:00
|
|
|
def before_replacing_node(item, original):
|
|
|
|
check_permissions(original, 'PUT')
|
|
|
|
|
|
|
|
def before_inserting_nodes(items):
|
2015-10-29 19:10:53 +01:00
|
|
|
"""Before inserting a node in the collection we check if the user is allowed
|
|
|
|
and we append the project id to it.
|
|
|
|
"""
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
def find_parent_project(node):
|
|
|
|
"""Recursive function that finds the ultimate parent of a node."""
|
|
|
|
if node and 'parent' in node:
|
|
|
|
parent = nodes_collection.find_one({'_id': node['parent']})
|
|
|
|
return find_parent_project(parent)
|
|
|
|
if node:
|
|
|
|
return node
|
|
|
|
else:
|
|
|
|
return None
|
2015-10-11 22:20:18 +02:00
|
|
|
for item in items:
|
|
|
|
check_permissions(item, 'POST')
|
2015-11-06 16:09:54 +01:00
|
|
|
if 'parent' in item and 'project' not in item:
|
2015-10-29 19:10:53 +01:00
|
|
|
parent = nodes_collection.find_one({'_id': item['parent']})
|
|
|
|
project = find_parent_project(parent)
|
|
|
|
if project:
|
|
|
|
item['project'] = project['_id']
|
2015-10-11 22:20:18 +02:00
|
|
|
|
2015-10-12 18:27:16 +02:00
|
|
|
|
2015-10-20 11:38:12 +02:00
|
|
|
app.on_fetched_item_nodes += before_returning_item_permissions
|
|
|
|
app.on_fetched_resource_nodes += before_returning_resource_permissions
|
|
|
|
app.on_fetched_item_node_types += before_returning_item_permissions
|
|
|
|
app.on_fetched_resource_node_types += before_returning_resource_permissions
|
2015-10-11 22:20:18 +02:00
|
|
|
app.on_replace_nodes += before_replacing_node
|
|
|
|
app.on_insert_nodes += before_inserting_nodes
|
|
|
|
|
|
|
|
|
2015-05-20 12:14:38 -03:00
|
|
|
def post_GET_user(request, payload):
|
|
|
|
json_data = json.loads(payload.data)
|
2015-07-09 17:52:18 +02:00
|
|
|
# Check if we are querying the users endpoint (instead of the single user)
|
|
|
|
if json_data.get('_id') is None:
|
|
|
|
return
|
2015-10-11 22:20:18 +02:00
|
|
|
# json_data['computed_permissions'] = \
|
|
|
|
# compute_permissions(json_data['_id'], app.data.driver)
|
2015-05-20 12:14:38 -03:00
|
|
|
payload.data = json.dumps(json_data)
|
|
|
|
|
|
|
|
app.on_post_GET_users += post_GET_user
|
|
|
|
|
2015-09-24 15:45:57 +02:00
|
|
|
from modules.file_storage import process_file
|
2015-11-05 18:47:36 +01:00
|
|
|
from modules.file_storage import delete_file
|
2015-09-24 15:45:57 +02:00
|
|
|
|
|
|
|
def post_POST_files(request, payload):
|
|
|
|
"""After an file object has been created, we do the necessary processing
|
|
|
|
and further update it.
|
|
|
|
"""
|
|
|
|
process_file(request.get_json())
|
|
|
|
|
|
|
|
app.on_post_POST_files += post_POST_files
|
|
|
|
|
2015-09-11 15:04:25 +02:00
|
|
|
from utils.cdn import hash_file_path
|
2015-11-04 12:59:08 +01:00
|
|
|
from application.utils.gcs import GoogleCloudStorageBucket
|
2015-09-08 15:06:45 +02:00
|
|
|
# Hook to check the backend of a file resource, to build an appropriate link
|
|
|
|
# that can be used by the client to retrieve the actual file.
|
2015-11-05 18:47:36 +01:00
|
|
|
def generate_link(backend, file_path, project_id=None):
|
2015-11-04 12:59:08 +01:00
|
|
|
if backend == 'gcs':
|
|
|
|
storage = GoogleCloudStorageBucket(project_id)
|
2015-11-05 18:47:36 +01:00
|
|
|
blob = storage.Get(file_path)
|
2015-11-04 12:59:08 +01:00
|
|
|
link = blob['signed_url']
|
|
|
|
elif backend == 'pillar':
|
2015-11-05 18:47:36 +01:00
|
|
|
link = url_for('file_storage.index', file_name=file_path, _external=True)
|
2015-09-08 15:06:45 +02:00
|
|
|
elif backend == 'cdnsun':
|
2015-11-05 18:47:36 +01:00
|
|
|
link = hash_file_path(file_path, None)
|
2015-09-08 15:06:45 +02:00
|
|
|
else:
|
|
|
|
link = None
|
|
|
|
return link
|
|
|
|
|
|
|
|
def before_returning_file(response):
|
2015-11-04 12:59:08 +01:00
|
|
|
# TODO: add project id to all files
|
|
|
|
project_id = None if 'project' not in response else str(response['project'])
|
2015-11-05 18:47:36 +01:00
|
|
|
response['link'] = generate_link(response['backend'], response['file_path'], project_id)
|
2015-09-08 15:06:45 +02:00
|
|
|
|
|
|
|
def before_returning_files(response):
|
|
|
|
for item in response['_items']:
|
2015-11-04 12:59:08 +01:00
|
|
|
# TODO: add project id to all files
|
|
|
|
project_id = None if 'project' not in item else str(item['project'])
|
2015-11-05 18:47:36 +01:00
|
|
|
item['link'] = generate_link(item['backend'], item['file_path'], project_id)
|
2015-09-08 15:06:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
app.on_fetched_item_files += before_returning_file
|
|
|
|
app.on_fetched_resource_files += before_returning_files
|
2015-04-24 11:57:40 +02:00
|
|
|
|
2015-11-05 18:47:36 +01:00
|
|
|
|
|
|
|
def before_deleting_file(item):
|
|
|
|
delete_file(item)
|
|
|
|
|
|
|
|
app.on_delete_item_files += before_deleting_file
|
|
|
|
|
2015-09-24 15:45:57 +02:00
|
|
|
# The file_storage module needs app to be defined
|
|
|
|
from modules.file_storage import file_storage
|
|
|
|
#from modules.file_storage.serve import *
|
|
|
|
app.register_blueprint(file_storage, url_prefix='/storage')
|