Store generated links in MongoDB, along with expiry datetime.
Links are only regenerated after they have expired. For backward compatibility the links are also generated when there is no expiry or link. Every file has only one expiry timestamp for all its links. In the future we might want to inspect the used projection, to see whether the client needs those links at all (prevents unnecessary regeneration), and to force inclusion of the expiry timestamp when links are requested.
This commit is contained in:
@@ -102,7 +102,7 @@ logging.getLogger('werkzeug').setLevel(logging.INFO)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(logging.DEBUG if app.config['DEBUG'] else logging.INFO)
|
||||
log.info('Pillar starting')
|
||||
log.info('Pillar starting, debug=%s', app.config['DEBUG'])
|
||||
|
||||
bugsnag.configure(
|
||||
api_key=app.config['BUGSNAG_API_KEY'],
|
||||
@@ -148,9 +148,7 @@ from utils.algolia import algolia_index_node_save
|
||||
from utils.activities import activity_subscribe
|
||||
from utils.activities import activity_object_add
|
||||
from utils.activities import notification_parse
|
||||
from modules.file_storage import process_file
|
||||
from modules.file_storage import delete_file
|
||||
from modules.file_storage import generate_link
|
||||
from .modules import file_storage
|
||||
|
||||
|
||||
def before_returning_item_permissions(response):
|
||||
@@ -261,7 +259,7 @@ def item_parse_attachments(response):
|
||||
# Get the correc variation from the file
|
||||
thumbnail = next((item for item in f['variations'] if
|
||||
item['size'] == size), None)
|
||||
l = generate_link(f['backend'], thumbnail['file_path'], str(f['project']))
|
||||
l = file_storage.generate_link(f['backend'], thumbnail['file_path'], str(f['project']))
|
||||
# Build Markdown img string
|
||||
l = ''.format(slug, l, f['name'])
|
||||
# Parse the content of the file and replace the attachment
|
||||
@@ -351,36 +349,17 @@ def post_POST_files(request, payload):
|
||||
"""After an file object has been created, we do the necessary processing
|
||||
and further update it.
|
||||
"""
|
||||
process_file(request.get_json())
|
||||
file_storage.process_file(request.get_json())
|
||||
|
||||
|
||||
app.on_post_POST_files += post_POST_files
|
||||
|
||||
|
||||
def before_returning_file(response):
|
||||
# TODO: add project id to all files
|
||||
project_id = None if 'project' not in response else str(response['project'])
|
||||
response['link'] = generate_link(
|
||||
response['backend'], response['file_path'], project_id)
|
||||
if 'variations' in response:
|
||||
for variation in response['variations']:
|
||||
variation['link'] = generate_link(
|
||||
response['backend'], variation['file_path'], project_id)
|
||||
|
||||
|
||||
def before_returning_files(response):
|
||||
for item in response['_items']:
|
||||
# TODO: add project id to all files
|
||||
project_id = None if 'project' not in item else str(item['project'])
|
||||
item['link'] = generate_link(item['backend'], item['file_path'], project_id)
|
||||
|
||||
|
||||
app.on_fetched_item_files += before_returning_file
|
||||
app.on_fetched_resource_files += before_returning_files
|
||||
app.on_fetched_item_files += file_storage.before_returning_file
|
||||
app.on_fetched_resource_files += file_storage.before_returning_files
|
||||
|
||||
|
||||
def before_deleting_file(item):
|
||||
delete_file(item)
|
||||
file_storage.delete_file(item)
|
||||
|
||||
|
||||
app.on_delete_item_files += before_deleting_file
|
||||
|
@@ -1,6 +1,9 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
from multiprocessing import Process
|
||||
|
||||
import bson.tz_util
|
||||
from bson import ObjectId
|
||||
from flask import request
|
||||
from flask import Blueprint
|
||||
@@ -8,7 +11,10 @@ from flask import jsonify
|
||||
from flask import send_from_directory
|
||||
from flask import url_for, helpers
|
||||
from eve.methods.put import put_internal
|
||||
from eve.methods.patch import patch_internal
|
||||
|
||||
from application import app
|
||||
from application.utils import remove_private_keys
|
||||
from application.utils.imaging import generate_local_thumbnails
|
||||
from application.utils.imaging import get_video_data
|
||||
from application.utils.imaging import ffmpeg_encode
|
||||
@@ -298,3 +304,52 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
|
||||
else:
|
||||
link = None
|
||||
return link
|
||||
|
||||
|
||||
def before_returning_file(response):
|
||||
ensure_valid_link(response)
|
||||
|
||||
|
||||
def before_returning_files(response):
|
||||
for item in response['_items']:
|
||||
ensure_valid_link(item)
|
||||
|
||||
|
||||
def ensure_valid_link(response):
|
||||
"""Ensures the file item has valid file links using generate_link(...)."""
|
||||
|
||||
log.debug('Inspecting link for file %s', response['_id'])
|
||||
|
||||
# Check link expiry.
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
if 'link_expires' in response:
|
||||
link_expires = response['link_expires']
|
||||
if now < link_expires:
|
||||
# Not expired yet, so don't bother regenerating anything.
|
||||
log.debug('Link expires at %s, which is in the future, so not generating new link', link_expires)
|
||||
return
|
||||
|
||||
log.debug('Link expired at %s, which is in the past; generating new link', link_expires)
|
||||
else:
|
||||
log.debug('No expiry date for link; generating new link')
|
||||
|
||||
# Generate a new link for the file and all its variations.
|
||||
project_id = str(response['project']) if 'project' in response else None # TODO: add project id to all files
|
||||
backend = response['backend']
|
||||
response['link'] = generate_link(backend, response['file_path'], project_id)
|
||||
if 'variations' in response:
|
||||
for variation in response['variations']:
|
||||
variation['link'] = generate_link(backend, variation['file_path'], project_id)
|
||||
|
||||
# Construct the new expiry datetime.
|
||||
validity_secs = app.config['FILE_LINK_VALIDITY'][backend]
|
||||
response['link_expires'] = now + datetime.timedelta(seconds=validity_secs)
|
||||
|
||||
patch_info = remove_private_keys(response)
|
||||
(patch_resp, _, _, _) = patch_internal('files', patch_info, _id=ObjectId(response['_id']))
|
||||
if patch_resp.get('_status') == 'ERR':
|
||||
log.warning('Unable to save new links for file %s: %r', response['_id'], patch_resp)
|
||||
# TODO: raise a snag.
|
||||
response['_updated'] = now
|
||||
else:
|
||||
response['_updated'] = patch_resp['_updated']
|
||||
|
@@ -0,0 +1,12 @@
|
||||
import copy
|
||||
|
||||
|
||||
def remove_private_keys(document):
|
||||
"""Removes any key that starts with an underscore, returns result as new dictionary."""
|
||||
|
||||
patch_info = copy.deepcopy(document)
|
||||
for key in list(patch_info.keys()):
|
||||
if key.startswith('_'):
|
||||
del patch_info[key]
|
||||
|
||||
return patch_info
|
||||
|
@@ -65,5 +65,12 @@ class Development(object):
|
||||
ALGOLIA_API_KEY = ''
|
||||
ALGOLIA_INDEX_USERS = ''
|
||||
|
||||
# Validity period of links, per file storage backend. Expressed in seconds.
|
||||
# Shouldn't be more than a year, as this isn't supported by HTTP/1.1.
|
||||
FILE_LINK_VALIDITY = defaultdict(
|
||||
lambda: 3600 * 24 * 30, # default of 1 month.
|
||||
gcs=3600 * 23, # 23 hours for Google Cloud Storage.
|
||||
)
|
||||
|
||||
|
||||
class Deployment(Development): pass
|
||||
|
@@ -413,6 +413,12 @@ files_schema = {
|
||||
#'required': True,
|
||||
'unique': True,
|
||||
},
|
||||
'link': {
|
||||
'type': 'string',
|
||||
},
|
||||
'link_expires': {
|
||||
'type': 'datetime',
|
||||
},
|
||||
'project': {
|
||||
# The project node the files belongs to (does not matter if it is
|
||||
# attached to an asset or something else). We use the project id as
|
||||
@@ -463,6 +469,9 @@ files_schema = {
|
||||
'file_path': {
|
||||
'type': 'string',
|
||||
},
|
||||
'link': {
|
||||
'type': 'string',
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
Reference in New Issue
Block a user