From c6636e4ac7a8630fa4204d9baef7ca2cffefc86e Mon Sep 17 00:00:00 2001 From: Francesco Siddi Date: Thu, 7 Jan 2016 20:06:25 +0100 Subject: [PATCH] Assign ContentDisposition to GCS items on save --- pillar/application/__init__.py | 30 +++++++++++++++++++++++++++--- pillar/application/utils/gcs.py | 13 +++++++++++-- 2 files changed, 38 insertions(+), 5 deletions(-) diff --git a/pillar/application/__init__.py b/pillar/application/__init__.py index a17dbf94..e4162906 100644 --- a/pillar/application/__init__.py +++ b/pillar/application/__init__.py @@ -240,6 +240,31 @@ bugsnag.configure( project_root = "/data/git/pillar/pillar", ) handle_exceptions(app) +from utils.cdn import hash_file_path +from application.utils.gcs import GoogleCloudStorageBucket + +def update_file_name(item): + """Assign to the CGS blob the same name of the asset node. This way when + downloading an asset we get a human-readable name. + """ + + def _update_name(item, file_id): + files_collection = app.data.driver.db['files'] + f = files_collection.find_one({'_id': file_id}) + if f['backend'] == 'gcs': + storage = GoogleCloudStorageBucket(str(item['project'])) + blob = storage.Get(f['file_path'], to_dict=False) + storage.update_name(blob, item['name']) + + # Currently we search for 'file' and 'files' keys in the object properties. + # This could become a bit more flexible and realy on a true reference of the + # file object type from the schema. + if 'file' in item['properties']: + _update_name(item, item['properties']['file']) + + elif 'files' in item['properties']: + for f in item['properties']['files']: + _update_name(item, f['file']) def check_permissions(resource, method, append_allowed_methods=False): @@ -323,6 +348,7 @@ def before_returning_resource_permissions(response): def before_replacing_node(item, original): check_permissions(original, 'PUT') + update_file_name(item) def before_inserting_nodes(items): """Before inserting a node in the collection we check if the user is allowed @@ -400,7 +426,6 @@ app.on_fetched_resource_node_types += before_returning_resource_permissions app.on_replace_nodes += before_replacing_node app.on_insert_nodes += before_inserting_nodes - def post_GET_user(request, payload): json_data = json.loads(payload.data) # Check if we are querying the users endpoint (instead of the single user) @@ -423,8 +448,7 @@ def post_POST_files(request, payload): app.on_post_POST_files += post_POST_files -from utils.cdn import hash_file_path -from application.utils.gcs import GoogleCloudStorageBucket + # Hook to check the backend of a file resource, to build an appropriate link # that can be used by the client to retrieve the actual file. def generate_link(backend, file_path, project_id=None): diff --git a/pillar/application/utils/gcs.py b/pillar/application/utils/gcs.py index eef1a113..69316723 100644 --- a/pillar/application/utils/gcs.py +++ b/pillar/application/utils/gcs.py @@ -32,14 +32,14 @@ class GoogleCloudStorageBucket(object): private_key_pem = f.read() credentials_pem = SignedJwtAssertionCredentials(GCS_CLIENT_EMAIL, private_key_pem, - 'https://www.googleapis.com/auth/devstorage.read_write') + 'https://www.googleapis.com/auth/devstorage.full_control') # Load private key in p12 format (used by the singed urls generator) with open(GCS_PRIVATE_KEY_P12) as f: private_key_pkcs12 = f.read() credentials_p12 = SignedJwtAssertionCredentials(GCS_CLIENT_EMAIL, private_key_pkcs12, - 'https://www.googleapis.com/auth/devstorage.read_write') + 'https://www.googleapis.com/auth/devstorage.full_control') def __init__(self, bucket_name, subdir='_/'): @@ -141,3 +141,12 @@ class GoogleCloudStorageBucket(object): return True except NotFound: return None + + + def update_name(self, blob, name): + """Set the ContentDisposition metadata so that when a file is downloaded + it has a human-readable name. + """ + blob.content_disposition = "attachment; filename={0}".format(name) + blob.patch() +