On new project creation, use the backend storage set in config

This commit is contained in:
Francesco Siddi 2017-07-14 12:04:24 +02:00
parent 5ec76f8801
commit e752a5dc87
3 changed files with 12 additions and 17 deletions

View File

@ -17,6 +17,10 @@ from . import gcs
def default_storage_backend(name: str) -> Bucket:
"""Returns an instance of a Bucket, based on the default backend.
Depending on the backend this may actually create the bucket.
"""
from flask import current_app
backend_name = current_app.config['STORAGE_BACKEND']

View File

@ -6,7 +6,7 @@ import typing
from bson import ObjectId
from gcloud.storage.client import Client
import gcloud.storage.blob
from gcloud.exceptions import NotFound
import gcloud.exceptions as gcloud_exc
from flask import current_app, g
from werkzeug.local import LocalProxy
@ -58,7 +58,7 @@ class GoogleCloudStorageBucket(Bucket):
try:
self._gcs_bucket = gcs.get_bucket(name)
except NotFound:
except gcloud_exc.NotFound:
self._gcs_bucket = gcs.bucket(name)
# Hardcode the bucket location to EU
self._gcs_bucket.location = 'EU'
@ -72,6 +72,7 @@ class GoogleCloudStorageBucket(Bucket):
# }
# ]
self._gcs_bucket.create()
log.info('Created GCS instance for project %s', name)
self.subdir = subdir
@ -115,7 +116,7 @@ class GoogleCloudStorageBucket(Bucket):
try:
gblob.delete()
return True
except NotFound:
except gcloud_exc.NotFound:
return False
def copy_blob(self, blob: Blob, to_bucket: Bucket):
@ -184,7 +185,7 @@ class GoogleCloudStorageBlob(Blob):
# Reload to get the actual file properties from Google.
try:
self.gblob.reload()
except NotFound:
except gcloud_exc.NotFound:
return False
return self.gblob.exists()

View File

@ -9,6 +9,7 @@ from pillar.api.node_types.comment import node_type_comment
from pillar.api.node_types.group import node_type_group
from pillar.api.node_types.group_texture import node_type_group_texture
from pillar.api.node_types.texture import node_type_texture
from pillar.api.file_storage_backends import default_storage_backend
from pillar.api.file_storage_backends.gcs import GoogleCloudStorageBucket
from pillar.api.utils import authorization, authentication
from pillar.api.utils import remove_private_keys
@ -167,19 +168,8 @@ def after_inserting_project(project, db_user):
else:
project['url'] = "p-{!s}".format(project_id)
# Initialize storage page (defaults to GCS)
if current_app.config.get('TESTING'):
log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
else:
try:
gcs_storage = GoogleCloudStorageBucket(str(project_id))
# FIXME: don't use internal property, but use our bucket/blob API.
if gcs_storage._gcs_bucket.exists():
log.info('Created GCS instance for project %s', project_id)
else:
log.warning('Unable to create GCS instance for project %s', project_id)
except gcs_exceptions.Forbidden as ex:
log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
# Initialize storage using the default specified in STORAGE_BACKEND
default_storage_backend(str(project_id))
# Commit the changes directly to the MongoDB; a PUT is not allowed yet,
# as the project doesn't have a valid permission structure.