Attempt at proper naming

Using Bucket and Blob as base classes.
This commit is contained in:
Francesco Siddi 2016-11-09 02:14:45 +01:00
parent c06533db5b
commit 4d6bf65a99
5 changed files with 107 additions and 65 deletions

View File

@ -45,8 +45,8 @@ def size_descriptor(width, height):
@skip_when_testing
def rename_on_gcs(bucket_name, from_path, to_path):
gcs = GoogleCloudStorageBucket(str(bucket_name))
blob = gcs.bucket.blob(from_path)
gcs.bucket.rename_blob(blob, to_path)
blob = gcs.gcs_bucket.blob(from_path)
gcs.gcs_bucket.rename_blob(blob, to_path)
@encoding.route('/zencoder/notifications', methods=['POST'])

View File

@ -28,7 +28,7 @@ from pillar.api.utils.cdn import hash_file_path
from pillar.api.utils.encoding import Encoder
from pillar.api.utils.gcs import GoogleCloudStorageBucket, \
GoogleCloudStorageBlob
from pillar.api.utils.storage import PillarStorage, PillarStorageFile
from pillar.api.utils.storage import LocalBucket, LocalBlob, default_storage_backend
log = logging.getLogger(__name__)
@ -317,7 +317,7 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
return blob['public_url']
return blob['signed_url']
if backend == 'local':
f = PillarStorageFile(project_id, file_path)
f = LocalBlob(project_id, file_path)
return url_for('file_storage.index', file_name=f.partial_path,
_external=True, _scheme=current_app.config['SCHEME'])
if backend == 'pillar':
@ -705,14 +705,17 @@ def stream_to_storage(project_id):
# Fake a Blob object.
file_in_storage = type('Blob', (), {'size': file_size})
else:
if current_app.config['STORAGE_BACKEND'] == 'gcs':
file_in_storage, storage_backend = stream_to_gcs(
file_id, file_size, internal_fname, project_id, stream_for_gcs,
uploaded_file.mimetype)
elif current_app.config['STORAGE_BACKEND'] == 'local':
storage_backend = PillarStorage(project_id)
file_in_storage = PillarStorageFile(project_id, internal_fname)
file_in_storage.create_from_file(stream_for_gcs, file_size)
bucket = default_storage_backend(project_id)
blob = bucket.blob(internal_fname)
blob.create_from_file(stream_for_gcs, file_size)
# if current_app.config['STORAGE_BACKEND'] == 'gcs':
# file_in_storage, storage_backend = stream_to_gcs(
# file_id, file_size, internal_fname, project_id,
# stream_for_gcs, uploaded_file.mimetype)
# elif current_app.config['STORAGE_BACKEND'] == 'local':
# storage_backend = LocalBucket(project_id)
# file_in_storage = LocalBlob(project_id, internal_fname)
# file_in_storage.create_from_file(stream_for_gcs, file_size)
log.debug('Marking uploaded file id=%s, fname=%s, '
'size=%i as "queued_for_processing"',
@ -720,11 +723,11 @@ def stream_to_storage(project_id):
update_file_doc(file_id,
status='queued_for_processing',
file_path=internal_fname,
length=file_in_storage.size,
length=blob.size,
content_type=uploaded_file.mimetype)
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
internal_fname, file_in_storage.size)
internal_fname, blob.size)
process_file(storage_backend, file_id, local_file)
# Local processing is done, we can close the local file so it is removed.
@ -732,7 +735,7 @@ def stream_to_storage(project_id):
local_file.close()
log.debug('Handled uploaded file id=%s, fname=%s, size=%i, status=%i',
file_id, internal_fname, file_in_storage.size, status)
file_id, internal_fname, blob.size, status)
# Status is 200 if the file already existed, and 201 if it was newly
# created.

View File

@ -172,7 +172,7 @@ def after_inserting_project(project, db_user):
else:
try:
gcs_storage = GoogleCloudStorageBucket(str(project_id))
if gcs_storage.bucket.exists():
if gcs_storage.gcs_bucket.exists():
log.info('Created GCS instance for project %s', project_id)
else:
log.warning('Unable to create GCS instance for project %s', project_id)

View File

@ -9,7 +9,7 @@ from gcloud.exceptions import NotFound
from flask import current_app, g
from werkzeug.local import LocalProxy
from pillar.api.utils.storage import StorageBackend, FileInStorage
from pillar.api.utils.storage import register_backend, Bucket, Blob
log = logging.getLogger(__name__)
@ -34,7 +34,8 @@ def get_client():
gcs = LocalProxy(get_client)
class GoogleCloudStorageBucket(StorageBackend):
@register_backend('gcs')
class GoogleCloudStorageBucket(Bucket):
"""Cloud Storage bucket interface. We create a bucket for every project. In
the bucket we create first level subdirs as follows:
- '_' (will contain hashed assets, and stays on top of default listing)
@ -50,16 +51,16 @@ class GoogleCloudStorageBucket(StorageBackend):
"""
def __init__(self, bucket_name, subdir='_/'):
super(GoogleCloudStorageBucket, self).__init__(backend='cgs')
def __init__(self, name, subdir='_/'):
super(GoogleCloudStorageBucket, self).__init__(name=name)
try:
self.bucket = gcs.get_bucket(bucket_name)
self.gcs_bucket = gcs.get_bucket(name)
except NotFound:
self.bucket = gcs.bucket(bucket_name)
self.gcs_bucket = gcs.bucket(name)
# Hardcode the bucket location to EU
self.bucket.location = 'EU'
self.gcs_bucket.location = 'EU'
# Optionally enable CORS from * (currently only used for vrview)
# self.bucket.cors = [
# self.gcs_bucket.cors = [
# {
# "origin": ["*"],
# "responseHeader": ["Content-Type"],
@ -67,10 +68,13 @@ class GoogleCloudStorageBucket(StorageBackend):
# "maxAgeSeconds": 3600
# }
# ]
self.bucket.create()
self.gcs_bucket.create()
self.subdir = subdir
def blob(self, blob_name):
return GoogleCloudStorageBlob(name=blob_name, bucket=self)
def List(self, path=None):
"""Display the content of a subdir in the project bucket. If the path
points to a file the listing is simply empty.
@ -83,8 +87,8 @@ class GoogleCloudStorageBucket(StorageBackend):
prefix = os.path.join(self.subdir, path)
fields_to_return = 'nextPageToken,items(name,size,contentType),prefixes'
req = self.bucket.list_blobs(fields=fields_to_return, prefix=prefix,
delimiter='/')
req = self.gcs_bucket.list_blobs(fields=fields_to_return, prefix=prefix,
delimiter='/')
files = []
for f in req:
@ -134,7 +138,7 @@ class GoogleCloudStorageBucket(StorageBackend):
:param to_dict: Return the object as a dictionary.
"""
path = os.path.join(self.subdir, path)
blob = self.bucket.blob(path)
blob = self.gcs_bucket.blob(path)
if blob.exists():
if to_dict:
return self.blob_to_dict(blob)
@ -147,7 +151,7 @@ class GoogleCloudStorageBucket(StorageBackend):
"""Create new blob and upload data to it.
"""
path = path if path else os.path.join('_', os.path.basename(full_path))
blob = self.bucket.blob(path)
blob = self.gcs_bucket.blob(path)
if blob.exists():
return None
blob.upload_from_filename(full_path)
@ -179,22 +183,18 @@ class GoogleCloudStorageBucket(StorageBackend):
"""
assert isinstance(to_bucket, GoogleCloudStorageBucket)
return self.bucket.copy_blob(blob, to_bucket.bucket)
return self.gcs_bucket.copy_blob(blob, to_bucket.gcs_bucket)
def get_blob(self, internal_fname, chunk_size=256 * 1024 * 2):
return self.bucket.blob('_/' + internal_fname, chunk_size)
return self.gcs_bucket.blob('_/' + internal_fname, chunk_size)
class GoogleCloudStorageBlob(FileInStorage):
class GoogleCloudStorageBlob(Blob):
"""GCS blob interface."""
def __init__(self, bucket, internal_fname):
super(GoogleCloudStorageBlob, self).__init__(backend='cgs')
def __init__(self, name, bucket):
super(GoogleCloudStorageBlob, self).__init__(name, bucket)
self.blob = bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2)
self.size = self.get_size()
def get_size(self):
return self.blob.size
self.blob = bucket.gcs_bucket.blob('_/' + name, chunk_size=256 * 1024 * 2)
def update_file_name(node):

View File

@ -22,52 +22,91 @@ def register_backend(backend_name):
return wrapper
class StorageBackend(object):
class Bucket(object):
"""Can be a GCS bucket or simply a project folder in Pillar
:type backend: string
:param backend: Name of the storage backend (gcs, pillar, cdnsun).
:type name: string
:param name: Name of the bucket. As a convention, we use the ID of
the project to name the bucket.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, backend):
self.backend = backend
def __init__(self, name):
self.name = name
@abc.abstractmethod
def upload_file(self, param1, param2, param3):
"""docstuff"""
def blob(self, blob_name):
"""Factory constructor for blob object.
:type blob_name: string
:param blob_name: The name of the blob to be instantiated.
"""
return Blob(name=blob_name, bucket=self)
@abc.abstractmethod
def get_blob(self, blob_name):
"""Get a blob object by name.
If the blob exists return the object, otherwise None.
"""
pass
class FileInStorage(object):
class Blob(object):
"""A wrapper for file or blob objects.
:type backend: string
:param backend: Name of the storage backend (gcs, pillar, cdnsun).
:type name: string
:param name: Name of the blob.
"""
def __init__(self, backend):
self.backend = backend
self.path = None
self.size = None
__metaclass__ = abc.ABCMeta
def __init__(self, name, bucket):
self.name = name
self.bucket = bucket
self._size_in_bytes = None
@property
def size(self):
"""Size of the object, in bytes.
:rtype: integer or ``NoneType``
:returns: The size of the blob or ``None`` if the property
is not set locally.
"""
size = self._size_in_bytes
if size is not None:
return int(size)
return self._size_in_bytes
@abc.abstractmethod
def create_from_file(self, uploaded_file, file_size):
pass
@register_backend('local')
class PillarStorage(StorageBackend):
def __init__(self, project_id):
super(PillarStorage, self).__init__(backend='local')
class LocalBucket(Bucket):
def __init__(self, name):
super(LocalBucket, self).__init__(name=name)
def blob(self, blob_name):
return LocalBlob(name=blob_name, bucket=self)
def get_blob(self, blob_name):
# Check if file exists, otherwise None
return None
class PillarStorageFile(FileInStorage):
def __init__(self, project_id, internal_fname):
super(PillarStorageFile, self).__init__(backend='local')
class LocalBlob(Blob):
def __init__(self, name, bucket):
super(LocalBlob, self).__init__(name=name, bucket=bucket)
self.size = None
self.partial_path = os.path.join(project_id[:2], project_id,
internal_fname[:2], internal_fname)
bucket_name = bucket.name
self.partial_path = os.path.join(bucket_name[:2], bucket_name,
name[:2], name)
self.path = os.path.join(
current_app.config['STORAGE_DIR'], self.partial_path)
@ -78,11 +117,11 @@ class PillarStorageFile(FileInStorage):
with open(self.path, 'wb') as outfile:
shutil.copyfileobj(uploaded_file, outfile)
self.size = file_size
self._size_in_bytes = file_size
def default_storage_backend():
def default_storage_backend(name):
from flask import current_app
backend_cls = backends[current_app.config['STORAGE_BACKEND']]
return backend_cls()
return backend_cls(name)