From 84d297a3bdccbdee8837988636e8d5eac5d0009b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sybren=20A=2E=20St=C3=BCvel?= Date: Mon, 30 May 2016 16:47:33 +0200 Subject: [PATCH] Fixed issue uploading small files. --- pillar/application/modules/file_storage.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/pillar/application/modules/file_storage.py b/pillar/application/modules/file_storage.py index 877572d4..74e36060 100644 --- a/pillar/application/modules/file_storage.py +++ b/pillar/application/modules/file_storage.py @@ -4,6 +4,7 @@ import mimetypes import os import tempfile import uuid +import io from hashlib import md5 import bson.tz_util @@ -546,6 +547,14 @@ def stream_to_gcs(project_id): local_file = None stream_for_gcs = uploaded_file.stream + # Figure out the file size, as we need to pass this in explicitly to GCloud. + # Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't + # supported by a BytesIO object (even though it does have a fileno attribute). + if isinstance(stream_for_gcs, io.BytesIO): + file_size = len(stream_for_gcs.getvalue()) + else: + file_size = os.fstat(stream_for_gcs.fileno()).st_size + # Upload the file to GCS. from gcloud.streaming import transfer # Files larger than this many bytes will be streamed directly from disk, smaller @@ -554,7 +563,7 @@ def stream_to_gcs(project_id): try: gcs = GoogleCloudStorageBucket(project_id) blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2) - blob.upload_from_file(stream_for_gcs, + blob.upload_from_file(stream_for_gcs, size=file_size, content_type=uploaded_file.mimetype) except Exception: log.exception('Error uploading file to Google Cloud Storage (GCS),'