Fixed issue uploading small files.

This commit is contained in:
Sybren A. Stüvel 2016-05-30 16:47:33 +02:00
parent e242701ed0
commit 84d297a3bd

View File

@ -4,6 +4,7 @@ import mimetypes
import os
import tempfile
import uuid
import io
from hashlib import md5
import bson.tz_util
@ -546,6 +547,14 @@ def stream_to_gcs(project_id):
local_file = None
stream_for_gcs = uploaded_file.stream
# Figure out the file size, as we need to pass this in explicitly to GCloud.
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
# supported by a BytesIO object (even though it does have a fileno attribute).
if isinstance(stream_for_gcs, io.BytesIO):
file_size = len(stream_for_gcs.getvalue())
else:
file_size = os.fstat(stream_for_gcs.fileno()).st_size
# Upload the file to GCS.
from gcloud.streaming import transfer
# Files larger than this many bytes will be streamed directly from disk, smaller
@ -554,7 +563,7 @@ def stream_to_gcs(project_id):
try:
gcs = GoogleCloudStorageBucket(project_id)
blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2)
blob.upload_from_file(stream_for_gcs,
blob.upload_from_file(stream_for_gcs, size=file_size,
content_type=uploaded_file.mimetype)
except Exception:
log.exception('Error uploading file to Google Cloud Storage (GCS),'