From 41a278c4f09044d1c5c3cfb155c39b8818d8372c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sybren=20A=2E=20St=C3=BCvel?= Date: Fri, 13 May 2016 17:40:37 +0200 Subject: [PATCH] Removed size parameter from blob.upload_to_file() to prevent mem error By not passing the size, the gcloud module uses os.fstat to determine the size of the to-be-uploaded file, and switch to resumable uploads. This should prevent memory errors uploading large files. --- pillar/application/modules/file_storage.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pillar/application/modules/file_storage.py b/pillar/application/modules/file_storage.py index e52b3126..6b2ebdf2 100644 --- a/pillar/application/modules/file_storage.py +++ b/pillar/application/modules/file_storage.py @@ -550,8 +550,7 @@ def stream_to_gcs(project_id): gcs = GoogleCloudStorageBucket(project_id) blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2) blob.upload_from_file(stream_for_gcs, - content_type=uploaded_file.mimetype, - size=uploaded_file.content_length) + content_type=uploaded_file.mimetype) except Exception: log.exception('Error uploading file to Google Cloud Storage (GCS),' ' aborting handling of uploaded file (id=%s).', file_id)