Limit uploadable file size to 32 MiB for non-subscribers.

This commit is contained in:
Sybren A. Stüvel 2016-07-08 11:26:56 +02:00
parent a322aee0c7
commit 5d26de633c
4 changed files with 107 additions and 5 deletions

View File

@ -27,7 +27,7 @@ import werkzeug.exceptions as wz_exceptions
from application import utils
from application.utils import remove_private_keys, authentication
from application.utils.authorization import require_login, user_has_role
from application.utils.authorization import require_login, user_has_role, user_matches_roles
from application.utils.cdn import hash_file_path
from application.utils.encoding import Encoder
from application.utils.gcs import GoogleCloudStorageBucket
@ -529,6 +529,27 @@ def override_content_type(uploaded_file):
del uploaded_file._parsed_content_type
def assert_file_size_allowed(file_size):
"""Asserts that the current user is allowed to upload a file of the given size.
:raises
"""
roles = current_app.config['ROLES_FOR_UNLIMITED_UPLOADS']
if user_matches_roles(require_roles=roles):
return
filesize_limit = current_app.config['FILESIZE_LIMIT_BYTES_NONSUBS']
if file_size < filesize_limit:
return
filesize_limit_mb = filesize_limit / 2.0 ** 20
log.info('User %s tried to upload a %.3f MiB file, but is only allowed %.3f MiB.',
authentication.current_user_id(), file_size / 2.0 ** 20, filesize_limit_mb)
raise wz_exceptions.RequestEntityTooLarge(
'To upload files larger than %i MiB, subscribe to Blender Cloud' % filesize_limit_mb)
@file_storage.route('/stream/<string:project_id>', methods=['POST', 'OPTIONS'])
@require_login()
def stream_to_gcs(project_id):
@ -544,13 +565,18 @@ def stream_to_gcs(project_id):
authentication.current_user_id())
uploaded_file = request.files['file']
# Not every upload has a Content-Length header. If it was passed, we might as
# well check for its value before we require the user to upload the entire file.
# (At least I hope that this part of the code is processed before the body is
# read in its entirety)
if uploaded_file.content_length:
assert_file_size_allowed(uploaded_file.content_length)
override_content_type(uploaded_file)
if not uploaded_file.content_type:
log.warning('File uploaded to project %s without content type.', project_oid)
raise wz_exceptions.BadRequest('Missing content type.')
file_id, internal_fname, status = create_file_doc_for_upload(project_oid, uploaded_file)
if uploaded_file.content_type.startswith('image/'):
# We need to do local thumbnailing, so we have to write the stream
# both to Google Cloud Storage and to local storage.
@ -570,6 +596,12 @@ def stream_to_gcs(project_id):
else:
file_size = os.fstat(stream_for_gcs.fileno()).st_size
# Check the file size again, now that we know its size for sure.
assert_file_size_allowed(file_size)
# Create file document in MongoDB.
file_id, internal_fname, status = create_file_doc_for_upload(project_oid, uploaded_file)
if current_app.config['TESTING']:
log.warning('NOT streaming to GCS because TESTING=%r', current_app.config['TESTING'])
# Fake a Blob object.

View File

@ -101,3 +101,8 @@ LOGGING = {
SHORT_LINK_BASE_URL = 'https://blender.cloud/r/'
SHORT_CODE_LENGTH = 6 # characters
# People are allowed this many bytes per uploaded file.
FILESIZE_LIMIT_BYTES_NONSUBS = 32 * 2 ** 20
# Unless they have one of those roles.
ROLES_FOR_UNLIMITED_UPLOADS = {u'subscriber', u'demo', u'admin'}

View File

@ -6,3 +6,6 @@ DEBUG = False
TESTING = True
CDN_STORAGE_USER = 'u41508580125621'
FILESIZE_LIMIT_BYTES_NONSUBS = 20 * 2 ** 10
ROLES_FOR_UNLIMITED_UPLOADS = {u'subscriber', u'demo', u'admin'}

View File

@ -1,11 +1,13 @@
import json
import io
import os
import tempfile
import rsa.randnum
from werkzeug.datastructures import FileStorage
from common_test_class import AbstractPillarTest
from common_test_class import AbstractPillarTest, TEST_EMAIL_ADDRESS
import common_test_data as ctd
class FileStorageTest(AbstractPillarTest):
@ -172,3 +174,63 @@ class FileAccessTest(AbstractPillarTest):
self.assertIsNone(file_info['variations'])
file_info = assert_variations(blend_file_id, True, 'admin-token')
self.assertIsNone(file_info['variations'])
class FileMaxSizeTest(AbstractPillarTest):
def setUp(self, **kwargs):
AbstractPillarTest.setUp(self, **kwargs)
self.project_id, _ = self.ensure_project_exists()
self.user_id = self.create_user(groups=[ctd.EXAMPLE_ADMIN_GROUP_ID],
roles=set())
self.create_valid_auth_token(self.user_id, 'token')
def test_upload_small_file(self):
file_size = 10 * 2 ** 10
test_file = self.create_test_file(file_size)
resp = self.post('/storage/stream/%s' % self.project_id,
expected_status=201,
auth_token='token',
files={'file': (test_file, 'test_file.bin')})
stream_info = resp.json()
file_id = stream_info['file_id']
self.assert_file_doc_ok(file_id, file_size)
def test_upload_too_large_file(self):
file_size = 30 * 2 ** 10
test_file = self.create_test_file(file_size)
self.post('/storage/stream/%s' % self.project_id,
expected_status=413,
auth_token='token',
files={'file': (test_file, 'test_file.bin')})
def test_upload_large_file_subscriber(self):
self.badger(TEST_EMAIL_ADDRESS, 'subscriber', 'grant')
file_size = 30 * 2 ** 10
test_file = self.create_test_file(file_size)
resp = self.post('/storage/stream/%s' % self.project_id,
expected_status=201,
auth_token='token',
files={'file': (test_file, 'test_file.bin')})
stream_info = resp.json()
file_id = stream_info['file_id']
self.assert_file_doc_ok(file_id, file_size)
def assert_file_doc_ok(self, file_id, file_size):
with self.app.test_request_context():
from application.utils import str2id
# Check that the file exists in MongoDB
files_coll = self.app.data.driver.db['files']
db_file = files_coll.find_one({'_id': str2id(file_id)})
self.assertEqual(file_size, db_file['length'])
def create_test_file(self, file_size_bytes):
fileob = io.BytesIO(rsa.randnum.read_random_bits(file_size_bytes * 8))
return fileob