File upload: Removed JS-side file size check.
Instead, the size of the entire HTTP request body is checked against the maximum file size. This allows for slightly smaller files (in the order of 200-300 bytes), which shouldn't be noticeable given our 32 MiB limit for non-subscribers. This check is performed before accessing request.files[], and thus before the file even starts uploading. This also allows unlimited file uploads to subscribers and demo users. This was already possible using the API, so now the web interface is consistent. Limits can be set using config[_local].py. This closes T49264: Allow large uploads for admins
This commit is contained in:
parent
52cc61b143
commit
486686f1f9
@ -639,6 +639,14 @@ def stream_to_storage(project_id):
|
|||||||
log.info('Streaming file to bucket for project=%s user_id=%s', project_id,
|
log.info('Streaming file to bucket for project=%s user_id=%s', project_id,
|
||||||
authentication.current_user_id())
|
authentication.current_user_id())
|
||||||
log.info('request.headers[Origin] = %r', request.headers.get('Origin'))
|
log.info('request.headers[Origin] = %r', request.headers.get('Origin'))
|
||||||
|
log.info('request.content_length = %r', request.content_length)
|
||||||
|
|
||||||
|
# Try a check for the content length before we access request.files[]. This allows us
|
||||||
|
# to abort the upload early. The entire body content length is always a bit larger than
|
||||||
|
# the actual file size, so if we accept here, we're sure it'll be accepted in subsequent
|
||||||
|
# checks as well.
|
||||||
|
if request.content_length:
|
||||||
|
assert_file_size_allowed(request.content_length)
|
||||||
|
|
||||||
uploaded_file = request.files['file']
|
uploaded_file = request.files['file']
|
||||||
|
|
||||||
|
@ -70,10 +70,6 @@ function setup_file_uploader(index, upload_element) {
|
|||||||
if (data.originalFiles[0]['type'].length && !acceptFileTypes.test(data.originalFiles[0]['type'])) {
|
if (data.originalFiles[0]['type'].length && !acceptFileTypes.test(data.originalFiles[0]['type'])) {
|
||||||
uploadErrors.push('Not an accepted file type');
|
uploadErrors.push('Not an accepted file type');
|
||||||
}
|
}
|
||||||
// Limit upload size to 1GB
|
|
||||||
if (data.originalFiles[0]['size'] && data.originalFiles[0]['size'] > 1262485504) {
|
|
||||||
uploadErrors.push('Filesize is too big');
|
|
||||||
}
|
|
||||||
if (uploadErrors.length > 0) {
|
if (uploadErrors.length > 0) {
|
||||||
$(this).parent().parent().addClass('error');
|
$(this).parent().parent().addClass('error');
|
||||||
$(this).after(uploadErrors.join("\n"));
|
$(this).after(uploadErrors.join("\n"));
|
||||||
@ -118,11 +114,21 @@ function setup_file_uploader(index, upload_element) {
|
|||||||
|
|
||||||
$('body').trigger('file-upload:finished');
|
$('body').trigger('file-upload:finished');
|
||||||
},
|
},
|
||||||
fail: function (jqXHR, textStatus, errorThrown) {
|
fail: function (jqXHR, fileupload) {
|
||||||
if (console) {
|
if (console) {
|
||||||
console.log(textStatus, 'Upload error: ' + errorThrown);
|
console.log('Upload error:');
|
||||||
|
console.log('jqXHR', jqXHR);
|
||||||
|
console.log('fileupload', fileupload);
|
||||||
}
|
}
|
||||||
statusBarSet(textStatus, 'Upload error: ' + errorThrown, 'pi-attention', 8000);
|
|
||||||
|
var uploadErrors = [];
|
||||||
|
for (var key in fileupload.messages) {
|
||||||
|
uploadErrors.push(fileupload.messages[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
statusBarSet('error',
|
||||||
|
'Upload error: ' + uploadErrors.join("; "),
|
||||||
|
'pi-attention', 16000);
|
||||||
|
|
||||||
set_progress_bar(100, 'progress-error');
|
set_progress_bar(100, 'progress-error');
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user