Skip to content

Commit

Permalink
Fio 7239: Support for AWS S3 multipart uploads (4.18.x) (#5376)
Browse files Browse the repository at this point in the history
* adapt multipart upload feature to 4.18.x

* send token with requests that need them; error handling

* add logging for error during tests

* add abortcontroller polyfill

* try to lazily require abortcontroller polyfill
  • Loading branch information
brendanbond authored Oct 16, 2023
1 parent 22b3b30 commit e833890
Show file tree
Hide file tree
Showing 10 changed files with 373 additions and 109 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
"@formio/semantic": "2.6.1",
"@formio/text-mask-addons": "^3.8.0-formio.2",
"@formio/vanilla-text-mask": "^5.1.1-formio.1",
"abortcontroller-polyfill": "^1.7.5",
"autocompleter": "^7.0.1",
"browser-cookies": "^1.2.0",
"browser-md5-file": "^1.1.1",
Expand Down Expand Up @@ -105,8 +106,8 @@
"@babel/polyfill": "^7.12.1",
"@babel/preset-env": "^7.20.2",
"@babel/register": "^7.17.7",
"async-limiter": "^2.0.0",
"ace-builds": "1.23.4",
"async-limiter": "^2.0.0",
"babel-loader": "^9.1.0",
"bootstrap": "^4.6.0",
"bootswatch": "^4.6.0",
Expand Down
4 changes: 2 additions & 2 deletions src/Formio.js
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ class Formio {
});
}

uploadFile(storage, file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, uploadStartCallback, abortCallback) {
uploadFile(storage, file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, uploadStartCallback, abortCallback, multipartOptions) {
const requestArgs = {
provider: storage,
method: 'upload',
Expand All @@ -605,7 +605,7 @@ class Formio {
if (uploadStartCallback) {
uploadStartCallback();
}
return provider.uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback);
return provider.uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback, multipartOptions);
}
else {
throw ('Storage provider not found');
Expand Down
17 changes: 17 additions & 0 deletions src/components/file/File.js
Original file line number Diff line number Diff line change
Expand Up @@ -771,6 +771,22 @@ export default class FileComponent extends Field {
}
}

let count = 0;
const multipartOptions = this.component.useMultipartUpload && this.component.multipart ? {
...this.component.multipart,
progressCallback: (total) => {
count++;
fileUpload.status = 'progress';
fileUpload.progress = parseInt(100 * count / total);
delete fileUpload.message;
this.redraw();
},
changeMessage: (message) => {
fileUpload.message = message;
this.redraw();
},
} : false;

fileUpload.message = this.t('Starting upload.');
this.redraw();

Expand All @@ -797,6 +813,7 @@ export default class FileComponent extends Field {
},
// Abort upload callback
(abort) => this.abortUpload = abort,
multipartOptions
).then((fileInfo) => {
const index = this.statuses.indexOf(fileUpload);
if (index !== -1) {
Expand Down
40 changes: 40 additions & 0 deletions src/components/file/editForm/File.edit.file.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,46 @@ export default [
}
}
},
{
type: 'checkbox',
input: true,
key: 'useMultipartUpload',
label: 'Use the S3 Multipart Upload API',
tooltip: "The <a href='https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html'>S3 Multipart Upload API</a> is designed to improve the upload experience for larger objects (> 5GB).",
conditional: {
json: { '===': [{ var: 'data.storage' }, 's3'] }
},
},
{
label: 'Multipart Upload',
tableView: false,
key: 'multipart',
type: 'container',
input: true,
components: [
{
label: 'Part Size (MB)',
applyMaskOn: 'change',
mask: false,
tableView: false,
delimiter: false,
requireDecimal: false,
inputFormat: 'plain',
truncateMultipleSpaces: false,
validate: {
min: 5,
max: 5000,
},
key: 'partSize',
type: 'number',
input: true,
defaultValue: 500,
},
],
conditional: {
json: { '===': [{ var: 'data.useMultipartUpload' }, true] }
},
},
{
type: 'textfield',
input: true,
Expand Down
174 changes: 143 additions & 31 deletions src/providers/storage/s3.js
Original file line number Diff line number Diff line change
@@ -1,27 +1,75 @@
import NativePromise from 'native-promise-only';

import XHR from './xhr';
const s3 = (formio) => ({
uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback) {
return XHR.upload(formio, 's3', (xhr, response) => {
response.data.fileName = fileName;
response.data.key = XHR.path([response.data.key, dir, fileName]);
if (response.signed) {
xhr.openAndSetHeaders('PUT', response.signed);
Object.keys(response.data.headers).forEach(key => {
xhr.setRequestHeader(key, response.data.headers[key]);
});
return file;
}
else {
const fd = new FormData();
for (const key in response.data) {
fd.append(key, response.data[key]);
import { withRetries } from './util';

const AbortController = window.AbortController || require('abortcontroller-polyfill/dist/cjs-ponyfill');
function s3(formio) {
return {
async uploadFile(file, fileName, dir, progressCallback, url, options, fileKey, groupPermissions, groupId, abortCallback, multipartOptions) {
const xhrCallback = async(xhr, response, abortCallback) => {
response.data.fileName = fileName;
response.data.key = XHR.path([response.data.key, dir, fileName]);
if (response.signed) {
if (multipartOptions && Array.isArray(response.signed)) {
// patch abort callback
const abortController = new AbortController();
const abortSignal = abortController.signal;
if (typeof abortCallback === 'function') {
abortCallback(() => abortController.abort());
}
try {
const parts = await this.uploadParts(
file,
response.signed,
response.data.headers,
response.partSizeActual,
multipartOptions,
abortSignal
);
await withRetries(this.completeMultipartUpload, [response, parts, multipartOptions], 3);
return;
}
catch (err) {
// abort in-progress fetch requests
abortController.abort();
// attempt to cancel the multipart upload
this.abortMultipartUpload(response);
throw err;
}
}
else {
xhr.openAndSetHeaders('PUT', response.signed);
xhr.setRequestHeader('Content-Type', file.type);
Object.keys(response.data.headers).forEach((key) => {
xhr.setRequestHeader(key, response.data.headers[key]);
});
return file;
}
}
fd.append('file', file);
xhr.openAndSetHeaders('POST', response.url);
return fd;
}
}, file, fileName, dir, progressCallback, groupPermissions, groupId, abortCallback).then((response) => {
else {
const fd = new FormData();
for (const key in response.data) {
fd.append(key, response.data[key]);
}
fd.append('file', file);
xhr.openAndSetHeaders('POST', response.url);
return fd;
}
};
const response = await XHR.upload(
formio,
's3',
xhrCallback,
file,
fileName,
dir,
progressCallback,
groupPermissions,
groupId,
abortCallback,
multipartOptions
);
return {
storage: 's3',
name: fileName,
Expand All @@ -32,17 +80,81 @@ const s3 = (formio) => ({
size: file.size,
type: file.type
};
});
},
downloadFile(file) {
if (file.acl !== 'public-read') {
return formio.makeRequest('file', `${formio.formUrl}/storage/s3?bucket=${XHR.trim(file.bucket)}&key=${XHR.trim(file.key)}`, 'GET');
}
else {
return NativePromise.resolve(file);
},
async completeMultipartUpload(serverResponse, parts, multipart) {
const { changeMessage } = multipart;
const token = formio.getToken();
changeMessage('Completing AWS S3 multipart upload...');
const response = await fetch(`${formio.formUrl}/storage/s3/multipart/complete`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(token ? { 'x-jwt-token': token } : {}),
},
body: JSON.stringify({ parts, uploadId: serverResponse.uploadId, key: serverResponse.key })
});
const message = await response.text();
if (!response.ok) {
throw new Error(message || response.statusText);
}
// the AWS S3 SDK CompleteMultipartUpload command can return a HTTP 200 status header but still error;
// we need to parse, and according to AWS, to retry
if (message?.match(/Error/)) {
throw new Error(message);
}
},
abortMultipartUpload(serverResponse) {
const { uploadId, key } = serverResponse;
const token = formio.getToken();
fetch(`${formio.formUrl}/storage/s3/multipart/abort`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(token ? { 'x-jwt-token': token } : {}),
},
body: JSON.stringify({ uploadId, key })
}).catch((err) => console.error('Error while aborting multipart upload:', err));
},
uploadParts(file, urls, headers, partSize, multipart, abortSignal) {
const { changeMessage, progressCallback } = multipart;
changeMessage('Chunking and uploading parts to AWS S3...');
const promises = [];
for (let i = 0; i < urls.length; i++) {
const start = i * partSize;
const end = (i + 1) * partSize;
const blob = i < urls.length ? file.slice(start, end) : file.slice(start);
const promise = fetch(urls[i], {
method: 'PUT',
headers,
body: blob,
signal: abortSignal,
}).then((res) => {
if (res.ok) {
progressCallback(urls.length);
const eTag = res.headers.get('etag');
if (!eTag) {
throw new Error('ETag header not found; it must be exposed in S3 bucket CORS settings');
}
return { ETag: eTag, PartNumber: i + 1 };
}
else {
throw new Error(`Part no ${i} failed with status ${res.status}`);
}
});
promises.push(promise);
}
return NativePromise.all(promises);
},
downloadFile(file) {
if (file.acl !== 'public-read') {
return formio.makeRequest('file', `${formio.formUrl}/storage/s3?bucket=${XHR.trim(file.bucket)}&key=${XHR.trim(file.key)}`, 'GET');
}
else {
return NativePromise.resolve(file);
}
}
}
});
};
}

s3.title = 'S3';
export default s3;
79 changes: 79 additions & 0 deletions src/providers/storage/s3.unit.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import assert from 'assert';
import sinon from 'sinon';
import fetchMock from 'fetch-mock';

import S3 from './s3';
import { withRetries } from './util';

describe('S3 Provider', () => {
describe('Function Unit Tests', () => {
it('withRetries should retry a given function three times, then throw the provided error', (done) => {
function sleepAndReject(ms) {
return new Promise((_, reject) => setTimeout(reject, ms));
}

const spy = sinon.spy(sleepAndReject);
withRetries(spy, [200], 3, 'Custom error message').catch((err) => {
assert.equal(err.message, 'Custom error message');
assert.equal(spy.callCount, 3);
done();
});
});
});

describe('Provider Integration Tests', () => {
describe('AWS S3 Multipart Uploads', () => {
before('Mocks fetch', () => {
fetchMock
.post('https://fakeproject.form.io/fakeform/storage/s3', {
signed: new Array(5).fill('https://fakebucketurl.aws.com/signed'),
minio: false,
url: 'https://fakebucketurl.aws.com',
bucket: 'fakebucket',
uploadId: 'fakeuploadid',
key: 'test.jpg',
partSizeActual: 1,
data: {}
})
.put('https://fakebucketurl.aws.com/signed', { status: 200, headers: { 'Etag': 'fakeetag' } })
.post('https://fakeproject.form.io/fakeform/storage/s3/multipart/complete', 200)
.post('https://fakeproject.form.io/fakeform/storage/s3/multipart/abort', 200);
});
it('Given an array of signed urls it should upload a file to S3 using multipart upload', (done) => {
const mockFormio = {
formUrl: 'https://fakeproject.form.io/fakeform',
getToken: () => {}
};
const s3 = new S3(mockFormio);
const uploadSpy = sinon.spy(s3, 'uploadParts');
const completeSpy = sinon.spy(s3, 'completeMultipartUpload');

const mockFile = new File(['test!'], 'test.jpg', { type: 'image/jpeg' });
s3.uploadFile(
mockFile,
'test.jpg',
'',
() => {},
'',
{},
'test.jpg',
{},
'',
() => {},
{ partSize: 1, changeMessage: () => {}, progressCallback: () => {} }
).then((response) => {
assert.equal(response.storage, 's3');
assert.equal(response.name, 'test.jpg');
assert.equal(response.bucket, 'fakebucket');
assert.equal(response.url, 'https://fakebucketurl.aws.com/test.jpg');
assert.equal(response.acl, undefined);
assert.equal(response.size, 5);
assert.equal(response.type, 'image/jpeg');
assert.equal(uploadSpy.callCount, 1);
assert.equal(completeSpy.callCount, 1);
done();
});
});
});
});
});
6 changes: 6 additions & 0 deletions src/providers/storage/util.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
export async function withRetries(fn, args, retries = 3, err = null) {
if (!retries) {
throw new Error(err);
}
return fn(...args).catch(() => withRetries(fn, args, retries - 1, err));
}
Loading

0 comments on commit e833890

Please sign in to comment.