diff --git a/package.json b/package.json index 1910d7d4d..3cb977495 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,9 @@ "typed-error": "^3.2.2" }, "devDependencies": { + "@aws-sdk/client-s3": "^3.556.0", "@balena/lint": "^8.0.0", + "@balena/pinejs-webresource-s3": "^0.2.0", "@faker-js/faker": "^8.3.1", "@types/busboy": "^1.5.3", "@types/chai": "^4.3.11", @@ -106,9 +108,6 @@ "webpack-dev-server": "^4.15.1" }, "optionalDependencies": { - "@aws-sdk/client-s3": "^3.490.0", - "@aws-sdk/lib-storage": "^3.490.0", - "@aws-sdk/s3-request-presigner": "^3.490.0", "bcrypt": "^5.1.1", "body-parser": "^1.20.2", "compression": "^1.7.4", @@ -123,7 +122,7 @@ "serve-static": "^1.15.0" }, "engines": { - "node": ">=16.13.0", + "node": ">=18.18.0", "npm": ">=8.0.0" }, "lint-staged": { diff --git a/src/server-glue/module.ts b/src/server-glue/module.ts index fcf8b1eed..fd85079c6 100644 --- a/src/server-glue/module.ts +++ b/src/server-glue/module.ts @@ -5,6 +5,7 @@ import './sbvr-loader'; import * as dbModule from '../database-layer/db'; import * as configLoader from '../config-loader/config-loader'; import * as migrator from '../migrator/sync'; +import * as webResourceHandler from '../webresource-handler'; import type * as migratorUtils from '../migrator/utils'; import * as sbvrUtils from '../sbvr-api/sbvr-utils'; @@ -63,6 +64,7 @@ export const init = async ( await sbvrUtils.setup(app, db); const cfgLoader = await configLoader.setup(app); await cfgLoader.loadConfig(migrator.config); + await cfgLoader.loadConfig(webResourceHandler.config); const promises: Array> = []; if (process.env.SBVR_SERVER_ENABLED) { diff --git a/src/webresource-handler/handlers/NoopHandler.ts b/src/webresource-handler/handlers/NoopHandler.ts index 21f286fda..a7e85476a 100644 --- a/src/webresource-handler/handlers/NoopHandler.ts +++ b/src/webresource-handler/handlers/NoopHandler.ts @@ -1,5 +1,10 @@ import type { WebResourceType as WebResource } from '@balena/sbvr-types'; -import type { IncomingFile, UploadResponse, WebResourceHandler } from '..'; +import type { + BeginMultipartUploadHandlerResponse, + IncomingFile, + UploadResponse, + WebResourceHandler, +} from '..'; export class NoopHandler implements WebResourceHandler { public async handleFile(resource: IncomingFile): Promise { @@ -18,4 +23,12 @@ export class NoopHandler implements WebResourceHandler { public async onPreRespond(webResource: WebResource): Promise { return webResource; } + + public async beginMultipartUpload(): Promise { + return { fileKey: 'noop', uploadId: 'noop', uploadParts: [] }; + } + + public async commitMultipartUpload(): Promise { + return { filename: 'noop', href: 'noop' }; + } } diff --git a/src/webresource-handler/handlers/S3Handler.ts b/src/webresource-handler/handlers/S3Handler.ts deleted file mode 100644 index 222e4423a..000000000 --- a/src/webresource-handler/handlers/S3Handler.ts +++ /dev/null @@ -1,143 +0,0 @@ -import { - FileSizeExceededError, - type IncomingFile, - normalizeHref, - type UploadResponse, - WebResourceError, - type WebResourceHandler, -} from '..'; -import { - S3Client, - type S3ClientConfig, - DeleteObjectCommand, - type PutObjectCommandInput, - GetObjectCommand, -} from '@aws-sdk/client-s3'; -import { Upload } from '@aws-sdk/lib-storage'; -import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; - -import { randomUUID } from 'crypto'; -import type { WebResourceType as WebResource } from '@balena/sbvr-types'; -import memoize from 'memoizee'; - -export interface S3HandlerProps { - region: string; - accessKey: string; - secretKey: string; - endpoint: string; - bucket: string; - maxSize?: number; - signedUrlExpireTimeSeconds?: number; - signedUrlCacheExpireTimeSeconds?: number; -} - -export class S3Handler implements WebResourceHandler { - private readonly config: S3ClientConfig; - private readonly bucket: string; - private readonly maxFileSize: number; - - protected readonly signedUrlExpireTimeSeconds: number; - protected readonly signedUrlCacheExpireTimeSeconds: number; - protected cachedGetSignedUrl: (fileKey: string) => Promise; - - private client: S3Client; - - constructor(config: S3HandlerProps) { - this.config = { - region: config.region, - credentials: { - accessKeyId: config.accessKey, - secretAccessKey: config.secretKey, - }, - endpoint: config.endpoint, - forcePathStyle: true, - }; - - this.signedUrlExpireTimeSeconds = - config.signedUrlExpireTimeSeconds ?? 86400; // 24h - this.signedUrlCacheExpireTimeSeconds = - config.signedUrlCacheExpireTimeSeconds ?? 82800; // 22h - - this.maxFileSize = config.maxSize ?? 52428800; - this.bucket = config.bucket; - this.client = new S3Client(this.config); - - // Memoize expects maxAge in MS and s3 signing method in seconds. - // Normalization to use only seconds and therefore convert here from seconds to MS - this.cachedGetSignedUrl = memoize(this.s3SignUrl, { - maxAge: this.signedUrlCacheExpireTimeSeconds * 1000, - }); - } - - public async handleFile(resource: IncomingFile): Promise { - let size = 0; - const key = `${resource.fieldname}_${randomUUID()}_${ - resource.originalname - }`; - const params: PutObjectCommandInput = { - Bucket: this.bucket, - Key: key, - Body: resource.stream, - ContentType: resource.mimetype, - }; - const upload = new Upload({ client: this.client, params }); - - upload.on('httpUploadProgress', async (ev) => { - size = ev.total ?? ev.loaded!; - if (size > this.maxFileSize) { - await upload.abort(); - } - }); - - try { - await upload.done(); - } catch (err: any) { - resource.stream.resume(); - if (size > this.maxFileSize) { - throw new FileSizeExceededError(this.maxFileSize); - } - throw new WebResourceError(err); - } - - const filename = this.getS3URL(key); - return { size, filename }; - } - - public async removeFile(href: string): Promise { - const fileKey = this.getKeyFromHref(href); - - const command = new DeleteObjectCommand({ - Bucket: this.bucket, - Key: fileKey, - }); - - await this.client.send(command); - } - - public async onPreRespond(webResource: WebResource): Promise { - if (webResource.href != null) { - const fileKey = this.getKeyFromHref(webResource.href); - webResource.href = await this.cachedGetSignedUrl(fileKey); - } - return webResource; - } - - private s3SignUrl(fileKey: string): Promise { - const command = new GetObjectCommand({ - Bucket: this.bucket, - Key: fileKey, - }); - return getSignedUrl(this.client, command, { - expiresIn: this.signedUrlExpireTimeSeconds, - }); - } - - private getS3URL(key: string): string { - return `${this.config.endpoint}/${this.bucket}/${key}`; - } - - private getKeyFromHref(href: string): string { - const hrefWithoutParams = normalizeHref(href); - return hrefWithoutParams.substring(hrefWithoutParams.lastIndexOf('/') + 1); - } -} diff --git a/src/webresource-handler/handlers/index.ts b/src/webresource-handler/handlers/index.ts index 39158864a..554786946 100644 --- a/src/webresource-handler/handlers/index.ts +++ b/src/webresource-handler/handlers/index.ts @@ -1,2 +1 @@ export * from './NoopHandler'; -export * from './S3Handler'; diff --git a/src/webresource-handler/index.ts b/src/webresource-handler/index.ts index bd0fc5972..7228a04d0 100644 --- a/src/webresource-handler/index.ts +++ b/src/webresource-handler/index.ts @@ -13,7 +13,8 @@ import { } from '@balena/odata-to-abstract-sql'; import { errors, permissions } from '../server-glue/module'; import type { WebResourceType as WebResource } from '@balena/sbvr-types'; -import { TypedError } from 'typed-error'; +import type { AnyObject } from 'pinejs-client-core'; +import { multipartUploadHooks } from './multipartUpload'; export * from './handlers'; @@ -30,19 +31,44 @@ export interface UploadResponse { filename: string; } +export interface BeginMultipartUploadPayload { + filename: string; + content_type: string; + size: number; + chunk_size: number; +} + +export interface UploadPart { + url: string; + chunkSize: number; + partNumber: number; +} + +export interface BeginMultipartUploadHandlerResponse { + uploadParts: UploadPart[]; + fileKey: string; + uploadId: string; +} + +export interface CommitMultipartUploadPayload { + fileKey: string; + uploadId: string; + filename: string; + providerCommitData?: AnyObject; +} + export interface WebResourceHandler { handleFile: (resource: IncomingFile) => Promise; removeFile: (fileReference: string) => Promise; onPreRespond: (webResource: WebResource) => Promise; -} - -export class WebResourceError extends TypedError {} -export class FileSizeExceededError extends WebResourceError { - name = 'FileSizeExceededError'; - constructor(maxSize: number) { - super(`File size exceeded the limit of ${maxSize} bytes.`); - } + beginMultipartUpload: ( + fieldName: string, + payload: BeginMultipartUploadPayload, + ) => Promise; + commitMultipartUpload: ( + commitInfo: CommitMultipartUploadPayload, + ) => Promise; } type WebResourcesDbResponse = { @@ -193,17 +219,12 @@ export const getUploaderMiddlware = ( next(); } catch (err: any) { await clearFiles(); - - if (err instanceof FileSizeExceededError) { - return sbvrUtils.handleHttpErrors( - req, - res, - new errors.BadRequestError(err.message), - ); - } - - getLogger(getApiRoot(req)).error('Error uploading file', err); - next(err); + getLogger(getApiRoot(req)).warn('Error uploading file', err); + return sbvrUtils.handleHttpErrors( + req, + res, + new errors.BadRequestError(err), + ); } }); @@ -216,7 +237,7 @@ export const getUploaderMiddlware = ( }; }; -const getWebResourceFields = ( +export const getWebResourceFields = ( request: uriParser.ODataRequest, useTranslations = true, ): string[] => { @@ -249,6 +270,8 @@ const throwIfWebresourceNotInMultipart = ( { req, request }: HookArgs, ) => { if ( + request.custom.isAction !== 'beginUpload' && + request.custom.isAction !== 'commitUpload' && !req.is?.('multipart') && webResourceFields.some((field) => request.values[field] != null) ) { @@ -447,4 +470,23 @@ export const setupUploadHooks = ( resourceName, getCreateWebResourceHooks(handler), ); + + sbvrUtils.addPureHook( + 'POST', + apiRoot, + resourceName, + multipartUploadHooks(handler), + ); +}; + +// eslint-disable-next-line @typescript-eslint/no-var-requires +const webresourceModel: string = require('./webresource.sbvr'); +export const config = { + models: [ + { + apiRoot: 'webresource', + modelText: webresourceModel, + modelName: 'webresource', + }, + ] as sbvrUtils.ExecutableModel[], }; diff --git a/src/webresource-handler/multipartUpload.ts b/src/webresource-handler/multipartUpload.ts new file mode 100644 index 000000000..106965897 --- /dev/null +++ b/src/webresource-handler/multipartUpload.ts @@ -0,0 +1,275 @@ +import type { WebResourceType as WebResource } from '@balena/sbvr-types'; +import { randomUUID } from 'node:crypto'; +import type { AnyObject } from 'pinejs-client-core'; +import type { + BeginMultipartUploadPayload, + UploadPart, + WebResourceHandler, +} from '.'; +import { getWebResourceFields } from '.'; +import type { PinejsClient } from '../sbvr-api/sbvr-utils'; +import { api } from '../sbvr-api/sbvr-utils'; +import type { ODataRequest } from '../sbvr-api/uri-parser'; +import { errors, sbvrUtils } from '../server-glue/module'; + +type BeginUploadDbCheck = BeginMultipartUploadPayload & WebResource; + +export interface PendingUpload extends BeginMultipartUploadPayload { + fieldName: string; + fileKey: string; + uploadId: string; +} + +export interface BeginUploadResponse { + [fieldName: string]: { + uuid: string; + uploadParts: UploadPart[]; + }; +} + +const MB = 1024 * 1024; + +export const multipartUploadHooks = ( + webResourceHandler: WebResourceHandler, +): sbvrUtils.Hooks => { + return { + POSTPARSE: async ({ req, request, tx, api: applicationApi }) => { + if (request.odataQuery.property?.resource === 'beginUpload') { + const uploadParams = await validateBeginUpload(request, applicationApi); + + // This transaction is necessary because beginUpload requests + // will rollback the transaction (in order to first validate) + // The metadata requested. If we don't pass any transaction + // It will use the default transaction handler which will error out + // on any rollback. + tx = await sbvrUtils.db.transaction(); + req.tx = tx; + request.tx = tx; + + request.method = 'PATCH'; + request.values = uploadParams; + request.odataQuery.resource = request.resourceName; + delete request.odataQuery.property; + request.custom.isAction = 'beginUpload'; + } else if (request.odataQuery.property?.resource === 'commitUpload') { + const commitPayload = await validateCommitUpload( + request, + applicationApi, + ); + + const webresource = await webResourceHandler.commitMultipartUpload({ + fileKey: commitPayload.metadata.fileKey, + uploadId: commitPayload.metadata.uploadId, + filename: commitPayload.metadata.filename, + providerCommitData: commitPayload.providerCommitData, + }); + + await api.webresource.patch({ + resource: 'multipart_upload', + body: { + status: 'completed', + }, + options: { + $filter: { + uuid: commitPayload.uuid, + }, + }, + passthrough: { + tx: tx, + }, + }); + + request.method = 'PATCH'; + request.values = { + [commitPayload.metadata.fieldName]: webresource, + }; + request.odataQuery.resource = request.resourceName; + delete request.odataQuery.property; + request.custom.isAction = 'commitUpload'; + request.custom.commitUploadPayload = webresource; + } + }, + PRERESPOND: async ({ req, request, response, tx }) => { + if (request.custom.isAction === 'beginUpload') { + // In the case where the transaction has failed because it had invalid payload + // such as breaking a db constraint, this hook wouldn't have been called + // and would rather throw with the rule it failed to validate + // We rollback here as the patch was just a way to validate the upload payload + await tx.rollback(); + + response.statusCode = 200; + response.body = await beginUpload( + webResourceHandler, + request, + req.user?.actor, + ); + } else if (request.custom.isAction === 'commitUpload') { + response.body = await webResourceHandler.onPreRespond( + request.custom.commitUploadPayload, + ); + } + }, + }; +}; + +export const beginUpload = async ( + webResourceHandler: WebResourceHandler, + odataRequest: ODataRequest, + actorId?: number, +): Promise => { + const payload = odataRequest.values as { + [x: string]: BeginMultipartUploadPayload; + }; + const fieldName = Object.keys(payload)[0]; + const metadata = payload[fieldName]; + + const { fileKey, uploadId, uploadParts } = + await webResourceHandler.beginMultipartUpload(fieldName, metadata); + const uuid = randomUUID(); + + try { + await api.webresource.post({ + resource: 'multipart_upload', + body: { + uuid, + resource_name: odataRequest.resourceName, + field_name: fieldName, + resource_id: odataRequest.affectedIds?.[0], + upload_id: uploadId, + file_key: fileKey, + status: 'pending', + filename: metadata.filename, + content_type: metadata.content_type, + size: metadata.size, + chunk_size: metadata.chunk_size, + expiry_date: Date.now() + 7 * 24 * 60 * 60 * 1000, // 7 days in ms + is_created_by__actor: actorId, + }, + }); + } catch (err) { + console.error('failed to start multipart upload', err); + throw new errors.BadRequestError('Failed to start multipart upload'); + } + + return { [fieldName]: { uuid, uploadParts } }; +}; + +const validateBeginUpload = async ( + request: ODataRequest, + applicationApi: PinejsClient, +) => { + if (request.odataQuery.key == null) { + throw new errors.BadRequestError(); + } + + await applicationApi.post({ + url: request.url.substring(1).replace('beginUpload', 'canAccess'), + body: { method: 'PATCH' }, + }); + + const fieldNames = Object.keys(request.values); + if (fieldNames.length !== 1) { + throw new errors.BadRequestError( + 'You can only get upload url for one field at a time', + ); + } + + const [fieldName] = fieldNames; + const webResourceFields = getWebResourceFields(request, false); + if (!webResourceFields.includes(fieldName)) { + throw new errors.BadRequestError( + `You must provide a valid webresource field from: ${JSON.stringify(webResourceFields)}`, + ); + } + + const beginUploadPayload = parseBeginUploadPayload(request.values[fieldName]); + if (beginUploadPayload == null) { + throw new errors.BadRequestError('Invalid file metadata'); + } + + const uploadMetadataCheck: BeginUploadDbCheck = { + ...beginUploadPayload, + href: 'metadata_check', + }; + + return { [fieldName]: uploadMetadataCheck }; +}; + +const parseBeginUploadPayload = ( + payload: AnyObject, +): BeginMultipartUploadPayload | null => { + if (typeof payload !== 'object') { + return null; + } + + let { filename, content_type, size, chunk_size } = payload; + if ( + typeof filename !== 'string' || + typeof content_type !== 'string' || + typeof size !== 'number' || + (chunk_size != null && typeof chunk_size !== 'number') || + (chunk_size != null && chunk_size < 5 * MB) + ) { + return null; + } + + if (chunk_size == null) { + chunk_size = 5 * MB; + } + return { filename, content_type, size, chunk_size }; +}; + +const validateCommitUpload = async ( + request: ODataRequest, + applicationApi: PinejsClient, +) => { + if (request.odataQuery.key == null) { + throw new errors.BadRequestError(); + } + + await applicationApi.post({ + url: request.url.substring(1).replace('commitUpload', 'canAccess'), + body: { method: 'PATCH' }, + }); + + const { uuid, providerCommitData } = request.values; + if (typeof uuid !== 'string') { + throw new errors.BadRequestError('Invalid uuid type'); + } + + const [multipartUpload] = (await api.webresource.get({ + resource: 'multipart_upload', + options: { + $select: ['id', 'file_key', 'upload_id', 'field_name', 'filename'], + $filter: { + uuid, + status: 'pending', + expiry_date: { $gt: { $now: {} } }, + }, + }, + passthrough: { + tx: request.tx, + }, + })) as [ + { + id: number; + file_key: string; + upload_id: string; + field_name: string; + filename: string; + }?, + ]; + + if (multipartUpload == null) { + throw new errors.BadRequestError(`Invalid upload for uuid ${uuid}`); + } + + const metadata = { + fileKey: multipartUpload.file_key, + uploadId: multipartUpload.upload_id, + filename: multipartUpload.filename, + fieldName: multipartUpload.field_name, + }; + + return { uuid, providerCommitData, metadata }; +}; diff --git a/src/webresource-handler/webresource.sbvr b/src/webresource-handler/webresource.sbvr new file mode 100644 index 000000000..6fff31cd8 --- /dev/null +++ b/src/webresource-handler/webresource.sbvr @@ -0,0 +1,63 @@ +Vocabulary: Auth + +Term: actor +Term: expiry date + Concept Type: Date Time (Type) + +Vocabulary: webresource + +Term: uuid + Concept Type: Short Text (Type) +Term: resource name + Concept Type: Short Text (Type) +Term: field name + Concept Type: Short Text (Type) +Term: resource id + Concept Type: Integer (Type) +Term: upload id + Concept Type: Short Text (Type) +Term: file key + Concept Type: Short Text (Type) +Term: status + Concept Type: Short Text (Type) +Term: filename + Concept Type: Short Text (Type) +Term: content type + Concept Type: Short Text (Type) +Term: size + Concept Type: Integer (Type) +Term: chunk size + Concept Type: Integer (Type) +Term: valid until date + Concept Type: Date Time (Type) + +Term: multipart upload +Fact type: multipart upload has uuid + Necessity: each multipart upload has exactly one uuid + Necessity: each uuid is of exactly one multipart upload +Fact type: multipart upload has resource name + Necessity: each multipart upload has exactly one resource name +Fact type: multipart upload has field name + Necessity: each multipart upload has exactly one field name +Fact type: multipart upload has resource id + Necessity: each multipart upload has exactly one resource id +Fact type: multipart upload has upload id + Necessity: each multipart upload has exactly one upload id +Fact type: multipart upload has file key + Necessity: each multipart upload has exactly one file key +Fact type: multipart upload has status + Necessity: each multipart upload has exactly one status + Definition: "pending" or "completed" or "cancelled" +Fact type: multipart upload has filename + Necessity: each multipart upload has exactly one filename +Fact type: multipart upload has content type + Necessity: each multipart upload has exactly one content type +Fact type: multipart upload has size + Necessity: each multipart upload has exactly one size +Fact type: multipart upload has chunk size + Necessity: each multipart upload has exactly one chunk size +Fact type: multipart upload has expiry date (Auth) + Necessity: each multipart upload has exactly one expiry date (Auth) +Fact type: multipart upload is created by actor (Auth) + Necessity: each multipart upload is created by at most one actor (Auth) + Reference Type: informative diff --git a/test/06-webresource.test.ts b/test/06-webresource.test.ts index ad222ee5c..03e23a6dc 100644 --- a/test/06-webresource.test.ts +++ b/test/06-webresource.test.ts @@ -1087,6 +1087,216 @@ describe('06 webresources tests', function () { }); }, ); + + describe('multipart upload', () => { + let testOrg: { id: number }; + before(async () => { + const { body: org } = await supertest(testLocalServer) + .post(`/example/organization`) + .field('name', 'mtprt') + .expect(201); + + const { body: orgWithoutFile } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(orgWithoutFile.d[0].logo_image).to.be.null; + testOrg = org; + }); + + it('fails to generate upload URLs for multiple fields at time', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + not_translated_webresource: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'You can only get upload url for one field at a time', + ); + }); + + it('fails to generate upload URLs for invalid field', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + idonotexist: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'You must provide a valid webresource field from: ["not_translated_webresource","logo_image"]', + ); + }); + + it('fails to generate upload URLs for invalid field on translated endpoint', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/v1/organization(${testOrg.id})/beginUpload`) + .send({ + idonotexist: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'You must provide a valid webresource field from: ["not_translated_webresource","other_image"]', + ); + }); + + it('fails to generate upload URLs with chunk size < 5MB', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'image/png', + size: 6291456, + chunk_size: 10, + }, + }) + .expect(400); + expect(res).to.be.eq('Invalid file metadata'); + }); + + it('fails to generate upload URLs if invalid DB constraint', async () => { + const { body: res } = await supertest(testLocalServer) + .post(`/example/organization(${testOrg.id})/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'text/csv', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(400); + expect(res).to.be.eq( + 'It is necessary that each organization that has a logo image, has a logo image that has a Content Type (Type) that is equal to "image/png" or "image/jpg" or "image/jpeg" and has a Size (Type) that is less than 540000000.', + ); + }); + + it('fails to generate upload URLs if cannot access resource', async () => { + await supertest(testLocalServer) + .post(`/example/organization(4242)/beginUpload`) + .send({ + logo_image: { + filename: 'test.png', + content_type: 'text/csv', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(401); + }); + + it('uploads a file via S3 presigned URL', async () => { + const { body: org } = await supertest(testLocalServer) + .post(`/example/organization`) + .field('name', 'John') + .expect(201); + + const { body: orgWithoutFile } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(orgWithoutFile.d[0].logo_image).to.be.null; + + const uniqueFilename = `${randomUUID()}_test.png`; + const { + body: { logo_image: uploadResponse }, + } = await supertest(testLocalServer) + .post(`/example/organization(${org.id})/beginUpload`) + .send({ + logo_image: { + filename: uniqueFilename, + content_type: 'image/png', + size: 6291456, + chunk_size: 6000000, + }, + }) + .expect(200); + + const { body: after } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(after.d[0].logo_image).to.be.null; + + expect(uploadResponse.uuid).to.be.a('string'); + expect(uploadResponse.uploadParts).to.be.an('array').that.has.length(2); + expect(uploadResponse.uploadParts[0].chunkSize).to.be.eq(6000000); + expect(uploadResponse.uploadParts[0].partNumber).to.be.eq(1); + expect(uploadResponse.uploadParts[1].chunkSize).to.be.eq(291456); + expect(uploadResponse.uploadParts[1].partNumber).to.be.eq(2); + + const uuid = uploadResponse.uuid; + + const chunk1 = new Blob([Buffer.alloc(6000000)]); + const chunk2 = new Blob([Buffer.alloc(291456)]); + + const res = await Promise.all([ + fetch(uploadResponse.uploadParts[0].url, { + method: 'PUT', + body: chunk1, + }), + fetch(uploadResponse.uploadParts[1].url, { + method: 'PUT', + body: chunk2, + }), + ]); + + expect(res[0].status).to.be.eq(200); + expect(res[0].headers.get('Etag')).to.be.a('string'); + + expect(res[1].status).to.be.eq(200); + expect(res[1].headers.get('Etag')).to.be.a('string'); + + const { body: commitResponse } = await supertest(testLocalServer) + .post(`/example/organization(${org.id})/commitUpload`) + .send({ + uuid, + providerCommitData: { + Parts: [ + { + PartNumber: 1, + ETag: res[0].headers.get('Etag'), + }, + { + PartNumber: 2, + ETag: res[1].headers.get('Etag'), + }, + ], + }, + }) + .expect(200); + + await expectToExist(commitResponse.filename); + const { body: orgWithFile } = await supertest(testLocalServer) + .get(`/example/organization(${org.id})`) + .expect(200); + + expect(orgWithFile.d[0].logo_image.href).to.be.a('string'); + expect(orgWithFile.d[0].logo_image.size).to.be.eq(6291456); + }); + }); }); const removesSigning = (href: string): string => { diff --git a/test/fixtures/06-webresource/config.ts b/test/fixtures/06-webresource/config.ts index 39d1101dc..906dc8ef0 100644 --- a/test/fixtures/06-webresource/config.ts +++ b/test/fixtures/06-webresource/config.ts @@ -1,6 +1,6 @@ import type { ConfigLoader } from '../../../src/server-glue/module'; import type { WebResourceHandler } from '../../../src/webresource-handler'; -import { S3Handler } from '../../../src/webresource-handler/handlers/S3Handler'; +import { S3Handler } from '@balena/pinejs-webresource-s3'; import { v1AbstractSqlModel, v1Translations } from './translations/v1'; import { requiredVar, intVar } from '@balena/env-parsing';