From 89f2e0b4082285d32aa0b2634b6df8cde5e7b581 Mon Sep 17 00:00:00 2001 From: Eric Bunton Date: Thu, 14 Dec 2023 04:14:04 -0500 Subject: [PATCH 1/8] @tus/server: restore the onIncomingRequest callback for PATCH handler. (#532) Restore the onIncomingRequest for PATCH handler. --- packages/server/src/handlers/PatchHandler.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/server/src/handlers/PatchHandler.ts b/packages/server/src/handlers/PatchHandler.ts index 93f8f2e2..a2237103 100644 --- a/packages/server/src/handlers/PatchHandler.ts +++ b/packages/server/src/handlers/PatchHandler.ts @@ -36,6 +36,10 @@ export class PatchHandler extends BaseHandler { throw ERRORS.INVALID_CONTENT_TYPE } + if (this.options.onIncomingRequest) { + await this.options.onIncomingRequest(req, res, id) + } + const lock = await this.acquireLock(req, id, context) let upload: Upload From 855a3d0dd87357bf899bd4380989e2fde7b60a0f Mon Sep 17 00:00:00 2001 From: David van Leeuwen Date: Mon, 18 Dec 2023 10:10:25 +0100 Subject: [PATCH 2/8] @tus/server: fix typo in readme example code (#535) --- packages/server/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/server/README.md b/packages/server/README.md index a0305f96..c55b9fb8 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -358,18 +358,18 @@ const server = new Server({ const token = req.headers.authorization; if (!token) { - throw { status_code: 401, body: 'Unauthorized' }) + throw { status_code: 401, body: 'Unauthorized' } } try { const decodedToken = await jwt.verify(token, 'your_secret_key') req.user = decodedToken } catch (error) { - throw { status_code: 401, body: 'Invalid token' }) + throw { status_code: 401, body: 'Invalid token' } } if (req.user.role !== 'admin') { - throw { status_code: 403, body: 'Access denied' }) + throw { status_code: 403, body: 'Access denied' } } }, }); From dbb2e80b9460cbcf6e7fc07895089880e38cfdad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:01:15 +0100 Subject: [PATCH 3/8] Bump @types/node from 20.5.7 to 20.10.4 (#529) Bumps [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node) from 20.5.7 to 20.10.4. - [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases) - [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node) --- updated-dependencies: - dependency-name: "@types/node" dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/file-store/package.json | 2 +- packages/gcs-store/package.json | 2 +- packages/s3-store/package.json | 2 +- packages/server/package.json | 2 +- test/package.json | 2 +- yarn.lock | 27 ++++++++++++++++++--------- 6 files changed, 23 insertions(+), 14 deletions(-) diff --git a/packages/file-store/package.json b/packages/file-store/package.json index 894a9c0a..2d31d3a9 100644 --- a/packages/file-store/package.json +++ b/packages/file-store/package.json @@ -27,7 +27,7 @@ "@tus/server": "workspace:^", "@types/debug": "^4.1.8", "@types/mocha": "^10.0.1", - "@types/node": "^20.5.7", + "@types/node": "^20.10.4", "eslint": "^8.48.0", "eslint-config-custom": "workspace:*", "mocha": "^10.2.0", diff --git a/packages/gcs-store/package.json b/packages/gcs-store/package.json index 336a6f4d..d641559a 100644 --- a/packages/gcs-store/package.json +++ b/packages/gcs-store/package.json @@ -28,7 +28,7 @@ "@tus/server": "workspace:^", "@types/debug": "^4.1.8", "@types/mocha": "^10.0.1", - "@types/node": "^20.5.7", + "@types/node": "^20.10.4", "eslint": "^8.48.0", "eslint-config-custom": "workspace:*", "mocha": "^10.2.0", diff --git a/packages/s3-store/package.json b/packages/s3-store/package.json index c7b14218..a7b2ff5f 100644 --- a/packages/s3-store/package.json +++ b/packages/s3-store/package.json @@ -28,7 +28,7 @@ "@tus/server": "workspace:^", "@types/debug": "^4.1.8", "@types/mocha": "^10.0.1", - "@types/node": "^20.5.7", + "@types/node": "^20.10.4", "eslint": "^8.48.0", "eslint-config-custom": "workspace:*", "mocha": "^10.2.0", diff --git a/packages/server/package.json b/packages/server/package.json index ae9f0db7..d0d2c95b 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -26,7 +26,7 @@ "devDependencies": { "@types/debug": "^4.1.8", "@types/mocha": "^10.0.1", - "@types/node": "^20.5.7", + "@types/node": "^20.10.4", "@types/sinon": "^10.0.16", "@types/supertest": "^2.0.12", "eslint": "^8.48.0", diff --git a/test/package.json b/test/package.json index 422792ec..ba13a060 100644 --- a/test/package.json +++ b/test/package.json @@ -14,7 +14,7 @@ }, "devDependencies": { "@types/mocha": "^10.0.1", - "@types/node": "^20.5.7", + "@types/node": "^20.10.4", "@types/rimraf": "^3.0.2", "@types/sinon": "^10.0.16", "@types/supertest": "^2.0.12", diff --git a/yarn.lock b/yarn.lock index fd8bb4bc..6bf4239f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1495,7 +1495,7 @@ __metadata: "@tus/server": "workspace:^" "@types/debug": ^4.1.8 "@types/mocha": ^10.0.1 - "@types/node": ^20.5.7 + "@types/node": ^20.10.4 debug: ^4.3.4 eslint: ^8.48.0 eslint-config-custom: "workspace:*" @@ -1518,7 +1518,7 @@ __metadata: "@tus/server": "workspace:^" "@types/debug": ^4.1.8 "@types/mocha": ^10.0.1 - "@types/node": ^20.5.7 + "@types/node": ^20.10.4 debug: ^4.3.4 eslint: ^8.48.0 eslint-config-custom: "workspace:*" @@ -1539,7 +1539,7 @@ __metadata: "@tus/server": "workspace:^" "@types/debug": ^4.1.8 "@types/mocha": ^10.0.1 - "@types/node": ^20.5.7 + "@types/node": ^20.10.4 debug: ^4.3.4 eslint: ^8.48.0 eslint-config-custom: "workspace:*" @@ -1557,7 +1557,7 @@ __metadata: dependencies: "@types/debug": ^4.1.8 "@types/mocha": ^10.0.1 - "@types/node": ^20.5.7 + "@types/node": ^20.10.4 "@types/sinon": ^10.0.16 "@types/supertest": ^2.0.12 debug: ^4.3.4 @@ -1652,10 +1652,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:^20.5.7": - version: 20.5.7 - resolution: "@types/node@npm:20.5.7" - checksum: fc284c8e16ddc04569730d58e87eae349eb1c3dd9020cb79a1862d9d9add6f04e7367a236f3252db8db2572f90278e250f4cd43d27d264972b54394eaba1ed76 +"@types/node@npm:^20.10.4": + version: 20.10.4 + resolution: "@types/node@npm:20.10.4" + dependencies: + undici-types: ~5.26.4 + checksum: 054b296417e771ab524bea63cf3289559c6bdf290d45428f7cc68e9b00030ff7a0ece47b8c99a26b4f47a443919813bcf42beadff2f0bea7d8125fa541d92eb0 languageName: node linkType: hard @@ -4901,7 +4903,7 @@ __metadata: "@tus/s3-store": "workspace:^" "@tus/server": "workspace:^" "@types/mocha": ^10.0.1 - "@types/node": ^20.5.7 + "@types/node": ^20.10.4 "@types/rimraf": ^3.0.2 "@types/sinon": ^10.0.16 "@types/supertest": ^2.0.12 @@ -5168,6 +5170,13 @@ __metadata: languageName: node linkType: hard +"undici-types@npm:~5.26.4": + version: 5.26.5 + resolution: "undici-types@npm:5.26.5" + checksum: 3192ef6f3fd5df652f2dc1cd782b49d6ff14dc98e5dced492aa8a8c65425227da5da6aafe22523c67f035a272c599bb89cfe803c1db6311e44bed3042fc25487 + languageName: node + linkType: hard + "unique-filename@npm:^3.0.0": version: 3.0.0 resolution: "unique-filename@npm:3.0.0" From 4848ca592f023fcf46ef366ba74de2d23086024a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:01:35 +0100 Subject: [PATCH 4/8] Bump @typescript-eslint/parser from 6.5.0 to 6.13.1 (#525) Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 6.5.0 to 6.13.1. - [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases) - [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md) - [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v6.13.1/packages/parser) --- updated-dependencies: - dependency-name: "@typescript-eslint/parser" dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/eslint-config-custom/package.json | 2 +- yarn.lock | 62 +++++++++++----------- 2 files changed, 32 insertions(+), 32 deletions(-) diff --git a/packages/eslint-config-custom/package.json b/packages/eslint-config-custom/package.json index 1047d01c..56e28bba 100644 --- a/packages/eslint-config-custom/package.json +++ b/packages/eslint-config-custom/package.json @@ -10,7 +10,7 @@ }, "dependencies": { "@typescript-eslint/eslint-plugin": "^5.62.0", - "@typescript-eslint/parser": "^6.5.0", + "@typescript-eslint/parser": "^6.13.1", "eslint": "^8.48.0", "eslint-config-prettier": "^8.10.0", "eslint-config-turbo": "^1.10.13", diff --git a/yarn.lock b/yarn.lock index 6bf4239f..aabc87d9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1753,21 +1753,21 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/parser@npm:^6.5.0": - version: 6.5.0 - resolution: "@typescript-eslint/parser@npm:6.5.0" - dependencies: - "@typescript-eslint/scope-manager": 6.5.0 - "@typescript-eslint/types": 6.5.0 - "@typescript-eslint/typescript-estree": 6.5.0 - "@typescript-eslint/visitor-keys": 6.5.0 +"@typescript-eslint/parser@npm:^6.13.1": + version: 6.13.1 + resolution: "@typescript-eslint/parser@npm:6.13.1" + dependencies: + "@typescript-eslint/scope-manager": 6.13.1 + "@typescript-eslint/types": 6.13.1 + "@typescript-eslint/typescript-estree": 6.13.1 + "@typescript-eslint/visitor-keys": 6.13.1 debug: ^4.3.4 peerDependencies: eslint: ^7.0.0 || ^8.0.0 peerDependenciesMeta: typescript: optional: true - checksum: e9a70886ec2660aee5c77cdff67ba11651eb855b7ecd3ad1e70837fce997d6e6db9dfe1e1eab46a9b2147cbc034ae9c109951f3bc24ce54e78cae669b6bc9c95 + checksum: 58b7fef6f2d02c8f4737f9908a8d335a20bee20dba648233a69f28e7b39237791d2b9fbb818e628dcc053ddf16507b161ace7f1139e093d72365f1270c426de3 languageName: node linkType: hard @@ -1781,13 +1781,13 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/scope-manager@npm:6.5.0": - version: 6.5.0 - resolution: "@typescript-eslint/scope-manager@npm:6.5.0" +"@typescript-eslint/scope-manager@npm:6.13.1": + version: 6.13.1 + resolution: "@typescript-eslint/scope-manager@npm:6.13.1" dependencies: - "@typescript-eslint/types": 6.5.0 - "@typescript-eslint/visitor-keys": 6.5.0 - checksum: 30d78143f68e07d6bd15a147f64cc16830f8a8c8409b37aa7c7d205d7585f3648ec1c5365b3f177b7561971b407f773f6dba83b3b78fa63091045f2d6bbc6b9f + "@typescript-eslint/types": 6.13.1 + "@typescript-eslint/visitor-keys": 6.13.1 + checksum: 109a213f82719e10f8c6a0168f2e105dc1369c7e0c075c1f30af137030fc866a3a585a77ff78a9a3538afc213061c8aedbb4462a91f26cbd90eefbab8b89ea10 languageName: node linkType: hard @@ -1815,10 +1815,10 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/types@npm:6.5.0": - version: 6.5.0 - resolution: "@typescript-eslint/types@npm:6.5.0" - checksum: 950ec16991d71494d10cb752535bbc4395295e3f03a716d53ec55bbb0aaff487aa774cc5002f775ffcc80b9f0e16ac53ecebf7cac1444ca4f7a847b0859ffbfb +"@typescript-eslint/types@npm:6.13.1": + version: 6.13.1 + resolution: "@typescript-eslint/types@npm:6.13.1" + checksum: bb1d52f1646bab9acd3ec874567ffbaaaf7fe4a5f79845bdacbfea46d15698e58d45797da05b08c23f9496a17229b7f2c1363d000fd89ce4e79874fd57ba1d4a languageName: node linkType: hard @@ -1840,12 +1840,12 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/typescript-estree@npm:6.5.0": - version: 6.5.0 - resolution: "@typescript-eslint/typescript-estree@npm:6.5.0" +"@typescript-eslint/typescript-estree@npm:6.13.1": + version: 6.13.1 + resolution: "@typescript-eslint/typescript-estree@npm:6.13.1" dependencies: - "@typescript-eslint/types": 6.5.0 - "@typescript-eslint/visitor-keys": 6.5.0 + "@typescript-eslint/types": 6.13.1 + "@typescript-eslint/visitor-keys": 6.13.1 debug: ^4.3.4 globby: ^11.1.0 is-glob: ^4.0.3 @@ -1854,7 +1854,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 05717fa1f2609fa5669803191cf309a379c815aaf4fff6850f40560eec8749759c36b288f05cecffd5c1d0be8de1fe414ecfee6ecf99b6ae521baa48c8b58455 + checksum: 09aa0f5cbd60e84df4f58f3d479be352549600b24dbefe75c686ea89252526c52c1c06ce1ae56c0405dd7337002e741c2ba02b71fb1caa3b94a740a70fcc8699 languageName: node linkType: hard @@ -1886,13 +1886,13 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/visitor-keys@npm:6.5.0": - version: 6.5.0 - resolution: "@typescript-eslint/visitor-keys@npm:6.5.0" +"@typescript-eslint/visitor-keys@npm:6.13.1": + version: 6.13.1 + resolution: "@typescript-eslint/visitor-keys@npm:6.13.1" dependencies: - "@typescript-eslint/types": 6.5.0 + "@typescript-eslint/types": 6.13.1 eslint-visitor-keys: ^3.4.1 - checksum: 768a02dd0d8aae45708646bb0c51e67da09e71dc101bb0a0e55d7e0c8eadfea2f531acd3035d1ec34bf2380b66188f3fc47c6bef0201eae36b2dcc48d1934442 + checksum: d15d362203a2fe995ea62a59d5b44c15c8fb1fb30ff59dd1542a980f75b3b62035303dfb781d83709921613f6ac8cc5bf57b70f6e20d820aec8b7911f07152e9 languageName: node linkType: hard @@ -2664,7 +2664,7 @@ __metadata: "@types/eslint": ^8.44.2 "@types/prettier": ^2.7.3 "@typescript-eslint/eslint-plugin": ^5.62.0 - "@typescript-eslint/parser": ^6.5.0 + "@typescript-eslint/parser": ^6.13.1 eslint: ^8.48.0 eslint-config-prettier: ^8.10.0 eslint-config-turbo: ^1.10.13 From 0575c9af4eb2ea18a47d82d4a081d7cd4d80ce74 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:02:17 +0100 Subject: [PATCH 5/8] Bump eslint-config-turbo from 1.10.13 to 1.10.16 (#512) Bumps [eslint-config-turbo](https://github.com/vercel/turbo/tree/HEAD/packages/eslint-config-turbo) from 1.10.13 to 1.10.16. - [Release notes](https://github.com/vercel/turbo/releases) - [Changelog](https://github.com/vercel/turbo/blob/main/release.md) - [Commits](https://github.com/vercel/turbo/commits/v1.10.16/packages/eslint-config-turbo) --- updated-dependencies: - dependency-name: eslint-config-turbo dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/eslint-config-custom/package.json | 2 +- yarn.lock | 20 ++++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/eslint-config-custom/package.json b/packages/eslint-config-custom/package.json index 56e28bba..342edef9 100644 --- a/packages/eslint-config-custom/package.json +++ b/packages/eslint-config-custom/package.json @@ -13,7 +13,7 @@ "@typescript-eslint/parser": "^6.13.1", "eslint": "^8.48.0", "eslint-config-prettier": "^8.10.0", - "eslint-config-turbo": "^1.10.13", + "eslint-config-turbo": "^1.10.16", "eslint-plugin-prettier": "^4.2.1", "prettier": "^2.8.8" }, diff --git a/yarn.lock b/yarn.lock index aabc87d9..728a1135 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2667,7 +2667,7 @@ __metadata: "@typescript-eslint/parser": ^6.13.1 eslint: ^8.48.0 eslint-config-prettier: ^8.10.0 - eslint-config-turbo: ^1.10.13 + eslint-config-turbo: ^1.10.16 eslint-plugin-prettier: ^4.2.1 prettier: ^2.8.8 languageName: unknown @@ -2684,14 +2684,14 @@ __metadata: languageName: node linkType: hard -"eslint-config-turbo@npm:^1.10.13": - version: 1.10.13 - resolution: "eslint-config-turbo@npm:1.10.13" +"eslint-config-turbo@npm:^1.10.16": + version: 1.10.16 + resolution: "eslint-config-turbo@npm:1.10.16" dependencies: - eslint-plugin-turbo: 1.10.13 + eslint-plugin-turbo: 1.10.16 peerDependencies: eslint: ">6.6.0" - checksum: 008f362d361c2d4631f725a412043c122c4f52c392c7209ba57117d34f8aa596c0941dd04b7eb860cb252e4263c6e3ae9e1ee134aef8c4f43c66e2a84fc376d4 + checksum: e5a6ec6d04f21c162f59bd2f3e7e25a311943d94d8130d7d5a7fdea375fa5acfd62918aa34467a5e56ab0f3ca087ea8e117354a5ff631bb38f1fe8a35444ae4b languageName: node linkType: hard @@ -2710,14 +2710,14 @@ __metadata: languageName: node linkType: hard -"eslint-plugin-turbo@npm:1.10.13": - version: 1.10.13 - resolution: "eslint-plugin-turbo@npm:1.10.13" +"eslint-plugin-turbo@npm:1.10.16": + version: 1.10.16 + resolution: "eslint-plugin-turbo@npm:1.10.16" dependencies: dotenv: 16.0.3 peerDependencies: eslint: ">6.6.0" - checksum: 39649fc71e5e1d8169a81a28adcd4850b595bbef01744159ffa32c0dcc211cb6f1dea25d4eef73aa4b57b8064b7c6661b048369046a40b19eaca051f507e5dfb + checksum: 00fdbd2cb956b3e9972c84169bb190bafaa274418989508bf52a5d1b5f7df0e850eb60fc702fedbc34477dcebc3e06450de0b81d8e30b56e061ced850e8cb8e3 languageName: node linkType: hard From 392d9ab6a7b5e4eb3aeb9ec8ad6d5ca1659a1d05 Mon Sep 17 00:00:00 2001 From: Murderlon Date: Wed, 20 Dec 2023 10:09:41 +0100 Subject: [PATCH 6/8] Trigger CI From aafb40dc3e0eb786fc019f5019b01831e52f4c7a Mon Sep 17 00:00:00 2001 From: Fabrizio Date: Thu, 21 Dec 2023 09:58:05 +0000 Subject: [PATCH 7/8] @tus/server: support `Tus-Max-Size` (#517) --- packages/server/README.md | 5 + packages/server/src/constants.ts | 8 + packages/server/src/handlers/BaseHandler.ts | 85 +++++- .../server/src/handlers/OptionsHandler.ts | 8 +- packages/server/src/handlers/PatchHandler.ts | 9 +- packages/server/src/handlers/PostHandler.ts | 15 +- packages/server/src/models/StreamLimiter.ts | 34 +++ packages/server/src/types.ts | 7 + packages/server/test/PatchHandler.test.ts | 64 +++++ packages/server/test/utils.ts | 18 +- test/e2e.test.ts | 252 ++++++++++++++++++ 11 files changed, 489 insertions(+), 16 deletions(-) create mode 100644 packages/server/src/models/StreamLimiter.ts diff --git a/packages/server/README.md b/packages/server/README.md index c55b9fb8..50aa39b5 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -60,6 +60,11 @@ Creates a new tus server with options. The route to accept requests (`string`). +#### `options.maxSize` + +Max file size (in bytes) allowed when uploading (`number` | (`(req, id: string | null) => Promise | number`)). +When providing a function during the OPTIONS request the id will be `null`. + #### `options.relativeLocation` Return a relative URL as the `Location` header to the client (`boolean`). diff --git a/packages/server/src/constants.ts b/packages/server/src/constants.ts index c9247711..7d4e7655 100644 --- a/packages/server/src/constants.ts +++ b/packages/server/src/constants.ts @@ -57,6 +57,14 @@ export const ERRORS = { status_code: 410, body: 'The file for this url no longer exists\n', }, + ERR_SIZE_EXCEEDED: { + status_code: 413, + body: "upload's size exceeded\n", + }, + ERR_MAX_SIZE_EXCEEDED: { + status_code: 413, + body: 'Maximum size exceeded\n', + }, INVALID_LENGTH: { status_code: 400, body: 'Upload-Length or Upload-Defer-Length header required\n', diff --git a/packages/server/src/handlers/BaseHandler.ts b/packages/server/src/handlers/BaseHandler.ts index aae7bf0f..a0c719c8 100644 --- a/packages/server/src/handlers/BaseHandler.ts +++ b/packages/server/src/handlers/BaseHandler.ts @@ -3,8 +3,11 @@ import EventEmitter from 'node:events' import type {ServerOptions} from '../types' import type {DataStore, CancellationContext} from '../models' import type http from 'node:http' -import stream from 'node:stream' +import {Upload} from '../models' import {ERRORS} from '../constants' +import stream from 'node:stream/promises' +import {addAbortSignal, PassThrough} from 'stream' +import {StreamLimiter} from '../models/StreamLimiter' const reExtractFileID = /([^/]+)\/?$/ const reForwardedHost = /host="?([^";]+)/ @@ -132,24 +135,24 @@ export class BaseHandler extends EventEmitter { req: http.IncomingMessage, id: string, offset: number, + maxFileSize: number, context: CancellationContext ) { return new Promise(async (resolve, reject) => { + // Abort early if the operation has been cancelled. if (context.signal.aborted) { reject(ERRORS.ABORTED) return } - const proxy = new stream.PassThrough() - stream.addAbortSignal(context.signal, proxy) + // Create a PassThrough stream as a proxy to manage the request stream. + // This allows for aborting the write process without affecting the incoming request stream. + const proxy = new PassThrough() + addAbortSignal(context.signal, proxy) proxy.on('error', (err) => { req.unpipe(proxy) - if (err.name === 'AbortError') { - reject(ERRORS.ABORTED) - } else { - reject(err) - } + reject(err.name === 'AbortError' ? ERRORS.ABORTED : err) }) req.on('error', (err) => { @@ -158,7 +161,71 @@ export class BaseHandler extends EventEmitter { } }) - this.store.write(req.pipe(proxy), id, offset).then(resolve).catch(reject) + // Pipe the request stream through the proxy. We use the proxy instead of the request stream directly + // to ensure that errors in the pipeline do not cause the request stream to be destroyed, + // which would result in a socket hangup error for the client. + stream + .pipeline(req.pipe(proxy), new StreamLimiter(maxFileSize), async (stream) => { + return this.store.write(stream as StreamLimiter, id, offset) + }) + .then(resolve) + .catch(reject) }) } + + getConfiguredMaxSize(req: http.IncomingMessage, id: string | null) { + if (typeof this.options.maxSize === 'function') { + return this.options.maxSize(req, id) + } + return this.options.maxSize ?? 0 + } + + /** + * Calculates the maximum allowed size for the body of an upload request. + * This function considers both the server's configured maximum size and + * the specifics of the upload, such as whether the size is deferred or fixed. + */ + async calculateMaxBodySize( + req: http.IncomingMessage, + file: Upload, + configuredMaxSize?: number + ) { + // Use the server-configured maximum size if it's not explicitly provided. + configuredMaxSize ??= await this.getConfiguredMaxSize(req, file.id) + + // Parse the Content-Length header from the request (default to 0 if not set). + const length = parseInt(req.headers['content-length'] || '0', 10) + const offset = file.offset + + const hasContentLengthSet = req.headers['content-length'] !== undefined + const hasConfiguredMaxSizeSet = configuredMaxSize > 0 + + if (file.sizeIsDeferred) { + // For deferred size uploads, if it's not a chunked transfer, check against the configured maximum size. + if ( + hasContentLengthSet && + hasConfiguredMaxSizeSet && + offset + length > configuredMaxSize + ) { + throw ERRORS.ERR_SIZE_EXCEEDED + } + + if (hasConfiguredMaxSizeSet) { + return configuredMaxSize - offset + } else { + return Number.MAX_SAFE_INTEGER + } + } + + // Check if the upload fits into the file's size when the size is not deferred. + if (offset + length > (file.size || 0)) { + throw ERRORS.ERR_SIZE_EXCEEDED + } + + if (hasContentLengthSet) { + return length + } + + return (file.size || 0) - offset + } } diff --git a/packages/server/src/handlers/OptionsHandler.ts b/packages/server/src/handlers/OptionsHandler.ts index 00af77f8..944318cd 100644 --- a/packages/server/src/handlers/OptionsHandler.ts +++ b/packages/server/src/handlers/OptionsHandler.ts @@ -6,7 +6,13 @@ import type http from 'node:http' // A successful response indicated by the 204 No Content status MUST contain // the Tus-Version header. It MAY include the Tus-Extension and Tus-Max-Size headers. export class OptionsHandler extends BaseHandler { - async send(_: http.IncomingMessage, res: http.ServerResponse) { + async send(req: http.IncomingMessage, res: http.ServerResponse) { + const maxSize = await this.getConfiguredMaxSize(req, null) + + if (maxSize) { + res.setHeader('Tus-Max-Size', maxSize) + } + const allowedHeaders = [...HEADERS, ...(this.options.allowedHeaders ?? [])] res.setHeader('Access-Control-Allow-Methods', ALLOWED_METHODS) diff --git a/packages/server/src/handlers/PatchHandler.ts b/packages/server/src/handlers/PatchHandler.ts index a2237103..787b844c 100644 --- a/packages/server/src/handlers/PatchHandler.ts +++ b/packages/server/src/handlers/PatchHandler.ts @@ -40,6 +40,8 @@ export class PatchHandler extends BaseHandler { await this.options.onIncomingRequest(req, res, id) } + const maxFileSize = await this.getConfiguredMaxSize(req, id) + const lock = await this.acquireLock(req, id, context) let upload: Upload @@ -90,11 +92,16 @@ export class PatchHandler extends BaseHandler { throw ERRORS.INVALID_LENGTH } + if (maxFileSize > 0 && size > maxFileSize) { + throw ERRORS.ERR_MAX_SIZE_EXCEEDED + } + await this.store.declareUploadLength(id, size) upload.size = size } - newOffset = await this.writeToStore(req, id, offset, context) + const maxBodySize = await this.calculateMaxBodySize(req, upload, maxFileSize) + newOffset = await this.writeToStore(req, id, offset, maxBodySize, context) } finally { await lock.unlock() } diff --git a/packages/server/src/handlers/PostHandler.ts b/packages/server/src/handlers/PostHandler.ts index 9d93f4af..efea6265 100644 --- a/packages/server/src/handlers/PostHandler.ts +++ b/packages/server/src/handlers/PostHandler.ts @@ -62,6 +62,16 @@ export class PostHandler extends BaseHandler { throw ERRORS.FILE_WRITE_ERROR } + const maxFileSize = await this.getConfiguredMaxSize(req, id) + + if ( + upload_length && + maxFileSize > 0 && + Number.parseInt(upload_length, 10) > maxFileSize + ) { + throw ERRORS.ERR_MAX_SIZE_EXCEEDED + } + let metadata if ('upload-metadata' in req.headers) { try { @@ -92,12 +102,14 @@ export class PostHandler extends BaseHandler { } const lock = await this.acquireLock(req, id, context) + let isFinal: boolean let url: string let headers: { 'Upload-Offset'?: string 'Upload-Expires'?: string } + try { await this.store.create(upload) url = this.generateUrl(req, upload.id) @@ -109,7 +121,8 @@ export class PostHandler extends BaseHandler { // The request MIGHT include a Content-Type header when using creation-with-upload extension if (validateHeader('content-type', req.headers['content-type'])) { - const newOffset = await this.writeToStore(req, id, 0, context) + const bodyMaxSize = await this.calculateMaxBodySize(req, upload, maxFileSize) + const newOffset = await this.writeToStore(req, id, 0, bodyMaxSize, context) headers['Upload-Offset'] = newOffset.toString() isFinal = newOffset === Number.parseInt(upload_length as string, 10) diff --git a/packages/server/src/models/StreamLimiter.ts b/packages/server/src/models/StreamLimiter.ts new file mode 100644 index 00000000..9b040e5e --- /dev/null +++ b/packages/server/src/models/StreamLimiter.ts @@ -0,0 +1,34 @@ +import {Transform, TransformCallback} from 'stream' +import {ERRORS} from '../constants' + +// TODO: create HttpError and use it everywhere instead of throwing objects +export class MaxFileExceededError extends Error { + status_code: number + body: string + + constructor() { + super(ERRORS.ERR_MAX_SIZE_EXCEEDED.body) + this.status_code = ERRORS.ERR_MAX_SIZE_EXCEEDED.status_code + this.body = ERRORS.ERR_MAX_SIZE_EXCEEDED.body + Object.setPrototypeOf(this, MaxFileExceededError.prototype) + } +} + +export class StreamLimiter extends Transform { + private maxSize: number + private currentSize = 0 + + constructor(maxSize: number) { + super() + this.maxSize = maxSize + } + + _transform(chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback): void { + this.currentSize += chunk.length + if (this.currentSize > this.maxSize) { + callback(new MaxFileExceededError()) + } else { + callback(null, chunk) + } + } +} diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 7fcbfd21..65420349 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -11,6 +11,13 @@ export type ServerOptions = { */ path: string + /** + * Max file size allowed when uploading + */ + maxSize?: + | number + | ((req: http.IncomingMessage, uploadId: string | null) => Promise | number) + /** * Return a relative URL as the `Location` header. */ diff --git a/packages/server/test/PatchHandler.test.ts b/packages/server/test/PatchHandler.test.ts index f8f6c91e..456d6282 100644 --- a/packages/server/test/PatchHandler.test.ts +++ b/packages/server/test/PatchHandler.test.ts @@ -12,6 +12,8 @@ import {EVENTS} from '../src/constants' import {EventEmitter} from 'node:events' import {addPipableStreamBody} from './utils' import {MemoryLocker} from '../src' +import streamP from 'node:stream/promises' +import stream from 'node:stream' describe('PatchHandler', () => { const path = '/test/output' @@ -182,4 +184,66 @@ describe('PatchHandler', () => { assert.ok(spy.args[0][1]) assert.equal(spy.args[0][2].offset, 10) }) + + it('should throw max size exceeded error when upload-length is higher then the maxSize', async () => { + handler = new PatchHandler(store, {path, maxSize: 5, locker: new MemoryLocker()}) + req.headers = { + 'upload-offset': '0', + 'upload-length': '10', + 'content-type': 'application/offset+octet-stream', + } + req.url = `${path}/file` + + store.hasExtension.withArgs('creation-defer-length').returns(true) + store.getUpload.resolves(new Upload({id: '1234', offset: 0})) + store.write.resolves(5) + store.declareUploadLength.resolves() + + try { + await handler.send(req, res, context) + throw new Error('failed test') + } catch (e) { + assert.equal('body' in e, true) + assert.equal('status_code' in e, true) + assert.equal(e.body, 'Maximum size exceeded\n') + assert.equal(e.status_code, 413) + } + }) + + it('should throw max size exceeded error when the request body is bigger then the maxSize', async () => { + handler = new PatchHandler(store, {path, maxSize: 5, locker: new MemoryLocker()}) + const req = addPipableStreamBody( + httpMocks.createRequest({ + method: 'PATCH', + url: `${path}/1234`, + body: Buffer.alloc(30), + }) + ) + const res = httpMocks.createResponse({req}) + req.headers = { + 'upload-offset': '0', + 'content-type': 'application/offset+octet-stream', + } + req.url = `${path}/file` + + store.getUpload.resolves(new Upload({id: '1234', offset: 0})) + store.write.callsFake(async (readable: http.IncomingMessage | stream.Readable) => { + const writeStream = new stream.PassThrough() + await streamP.pipeline(readable, writeStream) + return writeStream.readableLength + }) + store.declareUploadLength.resolves() + + try { + await handler.send(req, res, context) + throw new Error('failed test') + } catch (e) { + assert.equal(e.message !== 'failed test', true, 'failed test') + assert.equal('body' in e, true) + assert.equal('status_code' in e, true) + assert.equal(e.body, 'Maximum size exceeded\n') + assert.equal(e.status_code, 413) + assert.equal(context.signal.aborted, true) + } + }) }) diff --git a/packages/server/test/utils.ts b/packages/server/test/utils.ts index a16866bd..bfb91a9c 100644 --- a/packages/server/test/utils.ts +++ b/packages/server/test/utils.ts @@ -5,17 +5,27 @@ export function addPipableStreamBody>( mockRequest: T ) { // Create a Readable stream that simulates the request body - const bodyStream = new stream.Readable({ + const bodyStream = new stream.Duplex({ read() { - this.push(JSON.stringify(mockRequest.body)) + this.push( + mockRequest.body instanceof Buffer + ? mockRequest.body + : JSON.stringify(mockRequest.body) + ) this.push(null) }, }) // Add the pipe method to the mockRequest - // @ts-expect-error pipe exists + // @ts-ignore mockRequest.pipe = function (dest: stream.Writable) { - bodyStream.pipe(dest) + return bodyStream.pipe(dest) + } + + // Add the unpipe method to the mockRequest + // @ts-ignore + mockRequest.unpipe = function (dest: stream.Writable) { + return bodyStream.unpipe(dest) } return mockRequest } diff --git a/test/e2e.test.ts b/test/e2e.test.ts index b915b373..2ea217d1 100644 --- a/test/e2e.test.ts +++ b/test/e2e.test.ts @@ -16,6 +16,10 @@ import http from 'node:http' import sinon from 'sinon' import Throttle from 'throttle' import {Agent} from 'http' +import {Buffer} from 'buffer' +import {Readable} from 'stream' +import {AddressInfo} from 'net' +import {strict} from 'assert' const STORE_PATH = '/output' const PROJECT_ID = 'tus-node-server' @@ -433,6 +437,254 @@ describe('EndToEnd', () => { }) }) + describe('FileStore with MaxFileSize', () => { + before(() => { + server = new Server({ + path: STORE_PATH, + maxSize: 1024 * 1024, + datastore: new FileStore({directory: `./${STORE_PATH}`}), + }) + listener = server.listen() + agent = request.agent(listener) + }) + + after(() => { + listener.close() + }) + + it('should not allow creating an upload that exceed the max-file-size', async () => { + await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', (1024 * 1024 * 2).toString()) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(413) + }) + + it('should not allow uploading with fixed length more than the defined MaxFileSize', async () => { + const body = Buffer.alloc(1024 * 1024 * 2) + const chunkSize = (1024 * 1024 * 2) / 4 + // purposely set this to 1MB even if we will try uploading 2MB via transfer-encoding: chunked + const uploadLength = 1024 * 1024 + + const res = await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', uploadLength.toString()) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + + assert.equal('location' in res.headers, true) + assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE) + + const uploadId = res.headers.location.split('/').pop() + + const uploadChunk = async (body: Buffer, offset = 0) => { + const res = await agent + .patch(`${STORE_PATH}/${uploadId}`) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Offset', offset.toString()) + .set('Content-Type', 'application/offset+octet-stream') + .send(body) + .expect(204) + .expect('Tus-Resumable', TUS_RESUMABLE) + + return parseInt(res.headers['upload-offset'] || '0', 0) + } + + let offset = 0 + offset = await uploadChunk(body.subarray(offset, chunkSize)) // 500Kb + offset = await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1MB + + try { + // this request should fail since it exceeds the 1MB mark + await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1.5MB + throw new Error('failed test') + } catch (e) { + assert.equal(e instanceof Error, true) + assert.equal( + e.message.includes('got 413 "Payload Too Large"'), + true, + `wrong message received "${e.message}"` + ) + } + }) + + it('should not allow uploading with fixed length more than the defined MaxFileSize using chunked encoding', async () => { + const body = Buffer.alloc(1024 * 1024 * 2) + const chunkSize = (1024 * 1024 * 2) / 4 + // purposely set this to 1MB even if we will try uploading 2MB via transfer-encoding: chunked + const uploadLength = 1024 * 1024 + + const res = await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', uploadLength.toString()) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + + assert.equal('location' in res.headers, true) + assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE) + + const uploadId = res.headers.location.split('/').pop() + const address = listener.address() as AddressInfo + // Options for the HTTP request. + // transfer-encoding doesn't seem to be supported by superagent + const options = { + hostname: 'localhost', + port: address.port, + path: `${STORE_PATH}/${uploadId}`, + method: 'PATCH', + headers: { + 'Tus-Resumable': TUS_RESUMABLE, + 'Upload-Offset': '0', + 'Content-Type': 'application/offset+octet-stream', + 'Transfer-Encoding': 'chunked', + }, + } + + const {res: patchResp, body: resBody} = await new Promise<{ + res: http.IncomingMessage + body: string + }>((resolve, reject) => { + const req = http.request(options, (res) => { + let body = '' + res.on('data', (chunk) => { + body += chunk.toString() + }) + res.on('end', () => { + resolve({res, body}) + }) + }) + + req.on('error', (e) => { + reject(e) + }) + + req.write(body.subarray(0, chunkSize)) + req.write(body.subarray(chunkSize, chunkSize * 2)) + req.write(body.subarray(chunkSize * 2, chunkSize * 3)) + req.end() + }) + + assert.equal(patchResp.statusCode, 413) + assert.equal(resBody, 'Maximum size exceeded\n') + }) + + it('should not allow uploading with deferred length more than the defined MaxFileSize', async () => { + const res = await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Defer-Length', '1') + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + + assert.equal('location' in res.headers, true) + assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE) + + const uploadId = res.headers.location.split('/').pop() + const body = Buffer.alloc(1024 * 1024 * 2) + const chunkSize = (1024 * 1024 * 2) / 4 + + const uploadChunk = async (body: Buffer, offset = 0, uploadLength = 0) => { + const req = agent + .patch(`${STORE_PATH}/${uploadId}`) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Defer-Length', '1') + .set('Upload-Offset', offset.toString()) + .set('Content-Type', 'application/offset+octet-stream') + + if (uploadLength) { + req.set('Upload-Length', uploadLength.toString()) + } + + const res = await req + .send(body) + .expect(204) + .expect('Tus-Resumable', TUS_RESUMABLE) + return parseInt(res.headers['upload-offset'] || '0', 0) + } + + let offset = 0 + offset = await uploadChunk(body.subarray(offset, chunkSize)) // 500Kb + offset = await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1MB + + try { + // this request should fail since it exceeds the 1MB mark + await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1.5MB + throw new Error('failed test') + } catch (e) { + assert.equal(e instanceof Error, true) + assert.equal(e.message.includes('got 413 "Payload Too Large"'), true) + } + }) + + it('should not allow uploading with deferred length more than the defined MaxFileSize using chunked encoding', async () => { + const res = await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Defer-Length', '1') + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + + assert.equal('location' in res.headers, true) + assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE) + + const uploadId = res.headers.location.split('/').pop() + const body = Buffer.alloc(1024 * 1024 * 2) + const chunkSize = (1024 * 1024 * 2) / 4 + + const address = listener.address() as AddressInfo + // Options for the HTTP request. + // transfer-encoding doesn't seem to be supported by superagent + const options = { + hostname: 'localhost', + port: address.port, + path: `${STORE_PATH}/${uploadId}`, + method: 'PATCH', + headers: { + 'Tus-Resumable': TUS_RESUMABLE, + 'Upload-Defer-Length': '1', + 'Upload-Offset': '0', + 'Content-Type': 'application/offset+octet-stream', + 'Transfer-Encoding': 'chunked', + }, + } + + const {res: patchResp, body: resBody} = await new Promise<{ + res: http.IncomingMessage + body: string + }>((resolve, reject) => { + const req = http.request(options, (res) => { + let body = '' + res.on('data', (chunk) => { + body += chunk.toString() + }) + res.on('end', () => { + resolve({res, body}) + }) + }) + + req.on('error', (e) => { + reject(e) + }) + + req.write(body.subarray(0, chunkSize)) + req.write(body.subarray(chunkSize, chunkSize * 2)) + req.write(body.subarray(chunkSize * 2, chunkSize * 3)) + req.end() + }) + + assert.equal(patchResp.statusCode, 413) + assert.equal(resBody, 'Maximum size exceeded\n') + }) + }) + describe('GCSStore', () => { let file_id: string let deferred_file_id: string From 093efd7da196c99c5d6fcbdb1223684be5fbcd04 Mon Sep 17 00:00:00 2001 From: Murderlon Date: Thu, 21 Dec 2023 10:59:01 +0100 Subject: [PATCH 8/8] @tus/server@1.2.0 --- packages/server/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/package.json b/packages/server/package.json index d0d2c95b..4060f62b 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@tus/server", - "version": "1.1.0", + "version": "1.2.0", "description": "Tus resumable upload protocol in Node.js", "main": "dist/index.js", "types": "dist/index.d.ts",