Skip to content

Commit

Permalink
Make tests concurrency safe and faster (#568)
Browse files Browse the repository at this point in the history
  • Loading branch information
Murderlon authored Feb 5, 2024
1 parent 436b4a8 commit 0830d98
Show file tree
Hide file tree
Showing 10 changed files with 59 additions and 38 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"demo:s3": "yarn workspace demo start:s3",
"lint": "turbo run lint",
"format": "turbo run format",
"test": "turbo run test --concurrency 1",
"test": "turbo run test",
"release": "gh workflow run release",
"release:local": "turbo run build && changesets publish"
},
Expand Down
2 changes: 1 addition & 1 deletion packages/file-store/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ export class FileStore extends DataStore {
* Ensure the directory exists.
*/
private checkOrCreateDirectory() {
fs.mkdir(this.directory, MASK, (error) => {
fs.mkdir(this.directory, {mode: MASK, recursive: true}, (error) => {
if (error && error.code !== IGNORED_MKDIR_ERROR) {
throw error
}
Expand Down
8 changes: 5 additions & 3 deletions packages/file-store/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,13 @@ import {Upload} from '@tus/server'
import * as shared from '../../test/stores.test'

const fixturesPath = path.resolve('../', '../', 'test', 'fixtures')
const storePath = path.resolve('../', '../', 'test', 'output')
const storePath = path.resolve('../', '../', 'test', 'output', 'file-store')

async function cleanup() {
await fsProm.rm(storePath, {recursive: true})
await fsProm.mkdir(storePath)
if (fs.existsSync(storePath)) {
await fsProm.rm(storePath, {recursive: true})
await fsProm.mkdir(storePath)
}
}

describe('FileStore', function () {
Expand Down
2 changes: 1 addition & 1 deletion packages/gcs-store/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import * as shared from '../../test/stores.test'
import {Storage} from '@google-cloud/storage'

const fixturesPath = path.resolve('../', '../', 'test', 'fixtures')
const storePath = path.resolve('../', '../', 'test', 'output')
const storePath = path.resolve('../', '../', 'test', 'output', 'gcs-store')

describe('GCSStore', () => {
before(function () {
Expand Down
14 changes: 7 additions & 7 deletions packages/s3-store/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ import sinon from 'sinon'

import {S3Store} from './'
import * as shared from '../../test/stores.test'
import {Upload, Uid} from '@tus/server'
import {Upload} from '@tus/server'

const fixturesPath = path.resolve('../', '../', 'test', 'fixtures')
const storePath = path.resolve('../', '../', 'test', 'output')
const storePath = path.resolve('../', '../', 'test', 'output', 's3-store')

describe('S3DataStore', function () {
before(function () {
Expand Down Expand Up @@ -59,7 +59,7 @@ describe('S3DataStore', function () {
const uploadIncompletePart = sinon.spy(store, 'uploadIncompletePart')
const uploadPart = sinon.spy(store, 'uploadPart')
const upload = new Upload({
id: 'incomplete-part-test',
id: shared.testId('incomplete-part-test'),
size: size + incompleteSize,
offset: 0,
})
Expand All @@ -86,7 +86,7 @@ describe('S3DataStore', function () {
const size = 4096
const incompleteSize = 1024
const upload = new Upload({
id: `incomplete-part-test-${Uid.rand()}`,
id: shared.testId('incomplete-part-test'),
size: size + incompleteSize,
offset: 0,
})
Expand Down Expand Up @@ -121,7 +121,7 @@ describe('S3DataStore', function () {
const uploadIncompletePart = sinon.spy(store, 'uploadIncompletePart')
const uploadPart = sinon.spy(store, 'uploadPart')
const upload = new Upload({
id: 'incomplete-part-test-' + Uid.rand(),
id: shared.testId('incomplete-part-test'),
size: size + incompleteSize,
offset: 0,
})
Expand Down Expand Up @@ -158,7 +158,7 @@ describe('S3DataStore', function () {
const uploadIncompletePart = sinon.spy(store, 'uploadIncompletePart')
const uploadPart = sinon.spy(store, 'uploadPart')
const upload = new Upload({
id: 'min-part-size-test',
id: shared.testId('min-part-size-test'),
size: size + size,
offset: 0,
})
Expand Down Expand Up @@ -186,7 +186,7 @@ describe('S3DataStore', function () {
const incompleteSize = 1024

const upload = new Upload({
id: `get-incopmlete-part-size-test-${Uid.rand()}`,
id: shared.testId('get-incomplete-part-size-test'),
size: size + incompleteSize,
offset: 0,
})
Expand Down
22 changes: 13 additions & 9 deletions packages/server/test/Server.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,14 @@ import sinon from 'sinon'

// Test server crashes on http://{some-ip} so we remove the protocol...
const removeProtocol = (location: string) => location.slice(6)
const directory = path.resolve(__dirname, 'output', 'server')

describe('Server', () => {
before(async () => {
await fs.mkdir(directory, {recursive: true})
})
after(async () => {
await fs.rm(path.resolve(__dirname, 'output'), {force: true, recursive: true})
await fs.rm(directory, {force: true, recursive: true})
})

describe('instantiation', () => {
Expand Down Expand Up @@ -121,7 +125,7 @@ describe('Server', () => {
before(() => {
server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
})
listener = server.listen()
})
Expand Down Expand Up @@ -263,7 +267,7 @@ describe('Server', () => {
it('should not invoke handlers if onIncomingRequest throws', (done) => {
const server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
async onIncomingRequest() {
throw {status_code: 403, body: 'Access denied'}
},
Expand All @@ -282,7 +286,7 @@ describe('Server', () => {
const route = '/test/output'
const server = new Server({
path: route,
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
namingFunction() {
return `foo/bar/id`
},
Expand Down Expand Up @@ -330,7 +334,7 @@ describe('Server', () => {
beforeEach(() => {
server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
})
listener = server.listen()
})
Expand Down Expand Up @@ -408,7 +412,7 @@ describe('Server', () => {
it('should call onUploadCreate and return its error to the client', (done) => {
const server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
async onUploadCreate() {
throw {body: 'no', status_code: 500}
},
Expand All @@ -423,7 +427,7 @@ describe('Server', () => {
it('should call onUploadFinish and return its error to the client', (done) => {
const server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
onUploadFinish() {
throw {body: 'no', status_code: 500}
},
Expand All @@ -446,7 +450,7 @@ describe('Server', () => {
it('should call onUploadFinish and return its error to the client with creation-with-upload ', (done) => {
const server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
async onUploadFinish() {
throw {body: 'no', status_code: 500}
},
Expand Down Expand Up @@ -493,7 +497,7 @@ describe('Server', () => {
const spy = sinon.spy()
const server = new Server({
path: '/test/output',
datastore: new FileStore({directory: './test/output'}),
datastore: new FileStore({directory}),
onResponseError: () => {
spy()
return {status_code: 404, body: 'custom-error'}
Expand Down
9 changes: 5 additions & 4 deletions test/e2e.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ import {Agent} from 'http'
import {Buffer} from 'buffer'
import {AddressInfo} from 'net'

const STORE_PATH = '/output'
const STORE_PATH = '/test'
const PROJECT_ID = 'tus-node-server'
const KEYFILE = '../keyfile.json'
const BUCKET = 'tus-node-server-ci'
const FILES_DIRECTORY = path.resolve(STORE_PATH)
const FILES_DIRECTORY = path.resolve('./output/e2e')
const TEST_FILE_SIZE = '960244'
const TEST_FILE_PATH = path.resolve('fixtures', 'test.mp4')
const TEST_METADATA = 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential'
Expand Down Expand Up @@ -53,7 +53,8 @@ describe('EndToEnd', () => {
let file_id: string
let deferred_file_id: string

before(() => {
before(async function () {
await fs.promises.mkdir(FILES_DIRECTORY, {recursive: true})
server = new Server({
path: STORE_PATH,
datastore: new FileStore({directory: `./${STORE_PATH}`}),
Expand Down Expand Up @@ -280,7 +281,7 @@ describe('EndToEnd', () => {
before(() => {
server = new Server({
path: STORE_PATH,
datastore: new FileStore({directory: `./${STORE_PATH}`}),
datastore: new FileStore({directory: FILES_DIRECTORY}),
})
listener = server.listen()
agent = request.agent(listener)
Expand Down
3 changes: 2 additions & 1 deletion test/s3.e2e.ts
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,8 @@ describe('S3 Store E2E', () => {
await new Promise((resolve) => listener.close(resolve))
})

it('calling deleteExpired will delete all expired objects', async () => {
// TODO: refactor to mocked integration test instead of e2e
it.skip('calling deleteExpired will delete all expired objects', async () => {
const data = allocMB(11)
const {uploadId} = await createUpload(agent, data.length)
await patchUpload(agent, uploadId, data.subarray(0, 1024 * 1024 * 6))
Expand Down
27 changes: 17 additions & 10 deletions test/stores.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,14 @@ import fs from 'node:fs'
import stream from 'node:stream'
import {setTimeout as promSetTimeout} from 'node:timers/promises'

import {Upload} from '@tus/server'
import {Upload, Uid} from '@tus/server'

// In CI we run multiple jobs in parallel,
// so we need to make sure that the IDs are unique.
export function testId(id: string) {
// eslint-disable-next-line turbo/no-undeclared-env-vars
return `${id}-${process.env.GITHUB_JOB ?? Uid.rand()}`
}

export const shouldHaveStoreMethods = function () {
describe('the class', () => {
Expand All @@ -23,13 +30,13 @@ export const shouldHaveStoreMethods = function () {
export const shouldCreateUploads = function () {
describe('create', () => {
const file = new Upload({
id: 'create-test',
id: testId('create-test'),
size: 1000,
offset: 0,
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})
const file_defered = new Upload({
id: 'create-test-deferred',
id: testId('create-test-deferred'),
offset: 0,
})

Expand Down Expand Up @@ -76,7 +83,7 @@ export const shouldExpireUploads = function () {

it('should expire upload', async function () {
const file = new Upload({
id: 'expiration-test',
id: testId('expiration-test'),
size: this.testFileSize,
offset: 0,
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
Expand All @@ -94,7 +101,7 @@ export const shouldExpireUploads = function () {
}

export const shouldRemoveUploads = function () {
const file = new Upload({id: 'remove-test', size: 1000, offset: 0})
const file = new Upload({id: testId('remove-test'), size: 1000, offset: 0})

describe('remove (termination extension)', () => {
it("should report 'termination' extension", function () {
Expand All @@ -112,7 +119,7 @@ export const shouldRemoveUploads = function () {

it('should delete the file during upload', async function () {
const file = new Upload({
id: 'termination-test',
id: testId('termination-test'),
size: this.testFileSize,
offset: 0,
metadata: {filename: 'terminate_during_upload.pdf', is_confidential: null},
Expand Down Expand Up @@ -157,7 +164,7 @@ export const shouldWriteUploads = function () {

it('should write a stream and resolve the new offset', async function () {
const file = new Upload({
id: 'write-test',
id: testId('write-test'),
size: this.testFileSize,
offset: 0,
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
Expand All @@ -170,7 +177,7 @@ export const shouldWriteUploads = function () {

it('should reject when stream is destroyed', async function () {
const file = new Upload({
id: 'write-test-reject',
id: testId('write-test-reject'),
size: this.testFileSize,
offset: 0,
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
Expand All @@ -196,7 +203,7 @@ export const shouldHandleOffset = function () {

it('should resolve the stats for existing files', async function () {
const file = new Upload({
id: 'offset-test',
id: testId('offset-test'),
size: this.testFileSize,
offset: 0,
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
Expand All @@ -222,7 +229,7 @@ export const shouldDeclareUploadLength = function () {

it('should update upload_length after declaring upload length', async function () {
const file = new Upload({
id: 'declare-length-test',
id: testId('declare-length-test'),
offset: 0,
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})
Expand Down
8 changes: 7 additions & 1 deletion turbo.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,13 @@
},
"test": {
"dependsOn": ["build"],
"env": ["AWS_BUCKET", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_REGION"],
"env": [
"AWS_BUCKET",
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_REGION",
"GITHUB_JOB"
],
"outputs": []
},
"lint": {
Expand Down

0 comments on commit 0830d98

Please sign in to comment.