From 1d39eafd2cf59202749b0a92fc25362633bdd2fe Mon Sep 17 00:00:00 2001 From: Skyler Calaman <54462713+Blckbrry-Pi@users.noreply.github.com> Date: Fri, 29 Mar 2024 20:02:35 -0400 Subject: [PATCH] feat: Create `uploads` module --- modules/uploads/config.ts | 19 +++ .../migration.sql | 27 ++++ .../uploads/db/migrations/migration_lock.toml | 3 + modules/uploads/db/schema.prisma | 32 +++++ modules/uploads/module.yaml | 40 ++++++ modules/uploads/scripts/complete.ts | 77 ++++++++++ modules/uploads/scripts/delete.ts | 78 ++++++++++ modules/uploads/scripts/get.ts | 49 +++++++ modules/uploads/scripts/get_file_links.ts | 72 ++++++++++ modules/uploads/scripts/list_for_user.ts | 38 +++++ modules/uploads/scripts/prepare.ts | 134 ++++++++++++++++++ modules/uploads/tests/e2e.ts | 95 +++++++++++++ modules/uploads/utils/bucket.ts | 112 +++++++++++++++ modules/uploads/utils/types.ts | 82 +++++++++++ tests/basic/backend.yaml | 7 + 15 files changed, 865 insertions(+) create mode 100644 modules/uploads/config.ts create mode 100644 modules/uploads/db/migrations/20240328220608_create_uploads_module/migration.sql create mode 100644 modules/uploads/db/migrations/migration_lock.toml create mode 100644 modules/uploads/db/schema.prisma create mode 100644 modules/uploads/module.yaml create mode 100644 modules/uploads/scripts/complete.ts create mode 100644 modules/uploads/scripts/delete.ts create mode 100644 modules/uploads/scripts/get.ts create mode 100644 modules/uploads/scripts/get_file_links.ts create mode 100644 modules/uploads/scripts/list_for_user.ts create mode 100644 modules/uploads/scripts/prepare.ts create mode 100644 modules/uploads/tests/e2e.ts create mode 100644 modules/uploads/utils/bucket.ts create mode 100644 modules/uploads/utils/types.ts diff --git a/modules/uploads/config.ts b/modules/uploads/config.ts new file mode 100644 index 00000000..b476b29e --- /dev/null +++ b/modules/uploads/config.ts @@ -0,0 +1,19 @@ +export interface Config { + maxUploadSize: UploadSize; + maxFilesPerUpload: number; + s3: S3Config; +} + +type Units = "b" | "kb" | "mb" | "gb" | "tb" | "kib" | "mib" | "gib" | "tib"; + +export type UploadSize = { + [unit in Units]: Record; +}[Units]; + +export interface S3Config { + bucket: string; + region: string; + accessKeyId: string; + secretAccessKey: string; + endpoint: string; +} diff --git a/modules/uploads/db/migrations/20240328220608_create_uploads_module/migration.sql b/modules/uploads/db/migrations/20240328220608_create_uploads_module/migration.sql new file mode 100644 index 00000000..a70a9a1b --- /dev/null +++ b/modules/uploads/db/migrations/20240328220608_create_uploads_module/migration.sql @@ -0,0 +1,27 @@ +-- CreateTable +CREATE TABLE "Upload" ( + "id" UUID NOT NULL, + "userId" UUID, + "bucket" TEXT NOT NULL, + "contentLength" BIGINT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + "completedAt" TIMESTAMP(3), + "deletedAt" TIMESTAMP(3), + + CONSTRAINT "Upload_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "Files" ( + "path" TEXT NOT NULL, + "mime" TEXT, + "contentLength" BIGINT NOT NULL, + "nsfwScoreThreshold" DOUBLE PRECISION, + "uploadId" UUID NOT NULL, + + CONSTRAINT "Files_pkey" PRIMARY KEY ("uploadId","path") +); + +-- AddForeignKey +ALTER TABLE "Files" ADD CONSTRAINT "Files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "Upload"("id") ON DELETE RESTRICT ON UPDATE CASCADE; diff --git a/modules/uploads/db/migrations/migration_lock.toml b/modules/uploads/db/migrations/migration_lock.toml new file mode 100644 index 00000000..fbffa92c --- /dev/null +++ b/modules/uploads/db/migrations/migration_lock.toml @@ -0,0 +1,3 @@ +# Please do not edit this file manually +# It should be added in your version-control system (i.e. Git) +provider = "postgresql" \ No newline at end of file diff --git a/modules/uploads/db/schema.prisma b/modules/uploads/db/schema.prisma new file mode 100644 index 00000000..3acd3dc8 --- /dev/null +++ b/modules/uploads/db/schema.prisma @@ -0,0 +1,32 @@ +// Do not modify this `datasource` block +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} + +model Upload { + id String @id @default(uuid()) @db.Uuid + userId String? @db.Uuid + + bucket String + contentLength BigInt + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + completedAt DateTime? + deletedAt DateTime? + + files Files[] @relation("Files") +} + +model Files { + path String + mime String? + contentLength BigInt + nsfwScoreThreshold Float? + + uploadId String @db.Uuid + upload Upload @relation("Files", fields: [uploadId], references: [id]) + + @@id([uploadId, path]) +} diff --git a/modules/uploads/module.yaml b/modules/uploads/module.yaml new file mode 100644 index 00000000..0b1bcf03 --- /dev/null +++ b/modules/uploads/module.yaml @@ -0,0 +1,40 @@ +scripts: + prepare: + name: Prepare Upload + description: Prepare an upload batch for data transfer + complete: + name: Complete Upload + description: Alert the module that the upload has been completed + get: + name: Get Upload Metadata + description: Get the metadata (including contained files) for specified upload IDs + get_file_links: + name: Get File Link + description: Get presigned download links for each of the specified files + list_for_user: + name: List Uploads for Users + description: Get a list of upload IDs associated with the specified user IDs + delete: + name: Delete Upload + description: Removes the upload and deletes the files from the bucket +errors: + no_files: + name: No Files Provided + description: An upload must have at least 1 file + too_many_files: + name: Too Many Files Provided + description: There is a limit to how many files can be put into a single upload (see config) + duplicate_paths: + name: Duplicate Paths Provided + description: An upload cannot contain 2 files with the same paths (see `cause` for offending paths) + size_limit_exceeded: + name: Combined Size Limit Exceeded + description: There is a maximum total size per upload (see config) + upload_not_found: + name: Upload Not Found + description: The provided upload ID didn't match any known existing uploads + upload_already_completed: + name: Upload Already completed + description: \`complete\` was already called on this upload +dependencies: + users: {} diff --git a/modules/uploads/scripts/complete.ts b/modules/uploads/scripts/complete.ts new file mode 100644 index 00000000..32ae856c --- /dev/null +++ b/modules/uploads/scripts/complete.ts @@ -0,0 +1,77 @@ +import { RuntimeError, ScriptContext } from "../_gen/scripts/complete.ts"; +import { prismaToOutput } from "../utils/types.ts"; +import { Upload } from "../utils/types.ts"; + +export interface Request { + uploadId: string; +} + +export interface Response { + upload: Upload; +} + +export async function run( + ctx: ScriptContext, + req: Request, +): Promise { + const newUpload = await ctx.db.$transaction(async (db) => { + // Find the upload by ID + const upload = await db.upload.findFirst({ + where: { + id: req.uploadId, + }, + select: { + id: true, + userId: true, + bucket: true, + contentLength: true, + files: true, + createdAt: true, + updatedAt: true, + completedAt: true, + }, + }); + + // Error if the upload wasn't prepared + if (!upload) { + throw new RuntimeError( + "upload_not_found", + { cause: `Upload with ID ${req.uploadId} not found` }, + ); + } + + // Error if `complete` was already called with this ID + if (upload.completedAt !== null) { + throw new RuntimeError( + "upload_already_completed", + { cause: `Upload with ID ${req.uploadId} has already been completed` }, + ); + } + + // Update the upload to mark it as completed + const completedUpload = await db.upload.update({ + where: { + id: req.uploadId, + }, + data: { + completedAt: new Date().toISOString(), + }, + select: { + id: true, + userId: true, + bucket: true, + contentLength: true, + files: true, + createdAt: true, + updatedAt: true, + completedAt: true, + }, + }); + + return completedUpload; + }); + + return { + upload: prismaToOutput(newUpload), + }; +} diff --git a/modules/uploads/scripts/delete.ts b/modules/uploads/scripts/delete.ts new file mode 100644 index 00000000..d94a6d2d --- /dev/null +++ b/modules/uploads/scripts/delete.ts @@ -0,0 +1,78 @@ +import { RuntimeError, ScriptContext } from "../_gen/scripts/delete.ts"; +import { getKey } from "../utils/types.ts"; +import { deleteKeys } from "../utils/bucket.ts"; + +export interface Request { + uploadId: string; +} + +export interface Response { + bytesDeleted: string; +} + +export async function run( + ctx: ScriptContext, + req: Request, +): Promise { + const bytesDeleted = await ctx.db.$transaction(async (db) => { + const upload = await db.upload.findFirst({ + where: { + id: req.uploadId, + completedAt: { not: null }, + deletedAt: null, + }, + select: { + id: true, + userId: true, + bucket: true, + contentLength: true, + files: true, + createdAt: true, + updatedAt: true, + completedAt: true, + }, + }); + if (!upload) { + throw new RuntimeError( + "upload_not_found", + { + cause: `Upload with ID ${req.uploadId} not found`, + meta: { modified: false }, + }, + ); + } + + const filesToDelete = upload.files.map((file) => + getKey(file.uploadId, file.path) + ); + const deleteResults = await deleteKeys(ctx.userConfig.s3, filesToDelete); + + const failures = upload.files + .map((file, i) => [file, deleteResults[i]] as const) + .filter(([, successfullyDeleted]) => !successfullyDeleted) + .map(([file]) => file); + + if (failures.length) { + const failedPaths = JSON.stringify(failures.map((file) => file.path)); + throw new RuntimeError( + "failed_to_delete", + { + cause: `Failed to delete files with paths ${failedPaths}`, + meta: { modified: failures.length !== filesToDelete.length }, + }, + ); + } + + await db.upload.update({ + where: { + id: req.uploadId, + }, + data: { + deletedAt: new Date().toISOString(), + }, + }); + + return upload.contentLength.toString(); + }); + return { bytesDeleted }; +} diff --git a/modules/uploads/scripts/get.ts b/modules/uploads/scripts/get.ts new file mode 100644 index 00000000..8b791486 --- /dev/null +++ b/modules/uploads/scripts/get.ts @@ -0,0 +1,49 @@ +import { ScriptContext } from "../_gen/scripts/get.ts"; +import { prismaToOutput } from "../utils/types.ts"; +import { Upload } from "../utils/types.ts"; + +export interface Request { + uploadIds: string[]; +} + +export interface Response { + uploads: (Upload | null)[]; +} + +export async function run( + ctx: ScriptContext, + req: Request, +): Promise { + // Find uploads that match the IDs in the request + const dbUploads = await ctx.db.upload.findMany({ + where: { + id: { + in: req.uploadIds, + }, + completedAt: { not: null }, + deletedAt: null, + }, + select: { + id: true, + userId: true, + bucket: true, + contentLength: true, + files: true, + createdAt: true, + updatedAt: true, + completedAt: true, + }, + }); + + // Create a map of uploads by ID + const uploadMap = new Map(dbUploads.map((upload) => [upload.id, upload])); + + // Reorder uploads to match the order of the request + const uploads = req.uploadIds.map((uploadId) => { + const upload = uploadMap.get(uploadId); + // If the upload wasn't found, return null + return upload ? prismaToOutput(upload) : null; + }); + + return { uploads }; +} diff --git a/modules/uploads/scripts/get_file_links.ts b/modules/uploads/scripts/get_file_links.ts new file mode 100644 index 00000000..df215836 --- /dev/null +++ b/modules/uploads/scripts/get_file_links.ts @@ -0,0 +1,72 @@ +import { ScriptContext } from "../_gen/scripts/get_file_links.ts"; +import { getKey, UploadFile } from "../utils/types.ts"; +import { getPresignedGetUrl } from "../utils/bucket.ts"; + +export interface Request { + files: { uploadId: string; path: string }[]; + validSecs?: number; +} + +export interface Response { + files: (UploadFile & { uploadId: string; url: string })[]; +} + +export async function run( + ctx: ScriptContext, + req: Request, +): Promise { + const dbFiles = await ctx.db.files.findMany({ + where: { + uploadId: { + in: req.files.map((file) => file.uploadId), + }, + path: { + in: req.files.map((file) => file.path), + }, + upload: { + completedAt: { not: null }, + deletedAt: null, + }, + }, + select: { + uploadId: true, + path: true, + contentLength: true, + mime: true, + }, + }); + + const keys = new Set( + req.files.map((file) => getKey(file.uploadId, file.path)), + ); + const map = new Map( + dbFiles.map((file) => [getKey(file.uploadId, file.path), file]), + ); + for (const [mapKey] of map) { + // Remove any keys that don't have a corresponding file + if (!keys.has(mapKey)) map.delete(mapKey); + } + + // Create presigned URLs that can be accessed using a simple GET request + const formattedDownloadPromises = Array.from(map) + .map(([key, file]) => ({ + ...file, + url: getPresignedGetUrl( + ctx.userConfig.s3, + key, + req.validSecs ?? 60 * 60, + ), + })) + .map(async (file) => ({ + ...file, + contentLength: file.contentLength.toString(), + url: await file.url, + })); + + // Wait for all presigned URLs to be created + const formattedUploads = await Promise.all(formattedDownloadPromises); + + return { + files: formattedUploads, + }; +} diff --git a/modules/uploads/scripts/list_for_user.ts b/modules/uploads/scripts/list_for_user.ts new file mode 100644 index 00000000..8ef33985 --- /dev/null +++ b/modules/uploads/scripts/list_for_user.ts @@ -0,0 +1,38 @@ +import { ScriptContext } from "../_gen/scripts/list_for_user.ts"; + +export interface Request { + userIds: string[]; +} + +export interface Response { + uploadIds: Record; +} + +export async function run( + ctx: ScriptContext, + req: Request, +): Promise { + // Find uploads that match the IDs in the request + const dbUploads = await ctx.db.upload.findMany({ + where: { + userId: { + in: req.userIds, + }, + completedAt: { not: null }, + deletedAt: null, + }, + select: { + id: true, + userId: true, + }, + }); + + // Map each userId to an array of upload IDs with that associated user ID + // TODO: There may be a more efficient way to do this? Not sure. + const uploadIds = Object.fromEntries(req.userIds.map((userId) => { + const uploads = dbUploads.filter((upload) => upload.userId === userId); + return [userId, uploads.map((upload) => upload.id)] as const; + })); + + return { uploadIds }; +} diff --git a/modules/uploads/scripts/prepare.ts b/modules/uploads/scripts/prepare.ts new file mode 100644 index 00000000..1bfe2b9d --- /dev/null +++ b/modules/uploads/scripts/prepare.ts @@ -0,0 +1,134 @@ +import { RuntimeError, ScriptContext } from "../_gen/scripts/prepare.ts"; +import { + getMaxBytes, + PresignedUpload, + prismaToOutput, + UploadFile, +} from "../utils/types.ts"; +import { getPresignedPutUrl } from "../utils/bucket.ts"; + +export interface Request { + userId?: string; + files: UploadFile[]; +} + +export interface Response { + upload: PresignedUpload; +} + +export async function run( + ctx: ScriptContext, + req: Request, +): Promise { + // If there is a userId, ensure the user exists + if (req.userId) { + await ctx.modules.users.getUser({ + userIds: [req.userId], + }); + } + + // Ensure there are files in the upload + if (req.files.length === 0) { + throw new RuntimeError("no_files"); + } + + // Ensure the number of files is within the limit + if (req.files.length > ctx.userConfig.maxFilesPerUpload) { + throw new RuntimeError( + "too_many_files", + { + cause: `Max files per upload is ${ctx.userConfig.maxFilesPerUpload}` + + ` (requested upload of ${req.files.length} files)`, + }, + ); + } + + // Ensure there are no duplicate paths + const paths = new Set(); + const duplicates = new Set(); + for (const file of req.files) { + if (paths.has(file.path)) { + duplicates.add(file.path); + } + paths.add(file.path); + } + if (duplicates.size > 0) { + const duplicateString = Array.from(duplicates).map((path) => + JSON.stringify(path) + ).join(", "); + throw new RuntimeError("duplicate_paths", { + cause: `Multiple files had paths ${duplicateString}`, + }); + } + + // Ensure the total content length is within the limit + const uploadContentLength = req.files.reduce( + (acc, file) => acc + BigInt(file.contentLength), + 0n, + ); + if (uploadContentLength > getMaxBytes(ctx.userConfig.maxUploadSize)) { + throw new RuntimeError("size_limit_exceeded"); + } + + // Format the input files for prisma + const inputFiles = req.files.map((file) => ({ + path: file.path, + mime: file.mime, + contentLength: BigInt(file.contentLength), + })); + + // Create the upload in the database + const upload = await ctx.db.upload.create({ + data: { + userId: req.userId, + bucket: ctx.userConfig.s3.bucket, + contentLength: uploadContentLength, + files: { + create: inputFiles, + }, + }, + select: { + id: true, + userId: true, + bucket: true, + contentLength: true, + + files: true, + + createdAt: true, + updatedAt: true, + completedAt: true, + }, + }); + + // Send requests to create presigned URLs for each file + const formattedUploadPromises = upload.files.map((file) => + getPresignedPutUrl( + ctx.userConfig.s3, + upload.id, + { + path: file.path, + contentLength: file.contentLength.toString(), + mime: file.mime, + }, + ) + ); + + // Wait for all presigned URLs to be created + const formattedUploads = await Promise.all(formattedUploadPromises); + + // Return the upload + const presignedFiles = formattedUploads.map((formattedUpload, i) => ({ + ...upload.files[i], + contentLength: upload.files[i].contentLength.toString(), + + presignedUrl: formattedUpload.url, + })); + + return { + upload: { + ...prismaToOutput(upload), + files: presignedFiles, + }, + }; +} diff --git a/modules/uploads/tests/e2e.ts b/modules/uploads/tests/e2e.ts new file mode 100644 index 00000000..a24b433b --- /dev/null +++ b/modules/uploads/tests/e2e.ts @@ -0,0 +1,95 @@ +import { test, TestContext } from "../_gen/test.ts"; +import { + assert, + assertEquals, + assertRejects, +} from "https://deno.land/std@0.220.0/assert/mod.ts"; +import { faker } from "https://deno.land/x/deno_faker@v1.0.3/mod.ts"; + +test("e2e", async (ctx: TestContext) => { + const path = faker.system.fileName(); + const contentLength = faker.random.number(100); + const mime = faker.system.mimeType(); + + const fileData = crypto.getRandomValues(new Uint8Array(contentLength)); + + // Tell the module the metadata about the upload. + const { upload: presigned } = await ctx.modules.uploads.prepare({ + files: [ + { path, contentLength, mime }, + ], + }); + + // Upload the data using the presigned URLs returned + const uploadPutReq = await fetch( + presigned.files[0].presignedUrl, + { + method: "PUT", + body: fileData, + }, + ); + assert(uploadPutReq.ok); + + // Tell the module that the module had completed uploading. + const { upload: completed } = await ctx.modules.uploads.complete({ + uploadId: presigned.id, + }); + + // Ensure the presigned and completed uploads are the same, except for + // expected timestamp differences + assertEquals({ + ...presigned, + files: presigned.files.map((file) => ({ + path: file.path, + contentLength: file.contentLength, + mime: file.mime, + })), + completedAt: null, + updatedAt: null, + }, { + ...completed, + files: completed.files.map((file) => ({ + path: file.path, + contentLength: file.contentLength, + mime: file.mime, + })), + completedAt: null, + updatedAt: null, + }); + + // Lookup the completed upload + const { uploads: [retrieved] } = await ctx.modules.uploads.get({ + uploadIds: [completed.id], + }); + assertEquals(completed, retrieved); + + // Get presigned URLs to download the files from + const { files: [{ url: fileDownloadUrl }] } = await ctx.modules.uploads + .getFileLinks({ + files: [{ uploadId: completed.id, path: path }], + }); + + // Download the files, and make sure the data matches + const fileDownloadReq = await fetch(fileDownloadUrl); + const fileDownloadData = new Uint8Array(await fileDownloadReq.arrayBuffer()); + assertEquals(fileData, fileDownloadData); + + // Delete the file and assert that the amount of bytes deleted matches + // what's expected + const { bytesDeleted } = await ctx.modules.uploads.delete({ + uploadId: completed.id, + }); + assertEquals(bytesDeleted, completed.contentLength); + assertEquals(bytesDeleted, presigned.contentLength); + assertEquals(bytesDeleted, retrieved?.contentLength); + assertEquals(parseInt(bytesDeleted), fileData.byteLength); + + // Check that the upload can't still be retrieved + const { uploads: [retrievedAfterDeleted] } = await ctx.modules.uploads.get({ + uploadIds: [completed.id], + }); + assertEquals(retrievedAfterDeleted, null); + + const fileDownloadReqAfterDelete = await fetch(fileDownloadUrl); + assert(!fileDownloadReqAfterDelete.ok); +}); diff --git a/modules/uploads/utils/bucket.ts b/modules/uploads/utils/bucket.ts new file mode 100644 index 00000000..a1d457e2 --- /dev/null +++ b/modules/uploads/utils/bucket.ts @@ -0,0 +1,112 @@ +import { + DeleteObjectCommand, + DeleteObjectsCommand, + GetObjectCommand, + PutObjectCommand, + S3Client, +} from "npm:@aws-sdk/client-s3"; +import { getSignedUrl } from "npm:@aws-sdk/s3-request-presigner"; + +import { S3Config } from "../config.ts"; +import { getKey, UploadFile } from "./types.ts"; + +export const getClient = (config: S3Config) => + new S3Client({ + region: config.region, + credentials: { + accessKeyId: config.accessKeyId, + secretAccessKey: config.secretAccessKey, + }, + endpoint: config.endpoint, + defaultUserAgentProvider: () => + Promise.resolve([ + ["opengb/uploads", "0.1.0"], + ]), + }); + +export const getPresignedPutUrl = async ( + config: S3Config, + uploadId: string, + file: UploadFile, + expirySeconds = 60 * 60, +) => { + const client = getClient(config); + + const key = getKey(uploadId, file.path); + const command = new PutObjectCommand({ + Bucket: config.bucket, + Key: key, + ContentType: file.mime ?? undefined, + ContentLength: parseInt(file.contentLength), + }); + const url = await getSignedUrl( + client, + command, + { expiresIn: expirySeconds }, + ); + + return { + url, + key, + }; +}; + +export const getPresignedGetUrl = async ( + config: S3Config, + key: string, + expirySeconds = 60 * 60, +) => { + const client = getClient(config); + + const command = new GetObjectCommand({ + Bucket: config.bucket, + Key: key, + }); + const url = await getSignedUrl( + client, + command, + { expiresIn: expirySeconds }, + ); + + return url; +}; + +export const deleteKey = async ( + config: S3Config, + key: string, +): Promise => { + const client = getClient(config); + + const command = new DeleteObjectCommand({ + Bucket: config.bucket, + Key: key, + }); + + const response = await client.send(command); + return response.DeleteMarker ?? false; +}; + +export const deleteKeys = async ( + config: S3Config, + keys: string[], +): Promise => { + const client = getClient(config); + + const command = new DeleteObjectsCommand({ + Bucket: config.bucket, + Delete: { + Objects: keys.map((key) => ({ Key: key })), + }, + }); + + const response = await client.send(command); + if (response.Deleted) { + const deletedKeys = response.Deleted.flatMap((obj) => + obj.Key ? [obj.Key] : [] + ); + const keySet = new Set(deletedKeys); + return keys.map((key) => keySet.has(key)); + } else { + return keys.map(() => false); + } +}; diff --git a/modules/uploads/utils/types.ts b/modules/uploads/utils/types.ts new file mode 100644 index 00000000..a945a51c --- /dev/null +++ b/modules/uploads/utils/types.ts @@ -0,0 +1,82 @@ +import { + Files as PrismaFiles, + Upload as _PrismaUpload, +} from "../_gen/prisma/default.d.ts"; +import { UploadSize } from "../config.ts"; + +interface PrismaUpload extends Omit<_PrismaUpload, "deletedAt"> { + files: PrismaFiles[]; +} + +export interface Upload { + id: string; + userId: string | null; + + bucket: string; + contentLength: string; + + files: UploadFile[]; + + createdAt: string; + updatedAt: string; + completedAt: string | null; +} + +export interface UploadFile { + path: string; + mime: string | null; + contentLength: string; +} + +export interface PresignedUpload extends Omit { + files: PresignedUploadFile[]; +} + +export interface PresignedUploadFile extends UploadFile { + presignedUrl: string; +} + +export const getMaxBytes = (size: UploadSize): bigint => { + const b = 1n; + const kb = 1000n * b; + const mb = 1000n * kb; + const gb = 1000n * mb; + const tb = 1000n * gb; + + const kib = 1024n * b; + const mib = 1024n * kib; + const gib = 1024n * mib; + const tib = 1024n * gib; + + if ("b" in size) return BigInt(size.b) * b; + else if ("kb" in size) return BigInt(size.kb) * kb; + else if ("mb" in size) return BigInt(size.mb) * mb; + else if ("gb" in size) return BigInt(size.gb) * gb; + else if ("tb" in size) return BigInt(size.tb) * tb; + else if ("kib" in size) return BigInt(size.kib) * kib; + else if ("mib" in size) return BigInt(size.mib) * mib; + else if ("gib" in size) return BigInt(size.gib) * gib; + else if ("tib" in size) return BigInt(size.tib) * tib; + else return size; // Unreachable +}; + +export const prismaToOutput = (upload: PrismaUpload): Upload => ({ + id: upload.id, + userId: upload.userId, + + bucket: upload.bucket, + contentLength: upload.contentLength.toString(), + + files: upload.files.map((file) => ({ + path: file.path, + mime: file.mime, + contentLength: file.contentLength.toString(), + })), + + createdAt: upload.createdAt.toISOString(), + updatedAt: upload.updatedAt.toISOString(), + completedAt: upload.completedAt?.toISOString() ?? null, +}); + +export const getKey = (uploadId: string, path: string): string => + `${uploadId}/${path}`; diff --git a/tests/basic/backend.yaml b/tests/basic/backend.yaml index 19142bec..e3dfc91a 100644 --- a/tests/basic/backend.yaml +++ b/tests/basic/backend.yaml @@ -13,6 +13,13 @@ modules: registry: local users: registry: local + uploads: + registry: local + config: + maxUploadSize: { gb: 10 } + maxFilesPerUpload: 16 + s3: + # I'm not commiting API secrets again, I've learned auth: registry: local config: