Skip to content
This repository has been archived by the owner on Sep 17, 2024. It is now read-only.

Commit

Permalink
feat: Create uploads module
Browse files Browse the repository at this point in the history
  • Loading branch information
Blckbrry-Pi authored and NathanFlurry committed May 14, 2024
1 parent 1317b2b commit fb22613
Show file tree
Hide file tree
Showing 20 changed files with 1,439 additions and 2 deletions.
3 changes: 1 addition & 2 deletions modules/tokens/scripts/extend.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { ScriptContext } from "../_gen/scripts/extend.ts";
import { TokenWithSecret } from "../types/common.ts";
import { tokenFromRow } from "../types/common.ts";
import { TokenWithSecret, tokenFromRow } from "../utils/types.ts";

export interface Request {
token: string;
Expand Down
23 changes: 23 additions & 0 deletions modules/uploads/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { UploadSize } from "./utils/data_size.ts";

export interface Config {
maxUploadSize?: UploadSize;
maxMultipartUploadSize?: UploadSize;
maxFilesPerUpload?: number;
defaultMultipartChunkSize?: UploadSize;

s3: {
bucketName: string;
region: string;
endpoint: string;

accessKeyId?: string;
secretAccessKey?: string;
};
}

export const DEFAULT_MAX_FILES_PER_UPLOAD = 10;

export const DEFAULT_MAX_UPLOAD_SIZE: UploadSize = "30mib";
export const DEFAULT_MAX_MULTIPART_UPLOAD_SIZE: UploadSize = "10gib";
export const DEFAULT_MULTIPART_CHUNK_SIZE: UploadSize = "10mib";
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
-- CreateTable
CREATE TABLE "Upload" (
"id" UUID NOT NULL,
"userId" UUID,
"bucket" TEXT NOT NULL,
"contentLength" BIGINT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"completedAt" TIMESTAMP(3),
"deletedAt" TIMESTAMP(3),

CONSTRAINT "Upload_pkey" PRIMARY KEY ("id")
);

-- CreateTable
CREATE TABLE "Files" (
"uploadId" UUID NOT NULL,
"multipartUploadId" TEXT,
"path" TEXT NOT NULL,
"mime" TEXT,
"contentLength" BIGINT NOT NULL,

CONSTRAINT "Files_pkey" PRIMARY KEY ("uploadId","path")
);

-- AddForeignKey
ALTER TABLE "Files" ADD CONSTRAINT "Files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "Upload"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
3 changes: 3 additions & 0 deletions modules/uploads/db/migrations/migration_lock.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"
33 changes: 33 additions & 0 deletions modules/uploads/db/schema.prisma
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// Do not modify this `datasource` block
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}

model Upload {
id String @id @default(uuid()) @db.Uuid
userId String? @db.Uuid
bucket String
contentLength BigInt
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
completedAt DateTime?
deletedAt DateTime?
files Files[] @relation("Files")
}

model Files {
uploadId String @db.Uuid
upload Upload @relation("Files", fields: [uploadId], references: [id])
multipartUploadId String?
path String
mime String?
contentLength BigInt
@@id([uploadId, path])
}
52 changes: 52 additions & 0 deletions modules/uploads/module.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
scripts:
prepare:
name: Prepare Upload
description: Prepare an upload batch for data transfer
complete:
name: Complete Upload
description: Alert the module that the upload has been completed
get:
name: Get Upload Metadata
description: Get the metadata (including contained files) for specified upload IDs
get_public_file_urls:
name: Get File Link
description: Get presigned download links for each of the specified files
list_for_user:
name: List Uploads for Users
description: Get a list of upload IDs associated with the specified user IDs
delete:
name: Delete Upload
description: Removes the upload and deletes the files from the bucket
errors:
no_files:
name: No Files Provided
description: An upload must have at least 1 file
too_many_files:
name: Too Many Files Provided
description: There is a limit to how many files can be put into a single upload (see config)
duplicate_paths:
name: Duplicate Paths Provided
description: An upload cannot contain 2 files with the same paths (see `cause` for offending paths)
size_limit_exceeded:
name: Combined Size Limit Exceeded
description: There is a maximum total size per upload (see config)
upload_not_found:
name: Upload Not Found
description: The provided upload ID didn't match any known existing uploads
upload_already_completed:
name: Upload Already completed
description: \`complete\` was already called on this upload
s3_not_configured:
name: S3 Not Configured
description: The S3 bucket is not configured (missing env variables)
too_many_chunks:
name: Possibility Of Too Many Chunks
description: |
AWS S3 has a limit on the number of parts that can be uploaded in a
multipart upload. This limit is 10,000 parts. If the number of chunks
required to upload the maximum multipart upload size exceeds this limit,
any operation will preemptively throw this error.
multipart_upload_completion_fail:
name: Multipart Upload Completion Failure
description: The multipart upload failed to complete (see `cause` for more information)
dependencies: {}
136 changes: 136 additions & 0 deletions modules/uploads/scripts/complete.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import { RuntimeError, ScriptContext } from "../_gen/scripts/complete.ts";
import {
completeMultipartUpload,
getMultipartUploadParts,
keyExists,
} from "../utils/bucket.ts";
import { getConfig } from "../utils/config_defaults.ts";
import { getKey, prismaToOutputWithFiles, Upload } from "../utils/types.ts";

export interface Request {
uploadId: string;
}

export interface Response {
upload: Upload;
}

export async function run(
ctx: ScriptContext,
req: Request,
): Promise<Response> {
const config = getConfig(ctx.userConfig);

const newUpload = await ctx.db.$transaction(async (db) => {
// Find the upload by ID
const upload = await db.upload.findFirst({
where: {
id: req.uploadId,
},
select: {
id: true,
userId: true,
bucket: true,
contentLength: true,
files: true,
createdAt: true,
updatedAt: true,
completedAt: true,
},
});

// Error if the upload wasn't prepared
if (!upload) {
throw new RuntimeError(
"upload_not_found",
{
meta: { uploadId: req.uploadId },
},
);
}

// Check with S3 to see if the files were uploaded
const fileExistencePromises = upload.files.map(
async (file) => {
// If the file was uploaded in parts, complete the multipart upload
if (file.multipartUploadId) {
try {
const parts = await getMultipartUploadParts(
config.s3,
getKey(upload.id, file.path),
file.multipartUploadId,
);
if (parts.length === 0) return false;

await completeMultipartUpload(
config.s3,
getKey(upload.id, file.path),
file.multipartUploadId,
parts,
);
} catch (e) {
throw new RuntimeError(
"multipart_upload_completion_fail",
{ cause: e },
);
}

return true;
} else {
// Check if the file exists
return await keyExists(config.s3, getKey(upload.id, file.path));
}
},
);
const fileExistence = await Promise.all(fileExistencePromises);
const filesAllExist = fileExistence.every(Boolean);
if (!filesAllExist) {
const missingFiles = upload.files.filter((_, i) => !fileExistence[i]);
throw new RuntimeError(
"files_not_uploaded",
{
meta: {
uploadId: req.uploadId,
missingFiles: missingFiles.map((file) => file.path),
},
},
);
}

// Error if `complete` was already called with this ID
if (upload.completedAt !== null) {
throw new RuntimeError(
"upload_already_completed",
{
meta: { uploadId: req.uploadId },
},
);
}

// Update the upload to mark it as completed
const completedUpload = await db.upload.update({
where: {
id: req.uploadId,
},
data: {
completedAt: new Date().toISOString(),
},
select: {
id: true,
userId: true,
bucket: true,
contentLength: true,
files: true,
createdAt: true,
updatedAt: true,
completedAt: true,
},
});

return completedUpload;
});

return {
upload: prismaToOutputWithFiles(newUpload),
};
}
85 changes: 85 additions & 0 deletions modules/uploads/scripts/delete.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import { RuntimeError, ScriptContext } from "../_gen/scripts/delete.ts";
import { getKey } from "../utils/types.ts";
import { deleteKeys } from "../utils/bucket.ts";
import { getConfig } from "../utils/config_defaults.ts";

export interface Request {
uploadId: string;
}

export interface Response {
bytesDeleted: string;
}

export async function run(
ctx: ScriptContext,
req: Request,
): Promise<Response> {
const config = getConfig(ctx.userConfig);

const bytesDeleted = await ctx.db.$transaction(async (db) => {
const upload = await db.upload.findFirst({
where: {
id: req.uploadId,
completedAt: { not: null },
deletedAt: null,
},
select: {
id: true,
userId: true,
bucket: true,
contentLength: true,
files: true,
createdAt: true,
updatedAt: true,
completedAt: true,
},
});
if (!upload) {
throw new RuntimeError(
"upload_not_found",
{
meta: {
modified: false,
uploadId: req.uploadId,
},
},
);
}

const filesToDelete = upload.files.map((file) =>
getKey(file.uploadId, file.path)
);
const deleteResults = await deleteKeys(config.s3, filesToDelete);

const failures = upload.files
.map((file, i) => [file, deleteResults[i]] as const)
.filter(([, successfullyDeleted]) => !successfullyDeleted)
.map(([file]) => file);

if (failures.length) {
const failedPaths = JSON.stringify(failures.map((file) => file.path));
throw new RuntimeError(
"failed_to_delete",
{
meta: {
modified: failures.length !== filesToDelete.length,
reason: `Failed to delete files with paths ${failedPaths}`,
},
},
);
}

await db.upload.update({
where: {
id: req.uploadId,
},
data: {
deletedAt: new Date().toISOString(),
},
});

return upload.contentLength.toString();
});
return { bytesDeleted };
}
Loading

0 comments on commit fb22613

Please sign in to comment.