diff --git a/build.mts b/build.mts new file mode 100644 index 000000000000..32694e80c7a0 --- /dev/null +++ b/build.mts @@ -0,0 +1,5 @@ +const packages = [ + // ... other packages + 'upload', + // ... other packages +] diff --git a/docs/docs/storage-and-uploads/overview.md b/docs/docs/storage-and-uploads/overview.md new file mode 100644 index 000000000000..58c20abd7192 --- /dev/null +++ b/docs/docs/storage-and-uploads/overview.md @@ -0,0 +1,83 @@ +# Storage and Uploads + +When you're building web applications, you'll often need to handle files. This is where **uploads** and **storage** come into play. Let's explore these concepts and see how they work together to create powerful file management systems. + +Thankfully, RedwoodJS makes it easy to upload and store files with a flexible storage system that supports multiple storage backends (like AWS S3, local file system, etc). + +:::tip Ready to get started? + +If you want to setup and start using storage and uploads, visit the [quickstart guide](/docs/storage-and-uploads/quickstart). + +Or, for more detailed guides, visit the [storage guide](/docs/storage-and-uploads/storage) and the [uploads guide](/docs/storage-and-uploads/uploads). +::: + +Let's first understand how uploads and storage work. + +## Understanding Uploads + +Imagine you want to share a photo on a social media platform. When you click "Upload," you're initiating an upload process. Here's what happens: + +1. Your device (the client) sends the file to the web server. +2. The server receives the file and decides what to do with it. + +Uploads are all about getting files from the user to the server. There are several ways to implement uploads: + +- Simple HTML forms with `` +- JavaScript methods like Ajax or the Fetch API for smoother user experiences +- More advanced approaches using GraphQL or REST APIs + +## Understanding Storage + +Once a file reaches the server, it needs a place to live. This is where storage comes in. Think of storage as the file's new home on the internet. There are different types of storage: + +- **Local storage**: Saving files directly on the server. It's like keeping files on your computer. +- **Cloud storage**: Using services like AWS S3 or Google Cloud Storage. This is like having a huge, always-accessible hard drive in the cloud. +- **Database storage**: Storing files (usually small ones) directly in a database. This is less common but can be useful in specific scenarios. + +## Storage and Uploads on Their Own + +Here's where it gets interesting: uploads and storage don't always have to go hand in hand. Let's look at some examples: + +### Uploads Without Storage + +Imagine an online tool that converts images from one format to another: + +1. You upload your image. +2. The server converts it. +3. You download the converted image. +4. The server deletes both the original and converted files. + +In this case, we used the upload feature without long-term storage. This approach saves space and is great for temporary operations. + +### Storage Without Uploads + +Now, think about a system that generates monthly reports: + +1. At the end of each month, the server creates a PDF report using data from its database. +2. The PDF is stored in the cloud. +3. Users can access this report whenever they need it. + +Here, we're using storage without any user-initiated upload. The server is generating and storing files on its own. + +## Storage and Uploads Together + +While uploads and storage can work independently, their real power shines when they work together. Here's a common scenario: + +1. A user uploads a profile picture (upload). +2. The server processes the image, perhaps resizing it (processing). +3. The processed image is saved to cloud storage (storage). +4. The server saves a link to the stored image in its database (database integration). +5. Whenever needed, the app can quickly display the user's profile picture. + +This workflow combines upload, processing, storage, and database integration to create a seamless user experience. + +## Why This Matters + +Understanding the relationship between uploads and storage allows you to: + +1. **Optimize Performance**: You can choose when to store files and when to process them on-the-fly. +2. **Enhance User Experience**: Implement features like drag-and-drop uploads or instant image previews. +3. **Scale Efficiently**: Use cloud storage to handle growing numbers of files without overloading your server. +4. **Save Resources**: Process files without storing them when long-term storage isn't necessary. + +By mastering these concepts, you'll be well-equipped to handle a wide range of file management scenarios in your web applications. Whether you're building a simple photo-sharing app or a complex document management system, understanding uploads and storage will be key to your success. diff --git a/docs/docs/storage-and-uploads/quickstart.md b/docs/docs/storage-and-uploads/quickstart.md new file mode 100644 index 000000000000..4a6b4cbad3b5 --- /dev/null +++ b/docs/docs/storage-and-uploads/quickstart.md @@ -0,0 +1,18 @@ +# Storage and Uploads + +## Quick Start + +RedwoodJS makes it easy to upload and store files with a flexible storage system that supports multiple backends with just two setup commands. + +```bash +yarn rw setup storage +yarn rw setup uploads +``` + +## Basic Example + +## Common Patterns + +### With Prisma Reference + +### With Upload Token Validation diff --git a/docs/docs/storage-and-uploads/storage.md b/docs/docs/storage-and-uploads/storage.md new file mode 100644 index 000000000000..8eb30b9007e7 --- /dev/null +++ b/docs/docs/storage-and-uploads/storage.md @@ -0,0 +1,231 @@ +# Storage and Uploads + +## Storage + +RedwoodJS offers a flexible storage system that's got your back, whether you're working with local files or cloud storage like AWS S3. The best part? Switching between storage options is a breeze, thanks to a consistent API across all adapters. + +### Key Features + +- **Local filesystem support**: Perfect for development and simple deployments +- **S3-compatible storage**: Seamlessly integrate with popular cloud storage solutions +- **Unified API**: Write your code once, deploy anywhere without changing your storage logic +- **In-memory adapter**: Great for testing and ephemeral storage needs + +With RedwoodJS storage, you can focus on building your app while we handle the nitty-gritty of file management. Let's dive in and see how easy it is to get started! + +## Setup + +To set up storage in your RedwoodJS app, simply run the setup command: + +```bash +yarn rw setup storage +``` + +1.This will install the core storage package and the local filesystem adapter. + +```bash +yarn add @redwoodjs/storage-core @redwoodjs/storage-adapter-filesystem +``` + +2. The `storage.ts` file in your `api/src/lib` directory will be updated with a default configuration using the local filesystem adapter like this: + +```ts +// Setup and configuration for storage +// See: https://docs.redwoodjs.com/docs/storage + +import path from 'node:path' + +import { FileSystemAdapter } from '@redwoodjs/storage-adapter-filesystem' +import { StorageManager, StorageSelfSigner } from '@redwoodjs/storage-core' + +const baseUrl = process.env.STORAGE_SIGNING_BASE_URL +export const signer = new StorageSelfSigner({ + secret: process.env.STORAGE_SIGNING_SECRET, +}) + +export const storage = new StorageManager({ + adapters: { + local: new FileSystemAdapter({ + root: path.join(__dirname, '..', '..', '.storage'), + signing: { + signer, + baseUrl, + }, + }), + special: new FileSystemAdapter({ + root: path.join(__dirname, '..', '..', '.storage-special'), + signing: { + signer, + baseUrl, + }, + }), + }, + + default: 'local', + + env: { + development: 'local', + }, +}) +``` + +3. Add the following environment variables to your `.env` file: + +``` +STORAGE_SIGNING_BASE_URL=http://localhost:8911/storage +STORAGE_SIGNING_SECRET=super-secret +``` + +## Adapters + +RedwoodJS storage supports multiple adapters, allowing you to switch between different storage backends easily. + +### Local Filesystem + +The FileSystemAdapter is perfect for local development and simple deployments. It stores files on your local filesystem. + +Usage: + +```ts +import { FileSystemAdapter } from '@redwoodjs/storage-adapter-filesystem' +import { StorageSelfSigner } from '@redwoodjs/storage-core' + +const yourSigner = new StorageSelfSigner({ + secret: process.env.STORAGE_SIGNING_SECRET, +}) + +const localAdapter = new FileSystemAdapter({ + root: '/path/to/storage', + signing: { + signer: yourSigner, + baseUrl: 'http://your-base-url', + }, +}) +``` + +This adapter is already configured in the default `storage.ts` file. + +You can customize the root directory and signing configuration as needed. + +For exmaple, if you want to store files in a location called `.storage-special`, you can do so by adding a new adapter. + +### AWS S3 or Tigris + +For production environments, you might want to use cloud storage solutions like AWS S3 or Fly Tigris. RedwoodJS provides an S3-compatible adapter. + +To use the S3 adapter: + +1. Install the required package: + +```bash +yarn workspace api add @redwoodjs/storage-adapter-s3 +``` + +2. Update your `storage.ts` file to include the S3 adapter: + +```ts +import { S3Adapter } from '@redwoodjs/storage-adapter-s3' + +// ... existing code ... + +export const storage = new StorageManager({ + adapters: { + // ... existing adapters ... + s3: new S3Adapter({ + bucket: process.env.AWS_BUCKET, + region: process.env.AWS_REGION, + credentials: { + accessKeyId: process.env.AWS_ACCESS_KEY_ID, + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, + }, + }), + }, + // ... rest of the configuration ... +}) +``` + +### The Default Adapter + +### Using Different Adapters + +### Environment-Specific Adapters + +## Retrieving Files + +To retrieve files from storage, use the readData, readFile, or readStream methods: + +```ts +const buffer = await storage.readData('file-reference') +const file = await storage.readFile('file-reference') +const stream = await storage.readStream('file-reference') +``` + +### Temporary URLs + +You can generate signed URLs for temporary access to files: + +```ts +const signedUrl = await storage.getSignedUrl('file-reference') +``` + +## Deleting Files + +To delete a file from storage: + +```ts +await storage.delete('file-reference') +``` + +## Storing Files + +To store files, use the writeData, writeFile, or writeStream methods: + +```ts +await storage.writeData('file-reference', data) +await storage.writeFile('file-reference', file) +await storage.writeStream('file-reference', stream) +``` + +When should you use which method? + +When in doubt, use `writeFile`. It's the simplest method and works in most cases. + +But sometimes you might want to store binary data or you don't have a file instance yet. For that, use `writeData`. + +And if you need to stream a file to storage, use `writeStream` but you'll need a stream like `fs.createReadStream` or with a File object like `File.stream()`. + +## Streaming Files + +You can stream files directly from storage: + +```ts +const stream = await storage.stream('file-reference') +``` + +## Fetching Files in your Pages and Components + +```graphql +type Profile { + id: String! + createdAt: DateTime! + updatedAt: DateTime! + firstName: String! + lastName: String! + avatar: String! @withStorage +} +``` + +### withStorage Directive + +```graphql +@withStorage(format: SIGNED_URL | DATA_URI) +@withStorage(adapter: FS | S3) +@withStorage(adapter: FS | S3, format: SIGNED_URL | DATA_URI) +``` + +- SIGNED_URL +- DATA_URI + +### Storage Function + +- verifies SignedUrl diff --git a/docs/docs/storage-and-uploads/uploads.md b/docs/docs/storage-and-uploads/uploads.md new file mode 100644 index 000000000000..0ad4f8d89040 --- /dev/null +++ b/docs/docs/storage-and-uploads/uploads.md @@ -0,0 +1,74 @@ +# Storage and Uploads + +## Uploads + +In web applications, one of the most common use-cases for storing files is storing user uploaded files such as photos and documents. RedwoodJS makes it very easy to handle file uploads and then store them using `storage` and the `writeFile` or `writeData` methods of one of its adapters. + +While there are multiple ways to handle uploads, RedwoodJS comes with a built-in `File` scalar type for GraphQL that makes it easy to work with file uploads. + +Because RedwoodJS uses [GraphQL Yoga File Uploads](https://the-guild.dev/graphql/yoga-server/docs/features/file-uploads), you can upload files and consume the binary data inside your services easily. + +For example, an input type for creating a profile might look like this: + +```graphql +input CreateProfileInput { + firstName: String! + lastName: String! + avatar: [File!]! +} +``` + +In your service, you can access the uploaded files like this: + +```tsx +export const createProfile: MutationResolvers['createProfile'] = async ({ + input, +}) => { + const { avatar, ...rest } = input + + // note that web browsers send arrays for a file field + // so you'll need to access the first element + const file = avatar[0] + + // now you can use the file to get metadata + const { name, size, type } = file + + // or store it using RedwoodJS storage + await storage.writeFile(file) +} +``` + +## Setup + +If just want to upload files, RedwoodJS doesn't require any special setup aside from adding the `File` scalar to your GraphQL schema as GraphQL Yoga will handle the rest. + +:::note Ready To Go + +RedwoodJS has already configured its Apollo Client with a terminating link that fetches a GraphQL multipart request if the GraphQL variables contain files (by default FileList, File, or Blob instances). + +That means you don't need to do anything special to use the `File` scalar in your Upload mutations. +::: + +However, if you want to store the files, you'll need to configure the `storage` manager as documented in the [Storage](/docs/storage-and-uploads/storage) page. + +But before you do that, there are a few things you should consider -- namely, having some rules for who can upload what to your server. + +In that case you will want to run: + +```bash +yarn rw setup uploads +``` + +This will add a few new files to your project to let you configure upload validation. + +## Considerations + +- public vs private +- permissions +- validation + +## Upload Token and useUploadsMutation + +- UploadToken +- useUploadsMutation +- requiresUploadToken directive diff --git a/docs/docs/uploads.md b/docs/docs/uploads.md deleted file mode 100644 index 2f622158c79d..000000000000 --- a/docs/docs/uploads.md +++ /dev/null @@ -1,824 +0,0 @@ -# Uploads & Storage - -Getting started with file uploads can open up a world of possibilities for your application. Whether you're enhancing user profiles with custom avatars, allowing document sharing, or enabling image galleries - Redwood has an integrated way of uploading files and storing them. - -There are two parts to this: - -1. Setting up the frontend and GraphQL schema to send and receive files - Uploads -2. Manipulate the data inside services, and pass it to Prisma, for persistence - Storage - -We can roughly breakdown the flow as follows - -![Redwood Uploads Flow Diagram](/img/uploads/uploads-flow.png) - -## Uploading Files - -### 1. Setting up the File scalar - -Before we start sending files via GraphQL we need to tell Redwood how to handle them. Redwood and GraphQL Yoga are pre-configured to handle the `File` scalar. - -In your mutations, use the `File` scalar for the fields where you are submitting an upload - -```graphql title="api/src/graphql/profiles.sdl.ts" -input UpdateProfileInput { - id: Int - firstName: String - # ...other fields - // highlight-next-line - avatar: File -} -``` - -You're now ready to receive files! - -### 2. Configuring the UI - -Assuming you've built a [Form](forms.md) for your profile let's add a -`FileField` to it. - -```tsx title="web/src/components/ProfileForm.tsx" -// highlight-next-line -import { FileField, TextField, FieldError } from '@redwoodjs/forms' - -export const ProfileForm = ({ onSubmit }) => { - return { -
- - - - - - - - - // highlight-next-line - - - } -} -``` - -A `FileField` is just a standard `` - that's integrated with your Form context - it just makes it easier to extract the data for submission. - -Now we need to send the file as a mutation! - -```tsx title="web/src/components/EditProfile.tsx" -import { useMutation } from '@redwoodjs/web' - -const UPDATE_PROFILE_MUTATION = gql` - // This is the Input type we setup with File earlier! - // highlight-next-line - mutation UpdateProfileMutation($input: UpdateProfileInput!) { - updateProfile(input: $input) { - firstName - lastName - // highlight-next-line - avatar - } - } -` - -const EditProfile = ({ profile }) => { - const [updateProfile, { loading, error }] = useMutation( - UPDATE_PROFILE_MUTATION, - { - /*..*/ - } - ) - - const onSave = (formData: UpdateProfileInput) => { - // We have to extract the first file from the input - - const input = { - ...formData, - // FileField returns an array, we want the first and only file; Multi-file - // uploads are available - // highlight-next-line - avatar: formData.avatar?.[0], - } - - updateProfile({ variables: { input } }) - } - - return ( - - ) -} -``` - -While [multi-file uploads are possible](#saving-file-lists---savefilesinlist), when our example form is submitted we process the data to ensure the avatar field contains a single file instead of an array (because that's how we setup the UpdateProfileInput). The onSave function then calls the updateProfile mutation. The mutation automatically handles the file upload because we've set up the File scalar and configured our backend to process file inputs. - -### 3. Logging the Item Details - -Try uploading your avatar photo now, and if you log the `avatar` field in your service: - -```ts title="api/src/services/profiles/profiles.ts" -export const updateProfile = async ({ id, input }) => { - // highlight-next-line - console.log(input.avatar) - // File { - // filename: 'profile-picture.jpg', - // mimetype: 'image/jpeg', - // createReadStream: [Function: createReadStream] - // ... - // } - - // Example without using the built-in helpers - await fs.writeFile( - '/test/profile.jpg', - Buffer.from(await input.avatar.arrayBuffer()) - ) -} -``` - -You'll see that you are receiving an instance of [File](https://developer.mozilla.org/en-US/docs/Web/API/File). - -That's part 1 done - you can receive uploaded files. In the next steps, we'll talk about some tooling and a Prisma client extension that Redwood gives you, to help you persist and manage your uploads. - -
-**What's happening behind the scenes?** - -Once you send the request, and open up your Network Inspect Panel, you'll notice that the graphql request looks slightly different - it has a different Content-Type (instead of the regular `application/json`). - -That's because when you send a [File](https://developer.mozilla.org/en-US/docs/Web/API/File) - the Redwood Apollo client will switch the request to a multipart form request, using [GraphQL Multipart Request Spec](https://github.com/jaydenseric/graphql-multipart-request-spec). This is the case whether you send a `File`, `FileList` or `Blob` (which is a less specialized File). - -On the backend, GraphQL Yoga is pre-configured to handle multipart form requests, _as long as_ you specify the `File` scalar in your SDL. - -
- -## Storage - -Great, now you can receive Files from GraphQL - but how do you go about saving them to disk, while also tracking them in your database? Well, Redwood has the answers for you! Keep going to find out how! - -### 1. Configuring the Prisma schema - -In your Prisma schema, the `avatar` field should be defined as a string: - -```prisma title="api/db/schema.prisma" -model Profile { - id: Int - // ... other fields - // highlight-next-line - avatar String? -} -``` - -This is because Prisma doesn't have a native File type. Instead, we store the file path or URL as a string in the database. The actual file processing and storage will be handled in your service layer, and then the path to the uploaded file is passed to Prisma to save. - -### 2. Configuring the Upload savers and Uploads extension - -To make it easier (and more consistent) dealing with file uploads, Redwood gives you a standardized way of saving your uploads (i.e. write to storage) by using what we call "savers," along with our custom Uploads extension that will handle deletion and updates automatically for you. - -:::note - -The rest of the doc assumes you are running a "Serverful" configuration for your deployments, as it involves the file system. - -::: - -Let's first run the setup command: - -```shell -yarn rw setup uploads -``` - -This will do three things: - -1. Generate a configuration file in `api/src/lib/uploads.{js,ts}` -2. Configure your Prisma client with the storage extension -3. Generate a `signedUrl` function - -Let's break down the key components of the configuration. - -```ts title="api/src/lib/uploads.ts" -import { createUploadsConfig, setupStorage } from '@redwoodjs/storage' -import { FileSystemStorage } from '@redwoodjs/storage/FileSystemStorage' -import { UrlSigner } from '@redwoodjs/storage/signedUrl' - -// ⭐ (1) -const uploadConfig = createUploadsConfig({ - profile: { - fields: ['avatar'], // 👈 the fields that will contain your `File`s - }, -}) - -// ⭐ (2) -export const fsStorage = new FileSystemStorage({ - baseDir: './uploads', -}) - -// ⭐ (3) Optional -export const urlSigner = new UrlSigner({ - secret: process.env.UPLOADS_SECRET, - endpoint: '/signedUrl', -}) - -// ⭐ (4) -const { saveFiles, storagePrismaExtension } = setupStorage({ - uploadsConfig, - storageAdapter: fsStorage, - urlSigner, -}) - -export { saveFiles, storagePrismaExtension } -``` - -**1. Upload Configuration** -This is where you configure the fields that will receive uploads. In our case, it's the `profile.avatar` field. - -The shape of the config looks like this: - -``` -[prismaModel] : { - fields: ['modelField1'] - } -``` - -**2. Storage Adapter** -We create a storage adapter, in this case `FileSystemStorage`, that will save your uploads to the `./uploads` folder. - -This just sets the base path. The actual filenames and folders are determined by the saveFiles utility functions, but [can be overridden!](#customizing-save-file-name-or-save-path) - -**3. Url Signer instance** -This is an optional class that will help you generate signed urls for your files, so you can limit access to these files. Generate a secret with `yarn rw g secret` and add to your .env as `UPLOADS_SECRET`. - -**4. Utility Functions** -We provide utility functions that can be exported from this file to be used elsewhere, such as services. - -- `saveFiles` - object containing functions to save File objects to storage, and return a path. - For example: - -``` -saveFiles.forProfile(gqlInput) -``` - -- `storagePrismaExtension` - The Prisma client extension we'll use in `api/src/lib/db.{js,ts}` to automatically handle updates, deletion of uploaded files (including when the Prisma operation fails). It also configures [Result extensions](https://www.prisma.io/docs/orm/prisma-client/client-extensions/result), to give you utilities like `profile.withSignedUrl()`. - -### 3. Attaching the Uploads extension - -Now we need to extend our db client in `api/src/lib/db.{js,ts}` to use the configured prisma client. - -```ts title="api/src/lib/db.ts" -import { PrismaClient } from '@prisma/client' - -import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' - -import { logger } from './logger' -// highlight-next-line -import { storagePrismaExtension } from './uploads' - -// 👇 Notice here we create prisma client, but don't export it yet -const prismaClient = new PrismaClient({ - log: emitLogLevels(['info', 'warn', 'error']), -}) - -handlePrismaLogging({ - db: prismaClient, - logger, - logLevels: ['info', 'warn', 'error'], -}) - -// 👇 Export db after adding uploads extension -// highlight-next-line -export const db = prismaClient.$extends(storagePrismaExtension) -``` - -The `$extends` method is used to extend the functionality of your Prisma client by adding - -- [Query extensions](https://www.prisma.io/docs/orm/prisma-client/client-extensions/query) which will intercept your `create`, `update`, `delete` operations
-- [Result extensions](https://www.prisma.io/docs/orm/prisma-client/client-extensions/result) for your stored files - which gives you helper methods on the result of your prisma query - -More details on these extensions can be found [here](#storage-prisma-extension). - -
- -__Why Export This Way__ - - -The `$extends` method returns a new instance of the Prisma client with the extensions applied. By exporting this new instance as `db`, you ensure that any additional functionality provided by the uploads extension is available throughout your application, without needing to change where you import. -Note one of the [limitations](https://www.prisma.io/docs/orm/prisma-client/client-extensions#limitations) of using extensions is if you have to use `$on` on your prisma client (as we do in handlePrismaLogging), it needs to happen before you use `$extends` - -
- -### 4. Implementing Upload savers - -You'll also need a way to actually save the incoming `File` object to a file persisted on storage. In your services, you can use the pre-configured "savers" to write your `File` objects to storage. Prisma will automatically save the path into the database. The savers and storage adapters, configured in `api/src/lib/uploads`, determine where the file is saved. - -```ts title="api/src/services/profiles/profiles.ts" -// highlight-next-line -import { saveFiles } from 'src/lib/uploads' - -export const updateProfile: MutationResolvers['updateProfile'] = async ({ - id, - input, -}) => { - // highlight-next-line - const processedInput = await saveFiles.forProfile(input) - - // input.avatar (File) becomes a path string 👇 - // Settings in src/lib/uploads.ts configures where the upload is saved - // processedInput.avatar -> '/mySavePath/profile/avatar/generatedId.jpg' - - return db.profile.update({ - data: processedInput, - where: { id }, - }) -} -``` - -For each of the models you configured when you setup uploads (in `UploadConfig`) - you have savers for them. - -So if you passed: - -```ts -const uploadConfig = createUploadsConfig({ - profile: { - fields: ['avatar'], - }, - anotherModel: { - fields: ['document'], - }, -}) - -const { saveFiles } = setupStorage(uploadConfig) - -// Available methods 👇 -saveFiles.forProfile(profileGqlInput) -saveFiles.forAnotherModel(anotherModelGqlInput) - -// Special case - not mapped to prisma model -saveFiles.inList(arrayOfFiles) -``` - -:::info -You might have already noticed that the saver functions sort-of tie your GraphQL inputs to your Prisma model. - -In essence, these utility functions expect to take an object very similar to the Prisma data argument (the data you're passing to your `create`, `update`), but with File objects at fields `avatar`, and `document` instead of strings. - -If your `File` is in a different key (or a key you did not configure in the upload config), it will be ignored and left as-is. - -::: - -## Informational/Utilities - -## Storage Prisma Extension - -This Prisma extension is designed to handle file uploads and deletions in conjunction with database operations. The goal here is for you as the developer to not have to think too much in terms of files, rather just as Prisma operations. The extension ensures that file uploads are properly managed alongside database operations, preventing orphaned files and maintaining consistency between the database and the storage. - -:::note -The extension will _only_ operate on fields and models configured in your `UploadConfig` which you configure in [`api/src/lib/uploads.{js,ts}`](#2-configuring-the-upload-savers-and-uploads-extension). -::: - -What this configures is: - -**A) CRUD operations** - -- when the record is deleted, the associated upload is removed from storage -- when a record is updated, the associated upload file is also replaced - -...and negative cases such as: - -- saved uploads are removed if creation fails -- saved uploads are removed if update fails (while keeping the original) - -### `create` & `createMany` operations - -If your create operation fails, it removes any uploaded files to avoid orphaned files (so you can retry the request) - -### `update` & `updateMany` operations - -1. If update operation is successful, removes the old uploaded files -2. If it fails, removes any newly uploaded files (so you can retry the request) - -### `delete` operations - -Removes any associated uploaded files, once delete operation completes. - -### `upsert` operations - -Depending on whether it's updating or creating, performs the same actions as create or update. - -## Result Extensions - -When you add the storage prisma extension, it also configures your prisma objects to have special helper methods. - -These will only appear on fields that you configure in your `UploadConfig`. - -```typescript -const profile = await db.profile.update(/*...*/) - -// The result of your prisma query contains the helpers -profile?.withSignedUrl() // ✅ - -// Incorrect: you need to await the result of your prisma query first! -db.profile.update(/*...*/).withSignedUrl() // 🛑 - -// Assuming the comment model does not have an upload field -// the helper won't appear -db.comment.findMany(/*..*/).withSignedUrl() // 🛑 -``` - -**B) Result extensions** - -```ts title="api/src/services/profiles/profiles.ts" -export const profile = async ({ id }) => { - // 👇 await the result from your prisma query - const profile = await db.profile.findUnique({ - where: { id }, - }) - - // Convert the avatar field (which was persisted as a path) to data uri string - // highlight-next-line - return profile?.withDataUri() -} -``` - -:::tip -It's very important to note limitations around what Prisma extensions can do: - -**a) The CRUD operation extensions will not run on nested read and write operations**
-For example: - -```js -const savedFiles = saveFiles.inList(input.files) - -db.folder.update({ - data: { - ...input, - files: { - // highlight-start - createMany: { - data: savedFiles, // if the createMany fails, the saved files will _not_ be deleted - }, - // highlight-end - }, - }, - where: { id }, -}) -``` - -**b) Result extensions are not available on relations.** - -You can often rewrite the query in a different way though. For example, when looking up files : - -```ts -const filesViaRelation = await db.folder - .findUnique({ where: { id: root?.id } }) - .files() - -const filesWhereQuery = await db.file.findMany({ - where: { - folderId: root?.id, - }, -}) - -// 🛑 Will not work, because files accessed via relation -// highlight-next-line -return filesViaRelation.map((file) => file.withSignedUrl()) - -// ✅ OK, because direct lookup -// highlight-next-line -return filesWhereQuery.map((file) => file.withSignedUrl()) -``` - -::: - -### Saving File lists - `saveFiles.inList()` - -If you would like to upload FileLists (or an arrays of Files), use this special utility to persist your Files to storage. This is necessary because String arrays aren't supported on databases - you probably want to save them to a different table, or specific fields. - -Let's say you define in your SDL, a way to send an Array of files. - -```graphql -input UpdateAlbumInput { - name: String - photos: [File] -} -``` - -You can use the `.inList` function like this: - -```ts title="api/src/services/albums.ts" -export const updateAlbum = async ({ - id, - input, -}) => { - - // notice we're passing in the file list, and not the input! - // highlight-next-line - const processedInput = await saveFiles.inList(input.photos) - /* Returns an array like this: - [ - '/baseStoragePath/AG1258019MAFGK.jpg', - '/baseStoragePath/BG1059149NAKKE.jpg', - ] - */ - - const mappedPhotos = processedInput.map((path) => ({ path })) - /* Will make `mappedPhotos` be an array of objects like this: - [ - { path: '/baseStoragePath/AG1258019MAFGK.jpg' }, - { path: '/baseStoragePath/BG1059149NAKKE.jpg' }, - ] - */ - - return db.album.update({ - data: { - ...input, - photo: { - createMany: { - data: mappedPhotos, - }, - }, - }, - where: { id }, - }) - -``` - -### Customizing save file name or save path - -If you'd like to customize the filename that a saver will write to you can override it when calling it. For example, you could name your files by the User's id - -```ts -await saveFiles.forProfile(data, { - // highlight-next-line - fileName: 'profilePhoto-' + context.currentUser.id, -}) - -// Will save files to -// /base_path/profilePhoto-58xx4ruv41f8eit0y25.png -``` - -If you'd like to customize where files are saved, perhaps you want to put it in a specific folder, so you can make those files [publicly available](#making-a-folder-public), you can override the folder to use too (skipping the base path of your Storage adapter): - -```ts -await saveFiles.forProfile(data, { - fileName: 'profilePhoto-' + context.currentUser.id, - // highlight-next-line - path: '/public_avatar', -}) - -// Will save files to -// /public_avatar/profilePhoto-58xx4ruv41f8eit0y25.png -``` - -The extension is determined by the name of the uploaded file. - -### Signed URLs - -When you setup uploads, we also generate an API function (an endpoint) for you - by default at `/signedUrl`. You can use this in conjunction with the `.withSignedUrl` helper. For example: - -```ts title="api/src/services/profiles/profiles.ts" -import { EXPIRES_IN } from '@redwoodjs/storage/UrlSigner' - -export const profile = async ({ id }) => { - const profile = await db.profile.findUnique({ - where: { id }, - }) - - // Convert the avatar field to signed URLs - // highlight-start - return profile?.withSignedUrl({ - expiresIn: EXPIRES_IN.days(2), - }) - // highlight-end -} -``` - -The object being returned will look like: - -```ts -{ - id: 125, - avatar: '/.redwood/functions/signedUrl?s=s1gnatur3&expiry=1725190749613&path=path.png' -} -``` - -This will generate a URL that will expire in 2 days (from the point of query). Let's breakdown the URL: - -| URL Component | | -| ------------------------------- | ---------------------------------------------------- | -| `/.redwood/functions/signedUrl` | Point to the API server, and the endpoint configured | -| `s=s1gnatur3` | The signature that we'll validate | -| `expiry=1725190749613` | Time stamp for when it expires | -| `path=path.png` | The key to look up the file on your storage | - -
-How the signedUrl function validates - -This function is automatically generated for you, but let's take a quick look at how it works: - -```ts title="api/src/functions/signedUrl/signedUrl.ts" -import type { SignatureValidationArgs } from '@redwoodjs/storage/UrlSigner' - -// The urlSigner and fsStorage instances were configured when we setup uploads -// highlight-next-line -import { urlSigner, fsStorage } from 'src/lib/uploads' - -export const handler = async (event) => { - // Validate the signature using the urlSigner instance - // highlight-next-line - const fileToReturn = urlSigner.validateSignature( - // Pass the params {s, path, expiry} - // highlight-next-line - event.queryStringParameters as SignatureValidationArgs - ) - - // Use the returned value to lookup the file in your storage - // highlight-next-line - const { contents, type } = await fsStorage.read(fileToReturn) - - return { - statusCode: 200, - headers: { - // You also get the type from the read - 'Content-Type': type, - }, - // Return the contents of the file - body: contents, - } -} -``` - -We created and exported the `urlSigner` instance and `fsStorage` adapter in `src/lib/uploads`. - -The details to validate come through as query parameters, which we pass to the `urlSigner.validateSignature` parameter. - -If it's valid, you will receive a path (or key) to the file - which you can then lookup in your storage. - -The `read` function also returns the mime-type of the file (based on the extension) - which you pass as a response header. This ensures that browsers know how to read your response! - -
- -### Data URIs - -When you have smaller files, you can choose instead to return a Base64 DataURI string that you can render directly into your html. - -```ts title="api/src/services/profiles.ts" -export const profile = async ({ id }) => { - const profile = await db.profile.findUnique({ - where: { id }, - }) - - // highlight-next-line - return profile?.withDataUri() -} -``` - -:::tip -The `withDataUri` extension is an `async` function. Remember to await, if you are doing additional manipulation before returning your result object from the service. -::: - -The output of `withDataUri` would be your profile object, with the upload fields transformed into a data uri. For example: - -```js -{ - // other fields - id: 12355, - name: 'Danny' - email: '...' - // Because configured avatar as an upload field: - // highlight-next-line - avatar: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAJ/...Q0MgUHJvZmlsZQAAKJF1kL=' -} -``` - -## Storage Adapters - -Storage adapters are crucial for abstracting the underlying storage mechanism, allowing for flexibility in how files are managed. The BaseStorageAdapter defines a standard interface for all storage adapters, and looks like this: - -```ts -export abstract class BaseStorageAdapter { - adapterOpts: AdapterOptions - constructor(adapterOpts: AdapterOptions) { - this.adapterOpts = adapterOpts - } - - getAdapterOptions() { - return this.adapterOpts - } - - generateFileNameWithExtension( - saveOpts: SaveOptionsOverride | undefined, - file: File - ) { - /** We give you an easy way to generate file names **/ - } - - abstract save( - file: File, - saveOpts?: SaveOptionsOverride - ): Promise - - abstract remove(fileLocation: AdapterResult['location']): Promise - - abstract read(fileLocation: AdapterResult['location']): Promise<{ - contents: Buffer | string - type: ReturnType - }> -} -``` - -Types of Storage Adapters -MemoryStorage: This adapter stores files in memory, making it ideal for temporary storage needs or testing scenarios. It offers faster access times but does not persist data across application restarts. - -We build in two storage adapters: - -- [FileSystemStorage](https://github.com/redwoodjs/redwood/blob/main/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.ts) - This adapter interacts with the file system, enabling the storage of files on disk. -- [MemoryStorage](https://github.com/redwoodjs/redwood/blob/main/packages/storage/src/adapters/MemoryStorage/MemoryStorage.ts) - this adapter stores files in memory, making it ideal for temporary storage needs or testing scenarios. It offers faster access times but does not persist data across application restarts. - -## Configuring the server further - -Sometimes, you may need more control over how the Redwood API server behaves. This could include customizing the body limit for requests, redirects, or implementing additional logic - that's exactly what the [Server File](server-file.md) is for! - -### Making a folder public - -While you can always create a function to access certain files publicly, similar to the `/signedUrl` function that gets generated for you - another way could be to configure the API server with the [fastify-static](https://github.com/fastify/fastify-static) plugin to make a specific folder publicly accessible. - -```js title="api/server.js" -import path from 'path' -// highlight-next-line -import fastifyStatic from '@fastify/static' - -import { createServer } from '@redwoodjs/api-server' -import { logger } from 'src/lib/logger' - -async function main() { - const server = await createServer({ - logger, - }) - - // highlight-start - server.register(fastifyStatic, { - root: path.join(process.cwd() + '/uploads/public_profile_photos'), - prefix: '/public_uploads', - }) - // highlight-end - - await server.start() -} - -main() -``` - -Based on the above, you'll be able to access your files at: - -`http://localhost:8910/.redwood/functions/public_uploads/01J6AF89Y89WTWZF12DRC72Q2A.jpeg` - -OR directly - -`http://localhost:8911/public_uploads/01J6AF89Y89WTWZF12DRC72Q2A.jpeg` - -Where you are only exposing **part** of your uploads directory publicly - -In your web side code you can construct the URL like this: - -```ts -const publicUrl = `${global.RWJS_API_URL}/${profile.avatar.replace( - 'uploads/public_profile_photos/', - 'public_uploads/' -)}` -``` - -### Customizing the body limit for requests - -The default body size limit for the Redwood API server is 100MB (per request). Depending on the sizes of files you're uploading, especially in the case of multiple files, you may receive errors like this: - -```json -{ - "code": "FST_ERR_CTP_BODY_TOO_LARGE", - "error": "Payload Too Large", - "message": "Request body is too large" -} -``` - -You can configure the `bodyLimit` option to increase or decrease the default limit. - -```js title="api/server.js" -import { createServer } from '@redwoodjs/api-server' - -import { logger } from 'src/lib/logger' - -async function main() { - const server = await createServer({ - logger, - fastifyServerOptions: { - // highlight-next-line - bodyLimit: 1024 * 1024 * 500, // 500MB - }, - }) - - await server.start() -} - -main() -``` diff --git a/docs/sidebars.js b/docs/sidebars.js index 28c24efb2730..12c823da1cc7 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -197,6 +197,37 @@ module.exports = { 'server-file', 'serverless-functions', 'services', + { + type: 'category', + label: 'Storage and Uploads', + link: { + type: 'generated-index', + title: 'Storage and Uploads', + slug: 'storage-and-uploads/index', + }, + items: [ + { + type: 'doc', + label: 'Overview', + id: 'storage-and-uploads/overview', + }, + { + type: 'doc', + label: 'Quick Start', + id: 'storage-and-uploads/quickstart', + }, + { + type: 'doc', + label: 'Storage', + id: 'storage-and-uploads/storage', + }, + { + type: 'doc', + label: 'Uploads', + id: 'storage-and-uploads/uploads', + }, + ], + }, 'storybook', 'studio', 'testing', @@ -231,8 +262,8 @@ module.exports = { }, ], }, + 'webhooks', - 'uploads', 'vite-configuration', ], }, diff --git a/package.json b/package.json index 1e11f6f48d32..371f78a9752a 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,9 @@ "packages/mailer/core", "packages/mailer/handlers/*", "packages/mailer/renderers/*", + "packages/storage/core", + "packages/storage/adapters/*", + "packages/uploads/*", "!packages/create-redwood-rsc-app" ], "scripts": { diff --git a/packages/cli/src/commands/setup/storage/storage.js b/packages/cli/src/commands/setup/storage/storage.js new file mode 100644 index 000000000000..efd0a746b14f --- /dev/null +++ b/packages/cli/src/commands/setup/storage/storage.js @@ -0,0 +1,31 @@ +import { recordTelemetryAttributes } from '@redwoodjs/cli-helpers' + +export const command = 'storage' + +export const description = + 'Setup redwood storage. This will install the required packages and add the required initial configuration to your redwood app.' + +export const builder = (yargs) => { + yargs + .option('force', { + alias: 'f', + default: false, + description: 'Overwrite existing configuration', + type: 'boolean', + }) + .option('skip-examples', { + default: false, + description: 'Only include required files and exclude any examples', + type: 'boolean', + }) +} + +export const handler = async (options) => { + recordTelemetryAttributes({ + command: 'setup storage', + force: options.force, + skipExamples: options.skipExamples, + }) + const { handler } = await import('./storageHandler.js') + return handler(options) +} diff --git a/packages/cli/src/commands/setup/storage/storageHandler.js b/packages/cli/src/commands/setup/storage/storageHandler.js new file mode 100644 index 000000000000..f63ee30bffdf --- /dev/null +++ b/packages/cli/src/commands/setup/storage/storageHandler.js @@ -0,0 +1,182 @@ +import crypto from 'node:crypto' +import path from 'node:path' + +import fs from 'fs-extra' +import { Listr } from 'listr2' +import { format } from 'prettier' + +import { + addApiPackages, + getPrettierOptions, + addEnvVarTask, +} from '@redwoodjs/cli-helpers' +import { errorTelemetry } from '@redwoodjs/telemetry' + +import { getPaths, transformTSToJS, writeFile } from '../../../lib' +import c from '../../../lib/colors' +import { isTypeScriptProject } from '../../../lib/project' + +export const handler = async ({ force }) => { + const projectIsTypescript = isTypeScriptProject() + const redwoodVersion = + require(path.join(getPaths().base, 'package.json')).devDependencies[ + '@redwoodjs/core' + ] ?? 'latest' + + const tasks = new Listr( + [ + { + title: `Adding api/src/lib/storage.${ + projectIsTypescript ? 'ts' : 'js' + }...`, + task: async () => { + const templatePath = path.resolve( + __dirname, + 'templates', + 'storage.ts.template', + ) + const templateContent = fs.readFileSync(templatePath, { + encoding: 'utf8', + flag: 'r', + }) + + const storagePath = path.join( + getPaths().api.lib, + `storage.${projectIsTypescript ? 'ts' : 'js'}`, + ) + const storageContent = projectIsTypescript + ? templateContent + : await transformTSToJS(storagePath, templateContent) + + return writeFile(storagePath, storageContent, { + overwriteExisting: force, + }) + }, + }, + { + title: `Adding withStorage directive...`, + task: async () => { + const templatePath = path.resolve( + __dirname, + 'templates', + 'withStorage.directive.ts.template', + ) + const templateContent = fs.readFileSync(templatePath, { + encoding: 'utf8', + flag: 'r', + }) + + const storagePath = path.join( + getPaths().api.directives, + 'withStorage.ts', + ) + const storageContent = projectIsTypescript + ? templateContent + : await transformTSToJS(storagePath, templateContent) + + return writeFile(storagePath, storageContent, { + overwriteExisting: force, + }) + }, + }, + { + title: `Adding signedUrl function...`, + task: async () => { + const templatePath = path.resolve( + __dirname, + 'templates', + 'storageFunction.ts.template', + ) + const templateContent = fs.readFileSync(templatePath, { + encoding: 'utf8', + flag: 'r', + }) + + const storagePath = path.join( + getPaths().api.functions, + `storage.${projectIsTypescript ? 'ts' : 'js'}`, + ) + const storageContent = projectIsTypescript + ? templateContent + : await transformTSToJS(storagePath, templateContent) + + return writeFile(storagePath, storageContent, { + overwriteExisting: force, + }) + }, + }, + // TODO(jgmw): Enable this once these packages have been published otherwise it will fail + { + ...addApiPackages([ + `@redwoodjs/storage-core@${redwoodVersion}`, + `@redwoodjs/storage-adapter-filesystem@${redwoodVersion}`, + ]), + title: 'Adding required dependencies to your api side...', + }, + { + title: 'Prettifying changed files', + task: async (_ctx, task) => { + const prettifyPaths = [ + path.join(getPaths().api.lib, 'storage.js'), + path.join(getPaths().api.lib, 'storage.ts'), + path.join(getPaths().api.functions, 'storage.js'), + path.join(getPaths().api.functions, 'storage.ts'), + ] + + for (const prettifyPath of prettifyPaths) { + try { + if (!fs.existsSync(prettifyPath)) { + continue + } + const source = fs.readFileSync(prettifyPath, 'utf-8') + const prettierOptions = await getPrettierOptions() + const prettifiedApp = await format(source, { + ...prettierOptions, + parser: 'babel-ts', + }) + + fs.writeFileSync(prettifyPath, prettifiedApp, 'utf-8') + } catch { + task.output = + "Couldn't prettify the changes. Please reformat the files manually if needed." + } + } + }, + }, + addEnvVarTask( + 'STORAGE_SIGNING_SECRET', + crypto.randomBytes(32).toString('base64'), + 'Secret for securely signing tokens used in the self hosted storage function', + ), + addEnvVarTask( + 'STORAGE_SIGNING_BASE_URL', + 'http://localhost:8911/storage', + 'Base URL for the self hosted storage function', + ), + { + title: 'One more thing...', + task: (_ctx, task) => { + task.title = `One more thing... + + ${c.success('\nStorage setup complete!\n')} + + Check out the docs for more info: + ${c.link('https://docs.redwoodjs.com/docs/storage')} + + ` + }, + }, + ], + { + rendererOptions: { collapseSubtasks: false }, + }, + ) + + try { + await tasks.run() + } catch (e) { + errorTelemetry(process.argv, e.message) + console.error(c.error(e.message)) + process.exit(e?.exitCode || 1) + } +} diff --git a/packages/cli/src/commands/setup/storage/templates/storage.ts.template b/packages/cli/src/commands/setup/storage/templates/storage.ts.template new file mode 100644 index 000000000000..f4e6555bcaac --- /dev/null +++ b/packages/cli/src/commands/setup/storage/templates/storage.ts.template @@ -0,0 +1,37 @@ +// Setup and configuration for storage +// See: https://docs.redwoodjs.com/docs/storage + +import path from 'node:path' + +import { FileSystemAdapter } from '@redwoodjs/storage-adapter-filesystem' +import { StorageManager, StorageSelfSigner } from '@redwoodjs/storage-core' + +const baseUrl = process.env.STORAGE_SIGNING_BASE_URL +export const signer = new StorageSelfSigner({ + secret: process.env.STORAGE_SIGNING_SECRET, +}) + +export const storage = new StorageManager({ + adapters: { + local: new FileSystemAdapter({ + root: path.join(__dirname, '..', '..', '.storage'), + signing: { + signer, + baseUrl, + }, + }), + special: new FileSystemAdapter({ + root: path.join(__dirname, '..', '..', '.storage-special'), + signing: { + signer, + baseUrl, + }, + }), + }, + + default: 'local', + + env: { + development: 'local', + }, +}) diff --git a/packages/cli/src/commands/setup/storage/templates/storageFunction.ts.template b/packages/cli/src/commands/setup/storage/templates/storageFunction.ts.template new file mode 100644 index 000000000000..bbce292ddf4c --- /dev/null +++ b/packages/cli/src/commands/setup/storage/templates/storageFunction.ts.template @@ -0,0 +1,56 @@ +import { Buffer } from 'buffer' + +import type { APIGatewayEvent, Context } from 'aws-lambda' + +import { storage, signer } from 'src/lib/storage' + +export const handler = async (event: APIGatewayEvent, _context: Context) => { + const unauthorizedResponse = { + statusCode: 401, + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ error: 'Unauthorized' }), + } + + // Extract the token + const token = event.queryStringParameters?.token + if (!token) { + return unauthorizedResponse + } + + // Decode the token + const decoded = signer.decode(token) + if (!decoded) { + return unauthorizedResponse + } + + const { adapter: adapterName, reference, expiry } = decoded + + // Validate the expiry + if (expiry && expiry < Date.now()) { + return unauthorizedResponse + } + + // Validate the adapter + const adapter = storage.findAdapter(adapterName) + if (!adapter) { + return unauthorizedResponse + } + + const etagData = `${adapterName}-${reference}-${expiry}` + const ETag = `"${Buffer.from(etagData).toString('base64')}"` + + // Lookup and return the data + const result = await adapter.readData(reference) + + return { + statusCode: 200, + headers: { + // 'Content-Type': 'application/octet-stream', + 'Cache-Control': 'public, max-age=3600', + ETag, + }, + body: result, + } +} diff --git a/packages/cli/src/commands/setup/storage/templates/withStorage.directive.ts.template b/packages/cli/src/commands/setup/storage/templates/withStorage.directive.ts.template new file mode 100644 index 000000000000..3f69517014e2 --- /dev/null +++ b/packages/cli/src/commands/setup/storage/templates/withStorage.directive.ts.template @@ -0,0 +1,88 @@ +import { RedwoodStorageFormat, RedwoodStorageAdapter } from 'types/graphql' + +import type { TransformArgs } from '@redwoodjs/graphql-server' +import { + createTransformerDirective, + TransformerDirectiveFunc, +} from '@redwoodjs/graphql-server' +import type { StorageAdapter } from '@redwoodjs/storage-core' + +import { logger } from 'src/lib/logger' +import { storage } from 'src/lib/storage' + +export const schema = gql` + """ + Use @withStorage to fetch data from storage as a signed URL or data URI. + """ + enum RedwoodStorageFormat { + SIGNED_URL + DATA_URI + } + + enum RedwoodStorageAdapter { + S3 + FS + OG + } + + directive @withStorage( + format: RedwoodStorageFormat = SIGNED_URL + adapter: RedwoodStorageAdapter = FS + ) on FIELD_DEFINITION +` + +export const getBase64DataUri = async ( + adapter: StorageAdapter, + reference: string +): Promise => { + try { + const file = await adapter.readFile(reference) + const base64Data = Buffer.from(await file.arrayBuffer()).toString('base64') + const mimeType = file.type + + const dataUri = `data:${mimeType};base64,${base64Data}` + return dataUri + } catch (error) { + logger.error({ error, reference }, 'Error creating base64 data URI') + throw error + } +} + +// New type definition for directiveArgs +type WithStorageDirectiveArgs = { + adapter: RedwoodStorageAdapter + format: RedwoodStorageFormat +} + +const transform: TransformerDirectiveFunc = async ({ + directiveArgs, + resolvedValue, +}: TransformArgs) => { + if (typeof resolvedValue !== 'string' || resolvedValue.length === 0) { + return null + } + + const format = directiveArgs.format + const adapter = storage.findAdapter(directiveArgs.adapter.toLowerCase()) + + // you can check context's currentUser to conditionally return signed urls or data uris + + try { + if (format === 'SIGNED_URL') { + return await adapter.getSignedUrl(resolvedValue) + } + + if (format === 'DATA_URI') { + return await getBase64DataUri(adapter, resolvedValue) + } + + return resolvedValue + } catch (error) { + logger.error({ error, resolvedValue }, 'Error in withStorage directive') + throw new Error('Failed to process storage directive') + } +} + +const withStorage = createTransformerDirective(schema, transform) + +export default withStorage diff --git a/packages/cli/src/commands/setup/uploads/__codemod_tests__/dbCodemod.test.ts b/packages/cli/src/commands/setup/uploads/__codemod_tests__/dbCodemod.test.ts deleted file mode 100644 index 0475defece4a..000000000000 --- a/packages/cli/src/commands/setup/uploads/__codemod_tests__/dbCodemod.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import path from 'node:path' - -import { describe, it, expect } from 'vitest' - -import { runTransform } from '../../../../lib/runTransform' - -describe('Db codemod', () => { - it('Handles the default db case', async () => { - await matchTransformSnapshot('dbCodemod', 'defaultDb') - }) - - it('will throw an error if the db file has the old format', async () => { - const transformResult = await runTransform({ - transformPath: path.join(__dirname, '../dbCodemod.ts'), // Use TS here! - targetPaths: [ - path.join(__dirname, '../__testfixtures__/oldFormat.input.ts'), - ], - }) - - expect(transformResult.error).toContain('ERR_OLD_FORMAT') - }) -}) diff --git a/packages/cli/src/commands/setup/uploads/__testfixtures__/defaultDb.input.ts b/packages/cli/src/commands/setup/uploads/__testfixtures__/defaultDb.input.ts deleted file mode 100644 index 006193cc6b72..000000000000 --- a/packages/cli/src/commands/setup/uploads/__testfixtures__/defaultDb.input.ts +++ /dev/null @@ -1,26 +0,0 @@ -// See https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/constructor -// for options. - -import { PrismaClient } from '@prisma/client' - -import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' - -import { logger } from './logger' - -const prismaClient = new PrismaClient({ - log: emitLogLevels(['info', 'warn', 'error']), -}) - -handlePrismaLogging({ - db: prismaClient, - logger, - logLevels: ['info', 'warn', 'error'], -}) - -/** - * Global Prisma client extensions should be added here, as $extend - * returns a new instance. - * export const db = prismaClient.$extend(...) - * Add any .$on hooks before using $extend - */ -export const db = prismaClient diff --git a/packages/cli/src/commands/setup/uploads/__testfixtures__/defaultDb.output.ts b/packages/cli/src/commands/setup/uploads/__testfixtures__/defaultDb.output.ts deleted file mode 100644 index 5c60d4398da2..000000000000 --- a/packages/cli/src/commands/setup/uploads/__testfixtures__/defaultDb.output.ts +++ /dev/null @@ -1,28 +0,0 @@ -// See https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/constructor -// for options. - -import { PrismaClient } from '@prisma/client' - -import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' - -import { logger } from './logger' - -import { storagePrismaExtension } from './uploads' - -const prismaClient = new PrismaClient({ - log: emitLogLevels(['info', 'warn', 'error']), -}) - -handlePrismaLogging({ - db: prismaClient, - logger, - logLevels: ['info', 'warn', 'error'], -}) - -/** - * Global Prisma client extensions should be added here, as $extend - * returns a new instance. - * export const db = prismaClient.$extend(...) - * Add any .$on hooks before using $extend - */ -export const db = prismaClient.$extends(storagePrismaExtension) diff --git a/packages/cli/src/commands/setup/uploads/__testfixtures__/oldFormat.input.ts b/packages/cli/src/commands/setup/uploads/__testfixtures__/oldFormat.input.ts deleted file mode 100644 index 3001ab2be9d7..000000000000 --- a/packages/cli/src/commands/setup/uploads/__testfixtures__/oldFormat.input.ts +++ /dev/null @@ -1,18 +0,0 @@ -// See https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/constructor -// for options. - -import { PrismaClient } from '@prisma/client' - -import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' - -import { logger } from './logger' - -export const db = new PrismaClient({ - log: emitLogLevels(['info', 'warn', 'error']), -}) - -handlePrismaLogging({ - db, - logger, - logLevels: ['info', 'warn', 'error'], -}) diff --git a/packages/cli/src/commands/setup/uploads/dbCodemod.ts b/packages/cli/src/commands/setup/uploads/dbCodemod.ts deleted file mode 100644 index 6638e0c79c32..000000000000 --- a/packages/cli/src/commands/setup/uploads/dbCodemod.ts +++ /dev/null @@ -1,44 +0,0 @@ -import j from 'jscodeshift' - -module.exports = function transform(fileInfo: j.FileInfo) { - const root = j(fileInfo.source) - - // Add the import statement for storagePrismaExtension - const imports = root.find(j.ImportDeclaration) - - imports - .at(-1) // add it after the last one - .insertAfter( - j.importDeclaration( - [j.importSpecifier(j.identifier('storagePrismaExtension'))], - j.literal('./uploads'), - ), - ) - - // Find the export statement for db and modify it - root - .find(j.VariableDeclaration, { declarations: [{ id: { name: 'db' } }] }) - .forEach((path) => { - const dbDeclaration = path.node.declarations[0] - - if ( - j.VariableDeclarator.check(dbDeclaration) && - j.NewExpression.check(dbDeclaration.init) - ) { - throw new Error('RW_CODEMOD_ERR_OLD_FORMAT') - } - - if ( - j.VariableDeclarator.check(dbDeclaration) && - j.Expression.check(dbDeclaration.init) - ) { - const newInit = j.callExpression( - j.memberExpression(dbDeclaration.init, j.identifier('$extends')), - [j.identifier('storagePrismaExtension')], - ) - dbDeclaration.init = newInit - } - }) - - return root.toSource() -} diff --git a/packages/cli/src/commands/setup/uploads/templates/api/directives/requireUploadToken.ts.template b/packages/cli/src/commands/setup/uploads/templates/api/directives/requireUploadToken.ts.template new file mode 100644 index 000000000000..ed4cfeddc729 --- /dev/null +++ b/packages/cli/src/commands/setup/uploads/templates/api/directives/requireUploadToken.ts.template @@ -0,0 +1,54 @@ +import type { DirectiveParams } from '@redwoodjs/graphql-server' +import { + createValidatorDirective, + ValidationError, + ValidatorDirectiveFunc, +} from '@redwoodjs/graphql-server' +import { validateUploadToken, validateFiles } from '@redwoodjs/uploads-graphql' +import type { RedwoodUploadContext } from '@redwoodjs/uploads-graphql' + +import { logger } from 'src/lib/logger' + +export const schema = gql` + """ + Use @requireUploadToken to validate file uploads with dynamic input and size constraints. + """ + directive @requireUploadToken( + variable: String! + fields: [String!]! + ) on FIELD_DEFINITION +` + +const validate: ValidatorDirectiveFunc = ({ + directiveArgs, + args, + context, +}: { + directiveArgs: DirectiveParams['directiveArgs'] + args: DirectiveParams['args'] + context: RedwoodUploadContext +}) => { + const { variable, fields } = directiveArgs + + const uploadsConfig = validateUploadToken(context) + + try { + const inputVariable = args[variable] + + if (!inputVariable) { + throw new ValidationError('Input variable for files is required') + } + + fields.forEach((field) => { + const files = inputVariable[field] as File[] + validateFiles(files, uploadsConfig, context) + }) + } catch (error) { + logger.warn({ error }, 'Upload validation failed') + throw new ValidationError(error.message) + } +} + +const requireUploadToken = createValidatorDirective(schema, validate) + +export default requireUploadToken diff --git a/packages/cli/src/commands/setup/uploads/templates/api/sdl/redwoodUploads.sdl.template b/packages/cli/src/commands/setup/uploads/templates/api/sdl/redwoodUploads.sdl.template new file mode 100644 index 000000000000..585be416d727 --- /dev/null +++ b/packages/cli/src/commands/setup/uploads/templates/api/sdl/redwoodUploads.sdl.template @@ -0,0 +1,11 @@ +export const schema = gql` + scalar File + + type RedwoodUploadToken { + token: String! + } + + type Query { + getRedwoodUploadToken(operationName: String!): RedwoodUploadToken! @skipAuth + } +` diff --git a/packages/cli/src/commands/setup/uploads/templates/api/services/redwoodUploads.ts.template b/packages/cli/src/commands/setup/uploads/templates/api/services/redwoodUploads.ts.template new file mode 100644 index 000000000000..6d6c5d11a7f7 --- /dev/null +++ b/packages/cli/src/commands/setup/uploads/templates/api/services/redwoodUploads.ts.template @@ -0,0 +1,27 @@ +import { + createUploadToken, + IMAGE_CONTENT_TYPES, +} from '@redwoodjs/uploads-graphql' +import type { UploadTokenPayload } from '@redwoodjs/uploads-graphql' + +import type { GetRedwoodUploadTokenResolver } from './types' +export const getRedwoodUploadToken: GetRedwoodUploadTokenResolver = async ({ + operationName, +}) => { + if (!process.env.UPLOAD_TOKEN_SECRET) { + throw new Error('UPLOAD_TOKEN_SECRET is not set') + } + + // Note: based on the operation name, we could configure the content types, max file size, etc + + const token = createUploadToken({ + operationName, + minFiles: 1, + maxFiles: 3, + expiresIn: 24 * 60 * 60, + maxFileSize: 1 * 1024 * 1024, // 1MB + contentTypes: IMAGE_CONTENT_TYPES, + }) + + return { token } +} diff --git a/packages/cli/src/commands/setup/uploads/templates/api/services/types.ts.template b/packages/cli/src/commands/setup/uploads/templates/api/services/types.ts.template new file mode 100644 index 000000000000..1fcf87dc247d --- /dev/null +++ b/packages/cli/src/commands/setup/uploads/templates/api/services/types.ts.template @@ -0,0 +1,30 @@ +import type { GraphQLResolveInfo } from 'graphql' + +import type { RedwoodGraphQLContext } from '@redwoodjs/graphql-server/dist/types' + +export interface RedwoodUploadToken { + __typename?: 'RedwoodUploadToken' + token: string +} + +export interface RTRedwoodUploadToken { + __typename?: 'RedwoodUploadToken' + token: string +} + +export interface Query { + __typename?: 'Query' + getRedwoodUploadToken: RedwoodUploadToken +} + +export interface GetRedwoodUploadTokenResolver { + ( + args: { operationName: string }, + obj?: { + root: Query + + context: RedwoodGraphQLContext + info: GraphQLResolveInfo + }, + ): Promise +} diff --git a/packages/cli/src/commands/setup/uploads/templates/signedUrl.ts.template b/packages/cli/src/commands/setup/uploads/templates/signedUrl.ts.template deleted file mode 100644 index e5856601bce9..000000000000 --- a/packages/cli/src/commands/setup/uploads/templates/signedUrl.ts.template +++ /dev/null @@ -1,21 +0,0 @@ -import type { APIGatewayEvent, Context } from 'aws-lambda' - -import type { SignatureValidationArgs } from '@redwoodjs/storage/UrlSigner' - -import { urlSigner, fsStorage } from 'src/lib/uploads' - -export const handler = async (event: APIGatewayEvent, _context: Context) => { - const fileToReturn = urlSigner.validateSignature( - event.queryStringParameters as SignatureValidationArgs - ) - - const { contents, type } = await fsStorage.read(fileToReturn) - - return { - statusCode: 200, - headers: { - 'Content-Type': type, - }, - body: contents, - } -} diff --git a/packages/cli/src/commands/setup/uploads/templates/srcLibUploads.ts.template b/packages/cli/src/commands/setup/uploads/templates/srcLibUploads.ts.template deleted file mode 100644 index e648e54cfe95..000000000000 --- a/packages/cli/src/commands/setup/uploads/templates/srcLibUploads.ts.template +++ /dev/null @@ -1,25 +0,0 @@ -import { createUploadsConfig, setupStorage } from '@redwoodjs/storage' -import { FileSystemStorage } from '@redwoodjs/storage/FileSystemStorage' -import { UrlSigner } from '@redwoodjs/storage/UrlSigner' - -const uploadsConfig = createUploadsConfig({ - // Configure your fields here - // e.g. modelName: { fields: ['fieldWithUpload']} -}) - -export const fsStorage = new FileSystemStorage({ - baseDir: './uploads', -}) - -export const urlSigner = new UrlSigner({ - secret: process.env.UPLOADS_SECRET, - endpoint: '/signedUrl', -}) - -const { saveFiles, storagePrismaExtension } = setupStorage({ - uploadsConfig, - storageAdapter: fsStorage, - urlSigner, -}) - -export { saveFiles, storagePrismaExtension } diff --git a/packages/cli/src/commands/setup/uploads/uploads.js b/packages/cli/src/commands/setup/uploads/uploads.js index ff2a3de2616b..70fef6f5b083 100644 --- a/packages/cli/src/commands/setup/uploads/uploads.js +++ b/packages/cli/src/commands/setup/uploads/uploads.js @@ -2,23 +2,29 @@ import { recordTelemetryAttributes } from '@redwoodjs/cli-helpers' export const command = 'uploads' -export const description = - 'Setup uploads and storage. This will install the required packages and add the required initial configuration to your redwood app.' +export const description = 'Setup RedwoodJS Uploads' -export const builder = (yargs) => { - yargs.option('force', { - alias: 'f', - default: false, - description: 'Overwrite existing configuration', - type: 'boolean', - }) +export function builder(yargs) { + yargs + .option('force', { + alias: 'f', + default: false, + description: 'Overwrite existing configuration', + type: 'boolean', + }) + .option('verbose', { + alias: 'v', + default: false, + description: 'Print more logs', + type: 'boolean', + }) } -export const handler = async (options) => { +export async function handler(options) { recordTelemetryAttributes({ - command: 'setup uploads', + command: 'setup upload', force: options.force, - skipExamples: options.skipExamples, + verbose: options.verbose, }) const { handler } = await import('./uploadsHandler.js') return handler(options) diff --git a/packages/cli/src/commands/setup/uploads/uploadsHandler.js b/packages/cli/src/commands/setup/uploads/uploadsHandler.js index 8abcc6656406..6ee7c460e856 100644 --- a/packages/cli/src/commands/setup/uploads/uploadsHandler.js +++ b/packages/cli/src/commands/setup/uploads/uploadsHandler.js @@ -1,118 +1,226 @@ -import path from 'node:path' +import crypto from 'node:crypto' +import path from 'path' import fs from 'fs-extra' import { Listr } from 'listr2' import { format } from 'prettier' -import { addApiPackages, getPrettierOptions } from '@redwoodjs/cli-helpers' +import { + addApiPackages, + addWebPackages, + addEnvVarTask, + getPrettierOptions, +} from '@redwoodjs/cli-helpers' +import { generate as generateTypes } from '@redwoodjs/internal/dist/generate/generate' +import { getConfig } from '@redwoodjs/project-config' import { errorTelemetry } from '@redwoodjs/telemetry' import { getPaths, transformTSToJS, writeFile } from '../../../lib' import c from '../../../lib/colors' import { isTypeScriptProject } from '../../../lib/project' -import { runTransform } from '../../../lib/runTransform' +const { version } = JSON.parse( + fs.readFileSync(path.resolve(__dirname, '../../../../package.json'), 'utf-8'), +) -export const handler = async ({ force }) => { - const projectIsTypescript = isTypeScriptProject() - const redwoodVersion = - require(path.join(getPaths().base, 'package.json')).devDependencies[ - '@redwoodjs/core' - ] ?? 'latest' +export async function handler({ force, verbose }) { + const redwoodPaths = getPaths() + const ts = isTypeScriptProject() + const projectName = getConfig().web.title const tasks = new Listr( [ + addApiPackages([`@redwoodjs/uploads-graphql@${version}`]), + addWebPackages([`@redwoodjs/uploads-web@${version}`]), { - title: `Adding api/src/lib/uploads.${ - projectIsTypescript ? 'ts' : 'js' - }...`, + title: 'Adding the upload directive ...', task: async () => { - const templatePath = path.resolve( - __dirname, - 'templates', - 'srcLibUploads.ts.template', + const uploadsDirectiveTemplateContent = fs.readFileSync( + path.resolve( + __dirname, + 'templates', + 'api', + 'directives', + 'requireUploadToken.ts.template', + ), + 'utf-8', ) - const templateContent = fs.readFileSync(templatePath, { - encoding: 'utf8', - flag: 'r', - }) - - const uploadsPath = path.join( - getPaths().api.lib, - `uploads.${projectIsTypescript ? 'ts' : 'js'}`, + + const uploadsDirectiveFile = path.join( + redwoodPaths.api.directives, + 'requireUploadToken', + 'requireUploadToken.ts', ) - const uploadsContent = projectIsTypescript - ? templateContent - : await transformTSToJS(uploadsPath, templateContent) - return writeFile(uploadsPath, uploadsContent, { - overwriteExisting: force, - }) + const directiveContent = ts + ? uploadsDirectiveTemplateContent + : await transformTSToJS( + uploadsDirectiveFile, + uploadsDirectiveTemplateContent, + ) + + return [ + writeFile(uploadsDirectiveFile, directiveContent, { + overwriteExisting: force, + }), + ] }, }, { - title: `Adding signedUrl function...`, + title: 'Adding uploads sdl and service ...', task: async () => { - const templatePath = path.resolve( - __dirname, - 'templates', - 'signedUrl.ts.template', + // sdl + + const uploadSdlTemplateContent = fs.readFileSync( + path.resolve( + __dirname, + 'templates', + 'api', + 'sdl', + `redwoodUploads.sdl.template`, + ), + 'utf-8', + ) + + const sdlFile = path.join( + redwoodPaths.api.graphql, + `redwoodUploads.sdl.${isTypeScriptProject() ? 'ts' : 'js'}`, + ) + + const sdlContent = ts + ? uploadSdlTemplateContent + : await transformTSToJS(sdlFile, uploadSdlTemplateContent) + + // service + + const exampleServiceTemplateContent = fs.readFileSync( + path.resolve( + __dirname, + 'templates', + 'api', + 'services', + `redwoodUploads.ts.template`, + ), + 'utf-8', ) - const templateContent = fs.readFileSync(templatePath, { - encoding: 'utf8', - flag: 'r', - }) - - const uploadsPath = path.join( - getPaths().api.functions, - `signedUrl.${projectIsTypescript ? 'ts' : 'js'}`, + const serviceFile = path.join( + redwoodPaths.api.services, + 'redwoodUploads', + `redwoodUploads.${isTypeScriptProject() ? 'ts' : 'js'}`, ) - const uploadsContent = projectIsTypescript - ? templateContent - : await transformTSToJS(uploadsPath, templateContent) - return writeFile(uploadsPath, uploadsContent, { - overwriteExisting: force, - }) + const serviceContent = ts + ? exampleServiceTemplateContent + : await transformTSToJS(serviceFile, exampleServiceTemplateContent) + + // types + + const typesTemplateContent = fs.readFileSync( + path.resolve( + __dirname, + 'templates', + 'api', + 'services', + `types.ts.template`, + ), + 'utf-8', + ) + let typesFile + let typesContent + + if (isTypeScriptProject()) { + typesFile = path.join( + redwoodPaths.api.services, + 'redwoodUploads', + `types.${isTypeScriptProject() ? 'ts' : 'js'}`, + ) + + typesContent = ts + ? typesTemplateContent + : await transformTSToJS(typesFile, typesTemplateContent) + } + // write all files + return [ + writeFile(sdlFile, sdlContent, { + overwriteExisting: force, + }), + writeFile(serviceFile, serviceContent, { + overwriteExisting: force, + }), + isTypeScriptProject() && + writeFile(typesFile, typesContent, { + overwriteExisting: force, + }), + ] }, }, { - ...addApiPackages([`@redwoodjs/storage@${redwoodVersion}`]), - title: 'Adding dependencies to your api side...', - }, - { - title: 'Modifying api/src/lib/db to add uploads prisma extension..', + title: 'Adding the uploads plugin to the graphql server ...', task: async () => { - const dbPath = path.join( - getPaths().api.lib, - `db.${projectIsTypescript ? 'ts' : 'js'}`, + const graphqlFunctionFile = path.join( + redwoodPaths.api.functions, + `graphql.${isTypeScriptProject() ? 'ts' : 'js'}`, + ) + + // Read the graphql function file + let graphqlFunctionContent = fs.readFileSync( + graphqlFunctionFile, + 'utf-8', ) - const transformResult = await runTransform({ - transformPath: path.join(__dirname, 'dbCodemod.js'), - targetPaths: [dbPath], - }) + // Add import statement at the top of the file + const importStatement = `import { useRedwoodUploads } from '@redwoodjs/uploads-graphql' - if (transformResult.error) { - if (transformResult.error === 'RW_CODEMOD_ERR_OLD_FORMAT') { - throw new Error( - 'It looks like your src/lib/db file is using the old format. Please update it as per the v8 upgrade guide: https://redwoodjs.com/upgrade/v8#database-file-structure-change. And run again. \n\nYou can also manually modify your api/src/lib/db to include the prisma extension: https://docs.redwoodjs.com/docs/uploads/#attaching-the-prisma-extension', - ) - } +// +// In extraPlugins setup useRedwoodUpload +// extraPlugins: [ +// useRedwoodUploads({ +// appName: '${projectName}', +// }), +// ] +// - throw new Error( - 'Could not add the prisma extension. \n Please modify your api/src/lib/db to include the prisma extension: https://docs.redwoodjs.com/docs/uploads/#attaching-the-prisma-extension', - ) - } +` + + graphqlFunctionContent = importStatement + graphqlFunctionContent + + // Write the updated content back to the file + await fs.writeFile( + graphqlFunctionFile, + graphqlFunctionContent, + 'utf-8', + ) + }, + }, + addEnvVarTask( + 'UPLOAD_TOKEN_SECRET', + crypto.randomBytes(32).toString('base64'), + 'Secret for securely signing the upload token', + ), + { + title: `Generating types ...`, + task: async () => { + await generateTypes() + console.log( + 'Note: You may need to manually restart GraphQL in VSCode to see the new types take effect.\n\n', + ) }, }, + { title: 'Prettifying changed files', task: async (_ctx, task) => { + const ext = isTypeScriptProject() ? 'ts' : 'js' const prettifyPaths = [ - path.join(getPaths().api.lib, 'db.js'), - path.join(getPaths().api.lib, 'db.ts'), - path.join(getPaths().api.lib, 'uploads.js'), - path.join(getPaths().api.lib, 'uploads.ts'), + path.join(getPaths().api.directives, 'upload', `upload.${ext}`), + path.join(getPaths().api.graphql, `redwoodUploads.sdl.${ext}`), + path.join( + getPaths().api.services, + 'redwoodUploads', + `redwoodUploads.${ext}`, + ), + isTypeScriptProject() && + path.join(getPaths().api.services, 'types', `types.${ext}`), + path.join(getPaths().api.functions, `graphql.${ext}`), ] for (const prettifyPath of prettifyPaths) { @@ -135,25 +243,10 @@ export const handler = async ({ force }) => { } }, }, - { - title: 'One more thing...', - task: (_ctx, task) => { - task.title = `One more thing... - - ${c.success('\nUploads and storage configured!\n')} - - Remember to add UPLOADS_SECRET to your .env file. You can generate one with ${c.highlight('yarn rw generate secret')} - - - Check out the docs for more info: - ${c.link('https://docs.redwoodjs.com/docs/uploads')} - - ` - }, - }, ], { - rendererOptions: { collapseSubtasks: false }, + rendererOptions: { collapseSubtasks: false, persistentOutput: true }, + renderer: verbose ? 'verbose' : 'default', }, ) diff --git a/packages/storage/.gitignore b/packages/storage/.gitignore deleted file mode 100644 index 9b5d07c455ec..000000000000 --- a/packages/storage/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -src/__tests__/migrations/* -src/__tests__/for_unit_test.db* -.attw.json -src/__tests__/prisma-client/* diff --git a/packages/storage/README.md b/packages/storage/README.md deleted file mode 100644 index f1704713e3b4..000000000000 --- a/packages/storage/README.md +++ /dev/null @@ -1,123 +0,0 @@ -# `@redwoodjs/storage` - -This package houses - -- Prisma extension for handling uploads. Currently - a) Query Extension: will save, delete, replace files on disk during CRUD - b) Result Extension: gives you functions like `.withSignedUri` on configured prisma results - which will take the paths, and convert it to a signed url -- Storage adapters e.g. FS and Memory to use with the prisma extension -- Processors - i.e. utility functions which will take [`Files`](https://developer.mozilla.org/en-US/docs/Web/API/File) and save them to storage - -## Usage - -In `api/src/uploads.ts` - setup uploads - processors, storage and the prisma extension. - -```ts -// api/src/lib/uploads.ts -nua -import { setupUploads, UploadsConfig } from '@redwoodjs/storage' -import { FileSystemStorage } from '@redwoodjs/storage/FileSystemStorage' -import { UrlSigner } from '@redwoodjs/storage/UrlSigner' - -const uploadsConfig: UploadsConfig = { - // 👇 prisma model - profile: { - // 👇 pass in fields that are going to be File uploads - // these should be configured as string in the Prisma.schema - fields: ['avatar', 'coverPhoto'], - }, -} - -// 👇 exporting these allows you access elsewhere on the api side -export const fsStorage = new FileSystemStorage({ - baseDir: './uploads', -}) - -// Optional -export const urlSigner = new UrlSigner({ - secret: process.env.UPLOADS_SECRET, - endpoint: '/signedUrl', -}) - -const { saveFiles, storagePrismaExtension } = setupStorage({ - uploadsConfig, - storageAdapter: fsStorage, - urlSigner, -}) - -export { saveFiles, storagePrismaExtension } -``` - -### Configuring db to use the prisma extension - -```ts -// api/src/lib/db.ts - -import { PrismaClient } from '@prisma/client' - -import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' - -import { logger } from './logger' -import { storagePrismaExtension } from './uploads' - -// 👇 Notice here we create prisma client, and don't export it yet -export const prismaClient = new PrismaClient({ - log: emitLogLevels(['info', 'warn', 'error']), -}) - -handlePrismaLogging({ - db: prismaClient, - logger, - logLevels: ['info', 'warn', 'error'], -}) - -// 👇 Export db after adding uploads extension -export const db = prismaClient.$extends(storagePrismaExtension) -``` - -## Using Prisma extension - -### A) CRUD operations - -No need to do anything here, but you have to use processors to supply Prisma with data in the correct format. - -### B) Result extensions - -```ts -// api/src/services/profiles/profiles.ts - -export const profile: QueryResolvers['profile'] = async ({ id }) => { - // 👇 await the result from your prisma query - const profile = await db.profile.findUnique({ - where: { id }, - }) - - // Convert the avatar and coverPhoto fields to signed URLs - // Note that you still need to add a api endpoint to handle these signed urls - return profile?.withSignedUrl() -} -``` - -## Using `saveFiles` - -In your services, you can use the preconfigured "processors" - exported as `saveFiles` to convert Files to paths on storage, for Prisma to save into the database. The processors, and storage adapters determine where the file is saved. - -```ts -// api/src/services/profiles/profiles.ts - -export const updateProfile: MutationResolvers['updateProfile'] = async ({ - id, - input, -}) => { - const processedInput = await saveFiles.forProfile(input) - - // This becomes a string 👇 - // The configuration on where it was saved is passed when we setup uploads in src/lib/uploads.ts - // processedInput.avatar = '/mySavePath/profile/avatar/generatedId.jpg' - - return db.profile.update({ - data: processedInput, - where: { id }, - }) -} -``` diff --git a/packages/storage/adapters/filesystem/README.md b/packages/storage/adapters/filesystem/README.md new file mode 100644 index 000000000000..f9233323549b --- /dev/null +++ b/packages/storage/adapters/filesystem/README.md @@ -0,0 +1,3 @@ +# `@redwoodjs/storage-adapters-filesystem` + +TODO(jgmw): Add the readme. diff --git a/packages/storage/adapters/filesystem/build.mts b/packages/storage/adapters/filesystem/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/storage/adapters/filesystem/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/storage/adapters/filesystem/package.json b/packages/storage/adapters/filesystem/package.json new file mode 100644 index 000000000000..c9eb6652ffd6 --- /dev/null +++ b/packages/storage/adapters/filesystem/package.json @@ -0,0 +1,50 @@ +{ + "name": "@redwoodjs/storage-adapter-filesystem", + "version": "8.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/storage/adapters/filesystem" + }, + "license": "MIT", + "type": "commonjs", + "exports": { + ".": { + "default": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./package.json": "./package.json" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "yarn tsx ./build.mts && yarn build:types", + "build:pack": "yarn pack -o redwoodjs-storage-adapter-filesystem.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "dependencies": { + "@redwoodjs/storage-core": "workspace:*", + "mime-types": "2.1.35", + "uuid": "10.0.0" + }, + "devDependencies": { + "@redwoodjs/framework-tools": "workspace:*", + "@types/uuid": "10.0.0", + "concurrently": "8.2.2", + "esbuild": "0.23.1", + "publint": "0.2.10", + "tsx": "4.19.1", + "typescript": "5.6.2", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/storage/adapters/filesystem/src/index.ts b/packages/storage/adapters/filesystem/src/index.ts new file mode 100644 index 000000000000..b3f5c271d252 --- /dev/null +++ b/packages/storage/adapters/filesystem/src/index.ts @@ -0,0 +1,154 @@ +import fsSync from 'node:fs' +import fs from 'node:fs/promises' +import path from 'node:path' +import stream from 'node:stream' + +import mimeTypes from 'mime-types' +import { v7 as uuidv7 } from 'uuid' + +import type { StorageSigner } from '@redwoodjs/storage-core' +import { StorageAdapter } from '@redwoodjs/storage-core' + +export interface FileSystemAdapterConfig { + root: string + signing: { + signer: StorageSigner + baseUrl: string + } +} + +export class FileSystemAdapter extends StorageAdapter { + private name: string + public config: FileSystemAdapterConfig + + constructor(config: FileSystemAdapterConfig) { + super() + this.name = Date.now().toString() + this.config = config + } + + getName(): string { + return this.name + } + + setName(name: string): void { + this.name = name + } + + override async readData(reference: string) { + const filepath = this.referenceToPath(reference) + return fs.readFile(filepath, { flag: 'r' }) + } + + override async readFile(enrichedReference: string) { + const { mimeType } = this.parseEnrichedReference(enrichedReference) + const filepath = this.referenceToPath(enrichedReference) + + const buffer = await fs.readFile(filepath, { flag: 'r' }) + const lastModified = (await fs.stat(filepath)).mtimeMs + + return new File([buffer], enrichedReference, { + type: mimeType, + lastModified, + }) + } + + override async readStream(reference: string) { + const filepath = this.referenceToPath(reference) + const nodeStream = fsSync.createReadStream(filepath) + const webStream = stream.Readable.toWeb(nodeStream) + + // toWeb doesn't appear to take a generic type, so we need to cast it here + return webStream as ReadableStream + } + + override async writeData(data: Buffer) { + const reference = this.generateReference() + const filepath = this.referenceToPath(reference) + + await fs.mkdir(path.dirname(filepath), { recursive: true }) + await fs.writeFile(filepath, data, { flag: 'w' }) + + return reference + } + + override async writeFile(data: File) { + const reference = this.generateReference() + const enrichedReference = this.enrichReference(reference, data.type) + const filepath = this.referenceToPath(enrichedReference) + + const buffer = Buffer.from(await data.arrayBuffer()) + await fs.mkdir(path.dirname(filepath), { recursive: true }) + await fs.writeFile(filepath, buffer, { flag: 'w' }) + + return enrichedReference + } + + override async writeStream(data: ReadableStream) { + const reference = this.generateReference() + const filepath = this.referenceToPath(reference) + await fs.mkdir(path.dirname(filepath), { recursive: true }) + const writeStream = fsSync.createWriteStream(filepath) + const webStream = stream.Writable.toWeb(writeStream) + + await data.pipeTo(webStream) + + return reference + } + + override async delete(reference: string) { + const filepath = this.referenceToPath(reference) + return fs.unlink(filepath) + } + + override async exists(reference: string) { + const filepath = this.referenceToPath(reference) + try { + fs.stat(filepath) + return true + } catch { + return false + } + } + + override async getSignedUrl(reference: string): Promise { + const token = this.config.signing.signer.encode({ + adapter: this.name, + reference, + expiry: 0, + }) + + const base = new URL(this.config.signing.baseUrl) + base.searchParams.set('token', token) + return base.toString() + } + + // --- + + private referenceToPath(reference: string) { + // TODO(jgmw): Store metadata in a .json file with the same ref + return path.join(this.config.root, reference) + } + + private generateReference() { + return uuidv7() + } + + private enrichReference(reference: string, mimeType: string): string { + const ext = mimeTypes.extension(mimeType) || mimeType.replaceAll('/', '_') + return `${reference}.${ext}` + } + + private parseEnrichedReference(enrichedReference: string): { + reference: string + mimeType: string + } { + const parts = enrichedReference.split('.') + const reference = parts.slice(0, -1).join('.') + + const ext = parts[parts.length - 1] + const mimeType = mimeTypes.lookup(ext) || ext.replaceAll('_', '/') + + return { reference, mimeType } + } +} diff --git a/packages/storage/adapters/filesystem/tsconfig.build.json b/packages/storage/adapters/filesystem/tsconfig.build.json new file mode 100644 index 000000000000..01e812b8c84f --- /dev/null +++ b/packages/storage/adapters/filesystem/tsconfig.build.json @@ -0,0 +1,12 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [{ "path": "../../../project-config" }] +} diff --git a/packages/storage/adapters/filesystem/tsconfig.json b/packages/storage/adapters/filesystem/tsconfig.json new file mode 100644 index 000000000000..c9673b22bfed --- /dev/null +++ b/packages/storage/adapters/filesystem/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules"], + "references": [ + { "path": "../../../framework-tools" }, + { "path": "../../../project-config" }, + { "path": "../../core" } + ] +} diff --git a/packages/storage/adapters/memory/README.md b/packages/storage/adapters/memory/README.md new file mode 100644 index 000000000000..fb11c6be8e17 --- /dev/null +++ b/packages/storage/adapters/memory/README.md @@ -0,0 +1,3 @@ +# `@redwoodjs/storage-memory` + +TODO(jgmw): Add the readme. diff --git a/packages/storage/adapters/memory/build.mts b/packages/storage/adapters/memory/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/storage/adapters/memory/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/storage/adapters/memory/package.json b/packages/storage/adapters/memory/package.json new file mode 100644 index 000000000000..00b899a4ea46 --- /dev/null +++ b/packages/storage/adapters/memory/package.json @@ -0,0 +1,51 @@ +{ + "name": "@redwoodjs/storage-adapter-memory", + "version": "8.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/storage/adapters/memory" + }, + "license": "MIT", + "type": "commonjs", + "exports": { + ".": { + "default": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./package.json": "./package.json" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "yarn tsx ./build.mts && yarn build:types", + "build:pack": "yarn pack -o redwoodjs-storage-adapter-memory.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "dependencies": { + "@redwoodjs/storage-core": "workspace:*", + "lru-cache": "11.0.1", + "mime-types": "2.1.35", + "uuid": "10.0.0" + }, + "devDependencies": { + "@redwoodjs/framework-tools": "workspace:*", + "@types/uuid": "10.0.0", + "concurrently": "8.2.2", + "esbuild": "0.23.1", + "publint": "0.2.10", + "tsx": "4.19.1", + "typescript": "5.6.2", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/storage/adapters/memory/src/index.ts b/packages/storage/adapters/memory/src/index.ts new file mode 100644 index 000000000000..5f7aaec08e43 --- /dev/null +++ b/packages/storage/adapters/memory/src/index.ts @@ -0,0 +1,177 @@ +import { LRUCache } from 'lru-cache' +import mimeTypes from 'mime-types' +import { v7 as uuidv7 } from 'uuid' + +import type { StorageSigner } from '@redwoodjs/storage-core' +import { StorageAdapter } from '@redwoodjs/storage-core' + +export interface MemoryAdapterConfig { + maxSize?: number // Maximum number of items to store + maxAge?: number // Maximum age of items in milliseconds + signing: { + signer: StorageSigner + baseUrl: string + } +} + +interface StoredItem { + data: Buffer + mimeType: string + lastModified: number +} + +export class MemoryAdapter extends StorageAdapter { + private name: string + public config: MemoryAdapterConfig + private storage: LRUCache + + constructor(config: MemoryAdapterConfig) { + super() + this.name = Date.now().toString() + this.config = config + this.storage = new LRUCache({ + max: config.maxSize || 1000, + ttl: config.maxAge, + allowStale: false, + updateAgeOnGet: false, + updateAgeOnHas: false, + }) + } + + getName(): string { + return this.name + } + + setName(name: string): void { + this.name = name + } + + override async readData(reference: string) { + const item = this.storage.get(reference) + if (!item) { + throw new Error(`Item not found: ${reference}`) + } + return item.data + } + + override async readFile(enrichedReference: string) { + const { reference, mimeType } = + this.parseEnrichedReference(enrichedReference) + const item = this.storage.get(reference) + console.debug('item', item) + console.debug('reference', reference) + console.debug('mimeType', mimeType) + console.debug('enrichedReference', enrichedReference) + if (!item) { + throw new Error(`Item not found: ${reference}`) + } + + return new File([item.data], enrichedReference, { + type: mimeType, + lastModified: item.lastModified, + }) + } + + override async readStream(reference: string) { + const item = this.storage.get(reference) + if (!item) { + throw new Error(`Item not found: ${reference}`) + } + + return new ReadableStream({ + start(controller) { + controller.enqueue(item.data as TStreamType) + controller.close() + }, + }) + } + + override async writeData(data: Buffer) { + const reference = this.generateReference() + this.storage.set(reference, { + data, + mimeType: 'application/octet-stream', + lastModified: Date.now(), + }) + return reference + } + + override async writeFile(data: File) { + const reference = this.generateReference() + const buffer = Buffer.from(await data.arrayBuffer()) + const item = this.storage.set(reference, { + data: buffer, + mimeType: data.type, + lastModified: data.lastModified, + }) + console.debug('item', item) + const enrichedReference = this.enrichReference(reference, data.type) + console.debug('enrichedReference', enrichedReference) + return enrichedReference + } + + override async writeStream(data: ReadableStream) { + const reference = this.generateReference() + let isReading = true + const chunks: Buffer[] = [] // Define chunks array + while (isReading) { + const { done, value } = await data.getReader().read() + if (done) { + isReading = false + } + chunks.push(value as Buffer) + } + + const buffer = Buffer.concat(chunks) + this.storage.set(reference, { + data: buffer, + mimeType: 'application/octet-stream', + lastModified: Date.now(), + }) + return reference + } + + override async delete(reference: string) { + this.storage.delete(reference) + } + + override async exists(reference: string) { + return this.storage.has(reference) + } + + override async getSignedUrl(reference: string): Promise { + const token = this.config.signing.signer.encode({ + adapter: this.name, + reference, /// but what is saved with enriched extension + expiry: 0, + }) + + const base = new URL(this.config.signing.baseUrl) + base.searchParams.set('token', token) + return base.toString() + } + + // --- + + private generateReference() { + return uuidv7() + } + + private enrichReference(reference: string, mimeType: string): string { + const ext = mimeTypes.extension(mimeType) || mimeType.replaceAll('/', '_') + return `${reference}.${ext}` + } + + private parseEnrichedReference(enrichedReference: string): { + reference: string + mimeType: string + } { + const parts = enrichedReference.split('.') + const reference = parts.slice(0, -1).join('.') + + const ext = parts[parts.length - 1] + const mimeType = mimeTypes.lookup(ext) || ext.replaceAll('_', '/') + + return { reference, mimeType } + } +} diff --git a/packages/storage/adapters/memory/tsconfig.build.json b/packages/storage/adapters/memory/tsconfig.build.json new file mode 100644 index 000000000000..01e812b8c84f --- /dev/null +++ b/packages/storage/adapters/memory/tsconfig.build.json @@ -0,0 +1,12 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [{ "path": "../../../project-config" }] +} diff --git a/packages/storage/adapters/memory/tsconfig.json b/packages/storage/adapters/memory/tsconfig.json new file mode 100644 index 000000000000..c9673b22bfed --- /dev/null +++ b/packages/storage/adapters/memory/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules"], + "references": [ + { "path": "../../../framework-tools" }, + { "path": "../../../project-config" }, + { "path": "../../core" } + ] +} diff --git a/packages/storage/adapters/s3/README.md b/packages/storage/adapters/s3/README.md new file mode 100644 index 000000000000..b02ef6ddc05d --- /dev/null +++ b/packages/storage/adapters/s3/README.md @@ -0,0 +1,3 @@ +# `@redwoodjs/storage-adapters-s3` + +TODO(jgmw): Add the readme. diff --git a/packages/storage/adapters/s3/build.mts b/packages/storage/adapters/s3/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/storage/adapters/s3/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/storage/adapters/s3/package.json b/packages/storage/adapters/s3/package.json new file mode 100644 index 000000000000..e7d4ec7141fb --- /dev/null +++ b/packages/storage/adapters/s3/package.json @@ -0,0 +1,49 @@ +{ + "name": "@redwoodjs/storage-adapter-s3", + "version": "8.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/storage/adapters/s3" + }, + "license": "MIT", + "type": "commonjs", + "exports": { + ".": { + "default": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./package.json": "./package.json" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "yarn tsx ./build.mts && yarn build:types", + "build:pack": "yarn pack -o redwoodjs-storage-adapter-s3.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "dependencies": { + "@aws-sdk/client-s3": "3.663.0", + "@aws-sdk/lib-storage": "3.663.0", + "@aws-sdk/s3-request-presigner": "3.663.0" + }, + "devDependencies": { + "@redwoodjs/framework-tools": "workspace:*", + "concurrently": "8.2.2", + "esbuild": "0.23.1", + "publint": "0.2.10", + "tsx": "4.19.1", + "typescript": "5.6.2", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/storage/adapters/s3/src/index.ts b/packages/storage/adapters/s3/src/index.ts new file mode 100644 index 000000000000..de483ddec6a3 --- /dev/null +++ b/packages/storage/adapters/s3/src/index.ts @@ -0,0 +1,246 @@ +import { + S3Client, + GetObjectCommand, + DeleteObjectCommand, + HeadObjectCommand, + ListBucketsCommand, + ListObjectsV2Command, +} from '@aws-sdk/client-s3' +import type { PutObjectCommandInput } from '@aws-sdk/client-s3' +import type { Configuration } from '@aws-sdk/lib-storage' +import { Upload } from '@aws-sdk/lib-storage' +import { getSignedUrl } from '@aws-sdk/s3-request-presigner' +import mimeTypes from 'mime-types' +import { v7 as uuidv7 } from 'uuid' + +import { StorageAdapter } from '@redwoodjs/storage-core' + +export interface S3AdapterConfig { + bucket: string + region: string + endpoint?: string + credentials: { + accessKeyId: string + secretAccessKey: string + } + queueSize?: number + partSize?: number +} + +export type S3WriteOptions = { + tags?: Configuration['tags'] +} & Omit< + PutObjectCommandInput, + 'Bucket' | 'Key' | 'Body' | 'Tags' | 'ContentType' +> + +export class S3Adapter extends StorageAdapter { + private name: string + public config: S3AdapterConfig + private s3Client: S3Client + private queueSize: number + private partSize: number + + constructor(config: S3AdapterConfig) { + super() + this.name = Date.now().toString() + this.config = config + this.s3Client = new S3Client({ + region: config.region, + credentials: config.credentials, + ...(config.endpoint && { endpoint: config.endpoint }), + }) + this.queueSize = config.queueSize || 4 + this.partSize = config.partSize || 5 * 1024 * 1024 + } + + getName(): string { + return this.name + } + + setName(name: string): void { + this.name = name + } + + override async readData(reference: string) { + const command = new GetObjectCommand({ + Bucket: this.config.bucket, + Key: reference, + }) + const response = await this.s3Client.send(command) + return Buffer.from(await response.Body!.transformToByteArray()) + } + + override async readFile(enrichedReference: string) { + const { mimeType } = this.parseEnrichedReference(enrichedReference) + const data = await this.readData(enrichedReference) + const response = await this.s3Client.send( + new HeadObjectCommand({ + Bucket: this.config.bucket, + Key: enrichedReference, + }), + ) + + // don't I want all the s3 response data here? like typ, etag, etc?` + return new File([data], enrichedReference, { + type: mimeType, + lastModified: response.LastModified?.getTime(), + }) + } + + override async readStream(reference: string) { + const command = new GetObjectCommand({ + Bucket: this.config.bucket, + Key: reference, + }) + const response = await this.s3Client.send(command) + return response.Body as ReadableStream + } + + override async writeData(data: Buffer, options?: S3WriteOptions) { + const reference = this.generateReference() + const upload = new Upload({ + params: { + Bucket: this.config.bucket, + Key: reference, + Body: data, + ...Object.fromEntries( + Object.entries(options || {}).filter(([key]) => key !== 'tags'), + ), + }, + client: this.s3Client, + queueSize: this.queueSize, + partSize: this.partSize, + tags: options?.tags, + }) + + upload.on('httpUploadProgress', (progress) => { + console.log(progress) + }) + + await upload.done() + + return reference + } + + override async writeFile(data: File, options?: S3WriteOptions) { + const reference = this.generateReference() + const enrichedReference = this.enrichReference(reference, data.type) + + const upload = new Upload({ + params: { + Bucket: this.config.bucket, + Key: enrichedReference, + Body: data, + ContentType: data.type, + ...Object.fromEntries( + Object.entries(options || {}).filter(([key]) => key !== 'tags'), + ), + }, + client: this.s3Client, + queueSize: this.queueSize, + partSize: this.partSize, + tags: options?.tags, + }) + + upload.on('httpUploadProgress', (progress) => { + console.log(progress) + }) + + await upload.done() + + return enrichedReference + } + + // do I really read/write need data and stream? + override async writeStream( + data: ReadableStream, + options?: S3WriteOptions, + ) { + const reference = this.generateReference() + + const upload = new Upload({ + params: { + Bucket: this.config.bucket, + Key: reference, + Body: data, + + ...Object.fromEntries( + Object.entries(options || {}).filter(([key]) => key !== 'tags'), + ), + }, + client: this.s3Client, + queueSize: this.queueSize, + partSize: this.partSize, + tags: options?.tags, + }) + + upload.on('httpUploadProgress', (progress) => { + console.log(progress) + }) + + await upload.done() + + return reference + } + + override async delete(reference: string) { + await this.s3Client.send( + new DeleteObjectCommand({ + Bucket: this.config.bucket, + Key: reference, + }), + ) + } + + override async exists(reference: string) { + try { + await this.s3Client.send( + new HeadObjectCommand({ + Bucket: this.config.bucket, + Key: reference, + }), + ) + return true + } catch { + return false + } + } + + override async getSignedUrl(reference: string): Promise { + const command = new GetObjectCommand({ + Bucket: this.config.bucket, + Key: reference, + }) + return getSignedUrl(this.s3Client, command, { expiresIn: 3600 }) + } + + // Helper methods (similar to FileSystemAdapter) + private generateReference() { + return uuidv7() + } + + private enrichReference(reference: string, mimeType: string): string { + const ext = mimeTypes.extension(mimeType) || mimeType.replaceAll('/', '_') + return `${reference}.${ext}` + } + + private parseEnrichedReference(enrichedReference: string): { + reference: string + mimeType: string + } { + const parts = enrichedReference.split('.') + const reference = parts.slice(0, -1).join('.') + const ext = parts[parts.length - 1] + const mimeType = mimeTypes.lookup(ext) || ext.replaceAll('_', '/') + return { reference, mimeType } + } + + async listBuckets() { + return this.s3Client.send(new ListBucketsCommand({})) + } + + async listObjects(bucket: string) { + return this.s3Client.send(new ListObjectsV2Command({ Bucket: bucket })) + } +} diff --git a/packages/storage/adapters/s3/tsconfig.build.json b/packages/storage/adapters/s3/tsconfig.build.json new file mode 100644 index 000000000000..01e812b8c84f --- /dev/null +++ b/packages/storage/adapters/s3/tsconfig.build.json @@ -0,0 +1,12 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [{ "path": "../../../project-config" }] +} diff --git a/packages/storage/adapters/s3/tsconfig.json b/packages/storage/adapters/s3/tsconfig.json new file mode 100644 index 000000000000..8d14f294e8d5 --- /dev/null +++ b/packages/storage/adapters/s3/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules"], + "references": [ + { "path": "../../../framework-tools" }, + { "path": "../../../project-config" } + ] +} diff --git a/packages/storage/attw.ts b/packages/storage/attw.ts deleted file mode 100644 index a377f7b5f320..000000000000 --- a/packages/storage/attw.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { $ } from 'zx' - -interface Problem { - kind: string - entrypoint?: string - resolutionKind?: string -} - -await $({ nothrow: true })`yarn attw -P -f json > .attw.json` -const output = await $`cat .attw.json` -await $`rm .attw.json` - -const json = JSON.parse(output.stdout) - -if (!json.analysis.problems || json.analysis.problems.length === 0) { - console.log('No errors found') - process.exit(0) -} - -if ( - json.analysis.problems.every( - (problem: Problem) => problem.resolutionKind === 'node10', - ) -) { - console.log("Only found node10 problems, which we don't care about") - process.exit(0) -} - -console.log('Errors found') -console.log(json.analysis.problems) -process.exit(1) diff --git a/packages/storage/build.mts b/packages/storage/build.mts deleted file mode 100644 index da389f21e834..000000000000 --- a/packages/storage/build.mts +++ /dev/null @@ -1,33 +0,0 @@ -import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' -import { - generateTypesCjs, - generateTypesEsm, - insertCommonJsPackageJson, -} from '@redwoodjs/framework-tools/generateTypes' - -// ESM build -await build({ - buildOptions: { - ...defaultBuildOptions, - format: 'esm', - packages: 'external', - }, -}) - -await generateTypesEsm() - -// CJS build -await build({ - buildOptions: { - ...defaultBuildOptions, - outdir: 'dist/cjs', - packages: 'external', - }, -}) - -await generateTypesCjs() - -await insertCommonJsPackageJson({ - buildFileUrl: import.meta.url, - cjsDir: 'dist/cjs', -}) diff --git a/packages/storage/core/README.md b/packages/storage/core/README.md new file mode 100644 index 000000000000..12421486d93b --- /dev/null +++ b/packages/storage/core/README.md @@ -0,0 +1,3 @@ +# `@redwoodjs/storage-core` + +TODO(jgmw): Add the readme. diff --git a/packages/storage/core/build.mts b/packages/storage/core/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/storage/core/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/storage/core/package.json b/packages/storage/core/package.json new file mode 100644 index 000000000000..0def8b55f75b --- /dev/null +++ b/packages/storage/core/package.json @@ -0,0 +1,44 @@ +{ + "name": "@redwoodjs/storage-core", + "version": "8.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/storage/core" + }, + "license": "MIT", + "type": "commonjs", + "exports": { + ".": { + "default": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./package.json": "./package.json" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "yarn tsx ./build.mts && yarn build:types", + "build:pack": "yarn pack -o redwoodjs-storage-core.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "devDependencies": { + "@redwoodjs/framework-tools": "workspace:*", + "concurrently": "8.2.2", + "esbuild": "0.23.1", + "publint": "0.2.10", + "tsx": "4.19.1", + "typescript": "5.6.2", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/storage/core/src/adapter.ts b/packages/storage/core/src/adapter.ts new file mode 100644 index 000000000000..d1036bf1878e --- /dev/null +++ b/packages/storage/core/src/adapter.ts @@ -0,0 +1,35 @@ +// TODO(jgmw): Use the term 'key' rather than 'reference' ala key-value store and S3 + +// TODO(jgmw): The metadata should include the content type and the original filename + +export abstract class StorageAdapter { + abstract getName(): string + abstract setName(name: string): void + + // --- + + // TODO(jgmw): "Data" is not very clear, just use Buffer + abstract readData(reference: string): Promise + // TODO(jgmw): "File" does this need to exist? People should just move it to a buffer/stream anyway + abstract readFile(reference: string): Promise + abstract readStream( + reference: string, + ): Promise> + + abstract writeData(data: Buffer): Promise + abstract writeFile(data: File): Promise + abstract writeStream( + data: ReadableStream, + ): Promise + + // TODO(jgmw): consider a lookup metadata function + + abstract delete(reference: string): Promise + + abstract exists(reference: string): Promise + + abstract getSignedUrl(reference: string): Promise + // TODO(jgmw): validate signed url function - maybe? + // add options like download & filename that sets the content-disposition header + // TODO(dt): getPublicUrl - no expiration? +} diff --git a/packages/storage/core/src/index.ts b/packages/storage/core/src/index.ts new file mode 100644 index 000000000000..8a98f3b01093 --- /dev/null +++ b/packages/storage/core/src/index.ts @@ -0,0 +1,3 @@ +export * from './adapter.js' +export * from './manager.js' +export * from './signer.js' diff --git a/packages/storage/core/src/manager.ts b/packages/storage/core/src/manager.ts new file mode 100644 index 000000000000..26908d3ef692 --- /dev/null +++ b/packages/storage/core/src/manager.ts @@ -0,0 +1,141 @@ +import { StorageAdapter } from './adapter' + +export type Adapters = Record + +export type EnvironmentMapping = Record< + string, + keyof TAdapters | Partial> +> + +export type StorageManagerConfig = { + adapters: TAdapters + default: keyof TAdapters + + env?: EnvironmentMapping +} + +export class StorageManager extends StorageAdapter { + public config: StorageManagerConfig + + constructor(config: StorageManagerConfig) { + super() + this.config = config + + // Validate the default adapter + if (!config.default) { + throw new Error('A default adapter must be provided') + } + if (!config.adapters[config.default]) { + throw new Error( + 'The default adapter must be one of the provided adapters', + ) + } + + // Inform the adapters of their names + for (const name in config.adapters) { + config.adapters[name].setName(name) + } + } + + // --- + + override getName(): string { + throw new Error('Cannot get the name of the manager adapter') + } + + override setName(_: string): void { + throw new Error('Cannot set the name of the manager adapter') + } + + // --- + + using(adapter: keyof TAdapters, force?: boolean): TAdapters[keyof TAdapters] { + // Check for an environment override + const override = this.getEnvOverride(adapter) + if (!force && override) { + return this.using(override, true) + } + + if (!this.config.adapters[adapter]) { + throw new Error( + `Adapter '${adapter.toString()}' is not in the list of adapters`, + ) + } + return this.config.adapters[adapter] + } + + default(): TAdapters[keyof TAdapters] { + return this.using(this.config.default) + } + + getEnvOverride(original: keyof TAdapters): keyof TAdapters | undefined { + if (!process.env.NODE_ENV) { + return undefined + } + + const override = this.config.env?.[process.env.NODE_ENV] + if (override) { + if (typeof override === 'string') { + return override + } + + return (override as Partial>)[ + original + ] + } + + return undefined + } + + findAdapter(name: string): TAdapters[keyof TAdapters] | undefined { + const found = this.config.adapters[name] + if (found) { + return found as TAdapters[keyof TAdapters] + } + return undefined + } + + // --- + + async readData(reference: string): Promise { + return this.default().readData(reference) + } + + async readFile(reference: string): Promise { + return this.default().readFile(reference) + } + + async readStream( + reference: string, + ): Promise> { + return this.default().readStream(reference) + } + + async writeData(data: Buffer): Promise { + return this.default().writeData(data) + } + + async writeFile(data: File): Promise { + return this.default().writeFile(data) + } + + async writeStream( + data: ReadableStream, + ): Promise { + return this.default().writeStream(data) + } + + async delete(reference: string): Promise { + return this.default().delete(reference) + } + + async exists(reference: string): Promise { + return this.default().exists(reference) + } + + async getSignedUrl(reference: string): Promise { + return this.default().getSignedUrl(reference) + } + + // --- +} diff --git a/packages/storage/core/src/signer.ts b/packages/storage/core/src/signer.ts new file mode 100644 index 000000000000..062c84c8c241 --- /dev/null +++ b/packages/storage/core/src/signer.ts @@ -0,0 +1,50 @@ +import crypto from 'node:crypto' + +export interface StorageSignerPayload { + adapter: string + reference: string + expiry: number +} + +export abstract class StorageSigner { + // TODO(jgmw): verify + abstract encode(payload: StorageSignerPayload): string + abstract decode(token: string): StorageSignerPayload | undefined +} + +export class StorageSelfSigner extends StorageSigner { + private readonly VERSION = 1 + private readonly secret: string + + constructor({ secret }: { secret: string }) { + super() + this.secret = secret + } + + override encode(payload: StorageSignerPayload): string { + const data = JSON.stringify({ ...payload, version: this.VERSION }) + const data64 = Buffer.from(data).toString('base64url') + + const hmac = crypto.createHmac('sha512', this.secret) + hmac.update(data64) + const signature = hmac.digest('base64url') + + return `${signature}.${data64}` + } + + override decode(token: string): StorageSignerPayload | undefined { + const [signature, ...data] = token.split('.') + const data64 = data.join('.') + + const hmac = crypto.createHmac('sha512', this.secret) + hmac.update(data64) + const expectedSignature = hmac.digest('base64url') + + if (signature !== expectedSignature) { + return undefined + } + + const payload = JSON.parse(Buffer.from(data64, 'base64url').toString()) + return payload + } +} diff --git a/packages/storage/core/tsconfig.build.json b/packages/storage/core/tsconfig.build.json new file mode 100644 index 000000000000..28310dad439a --- /dev/null +++ b/packages/storage/core/tsconfig.build.json @@ -0,0 +1,12 @@ +{ + "extends": "../../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [{ "path": "../../project-config" }] +} diff --git a/packages/storage/core/tsconfig.json b/packages/storage/core/tsconfig.json new file mode 100644 index 000000000000..29627091555b --- /dev/null +++ b/packages/storage/core/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules"], + "references": [ + { "path": "../../framework-tools" }, + { "path": "../../project-config" } + ] +} diff --git a/packages/storage/package.json b/packages/storage/package.json deleted file mode 100644 index 2ed53149909b..000000000000 --- a/packages/storage/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "@redwoodjs/storage", - "version": "8.0.0", - "repository": { - "type": "git", - "url": "git+https://github.com/redwoodjs/redwood.git", - "directory": "packages/storage" - }, - "license": "MIT", - "type": "module", - "exports": { - ".": { - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - }, - "import": { - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - }, - "./FileSystemStorage": { - "require": "./dist/cjs/adapters/FileSystemStorage/FileSystemStorage.js", - "import": "./dist/adapters/FileSystemStorage/FileSystemStorage.js" - }, - "./MemoryStorage": { - "require": "./dist/cjs/adapters/MemoryStorage/MemoryStorage.js", - "import": "./dist/adapters/MemoryStorage/MemoryStorage.js" - }, - "./BaseStorageAdapter": { - "require": "./dist/cjs/adapters/BaseStorageAdapter.js", - "import": "./dist/adapters/BaseStorageAdapter.js" - }, - "./UrlSigner": { - "require": "./dist/cjs/UrlSigner.js", - "import": "./dist/UrlSigner.js" - } - }, - "files": [ - "dist", - "!dist/**/*.test.d.*" - ], - "scripts": { - "build": "yarn setup:test && tsx ./build.mts", - "build:pack": "yarn pack -o redwoodjs-storage.tgz", - "build:types": "tsc --build --verbose ./tsconfig.build.json", - "build:types-cjs": "tsc --build --verbose tsconfig.types-cjs.json", - "check:attw": "tsx attw.ts", - "check:package": "concurrently npm:check:attw yarn publint", - "setup:test": "npx prisma db push --accept-data-loss --schema ./src/__tests__/unit-test-schema.prisma", - "test": "vitest run", - "test:types": "yarn setup:test && tstyche", - "test:watch": "vitest watch" - }, - "dependencies": { - "@redwoodjs/project-config": "workspace:*", - "mime-types": "2.1.35", - "ulid": "2.3.0" - }, - "devDependencies": { - "@arethetypeswrong/cli": "0.16.4", - "@prisma/client": "5.19.1", - "@redwoodjs/framework-tools": "workspace:*", - "@types/mime-types": "2.1.4", - "concurrently": "8.2.2", - "esbuild": "0.23.1", - "publint": "0.2.10", - "tstyche": "2.1.1", - "tsx": "4.19.1", - "typescript": "5.6.2", - "vitest": "2.0.5" - }, - "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" -} diff --git a/packages/storage/prisma-override.d.ts b/packages/storage/prisma-override.d.ts deleted file mode 100644 index f976517c35be..000000000000 --- a/packages/storage/prisma-override.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -// Locally, within this project we override the type for @prisma/client to the one we generate locally -// This is so that we get accurate types (rather than the default anys) - and when the prismaExtension runs -// it will still use the types from '@prisma/client' which points to the user's prisma client and not ours - -import type { PrismaClient as LocalPrismaClient } from './src/__tests__/prisma-client/index.d.ts' - -declare module '@prisma/client' { - export class PrismaClient extends LocalPrismaClient {} -} diff --git a/packages/storage/src/UrlSigner.ts b/packages/storage/src/UrlSigner.ts deleted file mode 100644 index 1edb2d64816f..000000000000 --- a/packages/storage/src/UrlSigner.ts +++ /dev/null @@ -1,161 +0,0 @@ -import crypto from 'node:crypto' - -import { getConfig } from '@redwoodjs/project-config' - -export type SignedUrlSettings = { - endpoint: string // The path to the signed url endpoint, or a full url (include http(s)://) - secret: string // The secret to sign the urls with -} - -export type SignatureValidationArgs = { - path: string - s: string - expiry?: number | string -} -export class UrlSigner { - private secret: string - private endpoint: string - - constructor({ secret, endpoint }: SignedUrlSettings) { - this.secret = secret - this.endpoint = endpoint - - this.endpoint = endpoint.startsWith('http') - ? endpoint - : `${getConfig().web.apiUrl}${endpoint}` - } - - generateSignature({ - filePath, - expiresInMs, - }: { - filePath: string - expiresInMs?: number - }) { - if (!this.secret) { - throw new Error('Please configure the secret') - } - - if (expiresInMs) { - const expiry = Date.now() + expiresInMs - const signature = crypto - .createHmac('sha256', this.secret) - .update(`${filePath}:${expiry}`) - .digest('hex') - - return { expiry, signature } - } else { - // Does not expire - const signature = crypto - .createHmac('sha256', this.secret) - .update(filePath) - .digest('hex') - - return { - signature, - expiry: undefined, - } - } - } - - /** - * The signature and expires have to be extracted from the URL - */ - validateSignature({ - s: signature, - path: filePath, // In the URL we call it path - expiry, - }: SignatureValidationArgs) { - if (!this.secret) { - throw new Error('Please configure the secret') - } - - if (expiry) { - // No need to validate if the signature has expired, - // but make sure its a number! - if (Date.now() > +expiry) { - throw new Error('Signature has expired') - } - } - - // Decoded filePath - const decodedFilePath = decodeURIComponent(filePath) - - const validSignature = expiry - ? crypto - .createHmac('sha256', this.secret) - .update(`${decodedFilePath}:${expiry}`) - .digest('hex') - : crypto - .createHmac('sha256', this.secret) - .update(`${decodedFilePath}`) - .digest('hex') - - if (validSignature !== signature) { - throw new Error('Invalid signature') - } - - return decodedFilePath - } - - validateSignedUrl(fullPathWithQueryParametersOrUrl: string) { - const url = new URL( - fullPathWithQueryParametersOrUrl, - // We don't care about the host, but just need to create a URL object - // to parse search params - fullPathWithQueryParametersOrUrl.startsWith('http') - ? undefined - : 'http://localhost', - ) - - const path = url.searchParams.get('path') as string - - this.validateSignature({ - // Note the signature is called 's' in the URL - s: url.searchParams.get('s') as string, - expiry: url.searchParams.get('expiry') as string, - path, - }) - - // Return the decoded path - return decodeURIComponent(path) - } - - generateSignedUrl(filePath: string, expiresIn?: number) { - const { signature, expiry } = this.generateSignature({ - filePath, - expiresInMs: expiresIn, - }) - - // This way you can pass in a path with params already - const params = new URLSearchParams() - params.set('s', signature) - if (expiry) { - params.set('expiry', expiry.toString()) - } - - params.set('path', filePath) - - return `${this.endpoint}?${params.toString()}` - } -} - -export const getSignedDetailsFromUrl = (url: string) => { - const urlObj = new URL(url) - const expires = urlObj.searchParams.get('expires') - return { - expires: expires ? parseInt(expires) : undefined, - file: urlObj.searchParams.get('file'), - signature: urlObj.searchParams.get('s'), - } -} - -export const EXPIRES_IN = { - seconds: (s: number) => s * 1000, - minutes: (m: number) => m * 60 * 1000, - hours: (h: number) => h * 60 * 60 * 1000, - days: (d: number) => d * 24 * 60 * 60 * 1000, - weeks: (w: number) => w * 7 * 24 * 60 * 60 * 1000, - months: (m: number) => m * 30 * 24 * 60 * 60 * 1000, - years: (y: number) => y * 365 * 24 * 60 * 60 * 1000, -} diff --git a/packages/storage/src/__tests__/createSavers.test.ts b/packages/storage/src/__tests__/createSavers.test.ts deleted file mode 100644 index ce2722afd6ca..000000000000 --- a/packages/storage/src/__tests__/createSavers.test.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { describe, it, expect } from 'vitest' - -import { ensurePosixPath } from '@redwoodjs/project-config' - -import { MemoryStorage } from '../adapters/MemoryStorage/MemoryStorage.js' -import { createUploadSavers } from '../createSavers.js' -import { createUploadsConfig } from '../index.js' - -const memStore = new MemoryStorage({ - baseDir: '/memory_store_basedir', -}) - -const uploadsConfig = createUploadsConfig({ - dumbo: { - fields: ['firstUpload', 'secondUpload'], - }, - dummy: { - fields: 'uploadField', - }, -}) - -describe('Create savers', () => { - const fileToStorage = createUploadSavers(uploadsConfig, memStore) - - it('should create savers with CapitalCased model name', () => { - expect(fileToStorage.forDumbo).toBeDefined() - expect(fileToStorage.forDummy).toBeDefined() - - // These are in the schema but not in the config - // @ts-expect-error - testing! - expect(fileToStorage.forBook).not.toBeDefined() - // @ts-expect-error - testing! - expect(fileToStorage.forNoUploadFields).not.toBeDefined() - }) - - it('Should replace file types with location strings', async () => { - const data = { - firstUpload: new File(['Meaow'], 'kitten.txt', { - type: 'text/plain', - }), - secondUpload: new File(['Woof'], 'puppy.txt', { - type: 'text/plain', - }), - } - - const result = await fileToStorage.forDumbo(data) - - // Location strings in this format: {baseDir/{model}-{field}-{ulid}.{ext} - expect(ensurePosixPath(result.firstUpload)).toMatch( - /\/memory_store_basedir\/dumbo-*.*\.txt/, - ) - expect(ensurePosixPath(result.secondUpload)).toMatch( - /\/memory_store_basedir\/dumbo-*.*\.txt/, - ) - - const { contents: firstContents } = await memStore.read(result.firstUpload) - expect(firstContents.toString()).toBe('Meaow') - - const { contents: secondContents } = await memStore.read( - result.secondUpload, - ) - expect(secondContents.toString()).toBe('Woof') - }) - - it('Should be able to override save options', async () => { - const data = { - uploadField: new File(['Hello'], 'hello.png', { - type: 'image/png', - }), - } - - const fileNameOverrideOnly = await fileToStorage.forDummy(data, { - fileName: 'overridden', - }) - - const pathOverrideOnly = await fileToStorage.forDummy(data, { - path: '/bazinga', - }) - - const bothOverride = await fileToStorage.forDummy(data, { - path: '/bazinga', - fileName: 'overridden', - }) - - expect(ensurePosixPath(fileNameOverrideOnly.uploadField)).toBe( - '/memory_store_basedir/overridden.png', - ) - - expect(ensurePosixPath(pathOverrideOnly.uploadField)).toMatch( - /\/bazinga\/.*\.png/, - ) - // Overriding path ignores the baseDir - expect(pathOverrideOnly.uploadField).not.toContain('memory_store_basedir') - - expect(ensurePosixPath(bothOverride.uploadField)).toBe( - '/bazinga/overridden.png', - ) - }) - - it('Should not add extension for unknown file type', async () => { - const data = { - uploadField: new File(['Hello'], 'hello', { - type: 'bazinga/unknown', // we don't use this anyway - }), - } - - const noOverride = await fileToStorage.forDummy(data) - - // No extension - expect(ensurePosixPath(noOverride.uploadField)).toMatch( - /\/memory_store_basedir\/.*[^.]+$/, - ) - - const withOverride = await fileToStorage.forDummy(data, { - fileName: 'hello', - }) - - expect(withOverride.uploadField).toMatch(/[^.]+$/) - expect(ensurePosixPath(withOverride.uploadField)).toBe( - '/memory_store_basedir/hello', - ) - }) -}) -// FileLists -// Problem is - in the database world, a string[] is not a thing -// so we need a generic way of doing this -describe('FileList processing', () => { - const savers = createUploadSavers(uploadsConfig, memStore) - - const notPrismaData = [ - new File(['Hello'], 'hello.png', { - type: 'image/png', - }), - new File(['World'], 'world.jpeg', { - type: 'image/jpeg', - }), - ] - - it('Should handle FileLists', async () => { - const result = await savers.inList(notPrismaData) - - expect(result).toHaveLength(2) - - expect(ensurePosixPath(result[0])).toMatch( - /\/memory_store_basedir\/.*\.png/, - ) - expect(ensurePosixPath(result[1])).toMatch( - /\/memory_store_basedir\/.*\.jpeg/, - ) - }) - - it('Should handle FileLists with SaveOptions', async () => { - const result = await savers.inList(notPrismaData, { - path: '/bazinga_not_mem_store', - }) - - expect(result).toHaveLength(2) - expect(ensurePosixPath(result[0])).toMatch( - /\/bazinga_not_mem_store\/.*\.png/, - ) - expect(ensurePosixPath(result[1])).toMatch( - /\/bazinga_not_mem_store\/.*\.jpeg/, - ) - }) - - it('Should handle empty FileLists', async () => { - const promise = savers.inList() - - await expect(promise).resolves.not.toThrow() - }) -}) diff --git a/packages/storage/src/__tests__/queryExtensions.test.ts b/packages/storage/src/__tests__/queryExtensions.test.ts deleted file mode 100644 index a4e2ab817078..000000000000 --- a/packages/storage/src/__tests__/queryExtensions.test.ts +++ /dev/null @@ -1,517 +0,0 @@ -import fs from 'node:fs/promises' - -import type { MockedFunction } from 'vitest' -import { describe, it, vi, expect, beforeEach, beforeAll } from 'vitest' - -import { ensurePosixPath } from '@redwoodjs/project-config' - -import { FileSystemStorage } from '../adapters/FileSystemStorage/FileSystemStorage.js' -import { createUploadsConfig, setupStorage } from '../index.js' - -// @MARK: use the local prisma client in the test -import type { Dumbo, Dummy } from './prisma-client/index.js' -import { PrismaClient } from './prisma-client/index.js' - -vi.mock('node:fs/promises', () => ({ - default: { - writeFile: vi.fn(), - unlink: vi.fn(), - readFile: vi.fn(() => { - return 'MOCKED_FILE_CONTENT' - }), - copyFile: vi.fn(), - }, -})) - -// For creation of FS adapter -vi.mock('node:fs', () => ({ - existsSync: vi.fn(() => true), - mkdirSync: vi.fn(), -})) - -describe('Query extensions', () => { - const uploadsConfig = createUploadsConfig({ - dummy: { - fields: 'uploadField', - }, - dumbo: { - fields: ['firstUpload', 'secondUpload'], - }, - }) - - const { storagePrismaExtension, saveFiles } = setupStorage({ - uploadsConfig: uploadsConfig, - storageAdapter: new FileSystemStorage({ - baseDir: '/tmp', - }), - }) - - const prismaClient = new PrismaClient().$extends(storagePrismaExtension) - - beforeEach(() => { - vi.resetAllMocks() - }) - - const sampleFile = new File(['heres-some-content'], 'dummy.txt', { - type: 'text/plain', - }) - - describe('create', () => { - it('create will save files', async () => { - const processedData = await saveFiles.forDummy({ - uploadField: sampleFile, - }) - - expect(fs.writeFile).toHaveBeenCalled() - const dummy = await prismaClient.dummy.create({ - data: processedData, - }) - - // On windows the slahes are different - const uploadFieldPath = ensurePosixPath(dummy.uploadField) - - expect(uploadFieldPath).toMatch(/\/tmp\/.*\.txt$/) - }) - - it('will remove the file if the create fails', async () => { - try { - await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/first.txt', - secondUpload: '/bazinga/second.txt', - // @ts-expect-error Checking the error here - id: 'this-is-the-incorrect-type', - }, - }) - } catch { - expect(fs.unlink).toHaveBeenNthCalledWith(1, '/tmp/first.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(2, '/bazinga/second.txt') - } - - expect.assertions(2) - }) - }) - - describe('update', () => { - let ogDummy: Dummy - let ogDumbo: Dumbo - beforeAll(async () => { - ogDummy = await prismaClient.dummy.create({ - data: { - uploadField: '/tmp/old.txt', - }, - }) - - ogDumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/oldFirst.txt', - secondUpload: '/tmp/oldSecond.txt', - }, - }) - }) - - beforeEach(() => { - vi.resetAllMocks() - }) - - it('update will remove the old file, save new one', async () => { - const updatedDummy = await prismaClient.dummy.update({ - data: { - uploadField: '/tmp/new.txt', - }, - where: { - id: ogDummy.id, - }, - }) - - expect(fs.unlink).toHaveBeenCalledWith('/tmp/old.txt') - expect(updatedDummy.uploadField).toBe('/tmp/new.txt') - }) - - it('should not delete the file if the update fails', async () => { - const failedUpdatePromise = prismaClient.dummy.update({ - data: { - // @ts-expect-error Intentional - id: 'this-is-the-incorrect-type', - }, - where: { - id: ogDummy.id, - }, - }) - - // Id is invalid, so the update should fail - await expect(failedUpdatePromise).rejects.toThrowError() - - // The old one should NOT be deleted - expect(fs.unlink).not.toHaveBeenCalled() - }) - - it('should only delete old files from the fields that are being updated', async () => { - const updatedDumbo = await prismaClient.dumbo.update({ - data: { - firstUpload: '/tmp/newFirst.txt', - }, - where: { - id: ogDumbo.id, - }, - }) - - expect(updatedDumbo.firstUpload).toBe('/tmp/newFirst.txt') - expect(updatedDumbo.secondUpload).toBe('/tmp/oldSecond.txt') - expect(fs.unlink).toHaveBeenCalledOnce() - expect(fs.unlink).toHaveBeenCalledWith('/tmp/oldFirst.txt') - }) - - it('should not delete files on update of non-upload fields', async () => { - // In this case, we're only updating the message field - await prismaClient.dumbo.update({ - data: { - message: 'Hello world', - }, - where: { - id: ogDumbo.id, - }, - }) - - expect(fs.unlink).not.toHaveBeenCalled() - }) - }) - - describe('delete', () => { - it('delete will remove all uploads', async () => { - const dumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/first.txt', - secondUpload: '/tmp/second.txt', - }, - }) - - await prismaClient.dumbo.delete({ - where: { - id: dumbo.id, - }, - }) - - expect(fs.unlink).toHaveBeenCalledTimes(2) - expect(fs.unlink).toHaveBeenCalledWith('/tmp/first.txt') - expect(fs.unlink).toHaveBeenCalledWith('/tmp/second.txt') - }) - - it('delete will not remove any uploads if the delete fails', async () => { - const bookWithCover = await prismaClient.book.create({ - data: { - name: 'Prisma extensions for dummies', - cover: { - create: { - photo: '/tmp/book-covers/prisma-for-dummies.jpg', - }, - }, - }, - }) - - // This delete will fail because the book is associated with a cover BUTTTT - // test serves more as documentation (and to prevent regression if Prisma changes behavior) - // Because Prisma will throw the validation __before__ the delete in the extension is called - - try { - await prismaClient.bookCover.delete({ - where: { - id: bookWithCover.coverId, - }, - }) - // eslint-disable-next-line no-empty - } catch {} - - expect(fs.unlink).not.toHaveBeenCalled() - }) - - it('Should handle if a bad path is provided', async () => { - ;(fs.unlink as MockedFunction).mockRejectedValueOnce( - new Error('unlink error'), - ) - - const invalidPathDumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: '', - secondUpload: 'im-a-invalid-path', - }, - }) - - const deletePromise = prismaClient.dumbo.delete({ - where: { - id: invalidPathDumbo.id, - }, - }) - - await expect(deletePromise).resolves.not.toThrow() - - expect(fs.unlink).toHaveBeenCalledOnce() - expect(fs.unlink).toHaveBeenCalledWith('im-a-invalid-path') - }) - }) - - describe('upsert', () => { - it('will remove old files and save new ones on upsert, if it exists [UPDATE]', async () => { - const ogDumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/oldFirst.txt', - secondUpload: '/tmp/oldSecond.txt', - }, - }) - - const updatedDumbo = await prismaClient.dumbo.upsert({ - update: { - firstUpload: '/tmp/newFirst.txt', - }, - create: { - // won't be used - firstUpload: 'x', - secondUpload: 'x', - }, - where: { - id: ogDumbo.id, - }, - }) - - expect(updatedDumbo.firstUpload).toBe('/tmp/newFirst.txt') - expect(updatedDumbo.secondUpload).toBe('/tmp/oldSecond.txt') - expect(fs.unlink).toHaveBeenCalledOnce() - expect(fs.unlink).toHaveBeenCalledWith('/tmp/oldFirst.txt') - }) - - it('will create a new record (findOrCreate)', async () => { - const newDumbo = await prismaClient.dumbo.upsert({ - create: { - firstUpload: '/tmp/first.txt', - secondUpload: '/bazinga/second.txt', - }, - update: {}, - where: { - id: 444444444, - }, - }) - - expect(newDumbo.firstUpload).toBe('/tmp/first.txt') - expect(newDumbo.secondUpload).toBe('/bazinga/second.txt') - }) - - it('will remove processed files if upsert CREATION fails (findOrCreate)', async () => { - // This is essentially findOrCreate, because update is empty - try { - await prismaClient.dumbo.upsert({ - create: { - firstUpload: '/tmp/first.txt', - secondUpload: '/bazinga/second.txt', - // @ts-expect-error Checking the error here - id: 'this-is-the-incorrect-type', - }, - }) - } catch { - expect(fs.unlink).toHaveBeenNthCalledWith(1, '/tmp/first.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(2, '/bazinga/second.txt') - } - - expect.assertions(2) - }) - - it('will remove processed files if upsert UPDATE fails', async () => { - // Bit of a contrived case... why would you ever have different values for update and create... - - const ogDumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/oldFirst.txt', - secondUpload: '/tmp/oldSecond.txt', - }, - }) - - try { - await prismaClient.dumbo.upsert({ - where: { - id: ogDumbo.id, - }, - update: { - firstUpload: '/tmp/newFirst.txt', - secondUpload: '/tmp/newSecond.txt', - // @ts-expect-error Intentionally causing an error - id: 'this-should-cause-an-error', - }, - create: { - firstUpload: '/tmp/createFirst.txt', - secondUpload: '/tmp/createSecond.txt', - }, - }) - } catch (error) { - expect(fs.unlink).toHaveBeenCalledTimes(2) - expect(fs.unlink).not.toHaveBeenCalledWith('/tmp/createFirst.txt') - expect(fs.unlink).not.toHaveBeenCalledWith('/tmp/createSecond.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(1, '/tmp/newFirst.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(2, '/tmp/newSecond.txt') - expect(error).toBeDefined() - } - - // Verify the original files weren't deleted - const unchangedDumbo = await prismaClient.dumbo.findUnique({ - where: { id: ogDumbo.id }, - }) - expect(unchangedDumbo?.firstUpload).toBe('/tmp/oldFirst.txt') - expect(unchangedDumbo?.secondUpload).toBe('/tmp/oldSecond.txt') - - expect.assertions(8) - }) - }) - - describe('createMany', () => { - it('createMany will remove files if all the create fails', async () => { - try { - await prismaClient.dumbo.createMany({ - data: [ - { - firstUpload: '/one/first.txt', - secondUpload: '/one/second.txt', - // @ts-expect-error Intentional - id: 'break', - }, - { - firstUpload: '/two/first.txt', - secondUpload: '/two/second.txt', - // @ts-expect-error Intentional - id: 'break2', - }, - ], - }) - } catch { - expect(fs.unlink).toHaveBeenCalledTimes(4) - expect(fs.unlink).toHaveBeenNthCalledWith(1, '/one/first.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(2, '/one/second.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(3, '/two/first.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(4, '/two/second.txt') - } - - expect.assertions(5) - }) - - it('createMany will remove all files, even if one of them errors', async () => { - try { - await prismaClient.dumbo.createMany({ - data: [ - // This one is correct, but createMany fails together - // so all the files should be removed! - { - firstUpload: '/one/first.txt', - secondUpload: '/one/second.txt', - id: 9158125, - }, - { - firstUpload: '/two/first.txt', - secondUpload: '/two/second.txt', - // @ts-expect-error Intentional - id: 'break2', - }, - ], - }) - } catch { - // This one doesn't actually get created! - expect( - prismaClient.dumbo.findUnique({ where: { id: 9158125 } }), - ).resolves.toBeNull() - - expect(fs.unlink).toHaveBeenCalledTimes(4) - expect(fs.unlink).toHaveBeenNthCalledWith(1, '/one/first.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(2, '/one/second.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(3, '/two/first.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(4, '/two/second.txt') - } - - expect.assertions(6) - }) - }) - - describe('updateMany', () => { - it('will remove old files and save new ones on update, if they exist', async () => { - const ogDumbo1 = await prismaClient.dumbo.create({ - data: { - firstUpload: '/FINDME/oldFirst1.txt', - secondUpload: '/FINDME/oldSecond1.txt', - }, - }) - - const ogDumbo2 = await prismaClient.dumbo.create({ - data: { - firstUpload: '/FINDME/oldFirst2.txt', - secondUpload: '/FINDME/oldSecond2.txt', - }, - }) - - const updatedDumbos = await prismaClient.dumbo.updateMany({ - data: { - firstUpload: '/REPLACED/newFirst.txt', - secondUpload: '/REPLACED/newSecond.txt', - }, - where: { - firstUpload: { - contains: 'FINDME', - }, - }, - }) - - expect(updatedDumbos.count).toBe(2) - - const updatedDumbo1 = await prismaClient.dumbo.findFirstOrThrow({ - where: { - id: ogDumbo1.id, - }, - }) - - const updatedDumbo2 = await prismaClient.dumbo.findFirstOrThrow({ - where: { - id: ogDumbo2.id, - }, - }) - - // Still performs the update - expect(updatedDumbo1.firstUpload).toBe('/REPLACED/newFirst.txt') - expect(updatedDumbo1.secondUpload).toBe('/REPLACED/newSecond.txt') - expect(updatedDumbo2.firstUpload).toBe('/REPLACED/newFirst.txt') - expect(updatedDumbo2.secondUpload).toBe('/REPLACED/newSecond.txt') - - // Then deletes the old files - expect(fs.unlink).toHaveBeenCalledTimes(4) - expect(fs.unlink).toHaveBeenNthCalledWith(1, '/FINDME/oldFirst1.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(2, '/FINDME/oldSecond1.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(3, '/FINDME/oldFirst2.txt') - expect(fs.unlink).toHaveBeenNthCalledWith(4, '/FINDME/oldSecond2.txt') - }) - - it('will __not__ remove files if the update fails', async () => { - const ogDumbo1 = await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/oldFirst1.txt', - secondUpload: '/tmp/oldSecond1.txt', - }, - }) - - const ogDumbo2 = await prismaClient.dumbo.create({ - data: { - firstUpload: '/tmp/oldFirst2.txt', - secondUpload: '/tmp/oldSecond2.txt', - }, - }) - - const failedUpdatePromise = prismaClient.dumbo.updateMany({ - data: { - // @ts-expect-error Intentional - id: 'this-is-the-incorrect-type', - }, - where: { - OR: [{ id: ogDumbo1.id }, { id: ogDumbo2.id }], - }, - }) - - // Id is invalid, so the update should fail - await expect(failedUpdatePromise).rejects.toThrowError() - - // The old files should NOT be deleted - expect(fs.unlink).not.toHaveBeenCalled() - }) - }) -}) diff --git a/packages/storage/src/__tests__/resultExtensions.test.ts b/packages/storage/src/__tests__/resultExtensions.test.ts deleted file mode 100644 index 4e8a82ec4b4f..000000000000 --- a/packages/storage/src/__tests__/resultExtensions.test.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { describe, it, expect, vi } from 'vitest' - -import { MemoryStorage } from '../adapters/MemoryStorage/MemoryStorage.js' -import { createUploadsConfig, setupStorage } from '../index.js' -import { UrlSigner } from '../UrlSigner.js' - -// @MARK: use the local prisma client in the test -import { PrismaClient } from './prisma-client/index.js' - -vi.mock('@redwoodjs/project-config', async (importOriginal) => { - const originalProjectConfig = (await importOriginal()) as any - return { - ...originalProjectConfig, - getConfig: () => { - return { - web: { - apiUrl: '/.redwood/functions', - }, - } - }, - } -}) - -describe('Result extensions', () => { - const uploadsConfig = createUploadsConfig({ - dummy: { - fields: 'uploadField', - }, - dumbo: { - fields: ['firstUpload', 'secondUpload'], - }, - }) - - const memStorage = new MemoryStorage({ - baseDir: '/tmp', - }) - - const { storagePrismaExtension } = setupStorage({ - uploadsConfig, - storageAdapter: memStorage, - urlSigner: new UrlSigner({ - endpoint: '/signed-url', - secret: 'my-sekret', - }), - }) - - const prismaClient = new PrismaClient().$extends(storagePrismaExtension) - - it('Adds signedURL and dataURI extensions', async () => { - const dummy = await prismaClient.dummy.create({ - data: { - uploadField: '/dummy/upload.txt', - }, - }) - - expect(dummy).toHaveProperty('withSignedUrl') - expect(dummy).toHaveProperty('withDataUri') - }) - - it('Does not add it to models without upload fields', async () => { - const noUpload = await prismaClient.noUploadFields.create({ - data: { - name: 'no-upload', - }, - }) - - expect(noUpload).not.toHaveProperty('withSignedUrl') - expect(noUpload).not.toHaveProperty('withDataUri') - }) - - it('Generates signed urls for each upload field', async () => { - const dumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: '/dumbo/first.txt', - secondUpload: '/dumbo/second.txt', - }, - }) - - const signedUrlDumbo = dumbo.withSignedUrl({ - expiresIn: 254, - }) - - expect(signedUrlDumbo.firstUpload).toContain( - '/.redwood/functions/signed-url', - ) - expect(signedUrlDumbo.firstUpload).toContain('path=%2Fdumbo%2Ffirst.txt') - expect(signedUrlDumbo.secondUpload).toContain('path=%2Fdumbo%2Fsecond.txt') - }) - - it('Generates data uris for each upload field', async () => { - // Save these files to disk - const { location: firstUploadLocation } = await memStorage.save( - new File(['SOFT_KITTENS'], 'first.txt'), - { - fileName: 'first.txt', - path: '/dumbo', - }, - ) - const { location: secondUploadLocation } = await memStorage.save( - new File(['PURR_PURR'], 'second.txt'), - { - fileName: 'second.txt', - path: '/dumbo', - }, - ) - - const dumbo = await prismaClient.dumbo.create({ - data: { - firstUpload: firstUploadLocation, - secondUpload: secondUploadLocation, - }, - }) - - // Note that this is async! - const signedUrlDumbo = await dumbo.withDataUri() - - expect(signedUrlDumbo.firstUpload).toMatch( - `data:text/plain;base64,${Buffer.from('SOFT_KITTENS').toString('base64')}`, - ) - - expect(signedUrlDumbo.secondUpload).toMatch( - `data:text/plain;base64,${Buffer.from('PURR_PURR').toString('base64')}`, - ) - }) -}) diff --git a/packages/storage/src/__tests__/signedUrls.test.ts b/packages/storage/src/__tests__/signedUrls.test.ts deleted file mode 100644 index 9d0e6345abb1..000000000000 --- a/packages/storage/src/__tests__/signedUrls.test.ts +++ /dev/null @@ -1,221 +0,0 @@ -import { describe, expect, beforeEach, afterEach, vi, it, test } from 'vitest' - -import { EXPIRES_IN, UrlSigner } from '../UrlSigner.js' - -const signer = new UrlSigner({ - // Doing this means we don't need to mock getConfig - endpoint: 'https://myapiside.com/access-signed-file', - secret: 'bazinga-3-32-151', -}) - -describe('UrlSigner', () => { - it('Can creates a signature', () => { - const { signature, expiry: expires } = signer.generateSignature({ - filePath: '/tmp/myfile.txt', - expiresInMs: EXPIRES_IN.days(5), - }) - - expect(signature).toBeDefined() - - expect(diffInDaysFromNow(expires as number)).toBeCloseTo(5) - }) - - it('throws with correct error when wrong expires passed', () => { - const { signature, expiry: expires } = signer.generateSignature({ - filePath: '/tmp/myfile.txt', - expiresInMs: EXPIRES_IN.days(1), - }) - - expect(() => - signer.validateSignature({ - path: '/tmp/myfile.txt', - s: signature, - expiry: expires, - }), - ).not.toThrow() - - expect(() => - signer.validateSignature({ - path: '/tmp/myfile.txt', - s: signature, - expiry: 12512351, - }), - ).toThrowError('Signature has expired') - }) - - it('Handles url encoded filePaths', () => { - const { signature, expiry: expires } = signer.generateSignature({ - filePath: '/tmp/myfile.txt', - expiresInMs: EXPIRES_IN.days(1), - }) - - expect(() => - signer.validateSignature({ - path: encodeURIComponent('/tmp/myfile.txt'), - s: signature, - expiry: expires, - }), - ).not.toThrow() - }) - - it('Throws an invalid signature when signature is wrong', () => { - const { signature, expiry } = signer.generateSignature({ - filePath: '/tmp/myfile.txt', - expiresInMs: EXPIRES_IN.days(1), - }) - - expect(() => - signer.validateSignature({ - path: '/tmp/myfile.txt', - s: signature, - expiry, - }), - ).not.toThrow() - - expect(() => - signer.validateSignature({ - path: '/tmp/myfile.txt', - s: 'im-the-wrong-signature', - expiry, - }), - ).toThrowError('Invalid signature') - }) - - it('Throws an invalid signature when file path is wrong', () => { - const { signature, expiry } = signer.generateSignature({ - filePath: '/tmp/myfile.txt', - expiresInMs: EXPIRES_IN.days(20), - }) - expect(() => - signer.validateSignature({ - path: '/tmp/some-other-file.txt', - s: signature, - expiry, - }), - ).toThrowError('Invalid signature') - }) -}) - -describe('Expired signature', () => { - // Separate, so we can mock the times - beforeEach(() => { - vi.useFakeTimers() - }) - - afterEach(() => { - vi.useRealTimers() - }) - - it('throws an error when the signature has expired', () => { - const filePath = '/bazinga/kittens.png' - const { signature, expiry } = signer.generateSignature({ - filePath, - expiresInMs: EXPIRES_IN.minutes(15), - }) - - const validation = () => - signer.validateSignature({ - path: filePath, - s: signature, - expiry, - }) - - expect(validation).not.toThrow() - - // Time travel to the future - vi.advanceTimersByTime(EXPIRES_IN.days(1)) - - expect(validation).toThrowError('Signature has expired') - }) -}) - -test('Generates a signed url', () => { - const signedUrl = signer.generateSignedUrl( - '/files/bazinga', - EXPIRES_IN.days(1), - ) - - expect(signedUrl).toContain('https://myapiside.com/access-signed-file?s=') - expect(signedUrl).toMatch(/s=.*/) - expect(signedUrl).toMatch(/expiry=[0-9]+/) - expect(signedUrl).toContain(`path=${encodeURIComponent('/files/bazinga')}`) // The actual file path -}) - -describe('validatePath', () => { - beforeEach(() => { - vi.useFakeTimers() - }) - - afterEach(() => { - vi.useRealTimers() - }) - - it('validates a path or url with a valid signature and expiry', () => { - const filePath = '/tmp/myfile.txt' - const expiresInMs = EXPIRES_IN.days(1) - const { signature, expiry } = signer.generateSignature({ - filePath, - expiresInMs, - }) - - const signedPath = `/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( - filePath, - )}` - - // When its just a path - expect(() => signer.validateSignedUrl(signedPath)).not.toThrow() - expect(signer.validateSignedUrl(signedPath)).toBe(filePath) - - // When its a full url - const signedUrl = `https://myredwoodapp.com/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( - filePath, - )}` - - expect(() => signer.validateSignedUrl(signedUrl)).not.toThrow() - expect(signer.validateSignedUrl(signedUrl)).toBe(filePath) - }) - - it('throws an error when the signature has expired', () => { - const filePath = '/tmp/myfile.txt' - const expiresInMs = EXPIRES_IN.minutes(15) - const { signature, expiry } = signer.generateSignature({ - filePath, - expiresInMs, - }) - - const url = `/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( - filePath, - )}` - - // Time travel to the future - vi.advanceTimersByTime(EXPIRES_IN.days(1)) - - expect(() => signer.validateSignedUrl(url)).toThrowError( - 'Signature has expired', - ) - }) - - it('throws an error when the signature is invalid', () => { - const filePath = '/tmp/myfile.txt' - const expiresInMs = EXPIRES_IN.days(1) - const { signature, expiry } = signer.generateSignature({ - filePath, - expiresInMs, - }) - - const url = `/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( - filePath, - )}` - - const invalidSignatureUrl = url.replace(signature, 'invalid-signature') - - expect(() => signer.validateSignedUrl(invalidSignatureUrl)).toThrowError( - 'Invalid signature', - ) - }) -}) - -// Util functions to make the tests more readable -function diffInDaysFromNow(time: number) { - return Math.abs(time - Date.now()) / 86400000 -} diff --git a/packages/storage/src/__tests__/unit-test-schema.prisma b/packages/storage/src/__tests__/unit-test-schema.prisma deleted file mode 100644 index 21f28a1f1ccb..000000000000 --- a/packages/storage/src/__tests__/unit-test-schema.prisma +++ /dev/null @@ -1,40 +0,0 @@ -datasource db { - provider = "sqlite" - url = "file:for_unit_test.db" -} - -generator client { - provider = "prisma-client-js" - output = "./prisma-client" // <-- we generated a local prisma client so it doesn't interfere with the mono repo -} - -model Dummy { - id Int @id @default(autoincrement()) - uploadField String -} - -model Dumbo { - id Int @id @default(autoincrement()) - firstUpload String - secondUpload String - message String? -} - -model NoUploadFields { - id Int @id @default(autoincrement()) - name String -} - -model Book { - id Int @id @default(autoincrement()) - coverId Int @unique - cover BookCover @relation(fields: [coverId], references: [id]) - name String -} - -model BookCover { - id Int @id @default(autoincrement()) - // This is the upload field, - photo String - book Book? -} diff --git a/packages/storage/src/__typetests__/types.test.ts b/packages/storage/src/__typetests__/types.test.ts deleted file mode 100644 index de151d552902..000000000000 --- a/packages/storage/src/__typetests__/types.test.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { expect, test } from 'tstyche' - -import { createUploadsConfig, setupStorage } from 'src/index.js' - -import { MemoryStorage } from '../adapters/MemoryStorage/MemoryStorage.js' -import { type UploadsConfig } from '../prismaExtension.js' - -// Use the createUplodsConfig helper here.... -// otherwise the types won't be accurate -const uploadsConfig = createUploadsConfig({ - dummy: { - fields: 'uploadField', - }, - dumbo: { - fields: ['firstUpload', 'secondUpload'], - }, -}) - -const { saveFiles } = setupStorage({ - uploadsConfig, - storageAdapter: new MemoryStorage({ - baseDir: '/tmp', - }), -}) - -// const prismaClient = new PrismaClient().$extends(storagePrismaExtension) - -test('only configured models have savers', async () => { - expect(saveFiles).type.toHaveProperty('forDummy') - expect(saveFiles).type.toHaveProperty('forDumbo') - - // These weren't configured above - expect(saveFiles).type.not.toHaveProperty('forNoUploadFields') - expect(saveFiles).type.not.toHaveProperty('forBook') - expect(saveFiles).type.not.toHaveProperty('forBookCover') -}) - -test('inline config for save files is OK!', () => { - const { saveFiles } = setupStorage({ - uploadsConfig: { - bookCover: { - fields: 'photo', - }, - }, - storageAdapter: new MemoryStorage({ - baseDir: '/tmp', - }), - }) - - expect(saveFiles).type.toHaveProperty('forBookCover') - expect(saveFiles).type.not.toHaveProperty('forDummy') - expect(saveFiles).type.not.toHaveProperty('forDumbo') -}) - -test('UploadsConfig accepts all available models with their fields', async () => { - expect().type.toHaveProperty('dummy') - expect().type.toHaveProperty('dumbo') - expect().type.toHaveProperty('book') - expect().type.toHaveProperty('bookCover') - expect().type.toHaveProperty('noUploadFields') - - expect().type.toBeAssignableWith<{ - fields: ['firstUpload'] // one of the fields, but not all of them - }>() - - expect().type.toBeAssignableWith<{ - fields: ['firstUpload', 'secondUpload'] // one of the fields, but not all of them - }>() - - expect().type.toBeAssignableWith<{ - fields: 'photo' - }>() - - // If you give it something else, it won't accept it - expect().type.not.toBeAssignableWith<{ - fields: ['bazinga'] - }>() -}) diff --git a/packages/storage/src/adapters/BaseStorageAdapter.ts b/packages/storage/src/adapters/BaseStorageAdapter.ts deleted file mode 100644 index 66f3eee80136..000000000000 --- a/packages/storage/src/adapters/BaseStorageAdapter.ts +++ /dev/null @@ -1,55 +0,0 @@ -/** - * The storage adapter will just save the file and return - * { - * fileId: string, - * location: string, // depending on storage it could be a path - * } - */ - -import mime from 'mime-types' -import { ulid } from 'ulid' - -export type AdapterResult = { - location: string -} - -export type SaveOptionsOverride = { - fileName?: string - path?: string -} - -export type AdapterOptions = { - baseDir: string -} - -export abstract class BaseStorageAdapter { - adapterOpts: AdapterOptions - constructor(adapterOpts: AdapterOptions) { - this.adapterOpts = adapterOpts - } - - getAdapterOptions() { - return this.adapterOpts - } - - generateFileNameWithExtension( - saveOpts: SaveOptionsOverride | undefined, - file: File, - ) { - const fileName = saveOpts?.fileName || ulid() - const extension = mime.extension(file.type) - ? `.${mime.extension(file.type)}` - : '' - return `${fileName}${extension}` - } - - abstract save( - file: File, - saveOpts?: SaveOptionsOverride, - ): Promise - abstract remove(fileLocation: AdapterResult['location']): Promise - abstract read(fileLocation: AdapterResult['location']): Promise<{ - contents: Buffer | string - type: ReturnType - }> -} diff --git a/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.test.ts b/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.test.ts deleted file mode 100644 index 3cd3464c59aa..000000000000 --- a/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.test.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { vol } from 'memfs' -import { beforeEach, describe, expect, test, vi } from 'vitest' - -import { ensurePosixPath } from '@redwoodjs/project-config' - -import { FileSystemStorage } from './FileSystemStorage.js' - -// Mock the entire fs module -vi.mock('node:fs', async () => { - const memfs = await import('memfs') - return { - ...memfs.fs, - default: memfs.fs, - } -}) - -// Mock the fs/promises module -vi.mock('node:fs/promises', async () => { - const memfs = await import('memfs') - return { - ...memfs.fs.promises, - default: memfs.fs.promises, - } -}) - -describe('FileSystemStorage', () => { - let storage: FileSystemStorage - const baseDir = '/tmp/test_uploads' - - beforeEach(() => { - vol.reset() - // Avoiding printing on stdout - vi.spyOn(console, 'log').mockImplementation(() => {}) - - storage = new FileSystemStorage({ baseDir }) - }) - - const plainFile = new File(['test content'], 'test.txt', { - type: 'text/plain', - }) - - test('save should store a file on the file system', async () => { - const result = await storage.save(plainFile) - - expect(result).toHaveProperty('location') - const posixLocation = ensurePosixPath(result.location) - expect(posixLocation).toMatch(/\/tmp\/test_uploads\/.*\.txt$/) - expect(vol.existsSync(result.location)).toBe(true) - }) - - test('remove should delete a file fron ', async () => { - const { location } = await storage.save(plainFile) - - await storage.remove(location) - expect(vol.existsSync(location)).toBe(false) - }) - - test('read should return file contents and type', async () => { - const { location: plainFileLocation } = await storage.save(plainFile) - - const plainFileReadResult = await storage.read(plainFileLocation) - expect(plainFileReadResult.contents).toBeInstanceOf(Buffer) - expect(plainFileReadResult.contents.toString()).toBe('test content') - expect(plainFileReadResult.type).toBe('text/plain') - - const imageFile = new File(['ABCDEF'], 'test.png', { type: 'image/png' }) - const { location } = await storage.save(imageFile) - - const result = await storage.read(location) - expect(result.contents).toBeInstanceOf(Buffer) - expect(result.contents.toString()).toBe('ABCDEF') - expect(result.type).toBe('image/png') - }) - - test('save should use custom path, with no baseDir, when provided', async () => { - // Note that using a custom path means you need to create the directory yourself! - vol.mkdirSync('/my_custom/path', { recursive: true }) - - const result = await storage.save(plainFile, { - path: '/my_custom/path', - fileName: 'bazinga', - }) - - // Note that it doesn't have the baseDir! - expect(ensurePosixPath(result.location)).toEqual( - '/my_custom/path/bazinga.txt', - ) - expect(vol.existsSync(result.location)).toBe(true) - }) -}) diff --git a/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.ts b/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.ts deleted file mode 100644 index 4b479540724b..000000000000 --- a/packages/storage/src/adapters/FileSystemStorage/FileSystemStorage.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { existsSync, mkdirSync } from 'node:fs' -import fs from 'node:fs/promises' -import path from 'node:path' - -import mime from 'mime-types' - -import { ensurePosixPath } from '@redwoodjs/project-config' - -import type { SaveOptionsOverride } from '../BaseStorageAdapter.js' -import { BaseStorageAdapter } from '../BaseStorageAdapter.js' - -export class FileSystemStorage - extends BaseStorageAdapter - implements BaseStorageAdapter -{ - constructor(opts: { baseDir: string }) { - super(opts) - if (!existsSync(opts.baseDir)) { - const posixBaseDir = ensurePosixPath(opts.baseDir) - console.log('Creating baseDir >', posixBaseDir) - mkdirSync(posixBaseDir, { recursive: true }) - } - } - async save(file: File, saveOverride?: SaveOptionsOverride) { - const fileName = this.generateFileNameWithExtension(saveOverride, file) - - const location = path.join( - ensurePosixPath(saveOverride?.path || this.adapterOpts.baseDir), - fileName, - ) - const nodeBuffer = await file.arrayBuffer() - - await fs.writeFile(location, Buffer.from(nodeBuffer)) - return { location } - } - - async read(filePath: string) { - return { - contents: await fs.readFile(filePath), - type: mime.lookup(filePath), - } - } - - async remove(filePath: string) { - await fs.unlink(filePath) - } -} diff --git a/packages/storage/src/adapters/MemoryStorage/MemoryStorage.test.ts b/packages/storage/src/adapters/MemoryStorage/MemoryStorage.test.ts deleted file mode 100644 index 083ae4c5fc20..000000000000 --- a/packages/storage/src/adapters/MemoryStorage/MemoryStorage.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { ensurePosixPath } from '@redwoodjs/project-config' - -import { MemoryStorage } from './MemoryStorage.js' - -describe('MemoryStorage', () => { - const storage = new MemoryStorage({ baseDir: 'uploads' }) - - test('save should store a file in memory', async () => { - const file = new File(['test content'], 'test.txt', { type: 'text/plain' }) - const result = await storage.save(file) - - expect(result).toHaveProperty('location') - expect(ensurePosixPath(result.location)).toMatch(/uploads\/.*\.txt$/) - expect(storage.store[result.location]).toBeDefined() - }) - - test('read should return file contents and type', async () => { - const file = new File(['ABCDEF'], 'test.txt', { type: 'image/png' }) - const { location } = await storage.save(file) - - const result = await storage.read(location) - expect(result.contents).toBeInstanceOf(Buffer) - expect(result.contents.toString()).toBe('ABCDEF') - expect(result.type).toBe('image/png') - }) - - test('remove should delete a file from memory', async () => { - const file = new File(['test content'], 'test.txt', { type: 'text/plain' }) - const { location } = await storage.save(file) - - await storage.remove(location) - expect(storage.store[location]).toBeUndefined() - }) - - test('read should return file contents and type', async () => { - const file = new File(['ABCDEF'], 'test.txt', { type: 'image/png' }) - const { location } = await storage.save(file) - - const result = await storage.read(location) - expect(result.contents).toBeInstanceOf(Buffer) - expect(result.contents.toString()).toBe('ABCDEF') - expect(result.type).toBe('image/png') - }) - - test('clear should remove all stored files', async () => { - const file1 = new File(['content 1'], 'file1.txt', { type: 'text/plain' }) - const file2 = new File(['content 2'], 'file2.txt', { type: 'text/plain' }) - - await storage.save(file1) - await storage.save(file2) - - await storage.clear() - expect(Object.keys(storage.store).length).toBe(0) - }) - - test('save should use custom path when provided', async () => { - const file = new File(['test content'], 'test.txt', { type: 'text/plain' }) - const result = await storage.save(file, { path: 'custom/path' }) - - expect(ensurePosixPath(result.location)).toContain('custom/path') - }) -}) diff --git a/packages/storage/src/adapters/MemoryStorage/MemoryStorage.ts b/packages/storage/src/adapters/MemoryStorage/MemoryStorage.ts deleted file mode 100644 index fd4a49226140..000000000000 --- a/packages/storage/src/adapters/MemoryStorage/MemoryStorage.ts +++ /dev/null @@ -1,44 +0,0 @@ -import path from 'node:path' - -import mime from 'mime-types' - -import { BaseStorageAdapter } from '../BaseStorageAdapter.js' -import type { SaveOptionsOverride } from '../BaseStorageAdapter.js' - -export class MemoryStorage - extends BaseStorageAdapter - implements BaseStorageAdapter -{ - store: Record = {} - - async save(file: File, saveOpts?: SaveOptionsOverride) { - const fileName = this.generateFileNameWithExtension(saveOpts, file) - - const location = path.join( - saveOpts?.path || this.adapterOpts.baseDir, - fileName, - ) - const nodeBuffer = await file.arrayBuffer() - - this.store[location] = Buffer.from(nodeBuffer) - - return { - location, - } - } - - async remove(filePath: string) { - delete this.store[filePath] - } - - async read(filePath: string) { - return { - contents: this.store[filePath], - type: mime.lookup(filePath), - } - } - - async clear() { - this.store = {} - } -} diff --git a/packages/storage/src/createSavers.ts b/packages/storage/src/createSavers.ts deleted file mode 100644 index 45c314a3e495..000000000000 --- a/packages/storage/src/createSavers.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { ulid } from 'ulid' - -import type { - SaveOptionsOverride, - BaseStorageAdapter, -} from './adapters/BaseStorageAdapter.js' -import type { ModelNames, UploadsConfig } from './prismaExtension.js' - -// Assumes you pass in the graphql type -type MakeFilesString = { - [K in keyof T]: T[K] extends File ? string : T[K] -} - -export const createFileListSaver = (storage: BaseStorageAdapter) => { - return async (files: File[] = [], pathOverrideOnly?: { path?: string }) => { - const locations = await Promise.all( - files.map(async (file) => { - const { location } = await storage.save(file, pathOverrideOnly) - return location - }), - ) - - return locations - } -} - -/* -This creates a "saver" for each model in the uploads config (i.e. tied to a model in the prisma schema) -The saver will only handle single file uploads, not file lists. -*/ -export const createUploadSavers = ( - uploadConfig: UploadsConfig, - storage: BaseStorageAdapter, -) => { - type uploadSaverNames = `for${Capitalize}` - - // @TODO(TS): Is there a way to make the type of data more specific? - type Savers = { - [K in uploadSaverNames]: >( - data: T, - overrideSaveOptions?: SaveOptionsOverride, - ) => Promise> - } - - const savers = {} as Savers - - Object.keys(uploadConfig).forEach((model) => { - const modelKey = model as keyof typeof uploadConfig - - const currentModelConfig = uploadConfig[modelKey] - - if (!currentModelConfig) { - return - } - - const currentModelUploadFields = ( - Array.isArray(currentModelConfig.fields) - ? currentModelConfig.fields - : [currentModelConfig.fields] - ) as string[] - - const capitalCaseModel = `${model.charAt(0).toUpperCase() + model.slice(1)}` - const saverKey = `for${capitalCaseModel}` as keyof Savers - - savers[saverKey] = async (data, overrideSaveOptions) => { - const updatedFields = {} as Record - for await (const field of currentModelUploadFields) { - if (data[field]) { - const file = data[field] - - const saveOptions = overrideSaveOptions || { - fileName: `${model}-${field}-${ulid()}`, - } - const { location } = await storage.save(file, saveOptions) - - updatedFields[field] = location - } - } - return { - ...data, - ...updatedFields, - } - } - }) - - return { - ...savers, - inList: createFileListSaver(storage), - } -} diff --git a/packages/storage/src/fileToDataUri.ts b/packages/storage/src/fileToDataUri.ts deleted file mode 100644 index 16b0d3193f3f..000000000000 --- a/packages/storage/src/fileToDataUri.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { BaseStorageAdapter } from './adapters/BaseStorageAdapter.js' - -export async function fileToDataUri( - filePath: string, - storage: BaseStorageAdapter, -) { - const { contents, type: mimeType } = await storage.read(filePath) - - const base64Data = Buffer.from(contents).toString('base64') - - return `data:${mimeType};base64,${base64Data}` -} diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts deleted file mode 100644 index af0f5c468b1d..000000000000 --- a/packages/storage/src/index.ts +++ /dev/null @@ -1,52 +0,0 @@ -import type { BaseStorageAdapter } from './adapters/BaseStorageAdapter.js' -import { createUploadSavers } from './createSavers.js' -import type { - ModelNames, - UploadConfigForModel, - UploadsConfig, -} from './prismaExtension.js' -import { createUploadsExtension } from './prismaExtension.js' -import type { UrlSigner } from './UrlSigner.js' - -type SetupStorageOptions = { - uploadsConfig: UploadsConfig - storageAdapter: BaseStorageAdapter - urlSigner?: UrlSigner -} - -export const setupStorage = ({ - uploadsConfig, - storageAdapter, - urlSigner, -}: SetupStorageOptions) => { - const prismaExtension = createUploadsExtension( - uploadsConfig, - storageAdapter, - urlSigner, - ) - - const saveFiles = createUploadSavers(uploadsConfig, storageAdapter) - - return { - storagePrismaExtension: prismaExtension, - saveFiles, - } -} - -/** - * This utility function ensures that you receive accurate type suggestions for your savers. - * If you use the type UploadsConfig directly, you may receive suggestions for saveFiles.forY where Y hasn't been configured. - * By using this utility function, you will only receive suggestions for the models that you have configured. - * - * @param uploadsConfig The uploads configuration object. - * @returns The same uploads configuration object, but with filtered types - */ -export function createUploadsConfig< - T extends Partial<{ - [K in ModelNames]?: UploadConfigForModel - }>, ->(uploadsConfig: T): T { - return uploadsConfig -} - -export type { ModelNames, UploadsConfig } from './prismaExtension.js' diff --git a/packages/storage/src/prismaExtension.ts b/packages/storage/src/prismaExtension.ts deleted file mode 100644 index 9794dcb1fec2..000000000000 --- a/packages/storage/src/prismaExtension.ts +++ /dev/null @@ -1,345 +0,0 @@ -import { PrismaClient } from '@prisma/client' -import type { Prisma } from '@prisma/client' -import { Prisma as PrismaExtension } from '@prisma/client/extension' -import type * as runtime from '@prisma/client/runtime/library' - -import type { BaseStorageAdapter } from './adapters/BaseStorageAdapter.js' -import { fileToDataUri } from './fileToDataUri.js' -import type { UrlSigner } from './UrlSigner.js' - -type FilterOutDollarPrefixed = T extends `$${string}` - ? never - : T extends symbol // Remove symbol here, because it doesn't help users - ? never - : T - -// Filter out $on, $connect, etc. -export type ModelNames = FilterOutDollarPrefixed - -type PrismaModelFields = keyof Prisma.Result< - PrismaClient[MName], - any, - 'findFirstOrThrow' -> - -export type UploadConfigForModel = { - fields: - | PrismaModelFields - | PrismaModelFields[] -} - -export type UploadsConfig = { - [K in MNames]?: UploadConfigForModel -} - -type WithSignedUrlArgs = { - expiresIn?: number -} - -export const createUploadsExtension = ( - config: UploadsConfig, - storageAdapter: BaseStorageAdapter, - urlSigner?: UrlSigner, -) => { - // @TODO I think we can use Prisma.getExtensionContext(this) - // instead of creating a new PrismaClient instance - const prismaInstance = new PrismaClient() - - type ResultExtends = { - [K in MNames]: { - withDataUri: { - needs: Record - compute: ( - modelData: Record, - ) => (this: T) => Promise - } - withSignedUrl: { - needs: Record - compute: ( - modelData: Record, - ) => (this: T, signArgs?: WithSignedUrlArgs) => T - } - } - } - - const queryExtends: runtime.ExtensionArgs['query'] = {} - - const resultExtends = {} as ResultExtends - for (const modelName in config) { - // Guaranteed to have modelConfig, we're looping over config 🙄 - const modelConfig = config[modelName] - - if (!modelConfig) { - continue - } - - const uploadFields = ( - Array.isArray(modelConfig.fields) - ? modelConfig.fields - : [modelConfig.fields] - ) as string[] - - queryExtends[modelName] = { - async create({ query, args }) { - try { - const result = await query(args) - return result - } catch (e) { - // If the create fails, we need to delete the uploaded files - await removeUploadedFiles( - uploadFields, - args.data as Record, - ) - throw e - } - }, - async createMany({ query, args }) { - try { - const result = await query(args) - return result - } catch (e) { - const createDatas = args.data as [] - - // If the create fails, we need to delete the uploaded files - for await (const createData of createDatas) { - await removeUploadedFiles(uploadFields, createData) - } - - throw e - } - }, - async update({ query, model, args }) { - // Check if any of the uploadFields are present in args.data - // We only want to process fields that are being updated - const uploadFieldsToUpdate = uploadFields.filter( - (field) => - // All of this non-sense is to make typescript happy. I'm not sure how data could be anything but an object - typeof args.data === 'object' && - args.data !== null && - field in args.data, - ) - - // If no upload fields are present, proceed with the original query - // avoid overhead of extra lookups - if (uploadFieldsToUpdate.length == 0) { - return query(args) - } else { - const originalRecord = await prismaInstance[ - model as ModelNames - // @ts-expect-error TS in strict mode will error due to union type. We cannot narrow it down here. - ].findFirstOrThrow({ - where: args.where, - // @TODO: should we select here to reduce the amount of data we're handling - }) - - // Similar, but not same as create - try { - const result = await query(args) - - // **After** we've updated the record, we need to delete the old file. - await removeUploadedFiles(uploadFieldsToUpdate, originalRecord) - - return result - } catch (e) { - // If the update fails, we need to delete the newly uploaded files - // but not the ones that already exist! - await removeUploadedFiles( - uploadFieldsToUpdate, - args.data as Record, - ) - throw e - } - } - }, - async updateMany({ query, model, args }) { - // Check if any of the uploadFields are present in args.data - // We only want to process fields that are being updated - const uploadFieldsToUpdate = uploadFields.filter( - (field) => - // All of this non-sense is to make typescript happy. I'm not sure how data could be anything but an object - typeof args.data === 'object' && - args.data !== null && - field in args.data, - ) - - if (uploadFieldsToUpdate.length == 0) { - return query(args) - } else { - // MULTIPLE! - const originalRecords = await prismaInstance[ - model as ModelNames - // @ts-expect-error TS in strict mode will error due to union type. We cannot narrow it down here. - ].findMany({ - where: args.where, - // @TODO: should we select here to reduce the amount of data we're handling - }) - - try { - const result = await query(args) - - // Remove the uploaded files from each of the original records - for await (const originalRecord of originalRecords) { - await removeUploadedFiles(uploadFieldsToUpdate, originalRecord) - } - - return result - } catch (e) { - // If the update many fails, we need to delete the newly uploaded files - // but not the ones that already exist! - await removeUploadedFiles( - uploadFieldsToUpdate, - args.data as Record, - ) - throw e - } - } - }, - async upsert({ query, model, args }) { - let isUpdate: boolean | undefined - const uploadFieldsToUpdate = uploadFields.filter( - (field) => - typeof args.update === 'object' && - args.update !== null && - field in args.update, - ) - - try { - let existingRecord: Record | undefined - if (args.update) { - // We only need to check for existing records if we're updating - existingRecord = await prismaInstance[ - model as ModelNames - // @ts-expect-error TS in strict mode will error due to union type. We cannot narrow it down here. - ].findUnique({ - where: args.where, - }) - isUpdate = !!existingRecord - } - - const result = await query(args) - - if (isUpdate && existingRecord) { - // If the record existed, remove old uploaded files - await removeUploadedFiles(uploadFieldsToUpdate, existingRecord) - } - - return result - } catch (e) { - // If the upsert fails, we need to delete any newly uploaded files - await removeUploadedFiles( - // Only delete files we're updating on update - isUpdate ? uploadFieldsToUpdate : uploadFields, - (isUpdate ? args.update : args.create) as Record, - ) - - throw e - } - }, - - async delete({ query, args }) { - const deleteResult = await query(args) - await removeUploadedFiles( - uploadFields, - // We don't know the exact type here - deleteResult as Record, - ) - - return deleteResult - }, - } - - // This makes the result extension only available for models with uploadFields - const needs = Object.fromEntries(uploadFields.map((field) => [field, true])) - - resultExtends[modelName] = { - withDataUri: { - needs, - compute(modelData) { - return async () => { - const base64UploadFields: Record = {} - - for await (const field of uploadFields) { - base64UploadFields[field] = await fileToDataUri( - modelData[field] as string, - storageAdapter, - ) - } - - return { - // modelData is of type unknown at this point - ...(modelData as any), - ...base64UploadFields, - } - } - }, - }, - withSignedUrl: { - needs, - compute(modelData) { - return ({ expiresIn }: WithSignedUrlArgs = {}) => { - if (!urlSigner) { - throw new Error( - 'Please supply signed url settings in setupUpload()', - ) - } - const signedUrlFields: Record = {} - - for (const field of uploadFields) { - if (!modelData[field]) { - continue - } - - signedUrlFields[field] = urlSigner.generateSignedUrl( - modelData[field] as string, - expiresIn, - ) - } - - return { - // modelData is of type unknown at this point - ...(modelData as any), - ...signedUrlFields, - } - } - }, - }, - } - } - - return PrismaExtension.defineExtension((client) => { - return client.$extends({ - name: 'redwood-upload-prisma-plugin', - query: queryExtends, - result: resultExtends, - }) - }) - - /** - * This function deletes files from the storage adapter, but importantly, - * it does NOT throw, because if the file is already gone, that's fine, - * no need to stop the actual db operation - * - */ - async function removeUploadedFiles( - fieldsToDelete: string[], - data: Record, - ) { - if (!data) { - console.warn('Empty data object passed to removeUploadedFiles') - return - } - - for await (const field of fieldsToDelete) { - const uploadLocation = data?.[field] - if (uploadLocation) { - try { - await storageAdapter.remove(uploadLocation) - } catch { - // Swallow the error, we don't want to stop the db operation - // It also means that if one of the files in fieldsToDelete is gone, its ok - // we still want to delete the rest of the files - } - } - } - } -} diff --git a/packages/storage/tsconfig.build.json b/packages/storage/tsconfig.build.json deleted file mode 100644 index 555d966e1d38..000000000000 --- a/packages/storage/tsconfig.build.json +++ /dev/null @@ -1,11 +0,0 @@ -// This file is here so we don't build types for tests -{ - "extends": "./tsconfig.json", - "exclude": [ - "dist", - "node_modules", - "**/__mocks__", - "**/__tests__", - "**/__typetests__" - ] -} diff --git a/packages/storage/tsconfig.json b/packages/storage/tsconfig.json deleted file mode 100644 index d8a2b7c67426..000000000000 --- a/packages/storage/tsconfig.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "extends": "../../tsconfig.compilerOption.json", - "compilerOptions": { - "moduleResolution": "NodeNext", - "module": "NodeNext", - "baseUrl": ".", - "rootDir": "src", - "outDir": "dist" - }, - "include": ["src", "prisma-override.d.ts"], - // Excluding tests (as in root compilerOption) causes types to be inaccurate in tests - // This overrides the exclude in the root compilerOption - "exclude": ["dist", "node_modules", "**/__mocks__"], - "references": [ - { - "path": "../project-config" - }, - { - "path": "../framework-tools" - } - ] -} diff --git a/packages/storage/tsconfig.types-cjs.json b/packages/storage/tsconfig.types-cjs.json deleted file mode 100644 index 945cefa72752..000000000000 --- a/packages/storage/tsconfig.types-cjs.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "dist/cjs", - "tsBuildInfoFile": "./tsconfig.types-cjs.tsbuildinfo" - }, - "exclude": [ - "dist", - "node_modules", - "**/__mocks__", - "**/__tests__", // dont build types for tests - "**/__typetests__" - ] -} diff --git a/packages/storage/vitest.config.mts b/packages/storage/vitest.config.mts deleted file mode 100644 index 3ed9ddfa2e83..000000000000 --- a/packages/storage/vitest.config.mts +++ /dev/null @@ -1,26 +0,0 @@ -import path from 'path' -import { fileURLToPath } from 'url' - -import { defineConfig, configDefaults } from 'vitest/config' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) - -export default defineConfig({ - test: { - exclude: [...configDefaults.exclude, '**/fixtures', '**/__typetests__'], - deps: { - interopDefault: false, - }, - globalSetup: ['vitest.setup.mts'], - alias: { - // We alias prisma client, otherwise you'll get "prisma client not initialized" - // Important to have the subpath first here - '@prisma/client/extension': path.resolve( - __dirname, - '../../node_modules/@prisma/client/extension.js', - ), - '@prisma/client': path.resolve(__dirname, 'src/__tests__/prisma-client'), - }, - }, -}) diff --git a/packages/storage/vitest.setup.mts b/packages/storage/vitest.setup.mts deleted file mode 100644 index 702c707e71da..000000000000 --- a/packages/storage/vitest.setup.mts +++ /dev/null @@ -1,8 +0,0 @@ -import { $ } from 'zx' - -export default async function setup() { - $.verbose = true - console.log('[setup] Setting up unit test prisma db....') - await $`npx prisma db push --accept-data-loss --schema ./src/__tests__/unit-test-schema.prisma` - console.log('[setup] Done! \n') -} diff --git a/packages/uploads/README.md b/packages/uploads/README.md new file mode 100644 index 000000000000..817c0918f1c8 --- /dev/null +++ b/packages/uploads/README.md @@ -0,0 +1 @@ +#RedwoodJS Uploads diff --git a/packages/uploads/graphql/README.md b/packages/uploads/graphql/README.md new file mode 100644 index 000000000000..65055aefb024 --- /dev/null +++ b/packages/uploads/graphql/README.md @@ -0,0 +1,15 @@ +# Uploads GraphQL + +The file upload solution for RedwoodJS. + +This package provides a plugin for handling file uploads in RedwoodJS applications. + +## Features + +RedwoodJS Upload handles the complexities of file uploads by: + +- [List key features here] + +## Usage + +[Provide basic usage instructions here] diff --git a/packages/uploads/graphql/build.mts b/packages/uploads/graphql/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/uploads/graphql/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/uploads/graphql/package.json b/packages/uploads/graphql/package.json new file mode 100644 index 000000000000..6e4b3fbd8c53 --- /dev/null +++ b/packages/uploads/graphql/package.json @@ -0,0 +1,47 @@ +{ + "name": "@redwoodjs/uploads-graphql", + "version": "8.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/uploads/graphql" + }, + "license": "MIT", + "type": "commonjs", + "exports": { + ".": { + "default": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./package.json": "./package.json" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "yarn tsx ./build.mts && yarn build:types", + "build:pack": "yarn pack -o redwoodjs-uploads-graphql.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "dependencies": { + "@redwoodjs/context": "workspace:*", + "@redwoodjs/graphql-server": "workspace:*", + "jsonwebtoken": "9.0.2" + }, + "devDependencies": { + "@redwoodjs/framework-tools": "workspace:*", + "nodemon": "3.1.4", + "tsx": "4.19.1", + "typescript": "5.6.2", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/uploads/graphql/src/constants.ts b/packages/uploads/graphql/src/constants.ts new file mode 100644 index 000000000000..118791542306 --- /dev/null +++ b/packages/uploads/graphql/src/constants.ts @@ -0,0 +1,49 @@ +export const DEFAULT_UPLOAD_APP_NAME = 'RedwoodApp' +export const DEFAULT_UPLOAD_TOKEN_HEADER_NAME = 'x-rw-upload-token' +export const DEFAULT_UPLOAD_TARGET = 'RedwoodUpload' + +// set sensible defaults for content types, max file size, etc +// export const APP_NAME = 'pixeez' +// Represents where the upload is intended for to +//signify that it's for the upload functionality of your app, +// such as 'uploads'. +// export const UPLOAD_TARGET = 'uploads' + +// set sensible defaults for content types +export const IMAGE_FILE_TYPES = { + 'image/*': ['.jpeg', '.jpg', '.png', '.gif', '.webp'], +} + +export const VIDEO_FILE_TYPES = { + 'video/*': ['.mp4', '.mov', '.avi', '.mkv', '.webm'], +} + +export const AUDIO_FILE_TYPES = { + 'audio/*': ['.mp3', '.wav', '.ogg', '.flac', '.aac'], +} + +export const DOCUMENT_FILE_TYPES = { + 'application/*': ['.pdf', '.doc', '.docx', '.xls', '.xlsx', '.ppt', '.pptx'], +} + +export const IMAGE_CONTENT_TYPES = Object.values(IMAGE_FILE_TYPES) + .flat() + .map((ext) => `image/${ext.slice(1)}`) + +export const DOCUMENT_CONTENT_TYPES = Object.values(DOCUMENT_FILE_TYPES) + .flat() + .map((ext) => `application/${ext.slice(1)}`) + +export const VIDEO_CONTENT_TYPES = Object.values(VIDEO_FILE_TYPES) + .flat() + .map((ext) => `video/${ext.slice(1)}`) + +export const AUDIO_CONTENT_TYPES = Object.values(AUDIO_FILE_TYPES) + .flat() + .map((ext) => `audio/${ext.slice(1)}`) + +// set sensible defaults for max file size, max files, min files, etc +export const MAX_FILE_SIZE = 5 * 1024 * 1024 +export const MAX_FILES = 20 +export const MIN_FILES = 1 +export const EXPIRES_IN = '1hr' diff --git a/packages/uploads/graphql/src/index.ts b/packages/uploads/graphql/src/index.ts new file mode 100644 index 000000000000..8bea4e68e616 --- /dev/null +++ b/packages/uploads/graphql/src/index.ts @@ -0,0 +1,12 @@ +export { useRedwoodUploads } from './plugins/useRedwoodUploads' +export { createUploadToken } from './lib/createUploadToken' +export { validateUploadToken, validateFiles } from './lib/validateUploadToken' +export type { RedwoodUploadContext } from './lib/validateUploadToken' +export * from './constants' +export type { + UploadTokenPayload, + UploadsConfig, + UploadErrorMessage, + UploadErrorMessages, + RedwoodUploadsOptions, +} from './types' diff --git a/packages/uploads/graphql/src/lib/createUploadToken.ts b/packages/uploads/graphql/src/lib/createUploadToken.ts new file mode 100644 index 000000000000..d86bdf9932e9 --- /dev/null +++ b/packages/uploads/graphql/src/lib/createUploadToken.ts @@ -0,0 +1,50 @@ +import type { SignOptions } from 'jsonwebtoken' +import jwt from 'jsonwebtoken' + +import { context } from '@redwoodjs/context' + +import { + IMAGE_CONTENT_TYPES, + DOCUMENT_CONTENT_TYPES, + MAX_FILE_SIZE, + MAX_FILES, + MIN_FILES, + EXPIRES_IN, + DEFAULT_UPLOAD_APP_NAME, + DEFAULT_UPLOAD_TARGET, +} from '../constants' +import type { UploadsConfig, UploadTokenPayload } from '../types' + +const DEFAULT_UPLOADS_CONFIG: UploadsConfig = { + contentTypes: [...IMAGE_CONTENT_TYPES, ...DOCUMENT_CONTENT_TYPES], + maxFileSize: MAX_FILE_SIZE, + maxFiles: MAX_FILES, + minFiles: MIN_FILES, + expiresIn: EXPIRES_IN, +} + +export const createUploadToken = (payload: UploadTokenPayload) => { + const secret = process.env.UPLOAD_TOKEN_SECRET + + if (!secret) { + throw new Error('UPLOAD_TOKEN_SECRET is not set') + } + + const { operationName, ...uploadConfig } = payload + + // merge the payload with the default payload + const finalPayload = { ...DEFAULT_UPLOADS_CONFIG, ...uploadConfig } + const { expiresIn = EXPIRES_IN, ...finalPayloadWithoutExpiresIn } = + finalPayload + + const issuer = context.useRedwoodUploadAppName ?? DEFAULT_UPLOAD_APP_NAME + const audience = context.useRedwoodUploadTarget ?? DEFAULT_UPLOAD_TARGET + + return jwt.sign(finalPayloadWithoutExpiresIn, secret, { + algorithm: 'HS256', + audience, + issuer, + expiresIn, + subject: operationName, + } as SignOptions) +} diff --git a/packages/uploads/graphql/src/lib/validateUploadToken.ts b/packages/uploads/graphql/src/lib/validateUploadToken.ts new file mode 100644 index 000000000000..9473865b3e4d --- /dev/null +++ b/packages/uploads/graphql/src/lib/validateUploadToken.ts @@ -0,0 +1,144 @@ +import jwt from 'jsonwebtoken' + +import type { GlobalContext } from '@redwoodjs/context' +import { AuthenticationError, ValidationError } from '@redwoodjs/graphql-server' + +import type { UploadErrorMessage } from '..' +import { DEFAULT_UPLOAD_TOKEN_HEADER_NAME } from '../constants' +import type { + UploadErrorMessages, + RedwoodUploadsOptions, + UploadsConfig, +} from '../types' + +export type RedwoodUploadContext = GlobalContext & { + useRedwoodUploadErrorMessages?: UploadErrorMessages + useRedwoodUploadTokenHeaderName?: RedwoodUploadsOptions['uploadTokenHeaderName'] + useRedwoodUploadTarget?: RedwoodUploadsOptions['uploadTarget'] + useRedwoodUploadAppName?: RedwoodUploadsOptions['appName'] +} + +type ValidateUploadConditionProps = { + isConditionMet: boolean + errorMessage: UploadErrorMessage + errorMessageParams?: Record< + string, + string | number | boolean | string[] | undefined + > + // defaultMessage: string + isAuthenticationError?: boolean +} + +const validateUploadCondition = ({ + isConditionMet: condition, + errorMessage, + errorMessageParams: params = {}, + isAuthenticationError = false, +}: ValidateUploadConditionProps) => { + const UploadError = isAuthenticationError + ? AuthenticationError + : ValidationError + + if (condition) { + if (typeof errorMessage === 'function') { + const message = errorMessage(params) + throw new UploadError(message) + } + throw new UploadError(errorMessage) + } +} + +export const validateUploadToken = ( + context: RedwoodUploadContext, +): UploadsConfig => { + const headers = + (context.event as { headers?: Record })?.headers || {} + const { operationName } = context?.['params'] as { operationName: string } + + const uploadTokenHeaderName = + context.useRedwoodUploadTokenHeaderName ?? DEFAULT_UPLOAD_TOKEN_HEADER_NAME + const uploadToken = headers[ + uploadTokenHeaderName + ] as RedwoodUploadsOptions['uploadTokenHeaderName'] + const errorMessages = context.useRedwoodUploadErrorMessages + + validateUploadCondition({ + isConditionMet: !operationName, + errorMessage: + errorMessages?.operationNameRequired ?? 'Operation name is required', + errorMessageParams: {}, + }) + + validateUploadCondition({ + isConditionMet: !uploadToken, + errorMessage: + errorMessages?.uploadTokenRequired ?? 'Upload token is required', + errorMessageParams: {}, + }) + + try { + if (!uploadToken || !process.env.UPLOAD_TOKEN_SECRET) { + throw new AuthenticationError('Upload token is required') + } + const decodedToken = jwt.verify( + uploadToken, + process.env.UPLOAD_TOKEN_SECRET, + { + algorithms: ['HS256'], + audience: context.useRedwoodUploadTarget, + issuer: context.useRedwoodUploadAppName, + subject: operationName, + }, + ) + + // cast to UploadsConfig because the JWT has custom claims + return decodedToken as UploadsConfig + } catch (error) { + console.error('Error validating upload token', error) + throw new AuthenticationError('Authentication failed: Invalid upload token') + } +} + +export const validateFiles = ( + files: File[], + { minFiles, maxFiles, contentTypes, maxFileSize }: UploadsConfig, + context: RedwoodUploadContext, +) => { + const fileCount = files.length + + const errorMessages = context.useRedwoodUploadErrorMessages + + validateUploadCondition({ + isConditionMet: minFiles !== undefined && fileCount < minFiles, + errorMessage: + errorMessages?.tooFewFiles ?? + `Too few files. Min ${minFiles} files required`, + errorMessageParams: { minFiles }, + }) + + validateUploadCondition({ + isConditionMet: maxFiles !== undefined && fileCount > maxFiles, + errorMessage: + errorMessages?.tooManyFiles ?? + `Too many files. Max ${maxFiles} files allowed`, + errorMessageParams: { maxFiles }, + }) + + files.forEach((file) => { + validateUploadCondition({ + isConditionMet: !!contentTypes && !contentTypes.includes(file.type), + errorMessage: + errorMessages?.invalidFileType ?? + `Invalid file type. Allowed types: ${contentTypes?.join(', ')}`, + errorMessageParams: { contentTypes }, + }) + + validateUploadCondition({ + isConditionMet: maxFileSize !== undefined && file.size > maxFileSize, + errorMessage: + errorMessages?.tooLargeFile ?? + `File size exceeds the maximum allowed size. Max size: ${maxFileSize} bytes`, + errorMessageParams: { maxFileSize }, + }) + }) +} diff --git a/packages/uploads/graphql/src/plugins/useRedwoodUploads.ts b/packages/uploads/graphql/src/plugins/useRedwoodUploads.ts new file mode 100644 index 000000000000..ab3654feab4b --- /dev/null +++ b/packages/uploads/graphql/src/plugins/useRedwoodUploads.ts @@ -0,0 +1,25 @@ +import type { Plugin } from 'graphql-yoga' + +import type { RedwoodGraphQLContext } from '@redwoodjs/graphql-server' + +import { DEFAULT_UPLOAD_TOKEN_HEADER_NAME } from '../constants' +import type { RedwoodUploadsOptions } from '../types' + +export const useRedwoodUploads = ( + options: RedwoodUploadsOptions, +): Plugin => { + return { + async onContextBuilding({ extendContext }) { + const { appName, uploadTarget, uploadTokenHeaderName, errorMessages } = + options + + extendContext({ + useRedwoodUploadAppName: appName, + useRedwoodUploadTarget: uploadTarget, + useRedwoodUploadTokenHeaderName: + uploadTokenHeaderName ?? DEFAULT_UPLOAD_TOKEN_HEADER_NAME, + useRedwoodUploadErrorMessages: errorMessages, + }) + }, + } +} diff --git a/packages/uploads/graphql/src/types.ts b/packages/uploads/graphql/src/types.ts new file mode 100644 index 000000000000..9463710ecb52 --- /dev/null +++ b/packages/uploads/graphql/src/types.ts @@ -0,0 +1,31 @@ +export type UploadErrorMessage = string | ((config: UploadsConfig) => string) + +export type UploadErrorMessages = { + uploadTokenRequired?: UploadErrorMessage + operationNameRequired?: UploadErrorMessage + invalidUploadToken?: UploadErrorMessage + tooFewFiles?: UploadErrorMessage + tooManyFiles?: UploadErrorMessage + tooLargeFile?: UploadErrorMessage + tooManyRequests?: UploadErrorMessage + invalidFileType?: UploadErrorMessage +} + +export type UploadsConfig = { + contentTypes?: string[] + maxFileSize?: number + maxFiles?: number + minFiles?: number + expiresIn?: string | number +} + +export type UploadTokenPayload = UploadsConfig & { + operationName: string +} + +export type RedwoodUploadsOptions = { + appName: string + uploadTarget?: string + uploadTokenHeaderName?: string + errorMessages?: UploadErrorMessages +} diff --git a/packages/uploads/graphql/tsconfig.build.json b/packages/uploads/graphql/tsconfig.build.json new file mode 100644 index 000000000000..efe2de23f492 --- /dev/null +++ b/packages/uploads/graphql/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "../../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"] +} diff --git a/packages/uploads/graphql/tsconfig.json b/packages/uploads/graphql/tsconfig.json new file mode 100644 index 000000000000..52640cd8782f --- /dev/null +++ b/packages/uploads/graphql/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules"] +} diff --git a/packages/uploads/web/README.md b/packages/uploads/web/README.md new file mode 100644 index 000000000000..205fd1200114 --- /dev/null +++ b/packages/uploads/web/README.md @@ -0,0 +1,119 @@ +# Uploads Web + +The file upload solution for RedwoodJS. + +## Features + +RedwoodJS Upload handles the complexities of file uploads by: + +- providing a component for file uploads +- provide a useUploadsMutation hook for uploading files with Upload Token validation of file types, sizes, etc. + +## Using the RedwoodUploadsComponent + +The RedwoodUploadsComponent is a versatile file upload component for RedwoodJS applications. It provides an easy-to-use interface for handling file uploads with customizable options and preview capabilities. + +### Basic Usage: + +Import the component: + +```ts +import { RedwoodUploadsComponent } from '@redwoodjs/uploads-web' +``` + +Use the component in your JSX: + +```tsx + handleFiles(files)} +/> +``` + +### Configuration Options: + +The RedwoodUploadsComponent accepts several props to customize its behavior: + +- name: A string to identify the upload field (optional, default: 'uploads') +- className: Custom CSS class for styling +- fileConstraints: An object containing upload constraints: + - accept: File types to accept (e.g., { 'image/': ['.jpeg', '.jpg', '.png'] }) + - maxFiles: Maximum number of files allowed + - minSize: Minimum file size in bytes + - maxSize: Maximum file size in bytes + - multiple: Allow multiple file uploads +- children: Custom content to render inside the dropzone +- dropzoneContent: Custom content for the dropzone area +- messageContent: Custom message to display in the dropzone +- setFiles: Callback function to handle accepted files +- onResetFiles: Callback to provide a function for resetting files +- allowPaste: Enable pasting files (default: false) + +Additional props from react-dropzone can also be passed to further customize the dropzone behavior. + +### Using Custom Preview Components: + +The RedwoodUploadsComponent allows you to use custom components for previewing accepted files and rejected files. This is done by passing these components as children to the RedwoodUploadsComponent. + +1. Import the custom preview components: + import { PreviewFiles, PreviewFileRejections } from '@redwoodjs/uploads-web' +1. Use them within the RedwoodUploadsComponent: + +```tsx + + + + +``` + +### PreviewFiles Component: + +This component displays a list of accepted files, including image previews for image files. It uses the useRedwoodUploadsContext hook to access the list of accepted files. + +### PreviewFileRejections Component: + +This component shows a list of rejected files along with the reasons for rejection. It also uses the useRedwoodUploadsContext hook to access the list of file rejections. + +### Creating Custom Preview Components: + +You can create your own custom preview components by using the useRedwoodUploadsContext hook. This hook provides access to the following properties: +acceptedFiles: Array of accepted File objects +fileRejections: Array of FileRejection objects +open: Function to open the file dialog +isDragActive, isDragReject, isFocused, isDragAccept: Dropzone state indicators +Example of a custom preview component: + +```tsx +import React from 'react' +import { useRedwoodUploadsContext } from '@redwoodjs/uploads-web' +const MyCustomPreview = () => { + const { acceptedFiles } = useRedwoodUploadsContext() + return ( +
+

Accepted Files:

+
    + {acceptedFiles.map((file) => ( +
  • + {file.name} - {file.size} bytes +
  • + ))} +
+
+ ) +} +``` + +Then use it within the RedwoodUploadsComponent: + +```tsx + + + +``` + +By using these components and customization options, you can create a tailored file upload experience in your RedwoodJS application that meets your specific needs. diff --git a/packages/uploads/web/build.mts b/packages/uploads/web/build.mts new file mode 100644 index 000000000000..63a07cfe07d2 --- /dev/null +++ b/packages/uploads/web/build.mts @@ -0,0 +1,5 @@ +import { buildEsm } from '@redwoodjs/framework-tools' +import { generateTypesEsm } from '@redwoodjs/framework-tools/generateTypes' + +await buildEsm() +await generateTypesEsm() diff --git a/packages/uploads/web/package.json b/packages/uploads/web/package.json new file mode 100644 index 000000000000..421325253bee --- /dev/null +++ b/packages/uploads/web/package.json @@ -0,0 +1,48 @@ +{ + "name": "@redwoodjs/uploads-web", + "version": "8.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/uploads/web" + }, + "license": "MIT", + "type": "module", + "exports": { + ".": { + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + }, + "./package.json": "./package.json" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "yarn tsx ./build.mts && yarn build:types", + "build:pack": "yarn pack -o redwoodjs-uploads-web.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "dependencies": { + "@apollo/client": "3.11.1", + "@redwoodjs/web": "workspace:*", + "react-dropzone": "14.2.3", + "react-hot-toast": "2.4.1" + }, + "devDependencies": { + "@redwoodjs/framework-tools": "workspace:*", + "nodemon": "3.1.4", + "react": "19.0.0-rc-f2df5694-20240916", + "react-dom": "19.0.0-rc-f2df5694-20240916", + "tsx": "4.19.1", + "typescript": "5.6.2", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/uploads/web/src/components/PreviewFileRejections.tsx b/packages/uploads/web/src/components/PreviewFileRejections.tsx new file mode 100644 index 000000000000..3b6c9fc5d15d --- /dev/null +++ b/packages/uploads/web/src/components/PreviewFileRejections.tsx @@ -0,0 +1,34 @@ +import React from 'react' + +import { formatFileSize, getReadableErrorMessage } from '../core/utils.js' + +import { useRedwoodUploadsContext } from './hooks/useRedwoodUploadsContext.js' + +export const PreviewFileRejections: React.FC = () => { + const { fileRejections } = useRedwoodUploadsContext() + const fileRejectionItems = fileRejections.map(({ file, errors }) => ( +
  • + {file.name} - {formatFileSize(file.size)} +
      + {errors.map((e) => ( +
    • + {getReadableErrorMessage(file, e.code, e.message)} +
    • + ))} +
    +
  • + )) + + return ( + <> + {fileRejections.length > 0 && ( + <> +

    + Rejected files +

    +
      {fileRejectionItems}
    + + )} + + ) +} diff --git a/packages/uploads/web/src/components/PreviewFiles.tsx b/packages/uploads/web/src/components/PreviewFiles.tsx new file mode 100644 index 000000000000..de2b237deff6 --- /dev/null +++ b/packages/uploads/web/src/components/PreviewFiles.tsx @@ -0,0 +1,43 @@ +import React from 'react' + +import { useRedwoodUploadsContext } from './hooks/useRedwoodUploadsContext.js' + +export const PreviewFiles: React.FC = () => { + const { acceptedFiles } = useRedwoodUploadsContext() + return ( + <> + {acceptedFiles.length > 0 && ( + <> +

    + Accepted Files +

    +
      + {acceptedFiles.map((file) => { + const previewUrl = URL.createObjectURL(file) + return ( +
    • + {file.type.startsWith('image/') && ( + {file.name} { + URL.revokeObjectURL(previewUrl) + }} + /> + )} + {file.name} - {file.size} bytes +
    • + ) + })} +
    + + )} + + ) +} diff --git a/packages/uploads/web/src/components/RedwoodFilePickerButton.tsx b/packages/uploads/web/src/components/RedwoodFilePickerButton.tsx new file mode 100644 index 000000000000..8e8286d63873 --- /dev/null +++ b/packages/uploads/web/src/components/RedwoodFilePickerButton.tsx @@ -0,0 +1,20 @@ +import React from 'react' + +import { useRedwoodUploadsContext } from './hooks/useRedwoodUploadsContext.js' + +interface RedwoodFilePickerButtonProps { + children: React.ReactNode + className?: string +} + +export const RedwoodFilePickerButton: React.FC< + RedwoodFilePickerButtonProps +> = ({ children, className }) => { + const { open } = useRedwoodUploadsContext() + + return ( + + ) +} diff --git a/packages/uploads/web/src/components/RedwoodUploadsComponent.tsx b/packages/uploads/web/src/components/RedwoodUploadsComponent.tsx new file mode 100644 index 000000000000..937f5ef8d738 --- /dev/null +++ b/packages/uploads/web/src/components/RedwoodUploadsComponent.tsx @@ -0,0 +1,176 @@ +import type { CSSProperties } from 'react' +import React, { useState, useCallback, useRef, useEffect, useMemo } from 'react' + +import type { FileRejection } from 'react-dropzone' +import { useDropzone } from 'react-dropzone' + +import { ACCEPTED_IMAGE_TYPES } from '../core/fileTypes.js' +import { ACCEPTED_DOCUMENT_TYPES } from '../index.js' + +import { RedwoodUploadsProvider } from './hooks/useRedwoodUploadsContext.js' +import type { RedwoodUploadComponentProps } from './types.js' + +const baseStyle: CSSProperties = { + flex: 1, + display: 'flex', + flexDirection: 'column', + alignItems: 'center', + padding: '24px', + borderWidth: 2, + borderRadius: 2, + borderColor: '#eeeeee', // Light gray + borderStyle: 'dashed', + backgroundColor: '#fafafa', // Very light gray (almost white) + color: '#000000', // Black + outline: 'none', + transition: 'border .24s ease-in-out', +} + +const focusedStyle = { + borderColor: '#2196f3', // Bright blue +} + +const acceptStyle = { + borderColor: '#00e676', // Bright green +} + +const rejectStyle = { + borderColor: '#ff1744', // Bright red +} + +export const RedwoodUploadsComponent: React.FC = ({ + name = 'uploads', + className, + fileConstraints, + children, + dropzoneContent, + messageContent: customMessageContent, // Rename the prop + setFiles, + onResetFiles, + allowPaste = false, + ...dropzoneOptions +}) => { + const [acceptedFiles, setAcceptedFiles] = useState([]) + const [fileRejections, setFileRejections] = useState([]) + + const { + accept = { + ...ACCEPTED_IMAGE_TYPES, + ...ACCEPTED_DOCUMENT_TYPES, + }, + maxFiles = 1, + minSize = 0, + maxSize = 1_024 * 1_024, + multiple = false, + } = fileConstraints || {} + + const onDrop = useCallback( + (acceptedFiles: File[], fileRejections: FileRejection[]) => { + setAcceptedFiles(acceptedFiles) + setFileRejections(fileRejections) + setFiles?.(acceptedFiles) + }, + [setFiles], + ) + + const dropzoneRef = useRef(null) + + const resetFiles = useCallback(() => { + setAcceptedFiles([]) + setFileRejections([]) + setFiles?.([]) + }, [setFiles]) + + // Provide the resetFiles function to the parent component + useEffect(() => { + if (onResetFiles) { + onResetFiles(resetFiles) + } + }, [onResetFiles, resetFiles]) + + const handlePaste = useCallback( + (event: React.ClipboardEvent) => { + const items = event.clipboardData?.items + if (items) { + const files = Array.from(items) + .filter((item) => item.kind === 'file') + .map((item) => item.getAsFile()) + .filter((file): file is File => file !== null) + + if (files.length > 0) { + onDrop(files, []) + } + } + }, + [onDrop], + ) + + const { + getRootProps, + getInputProps, + open, + isDragActive, + isDragReject, + isFocused, + isDragAccept, + } = useDropzone({ + onDrop, + accept, + maxFiles, + minSize, + maxSize, + multiple: maxFiles > 1 ? true : multiple, + ...dropzoneOptions, + }) + + const contextValue = { + open, + isDragActive, + isDragReject, + isFocused, + isDragAccept, + acceptedFiles, + fileRejections, + setAcceptedFiles, + setFileRejections, + resetFiles, + } + + const defaultMessageContent = ( +

    + {isDragActive + ? 'Drop the files here...' + : isDragReject + ? 'File type not accepted, sorry!' + : "Drag 'n' drop some files here, or click to select files"} +

    + ) + const style = useMemo( + () => ({ + ...baseStyle, + ...(isFocused ? focusedStyle : {}), + ...(isDragActive ? acceptStyle : {}), + ...(isDragAccept ? acceptStyle : {}), + ...(isDragReject ? rejectStyle : {}), + }), + [isFocused, isDragAccept, isDragActive, isDragReject], + ) as CSSProperties + + const rootProps = { + ...getRootProps({ className, ref: dropzoneRef, style }), + onPaste: allowPaste ? handlePaste : undefined, + } + + return ( + +
    +
    + + {customMessageContent || defaultMessageContent} + {dropzoneContent} +
    + {children} +
    +
    + ) +} diff --git a/packages/uploads/web/src/components/hooks/useRedwoodUploadsContext.ts b/packages/uploads/web/src/components/hooks/useRedwoodUploadsContext.ts new file mode 100644 index 000000000000..01728da64b12 --- /dev/null +++ b/packages/uploads/web/src/components/hooks/useRedwoodUploadsContext.ts @@ -0,0 +1,28 @@ +import React, { useContext } from 'react' + +import type { FileRejection } from 'react-dropzone' +export interface RedwoodUploadsContextType { + open: () => void + isDragActive: boolean + isDragReject: boolean + isFocused: boolean + isDragAccept: boolean + acceptedFiles: File[] + fileRejections: FileRejection[] + setAcceptedFiles: React.Dispatch> + setFileRejections: React.Dispatch> +} + +const RedwoodUploadsContext = React.createContext< + RedwoodUploadsContextType | undefined +>(undefined) + +export const useRedwoodUploadsContext = () => { + const context = useContext(RedwoodUploadsContext) + if (context === undefined) { + throw new Error('useDropzoneContext must be used within a DropzoneProvider') + } + return context +} + +export const RedwoodUploadsProvider = RedwoodUploadsContext.Provider diff --git a/packages/uploads/web/src/components/hooks/useUploadProgress.ts b/packages/uploads/web/src/components/hooks/useUploadProgress.ts new file mode 100644 index 000000000000..30e5fa39c2aa --- /dev/null +++ b/packages/uploads/web/src/components/hooks/useUploadProgress.ts @@ -0,0 +1,56 @@ +import { useState, useCallback } from 'react' + +import type { UseUploadsMutationOptions } from './useUploadsMutation.js' +import { + getMutationName, + getUploadTokenHeaderName, + useUploadToken, +} from './useUploadsMutation.js' + +export const useUploadProgress = ( + mutation: UseUploadsMutationOptions['mutation'], +) => { + const [progress, setProgress] = useState(0) + const [abortHandler, setAbortHandler] = useState<(() => void) | null>(null) + + const onAbortHandler = useCallback(() => { + if (abortHandler) { + abortHandler() + setProgress(0) + } + }, [abortHandler]) + + const fetchOptionsWithProgress = { + useUploadProgress: true, + headers: (headers: Headers) => { + return headers + }, + onProgress: (ev: ProgressEvent) => { + setProgress(ev.loaded / ev.total) + }, + onAbortPossible: (abort: () => void) => { + setAbortHandler(() => abort) + }, + } + + const mutationName = getMutationName(mutation) + const token = useUploadToken(mutationName) + const uploadTokenHeaderName = getUploadTokenHeaderName() + + const context: { + fetchOptions: typeof fetchOptionsWithProgress + headers: Record + } = { + fetchOptions: fetchOptionsWithProgress, + headers: { + [uploadTokenHeaderName]: token, + }, + } + + return { + context, + progress, + setProgress, + onAbortHandler, + } +} diff --git a/packages/uploads/web/src/components/hooks/useUploadsMutation.tsx b/packages/uploads/web/src/components/hooks/useUploadsMutation.tsx new file mode 100644 index 000000000000..28d99f482579 --- /dev/null +++ b/packages/uploads/web/src/components/hooks/useUploadsMutation.tsx @@ -0,0 +1,93 @@ +import type { + DocumentNode, + MutationHookOptions, + TypedDocumentNode, +} from '@apollo/client' +import { gql } from 'graphql-tag' + +import { useQuery, useMutation } from '@redwoodjs/web' +// Define the query to get the upload token +const GET_REDWOOD_UPLOAD_TOKEN = gql` + query GetRedwoodUploadToken($operationName: String!) { + uploadToken: getRedwoodUploadToken(operationName: $operationName) { + token + } + } +` + +export type UploadTokenOptions = { + uploadTokenHeaderName?: string +} + +export type UseUploadsMutationOptions = { + mutation: DocumentNode | TypedDocumentNode + options?: MutationHookOptions + uploadTokenOptions?: UploadTokenOptions +} + +export const DEFAULT_UPLOAD_TOKEN_HEADER_NAME = 'x-rw-upload-token' + +// Function to retrieve the upload token +export const useUploadToken = (operationName: string) => { + const { data } = useQuery(GET_REDWOOD_UPLOAD_TOKEN, { + variables: { operationName }, + skip: !operationName, // Skip the query if the operation name is not available + }) + + return data?.uploadToken?.token +} + +// Function to extract the mutation name from the mutation document +export const getMutationName = ( + mutation: DocumentNode | TypedDocumentNode, +) => { + const operationDef = mutation.definitions[0] + const mutationName = + operationDef && 'name' in operationDef + ? operationDef.name?.value + : undefined + + if (!mutationName) { + throw new Error('Mutation name is required') + } + + return mutationName +} + +// Function to get the upload token header name +export const getUploadTokenHeaderName = ( + uploadTokenOptions?: UploadTokenOptions, +) => { + return ( + uploadTokenOptions?.uploadTokenHeaderName ?? + DEFAULT_UPLOAD_TOKEN_HEADER_NAME + ) +} + +export const useUploadsMutation = ( + mutation: UseUploadsMutationOptions['mutation'], + options: UseUploadsMutationOptions['options'] = {}, + uploadTokenOptions: UseUploadsMutationOptions['uploadTokenOptions'] = {}, +) => { + const mutationName = getMutationName(mutation) + + // Retrieve the upload token header name using the new function + const uploadTokenHeaderName = getUploadTokenHeaderName(uploadTokenOptions) + + // Retrieve the token + const token = useUploadToken(mutationName) + + // Customize the useMutation hook to include the upload token in the headers + const result = useMutation(mutation, { + ...options, + context: { + ...options?.context, + headers: { + ...options?.context?.headers, + [uploadTokenHeaderName]: token, + }, + }, + }) + + return result +} diff --git a/packages/uploads/web/src/components/types.ts b/packages/uploads/web/src/components/types.ts new file mode 100644 index 000000000000..7af5d09b3b26 --- /dev/null +++ b/packages/uploads/web/src/components/types.ts @@ -0,0 +1,35 @@ +import type React from 'react' + +import type { Accept, DropzoneOptions } from 'react-dropzone' + +export interface FileConstraintsProps { + accept?: Accept + maxFiles?: number + minSize?: number + maxSize?: number + multiple?: boolean +} + +export interface RedwoodUploadComponentProps + extends Omit { + id?: string + key?: string + name?: string + className?: string + fileConstraints?: FileConstraintsProps + children?: React.ReactNode + dropzoneContent?: React.ReactNode + messageContent?: React.ReactNode + setFiles?: React.Dispatch> + onResetFiles?: (resetFunction: () => void) => void + allowPaste?: boolean +} + +export type MessageProp = string | ((args: MessagePropArgs) => string) + +interface MessagePropArgs { + maxFiles: number + minSize: number + maxSize: number + accept: Accept +} diff --git a/packages/uploads/web/src/core/fileTypes.ts b/packages/uploads/web/src/core/fileTypes.ts new file mode 100644 index 000000000000..014d680c3884 --- /dev/null +++ b/packages/uploads/web/src/core/fileTypes.ts @@ -0,0 +1,34 @@ +export const ACCEPTED_IMAGE_TYPES = { + 'image/png': ['.png'], + 'image/jpeg': ['.jpeg', '.jpg'], + 'image/gif': ['.gif'], +} + +export const ACCEPTED_DOCUMENT_TYPES = { + 'application/pdf': ['.pdf'], + 'application/msword': ['.doc', '.dot'], + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': [ + '.docx', + ], + 'application/vnd.ms-excel': ['.xls', '.xlt'], + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': [ + '.xlsx', + ], + 'application/vnd.ms-powerpoint': ['.ppt', '.pps'], + 'application/vnd.openxmlformats-officedocument.presentationml.presentation': [ + '.pptx', + ], + 'text/plain': ['.txt'], +} + +export const ACCEPTED_AUDIO_TYPES = { + 'audio/mpeg': ['.mp3'], + 'audio/wav': ['.wav'], + 'audio/ogg': ['.ogg'], +} + +export const ACCEPTED_VIDEO_TYPES = { + 'video/mp4': ['.mp4'], + 'video/x-msvideo': ['.avi'], + 'video/mpeg': ['.mpeg'], +} diff --git a/packages/uploads/web/src/core/utils.ts b/packages/uploads/web/src/core/utils.ts new file mode 100644 index 000000000000..9535fde75cf0 --- /dev/null +++ b/packages/uploads/web/src/core/utils.ts @@ -0,0 +1,32 @@ +// Error codes from react-dropzone +export const FILE_INVALID_TYPE = 'file-invalid-type' +export const FILE_TOO_LARGE = 'file-too-large' +export const FILE_TOO_SMALL = 'file-too-small' +export const TOO_MANY_FILES = 'too-many-files' + +export const formatFileSize = (bytes: number) => { + const mb = 1024 * 1024 + const kb = 1024 + return bytes >= mb + ? `${(bytes / mb).toFixed(2)} MB` + : `${(bytes / kb).toFixed(2)} KB` +} + +export const getReadableErrorMessage = ( + file: File, + code: string, + message: string, +) => { + switch (code) { + case FILE_INVALID_TYPE: + return `Invalid file type. File ${file.name} is ${file.type}.` + case FILE_TOO_LARGE: + return `File is too large. File ${file.name} is ${formatFileSize(file.size)}.` + case FILE_TOO_SMALL: + return `File is too small. File ${file.name} is ${formatFileSize(file.size)}.` + case TOO_MANY_FILES: + return 'Too many files uploaded' + default: + return message + } +} diff --git a/packages/uploads/web/src/index.ts b/packages/uploads/web/src/index.ts new file mode 100644 index 000000000000..eaa2df8c1185 --- /dev/null +++ b/packages/uploads/web/src/index.ts @@ -0,0 +1,13 @@ +export * from 'react-dropzone' +export type { DropzoneOptions, FileRejection } from 'react-dropzone' + +export * from './components/hooks/useRedwoodUploadsContext.js' +export * from './components/hooks/useUploadProgress.js' +export * from './components/hooks/useUploadsMutation.js' +export * from './components/PreviewFileRejections.js' +export * from './components/PreviewFiles.js' +export * from './components/RedwoodFilePickerButton.js' +export * from './components/RedwoodUploadsComponent.js' +export * from './components/types.js' +export * from './core/fileTypes.js' +export * from './core/utils.js' diff --git a/packages/uploads/web/tsconfig.build.json b/packages/uploads/web/tsconfig.build.json new file mode 100644 index 000000000000..efe2de23f492 --- /dev/null +++ b/packages/uploads/web/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "../../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"] +} diff --git a/packages/uploads/web/tsconfig.json b/packages/uploads/web/tsconfig.json new file mode 100644 index 000000000000..52640cd8782f --- /dev/null +++ b/packages/uploads/web/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules"] +} diff --git a/packages/web/package.json b/packages/web/package.json index 26893ded97ca..f9d2b4dc3ee1 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -142,7 +142,7 @@ }, "dependencies": { "@apollo/client": "3.11.1", - "@babel/runtime-corejs3": "7.25.0", + "@babel/runtime-corejs3": "7.25.7", "@redwoodjs/auth": "workspace:*", "@redwoodjs/server-store": "workspace:*", "@whatwg-node/fetch": "0.9.21", @@ -159,10 +159,10 @@ "devDependencies": { "@apollo/client-react-streaming": "0.10.0", "@arethetypeswrong/cli": "0.16.4", - "@babel/cli": "7.24.8", + "@babel/cli": "7.25.7", "@babel/core": "^7.22.20", - "@babel/plugin-transform-runtime": "7.24.7", - "@babel/runtime": "7.25.0", + "@babel/plugin-transform-runtime": "7.25.7", + "@babel/runtime": "7.25.7", "@redwoodjs/framework-tools": "workspace:*", "@redwoodjs/internal": "workspace:*", "@rollup/plugin-babel": "6.0.4", @@ -172,8 +172,8 @@ "@types/react": "^18.2.55", "@types/react-dom": "^18.2.19", "concurrently": "8.2.2", - "nodemon": "3.1.4", - "publint": "0.2.10", + "nodemon": "3.1.7", + "publint": "0.2.11", "react": "19.0.0-rc-f2df5694-20240916", "react-dom": "19.0.0-rc-f2df5694-20240916", "tstyche": "2.1.1", diff --git a/packages/web/src/apollo/index.tsx b/packages/web/src/apollo/index.tsx index 73bd87dd5a1a..3653a3fa7878 100644 --- a/packages/web/src/apollo/index.tsx +++ b/packages/web/src/apollo/index.tsx @@ -48,6 +48,7 @@ import { } from './fragmentRegistry.js' import * as SSELinkExports from './sseLink.js' import { useCache } from './useCache.js' +import { useUploadProgressFetch } from './useUploadProgressFetch.js' // Not sure why we need to import it this way for legacy builds to work const { SSELink, isSubscription, isLiveQuery } = SSELinkExports @@ -227,6 +228,9 @@ const ApolloProviderWithFetchConfig: React.FunctionComponent<{ const uploadLink: ApolloLink = createUploadLink({ uri, ...httpLinkConfig, + // We use a custom uploadProgressFetch to show progress of uploads + // if the useUploadProgress option is set in the mutation context options + fetch: useUploadProgressFetch as typeof fetch, // The upload link types don't match the ApolloLink types, even though it comes from Apollo // because they use ESM imports and we're using the default ones. }) as unknown as ApolloLink diff --git a/packages/web/src/apollo/useUploadProgressFetch.ts b/packages/web/src/apollo/useUploadProgressFetch.ts new file mode 100644 index 000000000000..d5e12e70b211 --- /dev/null +++ b/packages/web/src/apollo/useUploadProgressFetch.ts @@ -0,0 +1,89 @@ +const parseHeaders = (rawHeaders: string): Headers => { + const headers = new Headers() + + // Replace instances of \r\n and \n followed by at least one space or horizontal tab with a space + // https://tools.ietf.org/html/rfc7230#section-3.2 + const preProcessedHeaders = rawHeaders.replace(/\r?\n[\t ]+/g, ' ') + preProcessedHeaders.split(/\r?\n/).forEach((line: string) => { + const parts = line.split(':') + const key = parts.shift()?.trim() + if (key) { + const value = parts.join(':').trim() + headers.append(key, value) + } + }) + + return headers +} + +type OnloadOptions = { + status: number + statusText: string + headers: Headers +} & Record + +type AbortHandler = XMLHttpRequest['abort'] + +type UploadProgressFetchOptions = RequestInit & { + useUploadProgress: boolean + onProgress: (ev: ProgressEvent) => void + onAbortPossible: (abortHandler: AbortHandler) => void +} + +const uploadProgressFetch = ( + url: URL | RequestInfo, + options: UploadProgressFetchOptions, +): Promise => + new Promise((resolve, reject) => { + const xhr = new XMLHttpRequest() + xhr.onload = () => { + const opts: OnloadOptions = { + status: xhr.status, + statusText: xhr.statusText, + headers: parseHeaders(xhr.getAllResponseHeaders() || ''), + } + + opts.url = + 'responseURL' in xhr + ? xhr.responseURL + : opts.headers.get('X-Request-URL') + const body = 'response' in xhr ? xhr.response : (xhr as any).responseText + resolve(new Response(body, opts)) + } + xhr.onerror = () => { + reject(new TypeError('Network request failed')) + } + xhr.ontimeout = () => { + reject(new TypeError('Network request failed')) + } + xhr.open(options.method || '', url as string, true) + + Object.keys(options.headers as Headers).forEach((key) => { + const headerValue = options.headers + ? (options.headers[key as keyof HeadersInit] as string) + : '' + xhr.setRequestHeader(key, headerValue) + }) + + if (xhr.upload) { + xhr.upload.onprogress = options.onProgress + } + + options.onAbortPossible(() => xhr.abort()) + + xhr.send( + options.body as XMLHttpRequestBodyInit | Document | null | undefined, + ) + }) + +// if useUpload is true, we use our custom uploadFetch to show progress of uploads +// if useUpload is false, we use the default fetch +export const useUploadProgressFetch = ( + uri: URL | RequestInfo, + options: UploadProgressFetchOptions, +): Promise => { + if (options.useUploadProgress) { + return uploadProgressFetch(uri, options) + } + return fetch(uri, options) +} diff --git a/packages/web/src/components/cell/cellTypes.ts b/packages/web/src/components/cell/cellTypes.ts index bc6d9a8318ab..90f23fd7e018 100644 --- a/packages/web/src/components/cell/cellTypes.ts +++ b/packages/web/src/components/cell/cellTypes.ts @@ -52,11 +52,15 @@ export type CellProps< > export type CellLoadingProps = { - queryResult?: NonSuspenseCellQueryResult | SuspenseCellQueryResult + queryResult?: + | NonSuspenseCellQueryResult + | SuspenseCellQueryResult } export type CellFailureProps = { - queryResult?: NonSuspenseCellQueryResult | SuspenseCellQueryResult + queryResult?: + | NonSuspenseCellQueryResult + | SuspenseCellQueryResult error?: QueryOperationResult['error'] | Error // for tests and storybook /** @@ -106,7 +110,9 @@ export type CellSuccessProps< TData = any, TVariables extends OperationVariables = any, > = { - queryResult?: NonSuspenseCellQueryResult | SuspenseCellQueryResult + queryResult?: + | NonSuspenseCellQueryResult + | SuspenseCellQueryResult updating?: boolean } & A.Compute> // pre-computing makes the types more readable on hover @@ -199,8 +205,9 @@ export type SuspendingSuccessProps = React.PropsWithChildren< export type NonSuspenseCellQueryResult< TVariables extends OperationVariables = any, + TData = any, > = Partial< - Omit, 'loading' | 'error' | 'data'> + Omit, 'loading' | 'error' | 'data'> > // We call this queryResult in createCell, sadly a very overloaded term diff --git a/tsconfig.json b/tsconfig.json index 9405acbc156e..8161e71f0e3b 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,7 +11,9 @@ { "path": "packages/prerender" }, { "path": "packages/graphql-server" }, { "path": "packages/forms" }, - { "path": "packages/codemods" } + { "path": "packages/codemods" }, + { "path": "packages/uploads" }, + { "path": "packages/storage" } ], "files": [] } diff --git a/yarn.lock b/yarn.lock index 7941e9252c3c..f255fe3c51ef 100644 --- a/yarn.lock +++ b/yarn.lock @@ -292,6 +292,731 @@ __metadata: languageName: node linkType: hard +"@aws-crypto/crc32@npm:5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/crc32@npm:5.2.0" + dependencies: + "@aws-crypto/util": "npm:^5.2.0" + "@aws-sdk/types": "npm:^3.222.0" + tslib: "npm:^2.6.2" + checksum: 10c0/eab9581d3363af5ea498ae0e72de792f54d8890360e14a9d8261b7b5c55ebe080279fb2556e07994d785341cdaa99ab0b1ccf137832b53b5904cd6928f2b094b + languageName: node + linkType: hard + +"@aws-crypto/crc32c@npm:5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/crc32c@npm:5.2.0" + dependencies: + "@aws-crypto/util": "npm:^5.2.0" + "@aws-sdk/types": "npm:^3.222.0" + tslib: "npm:^2.6.2" + checksum: 10c0/223efac396cdebaf5645568fa9a38cd0c322c960ae1f4276bedfe2e1031d0112e49d7d39225d386354680ecefae29f39af469a84b2ddfa77cb6692036188af77 + languageName: node + linkType: hard + +"@aws-crypto/sha1-browser@npm:5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/sha1-browser@npm:5.2.0" + dependencies: + "@aws-crypto/supports-web-crypto": "npm:^5.2.0" + "@aws-crypto/util": "npm:^5.2.0" + "@aws-sdk/types": "npm:^3.222.0" + "@aws-sdk/util-locate-window": "npm:^3.0.0" + "@smithy/util-utf8": "npm:^2.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/51fed0bf078c10322d910af179871b7d299dde5b5897873ffbeeb036f427e5d11d23db9794439226544b73901920fd19f4d86bbc103ed73cc0cfdea47a83c6ac + languageName: node + linkType: hard + +"@aws-crypto/sha256-browser@npm:5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/sha256-browser@npm:5.2.0" + dependencies: + "@aws-crypto/sha256-js": "npm:^5.2.0" + "@aws-crypto/supports-web-crypto": "npm:^5.2.0" + "@aws-crypto/util": "npm:^5.2.0" + "@aws-sdk/types": "npm:^3.222.0" + "@aws-sdk/util-locate-window": "npm:^3.0.0" + "@smithy/util-utf8": "npm:^2.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/05f6d256794df800fe9aef5f52f2ac7415f7f3117d461f85a6aecaa4e29e91527b6fd503681a17136fa89e9dd3d916e9c7e4cfb5eba222875cb6c077bdc1d00d + languageName: node + linkType: hard + +"@aws-crypto/sha256-js@npm:5.2.0, @aws-crypto/sha256-js@npm:^5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/sha256-js@npm:5.2.0" + dependencies: + "@aws-crypto/util": "npm:^5.2.0" + "@aws-sdk/types": "npm:^3.222.0" + tslib: "npm:^2.6.2" + checksum: 10c0/6c48701f8336341bb104dfde3d0050c89c288051f6b5e9bdfeb8091cf3ffc86efcd5c9e6ff2a4a134406b019c07aca9db608128f8d9267c952578a3108db9fd1 + languageName: node + linkType: hard + +"@aws-crypto/supports-web-crypto@npm:^5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/supports-web-crypto@npm:5.2.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/4d2118e29d68ca3f5947f1e37ce1fbb3239a0c569cc938cdc8ab8390d595609b5caf51a07c9e0535105b17bf5c52ea256fed705a07e9681118120ab64ee73af2 + languageName: node + linkType: hard + +"@aws-crypto/util@npm:^5.2.0": + version: 5.2.0 + resolution: "@aws-crypto/util@npm:5.2.0" + dependencies: + "@aws-sdk/types": "npm:^3.222.0" + "@smithy/util-utf8": "npm:^2.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/0362d4c197b1fd64b423966945130207d1fe23e1bb2878a18e361f7743c8d339dad3f8729895a29aa34fff6a86c65f281cf5167c4bf253f21627ae80b6dd2951 + languageName: node + linkType: hard + +"@aws-sdk/client-s3@npm:3.663.0": + version: 3.663.0 + resolution: "@aws-sdk/client-s3@npm:3.663.0" + dependencies: + "@aws-crypto/sha1-browser": "npm:5.2.0" + "@aws-crypto/sha256-browser": "npm:5.2.0" + "@aws-crypto/sha256-js": "npm:5.2.0" + "@aws-sdk/client-sso-oidc": "npm:3.662.0" + "@aws-sdk/client-sts": "npm:3.662.0" + "@aws-sdk/core": "npm:3.662.0" + "@aws-sdk/credential-provider-node": "npm:3.662.0" + "@aws-sdk/middleware-bucket-endpoint": "npm:3.662.0" + "@aws-sdk/middleware-expect-continue": "npm:3.662.0" + "@aws-sdk/middleware-flexible-checksums": "npm:3.662.0" + "@aws-sdk/middleware-host-header": "npm:3.662.0" + "@aws-sdk/middleware-location-constraint": "npm:3.662.0" + "@aws-sdk/middleware-logger": "npm:3.662.0" + "@aws-sdk/middleware-recursion-detection": "npm:3.662.0" + "@aws-sdk/middleware-sdk-s3": "npm:3.662.0" + "@aws-sdk/middleware-ssec": "npm:3.662.0" + "@aws-sdk/middleware-user-agent": "npm:3.662.0" + "@aws-sdk/region-config-resolver": "npm:3.662.0" + "@aws-sdk/signature-v4-multi-region": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-endpoints": "npm:3.662.0" + "@aws-sdk/util-user-agent-browser": "npm:3.662.0" + "@aws-sdk/util-user-agent-node": "npm:3.662.0" + "@aws-sdk/xml-builder": "npm:3.662.0" + "@smithy/config-resolver": "npm:^3.0.9" + "@smithy/core": "npm:^2.4.7" + "@smithy/eventstream-serde-browser": "npm:^3.0.10" + "@smithy/eventstream-serde-config-resolver": "npm:^3.0.7" + "@smithy/eventstream-serde-node": "npm:^3.0.9" + "@smithy/fetch-http-handler": "npm:^3.2.9" + "@smithy/hash-blob-browser": "npm:^3.1.6" + "@smithy/hash-node": "npm:^3.0.7" + "@smithy/hash-stream-node": "npm:^3.1.6" + "@smithy/invalid-dependency": "npm:^3.0.7" + "@smithy/md5-js": "npm:^3.0.7" + "@smithy/middleware-content-length": "npm:^3.0.9" + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/middleware-retry": "npm:^3.0.22" + "@smithy/middleware-serde": "npm:^3.0.7" + "@smithy/middleware-stack": "npm:^3.0.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/node-http-handler": "npm:^3.2.4" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/url-parser": "npm:^3.0.7" + "@smithy/util-base64": "npm:^3.0.0" + "@smithy/util-body-length-browser": "npm:^3.0.0" + "@smithy/util-body-length-node": "npm:^3.0.0" + "@smithy/util-defaults-mode-browser": "npm:^3.0.22" + "@smithy/util-defaults-mode-node": "npm:^3.0.22" + "@smithy/util-endpoints": "npm:^2.1.3" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-retry": "npm:^3.0.7" + "@smithy/util-stream": "npm:^3.1.9" + "@smithy/util-utf8": "npm:^3.0.0" + "@smithy/util-waiter": "npm:^3.1.6" + tslib: "npm:^2.6.2" + checksum: 10c0/945245707a847f1b6f36a9a04cccb7ef46a4a92062507f605716cc0e44b43afd8ede8d8db1a881d22f7a421ea26b6855b1182bd148c415620ecb55f69b9682e6 + languageName: node + linkType: hard + +"@aws-sdk/client-sso-oidc@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/client-sso-oidc@npm:3.662.0" + dependencies: + "@aws-crypto/sha256-browser": "npm:5.2.0" + "@aws-crypto/sha256-js": "npm:5.2.0" + "@aws-sdk/core": "npm:3.662.0" + "@aws-sdk/credential-provider-node": "npm:3.662.0" + "@aws-sdk/middleware-host-header": "npm:3.662.0" + "@aws-sdk/middleware-logger": "npm:3.662.0" + "@aws-sdk/middleware-recursion-detection": "npm:3.662.0" + "@aws-sdk/middleware-user-agent": "npm:3.662.0" + "@aws-sdk/region-config-resolver": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-endpoints": "npm:3.662.0" + "@aws-sdk/util-user-agent-browser": "npm:3.662.0" + "@aws-sdk/util-user-agent-node": "npm:3.662.0" + "@smithy/config-resolver": "npm:^3.0.9" + "@smithy/core": "npm:^2.4.7" + "@smithy/fetch-http-handler": "npm:^3.2.9" + "@smithy/hash-node": "npm:^3.0.7" + "@smithy/invalid-dependency": "npm:^3.0.7" + "@smithy/middleware-content-length": "npm:^3.0.9" + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/middleware-retry": "npm:^3.0.22" + "@smithy/middleware-serde": "npm:^3.0.7" + "@smithy/middleware-stack": "npm:^3.0.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/node-http-handler": "npm:^3.2.4" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/url-parser": "npm:^3.0.7" + "@smithy/util-base64": "npm:^3.0.0" + "@smithy/util-body-length-browser": "npm:^3.0.0" + "@smithy/util-body-length-node": "npm:^3.0.0" + "@smithy/util-defaults-mode-browser": "npm:^3.0.22" + "@smithy/util-defaults-mode-node": "npm:^3.0.22" + "@smithy/util-endpoints": "npm:^2.1.3" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-retry": "npm:^3.0.7" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + peerDependencies: + "@aws-sdk/client-sts": ^3.662.0 + checksum: 10c0/8da2e46cba4a308f0b0adc77fb1409126e41cbe33b4e1f5e9f041c77182b079732368ba292188021e00eacee796c3ebe1b18665a72b0a33272f43590fb10bb70 + languageName: node + linkType: hard + +"@aws-sdk/client-sso@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/client-sso@npm:3.662.0" + dependencies: + "@aws-crypto/sha256-browser": "npm:5.2.0" + "@aws-crypto/sha256-js": "npm:5.2.0" + "@aws-sdk/core": "npm:3.662.0" + "@aws-sdk/middleware-host-header": "npm:3.662.0" + "@aws-sdk/middleware-logger": "npm:3.662.0" + "@aws-sdk/middleware-recursion-detection": "npm:3.662.0" + "@aws-sdk/middleware-user-agent": "npm:3.662.0" + "@aws-sdk/region-config-resolver": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-endpoints": "npm:3.662.0" + "@aws-sdk/util-user-agent-browser": "npm:3.662.0" + "@aws-sdk/util-user-agent-node": "npm:3.662.0" + "@smithy/config-resolver": "npm:^3.0.9" + "@smithy/core": "npm:^2.4.7" + "@smithy/fetch-http-handler": "npm:^3.2.9" + "@smithy/hash-node": "npm:^3.0.7" + "@smithy/invalid-dependency": "npm:^3.0.7" + "@smithy/middleware-content-length": "npm:^3.0.9" + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/middleware-retry": "npm:^3.0.22" + "@smithy/middleware-serde": "npm:^3.0.7" + "@smithy/middleware-stack": "npm:^3.0.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/node-http-handler": "npm:^3.2.4" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/url-parser": "npm:^3.0.7" + "@smithy/util-base64": "npm:^3.0.0" + "@smithy/util-body-length-browser": "npm:^3.0.0" + "@smithy/util-body-length-node": "npm:^3.0.0" + "@smithy/util-defaults-mode-browser": "npm:^3.0.22" + "@smithy/util-defaults-mode-node": "npm:^3.0.22" + "@smithy/util-endpoints": "npm:^2.1.3" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-retry": "npm:^3.0.7" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/3717b9425c74edeff722f41f4bddb2e1b44d4a020998b9e3a384a45576a0a36954b3b98d464dcb3505aa8c824c522e5fa4fd26fade24b0c10a593c0ecc8e7d7e + languageName: node + linkType: hard + +"@aws-sdk/client-sts@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/client-sts@npm:3.662.0" + dependencies: + "@aws-crypto/sha256-browser": "npm:5.2.0" + "@aws-crypto/sha256-js": "npm:5.2.0" + "@aws-sdk/client-sso-oidc": "npm:3.662.0" + "@aws-sdk/core": "npm:3.662.0" + "@aws-sdk/credential-provider-node": "npm:3.662.0" + "@aws-sdk/middleware-host-header": "npm:3.662.0" + "@aws-sdk/middleware-logger": "npm:3.662.0" + "@aws-sdk/middleware-recursion-detection": "npm:3.662.0" + "@aws-sdk/middleware-user-agent": "npm:3.662.0" + "@aws-sdk/region-config-resolver": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-endpoints": "npm:3.662.0" + "@aws-sdk/util-user-agent-browser": "npm:3.662.0" + "@aws-sdk/util-user-agent-node": "npm:3.662.0" + "@smithy/config-resolver": "npm:^3.0.9" + "@smithy/core": "npm:^2.4.7" + "@smithy/fetch-http-handler": "npm:^3.2.9" + "@smithy/hash-node": "npm:^3.0.7" + "@smithy/invalid-dependency": "npm:^3.0.7" + "@smithy/middleware-content-length": "npm:^3.0.9" + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/middleware-retry": "npm:^3.0.22" + "@smithy/middleware-serde": "npm:^3.0.7" + "@smithy/middleware-stack": "npm:^3.0.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/node-http-handler": "npm:^3.2.4" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/url-parser": "npm:^3.0.7" + "@smithy/util-base64": "npm:^3.0.0" + "@smithy/util-body-length-browser": "npm:^3.0.0" + "@smithy/util-body-length-node": "npm:^3.0.0" + "@smithy/util-defaults-mode-browser": "npm:^3.0.22" + "@smithy/util-defaults-mode-node": "npm:^3.0.22" + "@smithy/util-endpoints": "npm:^2.1.3" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-retry": "npm:^3.0.7" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/36e8a76a8d95ecc11cff35b5071ab2040978854e86013822bda5c1a78bce3a30b5fa3842a28aec850457f782d2d793445642777eb05619948dd17702a5418623 + languageName: node + linkType: hard + +"@aws-sdk/core@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/core@npm:3.662.0" + dependencies: + "@smithy/core": "npm:^2.4.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/signature-v4": "npm:^4.2.0" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-middleware": "npm:^3.0.7" + fast-xml-parser: "npm:4.4.1" + tslib: "npm:^2.6.2" + checksum: 10c0/4cbc9f0a230e26d33d92e0816ba72c2f123bcbe805bd2e5b6487e479d97566d57a7274e509da72d2d4eba6aa8a796f794ee0022225150068029a68e5457802fb + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-env@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-env@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/001b340d1e04f6ae9662d3ffcd12ed79a21dea1c3f5e89ae3a7595a4d6812e8e73aab1dcd082cc1bfbf31e71dd0e3af191e4b7659bc5f6a56ba8bfa45bb3bdae + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-http@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-http@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/fetch-http-handler": "npm:^3.2.9" + "@smithy/node-http-handler": "npm:^3.2.4" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-stream": "npm:^3.1.9" + tslib: "npm:^2.6.2" + checksum: 10c0/2671078c877f7b4dd2bd36fd1f543e08c2f3ff3a4c5499036d81a735a343cb98759d3d8b9ad184b22c42676ada0b3441e6d02d2e9abb24e933b90986671507eb + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-ini@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-ini@npm:3.662.0" + dependencies: + "@aws-sdk/credential-provider-env": "npm:3.662.0" + "@aws-sdk/credential-provider-http": "npm:3.662.0" + "@aws-sdk/credential-provider-process": "npm:3.662.0" + "@aws-sdk/credential-provider-sso": "npm:3.662.0" + "@aws-sdk/credential-provider-web-identity": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@smithy/credential-provider-imds": "npm:^3.2.4" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + peerDependencies: + "@aws-sdk/client-sts": ^3.662.0 + checksum: 10c0/347214a343460a55bde1ea35765f57db2a226a6528ed564d297419ad1f91307f5e424e2d9202cda376bf32f12fc699742f3d43ecdae24642d8163ca880b68f9f + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-node@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-node@npm:3.662.0" + dependencies: + "@aws-sdk/credential-provider-env": "npm:3.662.0" + "@aws-sdk/credential-provider-http": "npm:3.662.0" + "@aws-sdk/credential-provider-ini": "npm:3.662.0" + "@aws-sdk/credential-provider-process": "npm:3.662.0" + "@aws-sdk/credential-provider-sso": "npm:3.662.0" + "@aws-sdk/credential-provider-web-identity": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@smithy/credential-provider-imds": "npm:^3.2.4" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/904842a2da5f066debb3973a6b3b4ff09959cfdba6857bd9cfff3046445c02a937ebf3bccff7d43e5cdcb84b952dce7923807d6993d0020101115e594d202391 + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-process@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-process@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/987c8271d2dccc3a3c1a18c3232e0d7df44342eb984ef461df4ea192c7414d4fa18c9b9250e070176aecae73bcd6fb2f1137a07385f0519795c4c5be53e52d1e + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-sso@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-sso@npm:3.662.0" + dependencies: + "@aws-sdk/client-sso": "npm:3.662.0" + "@aws-sdk/token-providers": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/f1e60f41dbf88b46c39d9c5ff8e1b171220360189e504b16b660f18334eaa42162f4cc7f43a0b2710b33457f0e84db69f46d7097dcb1c769a4cb69d4a539f3af + languageName: node + linkType: hard + +"@aws-sdk/credential-provider-web-identity@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/credential-provider-web-identity@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + peerDependencies: + "@aws-sdk/client-sts": ^3.662.0 + checksum: 10c0/568cc4ff03eb71e0a1466ef76f84d4884b6cb647800766d1e27892d8f1fa4f1e154aa2680463bb74d8452aaf3db7efa49e736e86f0b55d6914d7ca6b4102bd04 + languageName: node + linkType: hard + +"@aws-sdk/lib-storage@npm:3.663.0": + version: 3.663.0 + resolution: "@aws-sdk/lib-storage@npm:3.663.0" + dependencies: + "@smithy/abort-controller": "npm:^3.1.5" + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + buffer: "npm:5.6.0" + events: "npm:3.3.0" + stream-browserify: "npm:3.0.0" + tslib: "npm:^2.6.2" + peerDependencies: + "@aws-sdk/client-s3": ^3.663.0 + checksum: 10c0/420da119e5e5362b39e43ef293a89709c9ae9a48e5bd2086d0cde836a9a03f2e54572f5d1ccdbdbf5b6c66cab169e3e04be95649f155adebcc8cbdef8ee22e99 + languageName: node + linkType: hard + +"@aws-sdk/middleware-bucket-endpoint@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-bucket-endpoint@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-arn-parser": "npm:3.568.0" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-config-provider": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/836248328b4a78566dc723b23a367263219b039ff23d38aeb812212cb6bdd8b76853045528a65d37cd82b111fd997c4685e3c9672ff952ca1040321a717786c1 + languageName: node + linkType: hard + +"@aws-sdk/middleware-expect-continue@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-expect-continue@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/a179c977610928b50c3b4248b037bd2ae35bf723ad904d3d8fd6761885da43adf5761e2f2a6b61ebbe75700111714f2dc70a1b59cef53a536bb329c8d99d3c8f + languageName: node + linkType: hard + +"@aws-sdk/middleware-flexible-checksums@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-flexible-checksums@npm:3.662.0" + dependencies: + "@aws-crypto/crc32": "npm:5.2.0" + "@aws-crypto/crc32c": "npm:5.2.0" + "@aws-sdk/types": "npm:3.662.0" + "@smithy/is-array-buffer": "npm:^3.0.0" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/418b2b76b4cbda28c5115443cc25a818bc289dd008315b772105f82ce6fd7019a86063398750be3cf7ae0cfb8ccb2722962511b987592f989739a0e80dee8f1d + languageName: node + linkType: hard + +"@aws-sdk/middleware-host-header@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-host-header@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/076c42bd9f95950546e930189dacdf65643a922cd2844751cce6eb66c56ce6657d525ac31dfb7fb6a8a7110693a6dd34f727c89890e88aec65c6ed80cfbdd0b7 + languageName: node + linkType: hard + +"@aws-sdk/middleware-location-constraint@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-location-constraint@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/7845fd4b14d58ff1a2520f0c61c975cd69de6d7949c68ece08a647bd07133cbd07d27740b50ad9fd4f0181eadd71ad156c642d52ff75cfd0a03b28c3419ca885 + languageName: node + linkType: hard + +"@aws-sdk/middleware-logger@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-logger@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/0d15b9194285be80d5582d21dcb799f23e99613ac0cb3612b84693b26a52a62cef64fd7b0d954da43d0137f4b82e2e32eb1af8936485df0b3f1f74468bbc43b9 + languageName: node + linkType: hard + +"@aws-sdk/middleware-recursion-detection@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-recursion-detection@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/85ad0a284b418a54eb83ca66d51406fa9ad174c01ce2919ae5e932f733d0ae897a11d13332e59abbfe1f8c4f2857b56a38b794195583bfb0b560a4432c95e7cf + languageName: node + linkType: hard + +"@aws-sdk/middleware-sdk-s3@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-sdk-s3@npm:3.662.0" + dependencies: + "@aws-sdk/core": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-arn-parser": "npm:3.568.0" + "@smithy/core": "npm:^2.4.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/signature-v4": "npm:^4.2.0" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-config-provider": "npm:^3.0.0" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-stream": "npm:^3.1.9" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/513c129d2e0d96c26145a3b999bd6504b9fb13687c1cfe7583aa53ccb9ab99342ee4eb6d52a12763c3e407d891dfeee6eeb73562d9b5447229c8154667e0d1b6 + languageName: node + linkType: hard + +"@aws-sdk/middleware-ssec@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-ssec@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/1a0f7291882b3948af8ec26f9a9cf68ff19077eac0ef83939b94af930fc079aece861390b2497d0518206c2f524a4f41981f395d42fc342717839e4bee8e49e4 + languageName: node + linkType: hard + +"@aws-sdk/middleware-user-agent@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/middleware-user-agent@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-endpoints": "npm:3.662.0" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/b6888ceaba4abf65d705d97778ac440ec09228dd49dbc3eb80402a51482e085b366ec71e91b6f68ea3ee563d734dcd1483f863e94fe1be1f3626ed37c55ccb0f + languageName: node + linkType: hard + +"@aws-sdk/region-config-resolver@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/region-config-resolver@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-config-provider": "npm:^3.0.0" + "@smithy/util-middleware": "npm:^3.0.7" + tslib: "npm:^2.6.2" + checksum: 10c0/d73eea8ce455a54f96797c9d6f53bb187094685391c9ce17900df34f38ac4aee80342b41513f664f65472ca1fde84dd6a61dec93cde4678e7300438f535c1439 + languageName: node + linkType: hard + +"@aws-sdk/s3-request-presigner@npm:3.663.0": + version: 3.663.0 + resolution: "@aws-sdk/s3-request-presigner@npm:3.663.0" + dependencies: + "@aws-sdk/signature-v4-multi-region": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@aws-sdk/util-format-url": "npm:3.662.0" + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/87900d150bb1d8e85fe399f5a217a3388ce502b643a72268ce266a8e11dc2c9b164de0c375269132f421557baa9fc115d770f6b1f602822c97d1aefed03f5442 + languageName: node + linkType: hard + +"@aws-sdk/signature-v4-multi-region@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/signature-v4-multi-region@npm:3.662.0" + dependencies: + "@aws-sdk/middleware-sdk-s3": "npm:3.662.0" + "@aws-sdk/types": "npm:3.662.0" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/signature-v4": "npm:^4.2.0" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/4282de6f344bcf98723be6058be1c5a795a08fe5ca9ce65a59e10779847d8b203321e011e00e44a8812f6c59045707588d3fd616d07299924c7134f9a81b7cff + languageName: node + linkType: hard + +"@aws-sdk/token-providers@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/token-providers@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + peerDependencies: + "@aws-sdk/client-sso-oidc": ^3.662.0 + checksum: 10c0/45fcff8ff38500e1310372aa1c632943acee0e8f7c6652cb6ceca03d08135c35250d5f010120b6d52bcd407400602b0edcb9763ef0de4737e62cf9f54e575275 + languageName: node + linkType: hard + +"@aws-sdk/types@npm:3.662.0, @aws-sdk/types@npm:^3.222.0": + version: 3.662.0 + resolution: "@aws-sdk/types@npm:3.662.0" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/c6df992bc3f58c6fe91228c59042a5bbeec1353eb99af4ec23e380432c7d3ed3d3d36a3fa98d2d0744bcf806645df45a2d74c0d75acee59e3125485bb45d500a + languageName: node + linkType: hard + +"@aws-sdk/util-arn-parser@npm:3.568.0": + version: 3.568.0 + resolution: "@aws-sdk/util-arn-parser@npm:3.568.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/4e6168b86a1ff4509f25b56e473c95bdcc0ecbaedcded29cbbd500eb7c156de63f2426282cd50489ac7f321a990056349974730f9e27ac3fe872ba3573b09fb6 + languageName: node + linkType: hard + +"@aws-sdk/util-endpoints@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/util-endpoints@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-endpoints": "npm:^2.1.3" + tslib: "npm:^2.6.2" + checksum: 10c0/d360a4a2ed44215a6ea8675eb8b6439620411a2469a7551541bf1dd056be838a7b1192c6600ba66e0b22633ee368fcd74d8b7663a5a37f1f26253c2c9d47fc77 + languageName: node + linkType: hard + +"@aws-sdk/util-format-url@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/util-format-url@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/querystring-builder": "npm:^3.0.7" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/622198083087cdc205e2c93f12010272cf3da9d9624343cdcafe70201e986aa399d855b6c3c0e8703fa61bf7efa459d92fe9a97cca55d8c06c1d5c98c61738fc + languageName: node + linkType: hard + +"@aws-sdk/util-locate-window@npm:^3.0.0": + version: 3.568.0 + resolution: "@aws-sdk/util-locate-window@npm:3.568.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/cb1d0919498206fe266542a635cd05909456a06f007a6a550ff897a01390b239e51c2a50e47509e23c179f8df8001bd5fecd900045da5ec989c3f934c3fd3d56 + languageName: node + linkType: hard + +"@aws-sdk/util-user-agent-browser@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/util-user-agent-browser@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/types": "npm:^3.5.0" + bowser: "npm:^2.11.0" + tslib: "npm:^2.6.2" + checksum: 10c0/6de7705b174acc2d8eea8d98fa0f50886acdfe78c9de18b8a143fbdf8e539e60328457591f71ea14844b8d97e1279596dbb29e0bff7b4b87a7c5b0133d09fccf + languageName: node + linkType: hard + +"@aws-sdk/util-user-agent-node@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/util-user-agent-node@npm:3.662.0" + dependencies: + "@aws-sdk/types": "npm:3.662.0" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + peerDependencies: + aws-crt: ">=1.0.0" + peerDependenciesMeta: + aws-crt: + optional: true + checksum: 10c0/e8de89ed58a55fbba75e60d1bd4bb41bd4a1ce06d402fdde11ba523739d1d7d5d91138ec1a7c20c790b58897c6a8468f34440c75fddf0c775590431889f0eb17 + languageName: node + linkType: hard + +"@aws-sdk/xml-builder@npm:3.662.0": + version: 3.662.0 + resolution: "@aws-sdk/xml-builder@npm:3.662.0" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/19d7b9dc63e9e071fbe5608cba00e1edfc89529e76034b336bf019f4950aebaf25913bcf4545b3380a2e53df93d3914d160abe2091e97d91138efb2855649a48 + languageName: node + linkType: hard + "@azure/abort-controller@npm:^1.0.0, @azure/abort-controller@npm:^1.1.0": version: 1.1.0 resolution: "@azure/abort-controller@npm:1.1.0" @@ -458,6 +1183,33 @@ __metadata: languageName: node linkType: hard +"@babel/cli@npm:7.25.7": + version: 7.25.7 + resolution: "@babel/cli@npm:7.25.7" + dependencies: + "@jridgewell/trace-mapping": "npm:^0.3.25" + "@nicolo-ribaudo/chokidar-2": "npm:2.1.8-no-fsevents.3" + chokidar: "npm:^3.6.0" + commander: "npm:^6.2.0" + convert-source-map: "npm:^2.0.0" + fs-readdir-recursive: "npm:^1.1.0" + glob: "npm:^7.2.0" + make-dir: "npm:^2.1.0" + slash: "npm:^2.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + dependenciesMeta: + "@nicolo-ribaudo/chokidar-2": + optional: true + chokidar: + optional: true + bin: + babel: ./bin/babel.js + babel-external-helpers: ./bin/babel-external-helpers.js + checksum: 10c0/bbbc53eef15844b0bfb737d7d134f979d42c51a269e2aee994b02eb9216a22e8dd3d790d5ae9f5b5c003e01eccfc164c14aaa1ad989695e0154f66a588f77d42 + languageName: node + linkType: hard + "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.13, @babel/code-frame@npm:^7.24.7": version: 7.24.7 resolution: "@babel/code-frame@npm:7.24.7" @@ -468,6 +1220,16 @@ __metadata: languageName: node linkType: hard +"@babel/code-frame@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/code-frame@npm:7.25.7" + dependencies: + "@babel/highlight": "npm:^7.25.7" + picocolors: "npm:^1.0.0" + checksum: 10c0/14825c298bdec914caf3d24d1383b6d4cd6b030714686004992f4fc251831ecf432236652896f99d5d341f17170ae9a07b58d8d7b15aa0df8cfa1c5a7d5474bc + languageName: node + linkType: hard + "@babel/compat-data@npm:^7.20.5, @babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.25.2, @babel/compat-data@npm:^7.25.4": version: 7.25.4 resolution: "@babel/compat-data@npm:7.25.4" @@ -548,6 +1310,18 @@ __metadata: languageName: node linkType: hard +"@babel/generator@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/generator@npm:7.25.7" + dependencies: + "@babel/types": "npm:^7.25.7" + "@jridgewell/gen-mapping": "npm:^0.3.5" + "@jridgewell/trace-mapping": "npm:^0.3.25" + jsesc: "npm:^3.0.2" + checksum: 10c0/c03a26c79864d60d04ce36b649c3fa0d6fd7b2bf6a22e22854a0457aa09206508392dd73ee40e7bc8d50b3602f9ff068afa47770cda091d332e7db1ca382ee96 + languageName: node + linkType: hard + "@babel/helper-annotate-as-pure@npm:^7.24.7": version: 7.24.7 resolution: "@babel/helper-annotate-as-pure@npm:7.24.7" @@ -645,6 +1419,16 @@ __metadata: languageName: node linkType: hard +"@babel/helper-module-imports@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/helper-module-imports@npm:7.25.7" + dependencies: + "@babel/traverse": "npm:^7.25.7" + "@babel/types": "npm:^7.25.7" + checksum: 10c0/0fd0c3673835e5bf75558e184bcadc47c1f6dd2fe2016d53ebe1e5a6ae931a44e093015c2f9a6651c1a89f25c76d9246710c2b0b460b95ee069c464f2837fa2c + languageName: node + linkType: hard + "@babel/helper-module-transforms@npm:^7.24.7, @babel/helper-module-transforms@npm:^7.24.8, @babel/helper-module-transforms@npm:^7.25.0, @babel/helper-module-transforms@npm:^7.25.2": version: 7.25.2 resolution: "@babel/helper-module-transforms@npm:7.25.2" @@ -675,6 +1459,13 @@ __metadata: languageName: node linkType: hard +"@babel/helper-plugin-utils@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/helper-plugin-utils@npm:7.25.7" + checksum: 10c0/241f8cf3c5b7700e91cab7cfe5b432a3c710ae3cd5bb96dc554da536a6d25f5b9f000cc0c0917501ceb4f76ba92599ee3beb25e10adaf96be59f8df89a842faf + languageName: node + linkType: hard + "@babel/helper-remap-async-to-generator@npm:^7.24.7, @babel/helper-remap-async-to-generator@npm:^7.25.0": version: 7.25.0 resolution: "@babel/helper-remap-async-to-generator@npm:7.25.0" @@ -728,6 +1519,13 @@ __metadata: languageName: node linkType: hard +"@babel/helper-string-parser@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/helper-string-parser@npm:7.25.7" + checksum: 10c0/73ef2ceb81f8294678a0afe8ab0103729c0370cac2e830e0d5128b03be5f6a2635838af31d391d763e3c5a4460ed96f42fd7c9b552130670d525be665913bc4c + languageName: node + linkType: hard + "@babel/helper-validator-identifier@npm:^7.24.7": version: 7.24.7 resolution: "@babel/helper-validator-identifier@npm:7.24.7" @@ -735,6 +1533,13 @@ __metadata: languageName: node linkType: hard +"@babel/helper-validator-identifier@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/helper-validator-identifier@npm:7.25.7" + checksum: 10c0/07438e5bf01ab2882a15027fdf39ac3b0ba1b251774a5130917907014684e2f70fef8fd620137ca062c4c4eedc388508d2ea7a3a7d9936a32785f4fe116c68c0 + languageName: node + linkType: hard + "@babel/helper-validator-option@npm:^7.24.7, @babel/helper-validator-option@npm:^7.24.8": version: 7.24.8 resolution: "@babel/helper-validator-option@npm:7.24.8" @@ -775,13 +1580,25 @@ __metadata: languageName: node linkType: hard -"@babel/node@npm:7.25.0": - version: 7.25.0 - resolution: "@babel/node@npm:7.25.0" +"@babel/highlight@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/highlight@npm:7.25.7" dependencies: - "@babel/register": "npm:^7.24.6" - commander: "npm:^6.2.0" - core-js: "npm:^3.30.2" + "@babel/helper-validator-identifier": "npm:^7.25.7" + chalk: "npm:^2.4.2" + js-tokens: "npm:^4.0.0" + picocolors: "npm:^1.0.0" + checksum: 10c0/1f5894fdb0a0af6101fb2822369b2eeeae32cbeae2ef73ff73fc6a0a4a20471565cd9cfa589f54ed69df66adeca7c57266031ca9134b7bd244d023a488d419aa + languageName: node + linkType: hard + +"@babel/node@npm:7.25.0": + version: 7.25.0 + resolution: "@babel/node@npm:7.25.0" + dependencies: + "@babel/register": "npm:^7.24.6" + commander: "npm:^6.2.0" + core-js: "npm:^3.30.2" node-environment-flags: "npm:^1.0.5" regenerator-runtime: "npm:^0.14.0" v8flags: "npm:^3.1.1" @@ -804,6 +1621,17 @@ __metadata: languageName: node linkType: hard +"@babel/parser@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/parser@npm:7.25.7" + dependencies: + "@babel/types": "npm:^7.25.7" + bin: + parser: ./bin/babel-parser.js + checksum: 10c0/b771469bb6b636c18a8d642b9df3c73913c3860a979591e1a29a98659efd38b81d3e393047b5251fe382d4c82c681c12da9ce91c98d69316d2604d155a214bcf + languageName: node + linkType: hard + "@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:^7.25.3": version: 7.25.3 resolution: "@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:7.25.3" @@ -1751,6 +2579,22 @@ __metadata: languageName: node linkType: hard +"@babel/plugin-transform-runtime@npm:7.25.7": + version: 7.25.7 + resolution: "@babel/plugin-transform-runtime@npm:7.25.7" + dependencies: + "@babel/helper-module-imports": "npm:^7.25.7" + "@babel/helper-plugin-utils": "npm:^7.25.7" + babel-plugin-polyfill-corejs2: "npm:^0.4.10" + babel-plugin-polyfill-corejs3: "npm:^0.10.6" + babel-plugin-polyfill-regenerator: "npm:^0.6.1" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/9b2514e9079361ac8e7e500ffd522dad869d61a3894302da7e29bbac80de00276c8a1b4394d1dcf0b51c57b2c854919928df9648be336139fdf1d6ecd6d1bb32 + languageName: node + linkType: hard + "@babel/plugin-transform-shorthand-properties@npm:^7.0.0, @babel/plugin-transform-shorthand-properties@npm:^7.24.7": version: 7.24.7 resolution: "@babel/plugin-transform-shorthand-properties@npm:7.24.7" @@ -2051,7 +2895,26 @@ __metadata: languageName: node linkType: hard -"@babel/runtime@npm:7.25.0, @babel/runtime@npm:^7.0.0, @babel/runtime@npm:^7.12.5, @babel/runtime@npm:^7.13.10, @babel/runtime@npm:^7.16.3, @babel/runtime@npm:^7.17.8, @babel/runtime@npm:^7.18.9, @babel/runtime@npm:^7.21.0, @babel/runtime@npm:^7.23.9, @babel/runtime@npm:^7.7.6, @babel/runtime@npm:^7.8.4": +"@babel/runtime-corejs3@npm:7.25.7": + version: 7.25.7 + resolution: "@babel/runtime-corejs3@npm:7.25.7" + dependencies: + core-js-pure: "npm:^3.30.2" + regenerator-runtime: "npm:^0.14.0" + checksum: 10c0/37217edf5f02c0e7ccb78af380b26b06dadc9b031a1bcec22a9cfb540d85470b61ebe1e5cd7e32689a6c0f786015c2ee1a73a16852574c3a46341105e457a87c + languageName: node + linkType: hard + +"@babel/runtime@npm:7.25.7": + version: 7.25.7 + resolution: "@babel/runtime@npm:7.25.7" + dependencies: + regenerator-runtime: "npm:^0.14.0" + checksum: 10c0/86b7829d2fc9343714a9afe92757cf96c4dc799006ca61d73cda62f4b9e29bfa1ce36794955bc6cb4c188f5b10db832c949339895e1bbe81a69022d9d578ce29 + languageName: node + linkType: hard + +"@babel/runtime@npm:^7.0.0, @babel/runtime@npm:^7.12.5, @babel/runtime@npm:^7.13.10, @babel/runtime@npm:^7.16.3, @babel/runtime@npm:^7.17.8, @babel/runtime@npm:^7.18.9, @babel/runtime@npm:^7.21.0, @babel/runtime@npm:^7.23.9, @babel/runtime@npm:^7.7.6, @babel/runtime@npm:^7.8.4": version: 7.25.0 resolution: "@babel/runtime@npm:7.25.0" dependencies: @@ -2071,6 +2934,17 @@ __metadata: languageName: node linkType: hard +"@babel/template@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/template@npm:7.25.7" + dependencies: + "@babel/code-frame": "npm:^7.25.7" + "@babel/parser": "npm:^7.25.7" + "@babel/types": "npm:^7.25.7" + checksum: 10c0/8ae9e36e4330ee83d4832531d1d9bec7dc2ef6a2a8afa1ef1229506fd60667abcb17f306d1c3d7e582251270597022990c845d5d69e7add70a5aea66720decb9 + languageName: node + linkType: hard + "@babel/traverse@npm:^7.1.6, @babel/traverse@npm:^7.14.0, @babel/traverse@npm:^7.16.8, @babel/traverse@npm:^7.18.9, @babel/traverse@npm:^7.22.20, @babel/traverse@npm:^7.22.5, @babel/traverse@npm:^7.23.2, @babel/traverse@npm:^7.24.7, @babel/traverse@npm:^7.24.8, @babel/traverse@npm:^7.25.0, @babel/traverse@npm:^7.25.1, @babel/traverse@npm:^7.25.2, @babel/traverse@npm:^7.25.3, @babel/traverse@npm:^7.25.4": version: 7.25.6 resolution: "@babel/traverse@npm:7.25.6" @@ -2086,6 +2960,21 @@ __metadata: languageName: node linkType: hard +"@babel/traverse@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/traverse@npm:7.25.7" + dependencies: + "@babel/code-frame": "npm:^7.25.7" + "@babel/generator": "npm:^7.25.7" + "@babel/parser": "npm:^7.25.7" + "@babel/template": "npm:^7.25.7" + "@babel/types": "npm:^7.25.7" + debug: "npm:^4.3.1" + globals: "npm:^11.1.0" + checksum: 10c0/75d73e52c507a7a7a4c7971d6bf4f8f26fdd094e0d3a0193d77edf6a5efa36fc3db91ec5cc48e8b94e6eb5d5ad21af0a1040e71309172851209415fd105efb1a + languageName: node + linkType: hard + "@babel/types@npm:^7.0.0, @babel/types@npm:^7.1.6, @babel/types@npm:^7.16.8, @babel/types@npm:^7.18.13, @babel/types@npm:^7.18.9, @babel/types@npm:^7.20.7, @babel/types@npm:^7.21.3, @babel/types@npm:^7.23.0, @babel/types@npm:^7.24.7, @babel/types@npm:^7.24.8, @babel/types@npm:^7.25.0, @babel/types@npm:^7.25.2, @babel/types@npm:^7.25.6, @babel/types@npm:^7.3.3, @babel/types@npm:^7.4.4": version: 7.25.6 resolution: "@babel/types@npm:7.25.6" @@ -2097,6 +2986,17 @@ __metadata: languageName: node linkType: hard +"@babel/types@npm:^7.25.7": + version: 7.25.7 + resolution: "@babel/types@npm:7.25.7" + dependencies: + "@babel/helper-string-parser": "npm:^7.25.7" + "@babel/helper-validator-identifier": "npm:^7.25.7" + to-fast-properties: "npm:^2.0.0" + checksum: 10c0/e03e1e2e08600fa1e8eb90632ac9c253dd748176c8d670d85f85b0dc83a0573b26ae748a1cbcb81f401903a3d95f43c3f4f8d516a5ed779929db27de56289633 + languageName: node + linkType: hard + "@base2/pretty-print-object@npm:1.0.1": version: 1.0.1 resolution: "@base2/pretty-print-object@npm:1.0.1" @@ -8614,23 +9514,70 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/storage@workspace:packages/storage": +"@redwoodjs/storage-adapter-filesystem@workspace:packages/storage/adapters/filesystem": version: 0.0.0-use.local - resolution: "@redwoodjs/storage@workspace:packages/storage" + resolution: "@redwoodjs/storage-adapter-filesystem@workspace:packages/storage/adapters/filesystem" dependencies: - "@arethetypeswrong/cli": "npm:0.16.4" - "@prisma/client": "npm:5.19.1" "@redwoodjs/framework-tools": "workspace:*" - "@redwoodjs/project-config": "workspace:*" - "@types/mime-types": "npm:2.1.4" + "@redwoodjs/storage-core": "workspace:*" + "@types/uuid": "npm:10.0.0" concurrently: "npm:8.2.2" esbuild: "npm:0.23.1" mime-types: "npm:2.1.35" publint: "npm:0.2.10" - tstyche: "npm:2.1.1" tsx: "npm:4.19.1" typescript: "npm:5.6.2" - ulid: "npm:2.3.0" + uuid: "npm:10.0.0" + vitest: "npm:2.0.5" + languageName: unknown + linkType: soft + +"@redwoodjs/storage-adapter-memory@workspace:packages/storage/adapters/memory": + version: 0.0.0-use.local + resolution: "@redwoodjs/storage-adapter-memory@workspace:packages/storage/adapters/memory" + dependencies: + "@redwoodjs/framework-tools": "workspace:*" + "@redwoodjs/storage-core": "workspace:*" + "@types/uuid": "npm:10.0.0" + concurrently: "npm:8.2.2" + esbuild: "npm:0.23.1" + lru-cache: "npm:11.0.1" + mime-types: "npm:2.1.35" + publint: "npm:0.2.10" + tsx: "npm:4.19.1" + typescript: "npm:5.6.2" + uuid: "npm:10.0.0" + vitest: "npm:2.0.5" + languageName: unknown + linkType: soft + +"@redwoodjs/storage-adapter-s3@workspace:packages/storage/adapters/s3": + version: 0.0.0-use.local + resolution: "@redwoodjs/storage-adapter-s3@workspace:packages/storage/adapters/s3" + dependencies: + "@aws-sdk/client-s3": "npm:3.663.0" + "@aws-sdk/lib-storage": "npm:3.663.0" + "@aws-sdk/s3-request-presigner": "npm:3.663.0" + "@redwoodjs/framework-tools": "workspace:*" + concurrently: "npm:8.2.2" + esbuild: "npm:0.23.1" + publint: "npm:0.2.10" + tsx: "npm:4.19.1" + typescript: "npm:5.6.2" + vitest: "npm:2.0.5" + languageName: unknown + linkType: soft + +"@redwoodjs/storage-core@workspace:*, @redwoodjs/storage-core@workspace:packages/storage/core": + version: 0.0.0-use.local + resolution: "@redwoodjs/storage-core@workspace:packages/storage/core" + dependencies: + "@redwoodjs/framework-tools": "workspace:*" + concurrently: "npm:8.2.2" + esbuild: "npm:0.23.1" + publint: "npm:0.2.10" + tsx: "npm:4.19.1" + typescript: "npm:5.6.2" vitest: "npm:2.0.5" languageName: unknown linkType: soft @@ -8745,6 +9692,39 @@ __metadata: languageName: unknown linkType: soft +"@redwoodjs/uploads-graphql@workspace:packages/uploads/graphql": + version: 0.0.0-use.local + resolution: "@redwoodjs/uploads-graphql@workspace:packages/uploads/graphql" + dependencies: + "@redwoodjs/context": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" + "@redwoodjs/graphql-server": "workspace:*" + jsonwebtoken: "npm:9.0.2" + nodemon: "npm:3.1.4" + tsx: "npm:4.19.1" + typescript: "npm:5.6.2" + vitest: "npm:2.0.5" + languageName: unknown + linkType: soft + +"@redwoodjs/uploads-web@workspace:packages/uploads/web": + version: 0.0.0-use.local + resolution: "@redwoodjs/uploads-web@workspace:packages/uploads/web" + dependencies: + "@apollo/client": "npm:3.11.1" + "@redwoodjs/framework-tools": "workspace:*" + "@redwoodjs/web": "workspace:*" + nodemon: "npm:3.1.4" + react: "npm:19.0.0-rc-f2df5694-20240916" + react-dom: "npm:19.0.0-rc-f2df5694-20240916" + react-dropzone: "npm:14.2.3" + react-hot-toast: "npm:2.4.1" + tsx: "npm:4.19.1" + typescript: "npm:5.6.2" + vitest: "npm:2.0.5" + languageName: unknown + linkType: soft + "@redwoodjs/vite@workspace:packages/vite": version: 0.0.0-use.local resolution: "@redwoodjs/vite@workspace:packages/vite" @@ -8837,11 +9817,11 @@ __metadata: "@apollo/client": "npm:3.11.1" "@apollo/client-react-streaming": "npm:0.10.0" "@arethetypeswrong/cli": "npm:0.16.4" - "@babel/cli": "npm:7.24.8" + "@babel/cli": "npm:7.25.7" "@babel/core": "npm:^7.22.20" - "@babel/plugin-transform-runtime": "npm:7.24.7" - "@babel/runtime": "npm:7.25.0" - "@babel/runtime-corejs3": "npm:7.25.0" + "@babel/plugin-transform-runtime": "npm:7.25.7" + "@babel/runtime": "npm:7.25.7" + "@babel/runtime-corejs3": "npm:7.25.7" "@redwoodjs/auth": "workspace:*" "@redwoodjs/framework-tools": "workspace:*" "@redwoodjs/internal": "workspace:*" @@ -8859,8 +9839,8 @@ __metadata: graphql: "npm:16.9.0" graphql-sse: "npm:2.5.3" graphql-tag: "npm:2.12.6" - nodemon: "npm:3.1.4" - publint: "npm:0.2.10" + nodemon: "npm:3.1.7" + publint: "npm:0.2.11" react: "npm:19.0.0-rc-f2df5694-20240916" react-dom: "npm:19.0.0-rc-f2df5694-20240916" react-helmet-async: "npm:2.0.5" @@ -9013,221 +9993,817 @@ __metadata: languageName: node linkType: hard -"@rollup/rollup-linux-s390x-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.21.2" - conditions: os=linux & cpu=s390x & libc=glibc +"@rollup/rollup-linux-s390x-gnu@npm:4.21.2": + version: 4.21.2 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.21.2" + conditions: os=linux & cpu=s390x & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-gnu@npm:4.21.2": + version: 4.21.2 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.21.2" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-musl@npm:4.21.2": + version: 4.21.2 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.21.2" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.21.2": + version: 4.21.2 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.21.2" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-ia32-msvc@npm:4.21.2": + version: 4.21.2 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.21.2" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.21.2": + version: 4.21.2 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.21.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@rtsao/scc@npm:^1.1.0": + version: 1.1.0 + resolution: "@rtsao/scc@npm:1.1.0" + checksum: 10c0/b5bcfb0d87f7d1c1c7c0f7693f53b07866ed9fec4c34a97a8c948fb9a7c0082e416ce4d3b60beb4f5e167cbe04cdeefbf6771320f3ede059b9ce91188c409a5b + languageName: node + linkType: hard + +"@sdl-codegen/node@npm:2.0.1": + version: 2.0.1 + resolution: "@sdl-codegen/node@npm:2.0.1" + dependencies: + "@mrleebo/prisma-ast": "npm:^0.12.0" + ts-morph: "npm:^22.0.0" + peerDependencies: + graphql: "*" + prettier: ^2 + typescript: "*" + peerDependenciesMeta: + prettier: + optional: true + checksum: 10c0/a99ae86cbc4f55939103716e7b7430e6cd60374cd86f8263cea69631996d386be4483763a32d4688279dfc95aaaa21b1d9fd6e154bb65623e76b1347aa4cbc2f + languageName: node + linkType: hard + +"@selderee/plugin-htmlparser2@npm:^0.11.0": + version: 0.11.0 + resolution: "@selderee/plugin-htmlparser2@npm:0.11.0" + dependencies: + domhandler: "npm:^5.0.3" + selderee: "npm:^0.11.0" + checksum: 10c0/e938ba9aeb31a9cf30dcb2977ef41685c598bf744bedc88c57aa9e8b7e71b51781695cf99c08aac50773fd7714eba670bd2a079e46db0788abe40c6d220084eb + languageName: node + linkType: hard + +"@sigstore/bundle@npm:^2.1.1": + version: 2.1.1 + resolution: "@sigstore/bundle@npm:2.1.1" + dependencies: + "@sigstore/protobuf-specs": "npm:^0.2.1" + checksum: 10c0/d3e358569e9b0f1a2c5bfa3ab8608046fc11f42424ea717b4871044cf9ecbff374fc08673b9858ec93f993fa1a6166d416b1245a0d13d0f856ea5de99e27a594 + languageName: node + linkType: hard + +"@sigstore/core@npm:^0.2.0": + version: 0.2.0 + resolution: "@sigstore/core@npm:0.2.0" + checksum: 10c0/b3da01c5369ba7e02b3f56921d0e3da8d6527d005d1bf6e90fbebf4211541c068a2a18e65f205ff7bcaa9ca3f41c7261396e7d4b4ad05b0aedca2e3bae0405eb + languageName: node + linkType: hard + +"@sigstore/protobuf-specs@npm:^0.2.1": + version: 0.2.1 + resolution: "@sigstore/protobuf-specs@npm:0.2.1" + checksum: 10c0/756b3bc64e7f21d966473208cd3920fcde6744025f7deb1d3be1d2b6261b825178b393db7458cd191b2eab947e516eacd6f91aa2f4545d8c045431fb699ac357 + languageName: node + linkType: hard + +"@sigstore/sign@npm:^2.2.1": + version: 2.2.1 + resolution: "@sigstore/sign@npm:2.2.1" + dependencies: + "@sigstore/bundle": "npm:^2.1.1" + "@sigstore/core": "npm:^0.2.0" + "@sigstore/protobuf-specs": "npm:^0.2.1" + make-fetch-happen: "npm:^13.0.0" + checksum: 10c0/482206264bdf517fe54d08171942219b4541704f5dec9ecb169687d545b1437c5a1493ab5ea84e87180f777d7476f0154828f0ce978f55071b0117d5687f3f9c + languageName: node + linkType: hard + +"@sigstore/tuf@npm:^2.3.0": + version: 2.3.0 + resolution: "@sigstore/tuf@npm:2.3.0" + dependencies: + "@sigstore/protobuf-specs": "npm:^0.2.1" + tuf-js: "npm:^2.2.0" + checksum: 10c0/a214561e143f553132428597eaa68cfdcb36c6bf757f3dea30b2e55038433b0ffc53c446036e6d104487fb55f8d6bc6e01764090d29f42497fb44d55017f360c + languageName: node + linkType: hard + +"@sigstore/verify@npm:^0.1.0": + version: 0.1.0 + resolution: "@sigstore/verify@npm:0.1.0" + dependencies: + "@sigstore/bundle": "npm:^2.1.1" + "@sigstore/core": "npm:^0.2.0" + "@sigstore/protobuf-specs": "npm:^0.2.1" + checksum: 10c0/3eeb4817ac38dc7b337a48e75c4e88226a5553c32594fa8c22221087a69656a7ccfe68e6f59eb12f1ecc506ea6c6db90e4b312c7dcc4a66c04e01434dc607fc7 + languageName: node + linkType: hard + +"@simplewebauthn/browser@npm:7.4.0": + version: 7.4.0 + resolution: "@simplewebauthn/browser@npm:7.4.0" + dependencies: + "@simplewebauthn/typescript-types": "npm:^7.4.0" + checksum: 10c0/cd69d51511e1bb75603b254b706194e8b7c3849e8f02fcb373cc8bb8c789df803a1bb900de7853c0cc63c0ad81fd56497ca63885638d566137afa387674099ad + languageName: node + linkType: hard + +"@simplewebauthn/iso-webcrypto@npm:^7.4.0": + version: 7.4.0 + resolution: "@simplewebauthn/iso-webcrypto@npm:7.4.0" + dependencies: + "@simplewebauthn/typescript-types": "npm:^7.4.0" + "@types/node": "npm:^18.11.9" + checksum: 10c0/66a3eabb8fca5a8f779d428b358c8fc02dd2496f9cafda882f3b19562e5c9d21a8af3082f635c7ff0a1914e33a87817be0d16307f5327606149a52e854406cbb + languageName: node + linkType: hard + +"@simplewebauthn/server@npm:7.4.0": + version: 7.4.0 + resolution: "@simplewebauthn/server@npm:7.4.0" + dependencies: + "@hexagon/base64": "npm:^1.1.25" + "@peculiar/asn1-android": "npm:^2.3.3" + "@peculiar/asn1-ecc": "npm:^2.3.4" + "@peculiar/asn1-rsa": "npm:^2.3.4" + "@peculiar/asn1-schema": "npm:^2.3.3" + "@peculiar/asn1-x509": "npm:^2.3.4" + "@simplewebauthn/iso-webcrypto": "npm:^7.4.0" + "@simplewebauthn/typescript-types": "npm:^7.4.0" + "@types/debug": "npm:^4.1.7" + "@types/node": "npm:^18.11.9" + cbor-x: "npm:^1.4.1" + cross-fetch: "npm:^3.1.5" + debug: "npm:^4.3.2" + checksum: 10c0/51858ad0bcfb55b96c8dd4a337ed93baf000ccf55cdf13f9f87c96e54c0fa80b0fb0eb96fc570d9e039a2526d770a1a21811a03a15f9ad23a02142ff9ba8ad6e + languageName: node + linkType: hard + +"@simplewebauthn/typescript-types@npm:7.4.0, @simplewebauthn/typescript-types@npm:^7.4.0": + version: 7.4.0 + resolution: "@simplewebauthn/typescript-types@npm:7.4.0" + checksum: 10c0/b7aefd742d2f483531ff96509475571339660addba1f140883d8e489601d6a3a5b1c6759aa5ba27a9da5b502709aee9f060a4d4e57010f32c94eb5c42ef562a3 + languageName: node + linkType: hard + +"@sinclair/typebox@npm:^0.27.8": + version: 0.27.8 + resolution: "@sinclair/typebox@npm:0.27.8" + checksum: 10c0/ef6351ae073c45c2ac89494dbb3e1f87cc60a93ce4cde797b782812b6f97da0d620ae81973f104b43c9b7eaa789ad20ba4f6a1359f1cc62f63729a55a7d22d4e + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^0.14.0": + version: 0.14.0 + resolution: "@sindresorhus/is@npm:0.14.0" + checksum: 10c0/7247aa9314d4fc3df9b3f63d8b5b962a89c7600a5db1f268546882bfc4d31a975a899f5f42a09dd41a11e58636e6402f7c40f92df853aee417247bb11faee9a0 + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^4.6.0": + version: 4.6.0 + resolution: "@sindresorhus/is@npm:4.6.0" + checksum: 10c0/33b6fb1d0834ec8dd7689ddc0e2781c2bfd8b9c4e4bacbcb14111e0ae00621f2c264b8a7d36541799d74888b5dccdf422a891a5cb5a709ace26325eedc81e22e + languageName: node + linkType: hard + +"@sinonjs/commons@npm:^3.0.0": + version: 3.0.0 + resolution: "@sinonjs/commons@npm:3.0.0" + dependencies: + type-detect: "npm:4.0.8" + checksum: 10c0/1df9cd257942f4e4960dfb9fd339d9e97b6a3da135f3d5b8646562918e863809cb8e00268535f4f4723535d2097881c8fc03d545c414d8555183376cfc54ee84 + languageName: node + linkType: hard + +"@sinonjs/fake-timers@npm:^10.0.2": + version: 10.3.0 + resolution: "@sinonjs/fake-timers@npm:10.3.0" + dependencies: + "@sinonjs/commons": "npm:^3.0.0" + checksum: 10c0/2e2fb6cc57f227912814085b7b01fede050cd4746ea8d49a1e44d5a0e56a804663b0340ae2f11af7559ea9bf4d087a11f2f646197a660ea3cb04e19efc04aa63 + languageName: node + linkType: hard + +"@smithy/abort-controller@npm:^3.1.5": + version: 3.1.5 + resolution: "@smithy/abort-controller@npm:3.1.5" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/384e3dca60720bc9048092b1765ec619c5c64844732ca1439ca90d6ea7454eed12d071a536d8c243410512cc39ad1683607415dbeaf89816ddb142bbe10cf789 + languageName: node + linkType: hard + +"@smithy/chunked-blob-reader-native@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/chunked-blob-reader-native@npm:3.0.0" + dependencies: + "@smithy/util-base64": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/f3cbd03baaaf33a2c44a484851e3f2902f87cbb2168abff179276b19fd137be021393551b9270f9f3135408d816a06fe84ff826d9beb576dbe53fae9cf487362 + languageName: node + linkType: hard + +"@smithy/chunked-blob-reader@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/chunked-blob-reader@npm:3.0.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/cc551e4d6c711bec381d70c3074e3937ee78245bb15dd55c28c43c6c30808af1855c8df4a785a1033ded1483979ae115cf2c9decce73083346734db0d32b2fe5 + languageName: node + linkType: hard + +"@smithy/config-resolver@npm:^3.0.9": + version: 3.0.9 + resolution: "@smithy/config-resolver@npm:3.0.9" + dependencies: + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-config-provider": "npm:^3.0.0" + "@smithy/util-middleware": "npm:^3.0.7" + tslib: "npm:^2.6.2" + checksum: 10c0/714504c9341bc4fcc0c5fc86304602a03a26c7ca589945f41d967c8449bb12b6336da423caca04e0c0349c28b6ec7615e29bbbcbc89a68406ec9f39ac5aac483 + languageName: node + linkType: hard + +"@smithy/core@npm:^2.4.7": + version: 2.4.7 + resolution: "@smithy/core@npm:2.4.7" + dependencies: + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/middleware-retry": "npm:^3.0.22" + "@smithy/middleware-serde": "npm:^3.0.7" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-body-length-browser": "npm:^3.0.0" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/0d7760bbdad7c886e90d3122cd17660048725bc9013cddfe4920e0e7e979ac35ed4cf1658065b9639d2914fcbf69c1c2cd536b08e76681271ea25c05fc5ce0f6 + languageName: node + linkType: hard + +"@smithy/credential-provider-imds@npm:^3.2.4": + version: 3.2.4 + resolution: "@smithy/credential-provider-imds@npm:3.2.4" + dependencies: + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/types": "npm:^3.5.0" + "@smithy/url-parser": "npm:^3.0.7" + tslib: "npm:^2.6.2" + checksum: 10c0/bafd86dd1524eafccdd0863e2ee2a59e12f6974d37f7cde6653903da58dd878f6de7d1cd6320b0749507ad959a3cdf039a0e24c76035d1abe85ff3b9c13ad019 + languageName: node + linkType: hard + +"@smithy/eventstream-codec@npm:^3.1.6": + version: 3.1.6 + resolution: "@smithy/eventstream-codec@npm:3.1.6" + dependencies: + "@aws-crypto/crc32": "npm:5.2.0" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-hex-encoding": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/a21d61b9096730e06fa52e1bbab1dfbb2889306a203b85f9f41b7a35756d5226d765aeed3c61100b9c29c2e9c801d7fe00cfc500a78e53d3b64107354d322b61 + languageName: node + linkType: hard + +"@smithy/eventstream-serde-browser@npm:^3.0.10": + version: 3.0.10 + resolution: "@smithy/eventstream-serde-browser@npm:3.0.10" + dependencies: + "@smithy/eventstream-serde-universal": "npm:^3.0.9" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/f826a111c274f3088d2c9a6c206d94a3ad3d7d6eff77338b1ff86922aa7e7aa333c72a18ded49b969c168737ff7418362403dc45ede4e2beb3ad19335b27cc94 + languageName: node + linkType: hard + +"@smithy/eventstream-serde-config-resolver@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/eventstream-serde-config-resolver@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/8844b1ae5029e1b3947b0038ad0617920032a6d3df9c81e8940302df9369c264a72a545e8305b5f074149bcea395aeebb948cd963db7769deed483204fc1180b + languageName: node + linkType: hard + +"@smithy/eventstream-serde-node@npm:^3.0.9": + version: 3.0.9 + resolution: "@smithy/eventstream-serde-node@npm:3.0.9" + dependencies: + "@smithy/eventstream-serde-universal": "npm:^3.0.9" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/c910589ae418baec716a9649f0053ca3ca151659b44bdb2e697d2e0aef5ed1c54d589906d7700b7b9ad6285b1f636ffe6cc9fc27ab2a3f068da1376b0bcea5bc + languageName: node + linkType: hard + +"@smithy/eventstream-serde-universal@npm:^3.0.9": + version: 3.0.9 + resolution: "@smithy/eventstream-serde-universal@npm:3.0.9" + dependencies: + "@smithy/eventstream-codec": "npm:^3.1.6" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/3824af8191eb05e2205beaf8908b6b26988a411e8f536e62ddbf37e9f794fd217504cbc129dd704f8aae653d1cc5a79e09ca18ded4ad2d17733fa0b77a03c23e + languageName: node + linkType: hard + +"@smithy/fetch-http-handler@npm:^3.2.9": + version: 3.2.9 + resolution: "@smithy/fetch-http-handler@npm:3.2.9" + dependencies: + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/querystring-builder": "npm:^3.0.7" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-base64": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/0427d47a86d8250aa21fe4a9ec6639e2b611173e7516077ca634a0a398d902152993624766c5411a527a07db12b5c131a351770a9357a346d79811a4939ccbc6 + languageName: node + linkType: hard + +"@smithy/hash-blob-browser@npm:^3.1.6": + version: 3.1.6 + resolution: "@smithy/hash-blob-browser@npm:3.1.6" + dependencies: + "@smithy/chunked-blob-reader": "npm:^3.0.0" + "@smithy/chunked-blob-reader-native": "npm:^3.0.0" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/853e8f4e40331a3d0032898d2a80afa54aea44f9975d197e4b627ccb2987dc4a3a59e554f9e77b768b6f2c9d5aaf4c7f2a986de8d78510a3465370fed2b484ed + languageName: node + linkType: hard + +"@smithy/hash-node@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/hash-node@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + "@smithy/util-buffer-from": "npm:^3.0.0" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/88b1e642639f016f40834035d03288ea7481382e2fcda8a0d6baf38f0c6f1e8541aae51f50aea7876166976ff2e276baae428fbdfb728c0fc29ccdfdb612e853 + languageName: node + linkType: hard + +"@smithy/hash-stream-node@npm:^3.1.6": + version: 3.1.6 + resolution: "@smithy/hash-stream-node@npm:3.1.6" + dependencies: + "@smithy/types": "npm:^3.5.0" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/1393411476cce5a4c740b3e5e34d64d68dfc49edb2667d7baa43a494c1e36dc80535131b02db9222b475ee599e9bd418d400eceb690c468ab95603e99b94e628 + languageName: node + linkType: hard + +"@smithy/invalid-dependency@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/invalid-dependency@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/b43e868d428d092f91702fe7030307129eb65f0592c60bc6f29ef2bd74799bcae90815326eb599d12aaeee6659ef7c9b2fb85fa0c843ab5132a446edb8767b98 + languageName: node + linkType: hard + +"@smithy/is-array-buffer@npm:^2.2.0": + version: 2.2.0 + resolution: "@smithy/is-array-buffer@npm:2.2.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/2f2523cd8cc4538131e408eb31664983fecb0c8724956788b015aaf3ab85a0c976b50f4f09b176f1ed7bbe79f3edf80743be7a80a11f22cd9ce1285d77161aaf + languageName: node + linkType: hard + +"@smithy/is-array-buffer@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/is-array-buffer@npm:3.0.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/44710d94b9e6655ebc02169c149ea2bc5d5b9e509b6b39511cfe61bac571412290f4b9c743d61e395822f014021fcb709dbb533f2f717c1ac2d5a356696c22fd + languageName: node + linkType: hard + +"@smithy/md5-js@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/md5-js@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/c8577a6a35d219c4f58a68d2b1a1e71d065e0f81c6feb0a6cd85c3efcdae9c33dbd5d1e46e93e854bfbbc208a91a5aa41dba5af9917716b3ab76a287928f0f78 + languageName: node + linkType: hard + +"@smithy/middleware-content-length@npm:^3.0.9": + version: 3.0.9 + resolution: "@smithy/middleware-content-length@npm:3.0.9" + dependencies: + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/7ea6d14fe64a486c024988bed41b70eacadc5e9af4b06d36f1d3902675baf9908090f4cdcc9f066ef26dddb1816035227afe778a0372473678f267e4cb37cbe8 + languageName: node + linkType: hard + +"@smithy/middleware-endpoint@npm:^3.1.4": + version: 3.1.4 + resolution: "@smithy/middleware-endpoint@npm:3.1.4" + dependencies: + "@smithy/middleware-serde": "npm:^3.0.7" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + "@smithy/url-parser": "npm:^3.0.7" + "@smithy/util-middleware": "npm:^3.0.7" + tslib: "npm:^2.6.2" + checksum: 10c0/29d10c124489a1715ec10dbb45e8359fbb036c8600357f18362df4fba4899357d361402ef55d961939857755ffedc20c780203dc562ce00ca903013ac00226f7 + languageName: node + linkType: hard + +"@smithy/middleware-retry@npm:^3.0.22": + version: 3.0.22 + resolution: "@smithy/middleware-retry@npm:3.0.22" + dependencies: + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/service-error-classification": "npm:^3.0.7" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-retry": "npm:^3.0.7" + tslib: "npm:^2.6.2" + uuid: "npm:^9.0.1" + checksum: 10c0/62e88bf5358eb843a21e812ff191d217bb241cb0e4115c2bb55635ef2e53ba4bfe8d704e42074faf9f5627af89f25faa6fb5f8eec5a5186f79f075afd18de511 + languageName: node + linkType: hard + +"@smithy/middleware-serde@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/middleware-serde@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/b04abb0adc9a3b15ce42b0fd3bbdb78ee86a34f9c017cbb2a59ceffc1bde0740fa2f3534abf2ff861112b6fb76a7ea4f55871503e2d8d1e6207052bcccf2819a + languageName: node + linkType: hard + +"@smithy/middleware-stack@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/middleware-stack@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/260ddf0f785fa3118130e8174c653d7267208794feeaeeac9762783c0ebb306f0cbe71d73092347e9dd85ee4ebbe5e82ee0dd6512b3a2da0aef9789d23d020e0 + languageName: node + linkType: hard + +"@smithy/node-config-provider@npm:^3.1.8": + version: 3.1.8 + resolution: "@smithy/node-config-provider@npm:3.1.8" + dependencies: + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/shared-ini-file-loader": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/354319e0a6a48775195eecb3486eddce57eb51bd3a88cef729db39b6592da5ac7b2b0b4f996396ed1496a9693a5a67344b4e36c0a6eeb94293ed1e50aa10b740 + languageName: node + linkType: hard + +"@smithy/node-http-handler@npm:^3.2.4": + version: 3.2.4 + resolution: "@smithy/node-http-handler@npm:3.2.4" + dependencies: + "@smithy/abort-controller": "npm:^3.1.5" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/querystring-builder": "npm:^3.0.7" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/b086811ca355cff0c7cf8d897a146f309f0d48c2bbd21a2248c511fa483dd3366ffc8e85f8fe52e727207f426f57c7d9e2127ccb0616f860e2d8755481cb5be9 + languageName: node + linkType: hard + +"@smithy/property-provider@npm:^3.1.7": + version: 3.1.7 + resolution: "@smithy/property-provider@npm:3.1.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/14547451d6a81678f4962717cb77a93b01e22d6578462be9a3945889923ba8c2978775f4befb639c305e89169b7e1ee56a0f41a51aabf0f14013a47cbb18be42 + languageName: node + linkType: hard + +"@smithy/protocol-http@npm:^4.1.4": + version: 4.1.4 + resolution: "@smithy/protocol-http@npm:4.1.4" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/595d25edfe04764a4d51303c3c93b86837b704b7c9d192bf41facebd37bcfe2d20725ea39dda5aa3b73ee985483012447dd02851798bcd6e5e23ac66380b65be + languageName: node + linkType: hard + +"@smithy/querystring-builder@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/querystring-builder@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + "@smithy/util-uri-escape": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/3c8cf8313524a2fc58388f511c2bd81b421b4a7f36acf3979806e957191cdb9b7233c300781ff045be1c2fdf5279a6102dfc613d5c5a25bfed6306f6b2911be2 + languageName: node + linkType: hard + +"@smithy/querystring-parser@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/querystring-parser@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/ceba87cfa24bb86402f4ca2be15753647ebb3df248e0fc2b06a5cbd0d32c1639cca3dc6469daa990e44696e0e94351424ed22326fef46ae28f8c8587c68be515 languageName: node linkType: hard -"@rollup/rollup-linux-x64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-x64-gnu@npm:4.21.2" - conditions: os=linux & cpu=x64 & libc=glibc +"@smithy/service-error-classification@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/service-error-classification@npm:3.0.7" + dependencies: + "@smithy/types": "npm:^3.5.0" + checksum: 10c0/2bd5e9b9328a66c6a774526519a0b167702fcd3b7301a7f1962e03142913b6cabefbf350b0607ebd79eb989f264d31ef267ad3ebb83d9eccbee78d5fba207759 languageName: node linkType: hard -"@rollup/rollup-linux-x64-musl@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-x64-musl@npm:4.21.2" - conditions: os=linux & cpu=x64 & libc=musl +"@smithy/shared-ini-file-loader@npm:^3.1.8": + version: 3.1.8 + resolution: "@smithy/shared-ini-file-loader@npm:3.1.8" + dependencies: + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/6f4e66b6e0ddc1250c8f7dc5ebf272165608dd5510a92f03781e2a2adeb3ab862a277cb4c48150a4d0fdc279cafd0476eab0f2a5e01b2d6fed5a15f86d81b778 languageName: node linkType: hard -"@rollup/rollup-win32-arm64-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.21.2" - conditions: os=win32 & cpu=arm64 +"@smithy/signature-v4@npm:^4.2.0": + version: 4.2.0 + resolution: "@smithy/signature-v4@npm:4.2.0" + dependencies: + "@smithy/is-array-buffer": "npm:^3.0.0" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-hex-encoding": "npm:^3.0.0" + "@smithy/util-middleware": "npm:^3.0.7" + "@smithy/util-uri-escape": "npm:^3.0.0" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/d6222c7787d51b3ed58bb09f5fc56e90b6cd0e4588735e78f43a9642549e8e233a2050fa5734e844b80ea23ff17f867e61a687d34dba5db0dd466635f51a9ccf languageName: node linkType: hard -"@rollup/rollup-win32-ia32-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.21.2" - conditions: os=win32 & cpu=ia32 +"@smithy/smithy-client@npm:^3.3.6": + version: 3.3.6 + resolution: "@smithy/smithy-client@npm:3.3.6" + dependencies: + "@smithy/middleware-endpoint": "npm:^3.1.4" + "@smithy/middleware-stack": "npm:^3.0.7" + "@smithy/protocol-http": "npm:^4.1.4" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-stream": "npm:^3.1.9" + tslib: "npm:^2.6.2" + checksum: 10c0/82e07e629a93b16238071bb7a66cfba74eca85d9d7b63b60644d3feb62a9688a6b6608b152cbb3efd49b86022e0e10e56540ca2a6579859ee147f65850f6d124 languageName: node linkType: hard -"@rollup/rollup-win32-x64-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-x64-msvc@npm:4.21.2" - conditions: os=win32 & cpu=x64 +"@smithy/types@npm:^3.5.0": + version: 3.5.0 + resolution: "@smithy/types@npm:3.5.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/298f1638b0ba3a5cef3d238219cebab21f9479e54a5de3f7dbde5f65f7a3966a9623d4bb4e3856ef67bc6139a065a149379f6374e68bef380e8bb789c592db22 languageName: node linkType: hard -"@rtsao/scc@npm:^1.1.0": - version: 1.1.0 - resolution: "@rtsao/scc@npm:1.1.0" - checksum: 10c0/b5bcfb0d87f7d1c1c7c0f7693f53b07866ed9fec4c34a97a8c948fb9a7c0082e416ce4d3b60beb4f5e167cbe04cdeefbf6771320f3ede059b9ce91188c409a5b +"@smithy/url-parser@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/url-parser@npm:3.0.7" + dependencies: + "@smithy/querystring-parser": "npm:^3.0.7" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/602199c24d13e35fc59bb075a626b83655d24e639a1c287e3eea2f3f8264f42870bab4d94282d0a1a210990263fbee532a661e662b2f11c6342d42dd36140bb5 languageName: node linkType: hard -"@sdl-codegen/node@npm:2.0.1": - version: 2.0.1 - resolution: "@sdl-codegen/node@npm:2.0.1" +"@smithy/util-base64@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-base64@npm:3.0.0" dependencies: - "@mrleebo/prisma-ast": "npm:^0.12.0" - ts-morph: "npm:^22.0.0" - peerDependencies: - graphql: "*" - prettier: ^2 - typescript: "*" - peerDependenciesMeta: - prettier: - optional: true - checksum: 10c0/a99ae86cbc4f55939103716e7b7430e6cd60374cd86f8263cea69631996d386be4483763a32d4688279dfc95aaaa21b1d9fd6e154bb65623e76b1347aa4cbc2f + "@smithy/util-buffer-from": "npm:^3.0.0" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/5c05c3505bd1ac4c1e04ec0e22ad1c9e0c61756945735861614f9e46146369a1a112dd0895602475822c18b8f1fe0cc3fb9e45c99a4e7fb03308969c673cf043 languageName: node linkType: hard -"@selderee/plugin-htmlparser2@npm:^0.11.0": - version: 0.11.0 - resolution: "@selderee/plugin-htmlparser2@npm:0.11.0" +"@smithy/util-body-length-browser@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-body-length-browser@npm:3.0.0" dependencies: - domhandler: "npm:^5.0.3" - selderee: "npm:^0.11.0" - checksum: 10c0/e938ba9aeb31a9cf30dcb2977ef41685c598bf744bedc88c57aa9e8b7e71b51781695cf99c08aac50773fd7714eba670bd2a079e46db0788abe40c6d220084eb + tslib: "npm:^2.6.2" + checksum: 10c0/cfb595e814334fe7bb78e8381141cc7364f66bff0c1d672680f4abb99361ef66fbdb9468fa1dbabcd5753254b2b05c59c907fa9d600b36e6e4b8423eccf412f7 languageName: node linkType: hard -"@sigstore/bundle@npm:^2.1.1": - version: 2.1.1 - resolution: "@sigstore/bundle@npm:2.1.1" +"@smithy/util-body-length-node@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-body-length-node@npm:3.0.0" dependencies: - "@sigstore/protobuf-specs": "npm:^0.2.1" - checksum: 10c0/d3e358569e9b0f1a2c5bfa3ab8608046fc11f42424ea717b4871044cf9ecbff374fc08673b9858ec93f993fa1a6166d416b1245a0d13d0f856ea5de99e27a594 + tslib: "npm:^2.6.2" + checksum: 10c0/6f779848e7c81051364cf6e40ed61034a06fa8df3480398528baae54d9b69622abc7d068869e33dbe51fef2bbc6fda3f548ac59644a0f10545a54c87bc3a4391 languageName: node linkType: hard -"@sigstore/core@npm:^0.2.0": - version: 0.2.0 - resolution: "@sigstore/core@npm:0.2.0" - checksum: 10c0/b3da01c5369ba7e02b3f56921d0e3da8d6527d005d1bf6e90fbebf4211541c068a2a18e65f205ff7bcaa9ca3f41c7261396e7d4b4ad05b0aedca2e3bae0405eb +"@smithy/util-buffer-from@npm:^2.2.0": + version: 2.2.0 + resolution: "@smithy/util-buffer-from@npm:2.2.0" + dependencies: + "@smithy/is-array-buffer": "npm:^2.2.0" + tslib: "npm:^2.6.2" + checksum: 10c0/223d6a508b52ff236eea01cddc062b7652d859dd01d457a4e50365af3de1e24a05f756e19433f6ccf1538544076b4215469e21a4ea83dc1d58d829725b0dbc5a languageName: node linkType: hard -"@sigstore/protobuf-specs@npm:^0.2.1": - version: 0.2.1 - resolution: "@sigstore/protobuf-specs@npm:0.2.1" - checksum: 10c0/756b3bc64e7f21d966473208cd3920fcde6744025f7deb1d3be1d2b6261b825178b393db7458cd191b2eab947e516eacd6f91aa2f4545d8c045431fb699ac357 +"@smithy/util-buffer-from@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-buffer-from@npm:3.0.0" + dependencies: + "@smithy/is-array-buffer": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/b10fb81ef34f95418f27c9123c2c1774e690dd447e8064184688c553156bdec46d2ba1b1ae3bad7edd2b58a5ef32ac569e1ad814b36e7ee05eba10526d329983 languageName: node linkType: hard -"@sigstore/sign@npm:^2.2.1": - version: 2.2.1 - resolution: "@sigstore/sign@npm:2.2.1" +"@smithy/util-config-provider@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-config-provider@npm:3.0.0" dependencies: - "@sigstore/bundle": "npm:^2.1.1" - "@sigstore/core": "npm:^0.2.0" - "@sigstore/protobuf-specs": "npm:^0.2.1" - make-fetch-happen: "npm:^13.0.0" - checksum: 10c0/482206264bdf517fe54d08171942219b4541704f5dec9ecb169687d545b1437c5a1493ab5ea84e87180f777d7476f0154828f0ce978f55071b0117d5687f3f9c + tslib: "npm:^2.6.2" + checksum: 10c0/a2c25eac31223eddea306beff2bb3c32e8761f8cb50e8cb2a9d61417a5040e9565dc715a655787e99a37465fdd35bbd0668ff36e06043a5f6b7be48a76974792 languageName: node linkType: hard -"@sigstore/tuf@npm:^2.3.0": - version: 2.3.0 - resolution: "@sigstore/tuf@npm:2.3.0" +"@smithy/util-defaults-mode-browser@npm:^3.0.22": + version: 3.0.22 + resolution: "@smithy/util-defaults-mode-browser@npm:3.0.22" dependencies: - "@sigstore/protobuf-specs": "npm:^0.2.1" - tuf-js: "npm:^2.2.0" - checksum: 10c0/a214561e143f553132428597eaa68cfdcb36c6bf757f3dea30b2e55038433b0ffc53c446036e6d104487fb55f8d6bc6e01764090d29f42497fb44d55017f360c + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + bowser: "npm:^2.11.0" + tslib: "npm:^2.6.2" + checksum: 10c0/8184572e333c34cfe3a4b86fa38e9b232ff8a8877dd18f650c9bfacdb45c2f65b7752bcb074a460c2e2b95f1ea95a33366c5cda8d267feb11fcdfa673d260835 languageName: node linkType: hard -"@sigstore/verify@npm:^0.1.0": - version: 0.1.0 - resolution: "@sigstore/verify@npm:0.1.0" +"@smithy/util-defaults-mode-node@npm:^3.0.22": + version: 3.0.22 + resolution: "@smithy/util-defaults-mode-node@npm:3.0.22" dependencies: - "@sigstore/bundle": "npm:^2.1.1" - "@sigstore/core": "npm:^0.2.0" - "@sigstore/protobuf-specs": "npm:^0.2.1" - checksum: 10c0/3eeb4817ac38dc7b337a48e75c4e88226a5553c32594fa8c22221087a69656a7ccfe68e6f59eb12f1ecc506ea6c6db90e4b312c7dcc4a66c04e01434dc607fc7 + "@smithy/config-resolver": "npm:^3.0.9" + "@smithy/credential-provider-imds": "npm:^3.2.4" + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/property-provider": "npm:^3.1.7" + "@smithy/smithy-client": "npm:^3.3.6" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/fd233529c5fe10c76905b2fa41523df6dc359de35c2c093a83f7281cfd287b4016ad7435399e366b5328414f39efb0d90ac931c96cffc23c2303021b7696362e languageName: node linkType: hard -"@simplewebauthn/browser@npm:7.4.0": - version: 7.4.0 - resolution: "@simplewebauthn/browser@npm:7.4.0" +"@smithy/util-endpoints@npm:^2.1.3": + version: 2.1.3 + resolution: "@smithy/util-endpoints@npm:2.1.3" dependencies: - "@simplewebauthn/typescript-types": "npm:^7.4.0" - checksum: 10c0/cd69d51511e1bb75603b254b706194e8b7c3849e8f02fcb373cc8bb8c789df803a1bb900de7853c0cc63c0ad81fd56497ca63885638d566137afa387674099ad + "@smithy/node-config-provider": "npm:^3.1.8" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/1f375f997b996af9b2d17a4d1fd2ace81bf0206bf6c9e80d591d1daadce34471ea5ff8913000cd2aae4f619b7d2f3b2d38caf528b036b97ada2831ffbb9725d9 languageName: node linkType: hard -"@simplewebauthn/iso-webcrypto@npm:^7.4.0": - version: 7.4.0 - resolution: "@simplewebauthn/iso-webcrypto@npm:7.4.0" +"@smithy/util-hex-encoding@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-hex-encoding@npm:3.0.0" dependencies: - "@simplewebauthn/typescript-types": "npm:^7.4.0" - "@types/node": "npm:^18.11.9" - checksum: 10c0/66a3eabb8fca5a8f779d428b358c8fc02dd2496f9cafda882f3b19562e5c9d21a8af3082f635c7ff0a1914e33a87817be0d16307f5327606149a52e854406cbb + tslib: "npm:^2.6.2" + checksum: 10c0/d2fa7270853cc8f22c4f4635c72bf52e303731a68a3999e3ea9da1d38b6bf08c0f884e7d20b65741e3bc68bb3821e1abd1c3406d7a3dce8fc02df019aea59162 languageName: node linkType: hard -"@simplewebauthn/server@npm:7.4.0": - version: 7.4.0 - resolution: "@simplewebauthn/server@npm:7.4.0" +"@smithy/util-middleware@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/util-middleware@npm:3.0.7" dependencies: - "@hexagon/base64": "npm:^1.1.25" - "@peculiar/asn1-android": "npm:^2.3.3" - "@peculiar/asn1-ecc": "npm:^2.3.4" - "@peculiar/asn1-rsa": "npm:^2.3.4" - "@peculiar/asn1-schema": "npm:^2.3.3" - "@peculiar/asn1-x509": "npm:^2.3.4" - "@simplewebauthn/iso-webcrypto": "npm:^7.4.0" - "@simplewebauthn/typescript-types": "npm:^7.4.0" - "@types/debug": "npm:^4.1.7" - "@types/node": "npm:^18.11.9" - cbor-x: "npm:^1.4.1" - cross-fetch: "npm:^3.1.5" - debug: "npm:^4.3.2" - checksum: 10c0/51858ad0bcfb55b96c8dd4a337ed93baf000ccf55cdf13f9f87c96e54c0fa80b0fb0eb96fc570d9e039a2526d770a1a21811a03a15f9ad23a02142ff9ba8ad6e + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/e625791046c73bf5a35d67127007054bb6cc8d8707575c122732de1d6474b97ce1bd5c8c02051287bd967320f768eba364f1f0a59937654dbe25a66cce21bc6d languageName: node linkType: hard -"@simplewebauthn/typescript-types@npm:7.4.0, @simplewebauthn/typescript-types@npm:^7.4.0": - version: 7.4.0 - resolution: "@simplewebauthn/typescript-types@npm:7.4.0" - checksum: 10c0/b7aefd742d2f483531ff96509475571339660addba1f140883d8e489601d6a3a5b1c6759aa5ba27a9da5b502709aee9f060a4d4e57010f32c94eb5c42ef562a3 +"@smithy/util-retry@npm:^3.0.7": + version: 3.0.7 + resolution: "@smithy/util-retry@npm:3.0.7" + dependencies: + "@smithy/service-error-classification": "npm:^3.0.7" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/d641f1e11afbda1b194e5e6a75e815eed03100e0c53305d106cd80836b22854b4ba01efd9aed32996ec538e5c49293bb8d0a77561ebd721d94d862173e40738b languageName: node linkType: hard -"@sinclair/typebox@npm:^0.27.8": - version: 0.27.8 - resolution: "@sinclair/typebox@npm:0.27.8" - checksum: 10c0/ef6351ae073c45c2ac89494dbb3e1f87cc60a93ce4cde797b782812b6f97da0d620ae81973f104b43c9b7eaa789ad20ba4f6a1359f1cc62f63729a55a7d22d4e +"@smithy/util-stream@npm:^3.1.9": + version: 3.1.9 + resolution: "@smithy/util-stream@npm:3.1.9" + dependencies: + "@smithy/fetch-http-handler": "npm:^3.2.9" + "@smithy/node-http-handler": "npm:^3.2.4" + "@smithy/types": "npm:^3.5.0" + "@smithy/util-base64": "npm:^3.0.0" + "@smithy/util-buffer-from": "npm:^3.0.0" + "@smithy/util-hex-encoding": "npm:^3.0.0" + "@smithy/util-utf8": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/04f37b1e97692d9177a41351336bb119eb5dbe2582bc17e76bc99919defe67fe5afbf3cb52612c48c2bca3bec6f96f2d860825afc9249ab6e7e8fd9b4719f7a8 languageName: node linkType: hard -"@sindresorhus/is@npm:^0.14.0": - version: 0.14.0 - resolution: "@sindresorhus/is@npm:0.14.0" - checksum: 10c0/7247aa9314d4fc3df9b3f63d8b5b962a89c7600a5db1f268546882bfc4d31a975a899f5f42a09dd41a11e58636e6402f7c40f92df853aee417247bb11faee9a0 +"@smithy/util-uri-escape@npm:^3.0.0": + version: 3.0.0 + resolution: "@smithy/util-uri-escape@npm:3.0.0" + dependencies: + tslib: "npm:^2.6.2" + checksum: 10c0/b8d831348412cfafd9300069e74a12e0075b5e786d7ef6a210ba4ab576001c2525653eec68b71dfe6d7aef71c52f547404c4f0345c0fb476a67277f9d44b1156 languageName: node linkType: hard -"@sindresorhus/is@npm:^4.6.0": - version: 4.6.0 - resolution: "@sindresorhus/is@npm:4.6.0" - checksum: 10c0/33b6fb1d0834ec8dd7689ddc0e2781c2bfd8b9c4e4bacbcb14111e0ae00621f2c264b8a7d36541799d74888b5dccdf422a891a5cb5a709ace26325eedc81e22e +"@smithy/util-utf8@npm:^2.0.0": + version: 2.3.0 + resolution: "@smithy/util-utf8@npm:2.3.0" + dependencies: + "@smithy/util-buffer-from": "npm:^2.2.0" + tslib: "npm:^2.6.2" + checksum: 10c0/e18840c58cc507ca57fdd624302aefd13337ee982754c9aa688463ffcae598c08461e8620e9852a424d662ffa948fc64919e852508028d09e89ced459bd506ab languageName: node linkType: hard -"@sinonjs/commons@npm:^3.0.0": +"@smithy/util-utf8@npm:^3.0.0": version: 3.0.0 - resolution: "@sinonjs/commons@npm:3.0.0" + resolution: "@smithy/util-utf8@npm:3.0.0" dependencies: - type-detect: "npm:4.0.8" - checksum: 10c0/1df9cd257942f4e4960dfb9fd339d9e97b6a3da135f3d5b8646562918e863809cb8e00268535f4f4723535d2097881c8fc03d545c414d8555183376cfc54ee84 + "@smithy/util-buffer-from": "npm:^3.0.0" + tslib: "npm:^2.6.2" + checksum: 10c0/b568ed84b4770d2ae9b632eb85603765195a791f045af7f47df1369dc26b001056f4edf488b42ca1cd6d852d0155ad306a0d6531e912cb4e633c0d87abaa8899 languageName: node linkType: hard -"@sinonjs/fake-timers@npm:^10.0.2": - version: 10.3.0 - resolution: "@sinonjs/fake-timers@npm:10.3.0" +"@smithy/util-waiter@npm:^3.1.6": + version: 3.1.6 + resolution: "@smithy/util-waiter@npm:3.1.6" dependencies: - "@sinonjs/commons": "npm:^3.0.0" - checksum: 10c0/2e2fb6cc57f227912814085b7b01fede050cd4746ea8d49a1e44d5a0e56a804663b0340ae2f11af7559ea9bf4d087a11f2f646197a660ea3cb04e19efc04aa63 + "@smithy/abort-controller": "npm:^3.1.5" + "@smithy/types": "npm:^3.5.0" + tslib: "npm:^2.6.2" + checksum: 10c0/dfa7cf04afa7be4736e78f54f96c6583c2f582fef6bd179cf925f5dd737f3fed0b37446d5198d9dedfb343a0b71c481f560b5954686f8e2b51155a37752bc586 languageName: node linkType: hard @@ -12667,6 +14243,13 @@ __metadata: languageName: node linkType: hard +"attr-accept@npm:^2.2.2": + version: 2.2.2 + resolution: "attr-accept@npm:2.2.2" + checksum: 10c0/f77c073ac9616a783f2df814a56f65f1c870193e8da6097139e30b3be84ecc19fb835b93e81315d1da4f19e80721f14e8c8075014205e00abd37b856fe030b80 + languageName: node + linkType: hard + "auto-bind@npm:~4.0.0": version: 4.0.0 resolution: "auto-bind@npm:4.0.0" @@ -12993,7 +14576,7 @@ __metadata: languageName: node linkType: hard -"base64-js@npm:^1.3.0, base64-js@npm:^1.3.1": +"base64-js@npm:^1.0.2, base64-js@npm:^1.3.0, base64-js@npm:^1.3.1": version: 1.5.1 resolution: "base64-js@npm:1.5.1" checksum: 10c0/f23823513b63173a001030fae4f2dabe283b99a9d324ade3ad3d148e218134676f1ee8568c877cd79ec1c53158dcf2d2ba527a97c606618928ba99dd930102bf @@ -13131,6 +14714,13 @@ __metadata: languageName: node linkType: hard +"bowser@npm:^2.11.0": + version: 2.11.0 + resolution: "bowser@npm:2.11.0" + checksum: 10c0/04efeecc7927a9ec33c667fa0965dea19f4ac60b3fea60793c2e6cf06c1dcd2f7ae1dbc656f450c5f50783b1c75cf9dc173ba6f3b7db2feee01f8c4b793e1bd3 + languageName: node + linkType: hard + "boxen@npm:5.1.2": version: 5.1.2 resolution: "boxen@npm:5.1.2" @@ -13348,6 +14938,16 @@ __metadata: languageName: node linkType: hard +"buffer@npm:5.6.0": + version: 5.6.0 + resolution: "buffer@npm:5.6.0" + dependencies: + base64-js: "npm:^1.0.2" + ieee754: "npm:^1.1.4" + checksum: 10c0/07037a0278b07fbc779920f1ba1b473933ffb4a2e2f7b387c55daf6ac64a05b58c27da9e85730a4046e8f97a49f8acd9f7bf89605c0a4dfda88ebfb7e08bfe4a + languageName: node + linkType: hard + "buffer@npm:6.0.3, buffer@npm:^6.0.3": version: 6.0.3 resolution: "buffer@npm:6.0.3" @@ -13844,7 +15444,7 @@ __metadata: languageName: node linkType: hard -"chokidar@npm:3.6.0, chokidar@npm:^3.0.0, chokidar@npm:^3.4.0, chokidar@npm:^3.4.2, chokidar@npm:^3.5.2": +"chokidar@npm:3.6.0, chokidar@npm:^3.0.0, chokidar@npm:^3.4.0, chokidar@npm:^3.4.2, chokidar@npm:^3.5.2, chokidar@npm:^3.6.0": version: 3.6.0 resolution: "chokidar@npm:3.6.0" dependencies: @@ -16934,7 +18534,7 @@ __metadata: languageName: node linkType: hard -"events@npm:^3.0.0, events@npm:^3.3.0": +"events@npm:3.3.0, events@npm:^3.0.0, events@npm:^3.3.0": version: 3.3.0 resolution: "events@npm:3.3.0" checksum: 10c0/d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6 @@ -17311,7 +18911,7 @@ __metadata: languageName: node linkType: hard -"fast-xml-parser@npm:^4.3.0": +"fast-xml-parser@npm:4.4.1, fast-xml-parser@npm:^4.3.0": version: 4.4.1 resolution: "fast-xml-parser@npm:4.4.1" dependencies: @@ -17454,6 +19054,15 @@ __metadata: languageName: node linkType: hard +"file-selector@npm:^0.6.0": + version: 0.6.0 + resolution: "file-selector@npm:0.6.0" + dependencies: + tslib: "npm:^2.4.0" + checksum: 10c0/477ca1b56274db9fee1a8a623c4bfef580389726a5fef843af8c1f2f17f70ec2d1e41b29115777c92e120a15f1cca734c6ef36bb48bfa2ee027c68da16cd0d28 + languageName: node + linkType: hard + "file-system-cache@npm:2.3.0": version: 2.3.0 resolution: "file-system-cache@npm:2.3.0" @@ -19181,7 +20790,7 @@ __metadata: languageName: node linkType: hard -"ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": +"ieee754@npm:^1.1.13, ieee754@npm:^1.1.4, ieee754@npm:^1.2.1": version: 1.2.1 resolution: "ieee754@npm:1.2.1" checksum: 10c0/b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb @@ -20948,6 +22557,15 @@ __metadata: languageName: node linkType: hard +"jsesc@npm:^3.0.2": + version: 3.0.2 + resolution: "jsesc@npm:3.0.2" + bin: + jsesc: bin/jsesc + checksum: 10c0/ef22148f9e793180b14d8a145ee6f9f60f301abf443288117b4b6c53d0ecd58354898dc506ccbb553a5f7827965cd38bc5fb726575aae93c5e8915e2de8290e1 + languageName: node + linkType: hard + "jsesc@npm:~0.5.0": version: 0.5.0 resolution: "jsesc@npm:0.5.0" @@ -23521,6 +25139,26 @@ __metadata: languageName: node linkType: hard +"nodemon@npm:3.1.7": + version: 3.1.7 + resolution: "nodemon@npm:3.1.7" + dependencies: + chokidar: "npm:^3.5.2" + debug: "npm:^4" + ignore-by-default: "npm:^1.0.1" + minimatch: "npm:^3.1.2" + pstree.remy: "npm:^1.1.8" + semver: "npm:^7.5.3" + simple-update-notifier: "npm:^2.0.0" + supports-color: "npm:^5.5.0" + touch: "npm:^3.1.0" + undefsafe: "npm:^2.0.5" + bin: + nodemon: bin/nodemon.js + checksum: 10c0/e0b46939abdbce251b1d6281005a5763cee57db295bb00bc4a753b0f5320dac00fe53547fb6764c70a086cf6d1238875cccb800fbc71544b3ecbd3ef71183c87 + languageName: node + linkType: hard + "nopt@npm:^6.0.0": version: 6.0.0 resolution: "nopt@npm:6.0.0" @@ -24755,6 +26393,13 @@ __metadata: languageName: node linkType: hard +"picocolors@npm:^1.1.0": + version: 1.1.0 + resolution: "picocolors@npm:1.1.0" + checksum: 10c0/86946f6032148801ef09c051c6fb13b5cf942eaf147e30ea79edb91dd32d700934edebe782a1078ff859fb2b816792e97ef4dab03d7f0b804f6b01a0df35e023 + languageName: node + linkType: hard + "picomatch@npm:^2.0.4, picomatch@npm:^2.2.1, picomatch@npm:^2.2.3, picomatch@npm:^2.3.0, picomatch@npm:^2.3.1": version: 2.3.1 resolution: "picomatch@npm:2.3.1" @@ -25395,6 +27040,19 @@ __metadata: languageName: node linkType: hard +"publint@npm:0.2.11": + version: 0.2.11 + resolution: "publint@npm:0.2.11" + dependencies: + npm-packlist: "npm:^5.1.3" + picocolors: "npm:^1.1.0" + sade: "npm:^1.8.1" + bin: + publint: lib/cli.js + checksum: 10c0/0e57d19e2cde5f4d2125ef984c844abbb18284d75a43bbf438908574902e4ed31dbeceea9e0b39f672bc39fe78382fac3e9d4a29f54f79a12d0cc9a37f7df17b + languageName: node + linkType: hard + "pump@npm:^2.0.0": version: 2.0.1 resolution: "pump@npm:2.0.1" @@ -25655,6 +27313,19 @@ __metadata: languageName: node linkType: hard +"react-dropzone@npm:14.2.3": + version: 14.2.3 + resolution: "react-dropzone@npm:14.2.3" + dependencies: + attr-accept: "npm:^2.2.2" + file-selector: "npm:^0.6.0" + prop-types: "npm:^15.8.1" + peerDependencies: + react: ">= 16.8 || 18.0.0" + checksum: 10c0/6433517c53309aca1bb4f4a535aeee297345ca1e11b123676f46c7682ffab34a3428cbda106448fc92b5c9a5e0fa5d225bc188adebcd4d302366bf6b1f9c3fc1 + languageName: node + linkType: hard + "react-element-to-jsx-string@npm:^15.0.0": version: 15.0.0 resolution: "react-element-to-jsx-string@npm:15.0.0" @@ -27649,7 +29320,7 @@ __metadata: languageName: node linkType: hard -"stream-browserify@npm:^3.0.0": +"stream-browserify@npm:3.0.0, stream-browserify@npm:^3.0.0": version: 3.0.0 resolution: "stream-browserify@npm:3.0.0" dependencies: @@ -29063,15 +30734,6 @@ __metadata: languageName: node linkType: hard -"ulid@npm:2.3.0": - version: 2.3.0 - resolution: "ulid@npm:2.3.0" - bin: - ulid: ./bin/cli.js - checksum: 10c0/070d237502781085e59cf3d8ece752ff96cd3a0990cf1c1be57273f4550597daeb72e9a7db8e5a320de31102509bb3321d280b54bfc44e98025e4628a9629773 - languageName: node - linkType: hard - "unbox-primitive@npm:^1.0.2": version: 1.0.2 resolution: "unbox-primitive@npm:1.0.2"