diff --git a/apps/processing/.env.example b/apps/processing/.env.example index 591b483..41b13df 100644 --- a/apps/processing/.env.example +++ b/apps/processing/.env.example @@ -13,4 +13,9 @@ IPFS_GATEWAYS_URL=["https://ipfs.io","https://gateway.pinata.cloud","https://dwe PRICING_SOURCE= # 'coingecko' or 'dummy' COINGECKO_API_KEY={{YOUR_KEY}} -COINGECKO_API_TYPE=demo \ No newline at end of file +COINGECKO_API_TYPE=demo + +RETRY_MAX_ATTEMPTS=3 +RETRY_BASE_DELAY_MS=3000 +RETRY_FACTOR=2 +RETRY_MAX_DELAY_MS=300000 \ No newline at end of file diff --git a/apps/processing/README.md b/apps/processing/README.md index 03e5392..8c2ba1a 100644 --- a/apps/processing/README.md +++ b/apps/processing/README.md @@ -36,6 +36,10 @@ Available options: | `DUMMY_PRICE` | Dummy price | 1 | No | Only if PRICING_SOURCE is dummy | | `COINGECKO_API_KEY` | API key for CoinGecko service | N/A | Yes | | | `COINGECKO_API_TYPE` | CoinGecko API tier (demo or pro) | pro | No | | +| `RETRY_MAX_ATTEMPTS` | Maximum number of retry attempts | 3 | No | | +| `RETRY_BASE_DELAY_MS` | Base delay for retry attempts | 3000 | No | | +| `RETRY_FACTOR` | Delay factor for retry attempts | 2 | No | | +| `RETRY_MAX_DELAY_MS` | Maximum delay for retry attempts | 300000 | No | | ## Available Scripts diff --git a/apps/processing/src/config/env.ts b/apps/processing/src/config/env.ts index 4a201e2..dfde17a 100644 --- a/apps/processing/src/config/env.ts +++ b/apps/processing/src/config/env.ts @@ -36,6 +36,10 @@ const baseSchema = z.object({ IPFS_GATEWAYS_URL: stringToJSONSchema .pipe(z.array(z.string().url())) .default('["https://ipfs.io"]'), + RETRY_MAX_ATTEMPTS: z.coerce.number().int().min(1).default(3), + RETRY_BASE_DELAY_MS: z.coerce.number().int().min(1).default(3000), // 3 seconds + RETRY_FACTOR: z.coerce.number().int().min(1).default(2), + RETRY_MAX_DELAY_MS: z.coerce.number().int().min(1).optional(), // 5 minute }); const dummyPricingSchema = baseSchema.extend({ diff --git a/apps/processing/src/services/processing.service.ts b/apps/processing/src/services/processing.service.ts index 0d8a199..91c0a85 100644 --- a/apps/processing/src/services/processing.service.ts +++ b/apps/processing/src/services/processing.service.ts @@ -45,8 +45,14 @@ export class ProcessingService { static async initialize(env: Environment): Promise { const sharedDependencies = await SharedDependenciesService.initialize(env); const { CHAINS: chains } = env; - const { core, registriesRepositories, indexerClient, kyselyDatabase, logger } = - sharedDependencies; + const { + core, + registriesRepositories, + indexerClient, + kyselyDatabase, + logger, + retryStrategy, + } = sharedDependencies; const { eventRegistryRepository, strategyRegistryRepository, @@ -83,6 +89,7 @@ export class ProcessingService { }, chain.fetchLimit, chain.fetchDelayMs, + retryStrategy, logger, ); const retroactiveProcessor = new RetroactiveProcessor( @@ -95,6 +102,7 @@ export class ProcessingService { checkpointRepository: strategyProcessingCheckpointRepository, }, chain.fetchLimit, + retryStrategy, logger, ); diff --git a/apps/processing/src/services/sharedDependencies.service.ts b/apps/processing/src/services/sharedDependencies.service.ts index d2d79ea..0c493e1 100644 --- a/apps/processing/src/services/sharedDependencies.service.ts +++ b/apps/processing/src/services/sharedDependencies.service.ts @@ -1,7 +1,7 @@ import { CoreDependencies } from "@grants-stack-indexer/data-flow"; import { EnvioIndexerClient } from "@grants-stack-indexer/indexer-client"; -import { IpfsProvider } from "@grants-stack-indexer/metadata"; -import { PricingProviderFactory } from "@grants-stack-indexer/pricing"; +import { CachingMetadataProvider, IpfsProvider } from "@grants-stack-indexer/metadata"; +import { CachingPricingProvider, PricingProviderFactory } from "@grants-stack-indexer/pricing"; import { createKyselyDatabase, IEventRegistryRepository, @@ -11,12 +11,15 @@ import { KyselyApplicationRepository, KyselyDonationRepository, KyselyEventRegistryRepository, + KyselyMetadataCache, + KyselyPricingCache, KyselyProjectRepository, KyselyRoundRepository, KyselyStrategyProcessingCheckpointRepository, KyselyStrategyRegistryRepository, + KyselyTransactionManager, } from "@grants-stack-indexer/repository"; -import { ILogger, Logger } from "@grants-stack-indexer/shared"; +import { ExponentialBackoff, ILogger, Logger, RetryStrategy } from "@grants-stack-indexer/shared"; import { Environment } from "../config/index.js"; @@ -29,6 +32,7 @@ export type SharedDependencies = { }; indexerClient: EnvioIndexerClient; kyselyDatabase: ReturnType; + retryStrategy: RetryStrategy; logger: ILogger; }; @@ -50,6 +54,8 @@ export class SharedDependenciesService { logger, ); + const transactionManager = new KyselyTransactionManager(kyselyDatabase); + const projectRepository = new KyselyProjectRepository(kyselyDatabase, env.DATABASE_SCHEMA); const roundRepository = new KyselyRoundRepository(kyselyDatabase, env.DATABASE_SCHEMA); const applicationRepository = new KyselyApplicationRepository( @@ -64,11 +70,23 @@ export class SharedDependenciesService { kyselyDatabase, env.DATABASE_SCHEMA, ); + const pricingRepository = new KyselyPricingCache(kyselyDatabase, env.DATABASE_SCHEMA); const pricingProvider = PricingProviderFactory.create(env, { logger, }); + const cachedPricingProvider = new CachingPricingProvider( + pricingProvider, + pricingRepository, + logger, + ); + const metadataRepository = new KyselyMetadataCache(kyselyDatabase, env.DATABASE_SCHEMA); const metadataProvider = new IpfsProvider(env.IPFS_GATEWAYS_URL, logger); + const cachedMetadataProvider = new CachingMetadataProvider( + metadataProvider, + metadataRepository, + logger, + ); const eventRegistryRepository = new KyselyEventRegistryRepository( kyselyDatabase, @@ -88,15 +106,23 @@ export class SharedDependenciesService { env.INDEXER_ADMIN_SECRET, ); + const retryStrategy = new ExponentialBackoff({ + maxAttempts: env.RETRY_MAX_ATTEMPTS, + baseDelay: env.RETRY_BASE_DELAY_MS, + maxDelay: env.RETRY_MAX_DELAY_MS, + factor: env.RETRY_FACTOR, + }); + return { core: { projectRepository, roundRepository, applicationRepository, - pricingProvider, + pricingProvider: cachedPricingProvider, donationRepository, - metadataProvider, + metadataProvider: cachedMetadataProvider, applicationPayoutRepository, + transactionManager, }, registriesRepositories: { eventRegistryRepository, @@ -105,6 +131,7 @@ export class SharedDependenciesService { }, indexerClient, kyselyDatabase, + retryStrategy, logger, }; } diff --git a/apps/processing/test/unit/sharedDependencies.service.spec.ts b/apps/processing/test/unit/sharedDependencies.service.spec.ts index df37a6e..c832ce9 100644 --- a/apps/processing/test/unit/sharedDependencies.service.spec.ts +++ b/apps/processing/test/unit/sharedDependencies.service.spec.ts @@ -19,8 +19,10 @@ const mocks = vi.hoisted(() => { }; }); -vi.mock("@grants-stack-indexer/shared", () => { +vi.mock("@grants-stack-indexer/shared", async (importActual) => { + const actual = await importActual(); return { + ...actual, Logger: { getInstance: vi.fn().mockReturnValue(mocks.logger), }, @@ -42,16 +44,21 @@ vi.mock("@grants-stack-indexer/repository", () => ({ })), KyselyEventRegistryRepository: vi.fn(), KyselyStrategyProcessingCheckpointRepository: vi.fn(), + KyselyTransactionManager: vi.fn(), + KyselyPricingCache: vi.fn(), + KyselyMetadataCache: vi.fn(), })); vi.mock("@grants-stack-indexer/pricing", () => ({ PricingProviderFactory: { create: vi.fn(), }, + CachingPricingProvider: vi.fn(), })); vi.mock("@grants-stack-indexer/metadata", () => ({ IpfsProvider: vi.fn(), + CachingMetadataProvider: vi.fn(), })); vi.mock("@grants-stack-indexer/indexer-client", () => ({ @@ -145,6 +152,7 @@ describe("SharedDependenciesService", () => { expect(dependencies.core).toHaveProperty("donationRepository"); expect(dependencies.core).toHaveProperty("metadataProvider"); expect(dependencies.core).toHaveProperty("applicationPayoutRepository"); + expect(dependencies.core).toHaveProperty("transactionManager"); // Verify registries expect(dependencies.registriesRepositories).toHaveProperty("eventRegistryRepository"); diff --git a/packages/chain-providers/src/exceptions/dataDecode.exception.ts b/packages/chain-providers/src/exceptions/dataDecode.exception.ts index 6fbfaa4..064f2f0 100644 --- a/packages/chain-providers/src/exceptions/dataDecode.exception.ts +++ b/packages/chain-providers/src/exceptions/dataDecode.exception.ts @@ -1,6 +1,7 @@ -export class DataDecodeException extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class DataDecodeException extends NonRetriableError { constructor(message: string) { super(message); - this.name = "DataDecodeException"; } } diff --git a/packages/chain-providers/src/exceptions/invalidArgument.exception.ts b/packages/chain-providers/src/exceptions/invalidArgument.exception.ts index 84b6087..d321723 100644 --- a/packages/chain-providers/src/exceptions/invalidArgument.exception.ts +++ b/packages/chain-providers/src/exceptions/invalidArgument.exception.ts @@ -1,6 +1,7 @@ -export class InvalidArgumentException extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class InvalidArgumentException extends NonRetriableError { constructor(message: string) { super(message); - this.name = "InvalidArgumentException"; } } diff --git a/packages/chain-providers/src/exceptions/multicallNotFound.exception.ts b/packages/chain-providers/src/exceptions/multicallNotFound.exception.ts index 45b3902..f988a3b 100644 --- a/packages/chain-providers/src/exceptions/multicallNotFound.exception.ts +++ b/packages/chain-providers/src/exceptions/multicallNotFound.exception.ts @@ -1,4 +1,6 @@ -export class MulticallNotFound extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class MulticallNotFound extends NonRetriableError { constructor() { super("Multicall contract address not found"); } diff --git a/packages/chain-providers/src/exceptions/rpcUrlsEmpty.exception.ts b/packages/chain-providers/src/exceptions/rpcUrlsEmpty.exception.ts index c57457f..5ed61ed 100644 --- a/packages/chain-providers/src/exceptions/rpcUrlsEmpty.exception.ts +++ b/packages/chain-providers/src/exceptions/rpcUrlsEmpty.exception.ts @@ -1,6 +1,7 @@ -export class RpcUrlsEmpty extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class RpcUrlsEmpty extends NonRetriableError { constructor() { super("RPC URLs array cannot be empty"); - this.name = "RpcUrlsEmpty"; } } diff --git a/packages/chain-providers/src/providers/evmProvider.ts b/packages/chain-providers/src/providers/evmProvider.ts index 65df554..68df045 100644 --- a/packages/chain-providers/src/providers/evmProvider.ts +++ b/packages/chain-providers/src/providers/evmProvider.ts @@ -22,10 +22,12 @@ import { HttpTransport, MulticallParameters, MulticallReturnType, + RpcError, + TimeoutError, toHex, } from "viem"; -import { ILogger } from "@grants-stack-indexer/shared"; +import { ILogger, NonRetriableError, RetriableError } from "@grants-stack-indexer/shared"; import { AbiWithConstructor, @@ -67,7 +69,11 @@ export class EvmProvider { } async getTransaction(hash: Hex): Promise { - return this.client.getTransaction({ hash }); + try { + return await this.client.getTransaction({ hash }); + } catch (e) { + throw this.wrapError(e, "getTransaction", "Failed to get transaction", { hash }); + } } /** @@ -76,7 +82,11 @@ export class EvmProvider { * @returns {Promise} A Promise that resolves to the balance of the address. */ async getBalance(address: Address): Promise { - return this.client.getBalance({ address }); + try { + return await this.client.getBalance({ address }); + } catch (e) { + throw this.wrapError(e, "getBalance", "Failed to get balance", { address }); + } } /** @@ -84,7 +94,11 @@ export class EvmProvider { * @returns {Promise} A Promise that resolves to the latest block number. */ async getBlockNumber(): Promise { - return this.client.getBlockNumber(); + try { + return await this.client.getBlockNumber(); + } catch (e) { + throw this.wrapError(e, "getBlockNumber", "Failed to get block number"); + } } /** @@ -92,7 +106,13 @@ export class EvmProvider { * @returns {Promise} Latest block number. */ async getBlockByNumber(blockNumber: bigint): Promise { - return this.client.getBlock({ blockNumber }); + try { + return await this.client.getBlock({ blockNumber }); + } catch (e) { + throw this.wrapError(e, "getBlockByNumber", "Failed to get block", { + blockNumber: blockNumber.toString(), + }); + } } /** @@ -100,11 +120,19 @@ export class EvmProvider { * @returns {Promise} A Promise that resolves to the current gas price. */ async getGasPrice(): Promise { - return this.client.getGasPrice(); + try { + return await this.client.getGasPrice(); + } catch (e) { + throw this.wrapError(e, "getGasPrice", "Failed to get gas price"); + } } async estimateGas(args: EstimateGasParameters): Promise { - return this.client.estimateGas(args); + try { + return await this.client.estimateGas(args); + } catch (e) { + throw this.wrapError(e, "estimateGas", "Failed to estimate gas", args); + } } /** @@ -121,10 +149,14 @@ export class EvmProvider { ); } - return this.client.getStorageAt({ - address, - slot: typeof slot === "string" ? slot : toHex(slot), - }); + try { + return await this.client.getStorageAt({ + address, + slot: typeof slot === "string" ? slot : toHex(slot), + }); + } catch (e) { + throw this.wrapError(e, "getStorageAt", "Failed to get storage", { address, slot }); + } } /** @@ -137,27 +169,28 @@ export class EvmProvider { */ async readContract< TAbi extends Abi, - TFunctionName extends ContractFunctionName = ContractFunctionName< - TAbi, - "pure" | "view" - >, - TArgs extends ContractFunctionArgs< - TAbi, - "pure" | "view", - TFunctionName - > = ContractFunctionArgs, + TFunctionName extends ContractFunctionName, + TArgs extends ContractFunctionArgs, >( contractAddress: Address, abi: TAbi, functionName: TFunctionName, args?: TArgs, ): Promise> { - return this.client.readContract({ - address: contractAddress, - abi, - functionName, - args, - }); + try { + return await this.client.readContract({ + address: contractAddress, + abi, + functionName, + args, + }); + } catch (e) { + throw this.wrapError(e, "readContract", "Failed to read contract", { + contractAddress, + functionName, + args, + }); + } } /** @@ -177,19 +210,27 @@ export class EvmProvider { ): Promise> { const deploymentData = args ? encodeDeployData({ abi, bytecode, args }) : bytecode; - const { data: returnData } = await this.client.call({ - data: deploymentData, - }); + try { + const { data: returnData } = await this.client.call({ + data: deploymentData, + }); - if (!returnData) { - throw new DataDecodeException("No return data"); - } + if (!returnData) { + throw new DataDecodeException("No return data"); + } - try { - const decoded = decodeAbiParameters(constructorReturnParams, returnData); - return decoded; + try { + return decodeAbiParameters(constructorReturnParams, returnData); + } catch (e) { + throw new DataDecodeException( + "Error decoding return data with given AbiParameters", + ); + } } catch (e) { - throw new DataDecodeException("Error decoding return data with given AbiParameters"); + if (e instanceof DataDecodeException) { + throw e; + } + throw this.wrapError(e, "batchRequest", "Failed to execute batch request", { args }); } } @@ -208,6 +249,40 @@ export class EvmProvider { ): Promise> { if (!this.chain?.contracts?.multicall3?.address) throw new MulticallNotFound(); - return this.client.multicall(args); + try { + return await this.client.multicall(args); + } catch (e) { + throw this.wrapError(e, "multicall", "Failed to execute multicall", { args }); + } + } + + private wrapError( + error: unknown, + methodName: Exclude< + keyof typeof EvmProvider.prototype, + "constructor" | "wrapError" | "logger" | "chain" | "client" + >, + errorMessage: string, + additionalData?: Record, + ): RetriableError | NonRetriableError { + const err = error as Error; + if (err instanceof TimeoutError || err instanceof RpcError) { + return new RetriableError(errorMessage, { + className: EvmProvider.name, + methodName, + chainId: this.chain?.id?.toString(), + additionalData, + }); + } + return new NonRetriableError( + err.message, + { + className: "EvmProvider", + methodName, + chainId: this.chain?.id?.toString(), + additionalData, + }, + err, + ); } } diff --git a/packages/data-flow/src/data-loader/dataLoader.ts b/packages/data-flow/src/data-loader/dataLoader.ts index 2f07d1a..91557a2 100644 --- a/packages/data-flow/src/data-loader/dataLoader.ts +++ b/packages/data-flow/src/data-loader/dataLoader.ts @@ -5,10 +5,11 @@ import { IDonationRepository, IProjectRepository, IRoundRepository, + ITransactionManager, } from "@grants-stack-indexer/repository"; -import { ILogger, stringify } from "@grants-stack-indexer/shared"; +import { ILogger } from "@grants-stack-indexer/shared"; -import { ExecutionResult, IDataLoader, InvalidChangeset } from "../internal.js"; +import { IDataLoader, InvalidChangeset } from "../internal.js"; import { createApplicationHandlers, createApplicationPayoutHandlers, @@ -42,6 +43,7 @@ export class DataLoader implements IDataLoader { donation: IDonationRepository; applicationPayout: IApplicationPayoutRepository; }, + private readonly transactionManager: ITransactionManager, private readonly logger: ILogger, ) { this.handlers = { @@ -54,40 +56,34 @@ export class DataLoader implements IDataLoader { } /** @inheritdoc */ - public async applyChanges(changesets: Changeset[]): Promise { - const result: ExecutionResult = { - changesets: [], - numExecuted: 0, - numSuccessful: 0, - numFailed: 0, - errors: [], - }; - + public async applyChanges(changesets: Changeset[]): Promise { const invalidTypes = changesets.filter((changeset) => !this.handlers[changeset.type]); if (invalidTypes.length > 0) { throw new InvalidChangeset(invalidTypes.map((changeset) => changeset.type)); } - //TODO: research how to manage transactions so we can rollback on error - for (const changeset of changesets) { - result.numExecuted++; - try { - //TODO: inside each handler, we should add zod validation that the args match the expected type - await this.handlers[changeset.type](changeset as never); - result.changesets.push(changeset.type); - result.numSuccessful++; - } catch (error) { - result.numFailed++; - result.errors.push( - `Failed to apply changeset ${changeset.type}: ${ - error instanceof Error ? error.message : String(error) - }`, - ); - this.logger.error(`${stringify(error, Object.getOwnPropertyNames(error))}`); - break; - } - } + await this.transactionManager.runInTransaction(async (tx) => { + this.logger.debug(`Starting transaction on ${changesets.length} changesets...`, { + className: DataLoader.name, + }); + for (const changeset of changesets) { + try { + //TODO: inside each handler, we should add zod validation that the args match the expected type + await this.handlers[changeset.type](changeset as never, tx); + } catch (error) { + this.logger.debug( + `Error applying changeset ${changeset.type}. Rolling back transaction with ${changesets.length} changesets`, + { + className: DataLoader.name, + }, + ); - return result; + throw error; + } + } + }); + this.logger.debug(`Successfully applied ${changesets.length} changesets`, { + className: DataLoader.name, + }); } } diff --git a/packages/data-flow/src/data-loader/handlers/application.handlers.ts b/packages/data-flow/src/data-loader/handlers/application.handlers.ts index 20afd8a..7167efc 100644 --- a/packages/data-flow/src/data-loader/handlers/application.handlers.ts +++ b/packages/data-flow/src/data-loader/handlers/application.handlers.ts @@ -19,12 +19,16 @@ export type ApplicationHandlers = { export const createApplicationHandlers = ( repository: IApplicationRepository, ): ApplicationHandlers => ({ - InsertApplication: (async (changeset): Promise => { - await repository.insertApplication(changeset.args); + InsertApplication: (async (changeset, txConnection): Promise => { + await repository.insertApplication(changeset.args, txConnection); }) satisfies ChangesetHandler<"InsertApplication">, - UpdateApplication: (async (changeset): Promise => { + UpdateApplication: (async (changeset, txConnection): Promise => { const { chainId, roundId, applicationId, application } = changeset.args; - await repository.updateApplication({ chainId, roundId, id: applicationId }, application); + await repository.updateApplication( + { chainId, roundId, id: applicationId }, + application, + txConnection, + ); }) satisfies ChangesetHandler<"UpdateApplication">, }); diff --git a/packages/data-flow/src/data-loader/handlers/applicationPayout.handlers.ts b/packages/data-flow/src/data-loader/handlers/applicationPayout.handlers.ts index f14fc24..a96de33 100644 --- a/packages/data-flow/src/data-loader/handlers/applicationPayout.handlers.ts +++ b/packages/data-flow/src/data-loader/handlers/applicationPayout.handlers.ts @@ -22,7 +22,7 @@ export type ApplicationPayoutHandlers = { export const createApplicationPayoutHandlers = ( repository: IApplicationPayoutRepository, ): ApplicationPayoutHandlers => ({ - InsertApplicationPayout: (async (changeset): Promise => { - await repository.insertApplicationPayout(changeset.args.applicationPayout); + InsertApplicationPayout: (async (changeset, txConnection): Promise => { + await repository.insertApplicationPayout(changeset.args.applicationPayout, txConnection); }) satisfies ChangesetHandler<"InsertApplicationPayout">, }); diff --git a/packages/data-flow/src/data-loader/handlers/donation.handlers.ts b/packages/data-flow/src/data-loader/handlers/donation.handlers.ts index b42d9e0..841fbd3 100644 --- a/packages/data-flow/src/data-loader/handlers/donation.handlers.ts +++ b/packages/data-flow/src/data-loader/handlers/donation.handlers.ts @@ -17,11 +17,11 @@ export type DonationHandlers = { * @returns An object containing all application-related handlers */ export const createDonationHandlers = (repository: IDonationRepository): DonationHandlers => ({ - InsertDonation: (async (changeset): Promise => { - await repository.insertDonation(changeset.args.donation); + InsertDonation: (async (changeset, txConnection): Promise => { + await repository.insertDonation(changeset.args.donation, txConnection); }) satisfies ChangesetHandler<"InsertDonation">, - InsertManyDonations: (async (changeset): Promise => { - await repository.insertManyDonations(changeset.args.donations); + InsertManyDonations: (async (changeset, txConnection): Promise => { + await repository.insertManyDonations(changeset.args.donations, txConnection); }) satisfies ChangesetHandler<"InsertManyDonations">, }); diff --git a/packages/data-flow/src/data-loader/handlers/project.handlers.ts b/packages/data-flow/src/data-loader/handlers/project.handlers.ts index b32a9db..01cb06f 100644 --- a/packages/data-flow/src/data-loader/handlers/project.handlers.ts +++ b/packages/data-flow/src/data-loader/handlers/project.handlers.ts @@ -17,38 +17,38 @@ export type ProjectHandlers = { * @returns An object containing all project-related handlers */ export const createProjectHandlers = (repository: IProjectRepository): ProjectHandlers => ({ - InsertProject: (async (changeset): Promise => { + InsertProject: (async (changeset, txConnection): Promise => { const { project } = changeset.args; - await repository.insertProject(project); + await repository.insertProject(project, txConnection); }) satisfies ChangesetHandler<"InsertProject">, - UpdateProject: (async (changeset): Promise => { + UpdateProject: (async (changeset, txConnection): Promise => { const { chainId, projectId, project } = changeset.args; - await repository.updateProject({ id: projectId, chainId }, project); + await repository.updateProject({ id: projectId, chainId }, project, txConnection); }) satisfies ChangesetHandler<"UpdateProject">, - InsertPendingProjectRole: (async (changeset): Promise => { + InsertPendingProjectRole: (async (changeset, txConnection): Promise => { const { pendingProjectRole } = changeset.args; - await repository.insertPendingProjectRole(pendingProjectRole); + await repository.insertPendingProjectRole(pendingProjectRole, txConnection); }) satisfies ChangesetHandler<"InsertPendingProjectRole">, - DeletePendingProjectRoles: (async (changeset): Promise => { + DeletePendingProjectRoles: (async (changeset, txConnection): Promise => { const { ids } = changeset.args; - await repository.deleteManyPendingProjectRoles(ids); + await repository.deleteManyPendingProjectRoles(ids, txConnection); }) satisfies ChangesetHandler<"DeletePendingProjectRoles">, - InsertProjectRole: (async (changeset): Promise => { + InsertProjectRole: (async (changeset, txConnection): Promise => { const { projectRole } = changeset.args; - await repository.insertProjectRole(projectRole); + await repository.insertProjectRole(projectRole, txConnection); }) satisfies ChangesetHandler<"InsertProjectRole">, - DeleteAllProjectRolesByRole: (async (changeset): Promise => { + DeleteAllProjectRolesByRole: (async (changeset, txConnection): Promise => { const { chainId, projectId, role } = changeset.args.projectRole; - await repository.deleteManyProjectRoles(chainId, projectId, role); + await repository.deleteManyProjectRoles(chainId, projectId, role, undefined, txConnection); }) satisfies ChangesetHandler<"DeleteAllProjectRolesByRole">, - DeleteAllProjectRolesByRoleAndAddress: (async (changeset): Promise => { + DeleteAllProjectRolesByRoleAndAddress: (async (changeset, txConnection): Promise => { const { chainId, projectId, role, address } = changeset.args.projectRole; - await repository.deleteManyProjectRoles(chainId, projectId, role, address); + await repository.deleteManyProjectRoles(chainId, projectId, role, address, txConnection); }) satisfies ChangesetHandler<"DeleteAllProjectRolesByRoleAndAddress">, }); diff --git a/packages/data-flow/src/data-loader/handlers/round.handlers.ts b/packages/data-flow/src/data-loader/handlers/round.handlers.ts index cb6c831..d877850 100644 --- a/packages/data-flow/src/data-loader/handlers/round.handlers.ts +++ b/packages/data-flow/src/data-loader/handlers/round.handlers.ts @@ -17,24 +17,28 @@ export type RoundHandlers = { * @returns An object containing all round-related handlers */ export const createRoundHandlers = (repository: IRoundRepository): RoundHandlers => ({ - InsertRound: (async (changeset): Promise => { + InsertRound: (async (changeset, txConnection): Promise => { const { round } = changeset.args; - await repository.insertRound(round); + await repository.insertRound(round, txConnection); }) satisfies ChangesetHandler<"InsertRound">, - UpdateRound: (async (changeset): Promise => { + UpdateRound: (async (changeset, txConnection): Promise => { const { chainId, roundId, round } = changeset.args; - await repository.updateRound({ id: roundId, chainId }, round); + await repository.updateRound({ id: roundId, chainId }, round, txConnection); }) satisfies ChangesetHandler<"UpdateRound">, - UpdateRoundByStrategyAddress: (async (changeset): Promise => { + UpdateRoundByStrategyAddress: (async (changeset, txConnection): Promise => { const { chainId, strategyAddress, round } = changeset.args; if (round) { - await repository.updateRound({ strategyAddress, chainId: chainId }, round); + await repository.updateRound( + { strategyAddress, chainId: chainId }, + round, + txConnection, + ); } }) satisfies ChangesetHandler<"UpdateRoundByStrategyAddress">, - IncrementRoundFundedAmount: (async (changeset): Promise => { + IncrementRoundFundedAmount: (async (changeset, txConnection): Promise => { const { chainId, roundId, fundedAmount, fundedAmountInUsd } = changeset.args; await repository.incrementRoundFunds( { @@ -43,10 +47,11 @@ export const createRoundHandlers = (repository: IRoundRepository): RoundHandlers }, fundedAmount, fundedAmountInUsd, + txConnection, ); }) satisfies ChangesetHandler<"IncrementRoundFundedAmount">, - IncrementRoundTotalDistributed: (async (changeset): Promise => { + IncrementRoundTotalDistributed: (async (changeset, txConnection): Promise => { const { chainId, roundId, amount } = changeset.args; await repository.incrementRoundTotalDistributed( { @@ -54,26 +59,33 @@ export const createRoundHandlers = (repository: IRoundRepository): RoundHandlers roundId, }, amount, + txConnection, ); }) satisfies ChangesetHandler<"IncrementRoundTotalDistributed">, - InsertPendingRoundRole: (async (changeset): Promise => { + InsertPendingRoundRole: (async (changeset, txConnection): Promise => { const { pendingRoundRole } = changeset.args; - await repository.insertPendingRoundRole(pendingRoundRole); + await repository.insertPendingRoundRole(pendingRoundRole, txConnection); }) satisfies ChangesetHandler<"InsertPendingRoundRole">, - DeletePendingRoundRoles: (async (changeset): Promise => { + DeletePendingRoundRoles: (async (changeset, txConnection): Promise => { const { ids } = changeset.args; - await repository.deleteManyPendingRoundRoles(ids); + await repository.deleteManyPendingRoundRoles(ids, txConnection); }) satisfies ChangesetHandler<"DeletePendingRoundRoles">, - InsertRoundRole: (async (changeset): Promise => { + InsertRoundRole: (async (changeset, txConnection): Promise => { const { roundRole } = changeset.args; - await repository.insertRoundRole(roundRole); + await repository.insertRoundRole(roundRole, txConnection); }) satisfies ChangesetHandler<"InsertRoundRole">, - DeleteAllRoundRolesByRoleAndAddress: (async (changeset): Promise => { + DeleteAllRoundRolesByRoleAndAddress: (async (changeset, txConnection): Promise => { const { chainId, roundId, role, address } = changeset.args.roundRole; - await repository.deleteManyRoundRolesByRoleAndAddress(chainId, roundId, role, address); + await repository.deleteManyRoundRolesByRoleAndAddress( + chainId, + roundId, + role, + address, + txConnection, + ); }) satisfies ChangesetHandler<"DeleteAllRoundRolesByRoleAndAddress">, }); diff --git a/packages/data-flow/src/data-loader/types/index.ts b/packages/data-flow/src/data-loader/types/index.ts index c8cae2b..9637860 100644 --- a/packages/data-flow/src/data-loader/types/index.ts +++ b/packages/data-flow/src/data-loader/types/index.ts @@ -1,7 +1,8 @@ -import { Changeset } from "@grants-stack-indexer/repository"; +import { Changeset, TransactionConnection } from "@grants-stack-indexer/repository"; export type ChangesetHandler = ( changeset: Extract, + txConnection?: TransactionConnection, ) => Promise; export type ChangesetHandlers = { diff --git a/packages/data-flow/src/eventsFetcher.ts b/packages/data-flow/src/eventsFetcher.ts index 9848215..9d25533 100644 --- a/packages/data-flow/src/eventsFetcher.ts +++ b/packages/data-flow/src/eventsFetcher.ts @@ -1,23 +1,29 @@ -import { GetEventsFilters, IIndexerClient } from "@grants-stack-indexer/indexer-client"; -import { AnyIndexerFetchedEvent, ChainId } from "@grants-stack-indexer/shared"; +import { + GetEventsAfterBlockNumberAndLogIndexParams, + GetEventsFilters, + IIndexerClient, +} from "@grants-stack-indexer/indexer-client"; +import { AnyIndexerFetchedEvent } from "@grants-stack-indexer/shared"; import { IEventsFetcher } from "./interfaces/index.js"; export class EventsFetcher implements IEventsFetcher { constructor(private indexerClient: IIndexerClient) {} /* @inheritdoc */ - async fetchEventsByBlockNumberAndLogIndex( - chainId: ChainId, - blockNumber: number, - logIndex: number, - limit: number = 100, - ): Promise { - return await this.indexerClient.getEventsAfterBlockNumberAndLogIndex( + async fetchEventsByBlockNumberAndLogIndex({ + chainId, + blockNumber, + logIndex, + limit = 100, + allowPartialLastBlock = true, + }: GetEventsAfterBlockNumberAndLogIndexParams): Promise { + return await this.indexerClient.getEventsAfterBlockNumberAndLogIndex({ chainId, blockNumber, logIndex, limit, - ); + allowPartialLastBlock, + }); } /** @inheritdoc */ diff --git a/packages/data-flow/src/exceptions/invalidChangeset.exception.ts b/packages/data-flow/src/exceptions/invalidChangeset.exception.ts index da0c2af..e57f17c 100644 --- a/packages/data-flow/src/exceptions/invalidChangeset.exception.ts +++ b/packages/data-flow/src/exceptions/invalidChangeset.exception.ts @@ -1,4 +1,6 @@ -export class InvalidChangeset extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class InvalidChangeset extends NonRetriableError { constructor(invalidTypes: string[]) { super(`Invalid changeset types: ${invalidTypes.join(", ")}`); } diff --git a/packages/data-flow/src/exceptions/invalidEvent.ts b/packages/data-flow/src/exceptions/invalidEvent.ts index 849c145..0497efc 100644 --- a/packages/data-flow/src/exceptions/invalidEvent.ts +++ b/packages/data-flow/src/exceptions/invalidEvent.ts @@ -1,6 +1,12 @@ -import { AnyEvent, ContractName, ProcessorEvent, stringify } from "@grants-stack-indexer/shared"; +import { + AnyEvent, + ContractName, + NonRetriableError, + ProcessorEvent, + stringify, +} from "@grants-stack-indexer/shared"; -export class InvalidEvent extends Error { +export class InvalidEvent extends NonRetriableError { constructor(event: ProcessorEvent) { super(`Event couldn't be processed: ${stringify(event)}`); diff --git a/packages/data-flow/src/interfaces/dataLoader.interface.ts b/packages/data-flow/src/interfaces/dataLoader.interface.ts index 16a28fb..3b78046 100644 --- a/packages/data-flow/src/interfaces/dataLoader.interface.ts +++ b/packages/data-flow/src/interfaces/dataLoader.interface.ts @@ -1,7 +1,5 @@ import type { Changeset } from "@grants-stack-indexer/repository"; -import type { ExecutionResult } from "../internal.js"; - export interface IDataLoader { /** * Applies the changesets to the database. @@ -9,5 +7,5 @@ export interface IDataLoader { * @returns The execution result. * @throws {InvalidChangeset} if there are changesets with invalid types. */ - applyChanges(changesets: Changeset[]): Promise; + applyChanges(changesets: Changeset[]): Promise; } diff --git a/packages/data-flow/src/interfaces/eventsFetcher.interface.ts b/packages/data-flow/src/interfaces/eventsFetcher.interface.ts index 27969fa..42abc95 100644 --- a/packages/data-flow/src/interfaces/eventsFetcher.interface.ts +++ b/packages/data-flow/src/interfaces/eventsFetcher.interface.ts @@ -1,5 +1,8 @@ -import { GetEventsFilters } from "@grants-stack-indexer/indexer-client"; -import { AnyIndexerFetchedEvent, ChainId } from "@grants-stack-indexer/shared"; +import { + GetEventsAfterBlockNumberAndLogIndexParams, + GetEventsFilters, +} from "@grants-stack-indexer/indexer-client"; +import { AnyIndexerFetchedEvent } from "@grants-stack-indexer/shared"; /** * Interface for the events fetcher @@ -10,13 +13,11 @@ export interface IEventsFetcher { * @param chainId id of the chain * @param blockNumber block number to fetch events from * @param logIndex log index in the block to fetch events from - * @param limit limit of events to fetch + * @param limit limit of events to fetch\ + * @param allowPartialLastBlock Whether last block is allowed to be partially fetched */ fetchEventsByBlockNumberAndLogIndex( - chainId: ChainId, - blockNumber: number, - logIndex: number, - limit?: number, + params: GetEventsAfterBlockNumberAndLogIndexParams, ): Promise; /** diff --git a/packages/data-flow/src/orchestrator.ts b/packages/data-flow/src/orchestrator.ts index 89c9bcf..16b6c21 100644 --- a/packages/data-flow/src/orchestrator.ts +++ b/packages/data-flow/src/orchestrator.ts @@ -6,9 +6,11 @@ import { UnsupportedEventException, UnsupportedStrategy, } from "@grants-stack-indexer/processors"; +import { RoundNotFound, RoundNotFoundForId } from "@grants-stack-indexer/repository"; import { Address, AnyEvent, + AnyIndexerFetchedEvent, ChainId, ContractName, Hex, @@ -16,6 +18,9 @@ import { isAlloEvent, isStrategyEvent, ProcessorEvent, + RetriableError, + RetryHandler, + RetryStrategy, StrategyEvent, stringify, } from "@grants-stack-indexer/shared"; @@ -46,18 +51,21 @@ import { CoreDependencies, DataLoader, delay, IQueue, iStrategyAbi, Queue } from * The Orchestrator provides fault tolerance and performance optimization through: * - Configurable batch sizes for event fetching * - Delayed processing to prevent overwhelming the system - * - Error handling and logging for various failure scenarios + * - Retry handling with exponential backoff for transient failures + * - Comprehensive error handling and logging for various failure scenarios * - Registry tracking of supported/unsupported strategies and events * - * TODO: Enhance the error handling/retries, logging and observability + * TODO: Enhance logging and observability */ export class Orchestrator { private readonly eventsQueue: IQueue>; + private readonly eventsByBlockContext: Map; private readonly eventsFetcher: IEventsFetcher; private readonly eventsProcessor: EventsProcessor; private readonly eventsRegistry: IEventsRegistry; private readonly strategyRegistry: IStrategyRegistry; private readonly dataLoader: DataLoader; + private readonly retryHandler: RetryHandler; /** * @param chainId - The chain id @@ -65,6 +73,7 @@ export class Orchestrator { * @param indexerClient - The indexer client * @param registries - The registries * @param fetchLimit - The fetch limit + * @param retryStrategy - The retry strategy * @param fetchDelayInMs - The fetch delay in milliseconds */ constructor( @@ -77,6 +86,7 @@ export class Orchestrator { }, private fetchLimit: number = 1000, private fetchDelayInMs: number = 10000, + private retryStrategy: RetryStrategy, private logger: ILogger, ) { this.eventsFetcher = new EventsFetcher(this.indexerClient); @@ -94,9 +104,12 @@ export class Orchestrator { donation: this.dependencies.donationRepository, applicationPayout: this.dependencies.applicationPayoutRepository, }, + this.dependencies.transactionManager, this.logger, ); this.eventsQueue = new Queue>(fetchLimit); + this.eventsByBlockContext = new Map(); + this.retryHandler = new RetryHandler(retryStrategy, this.logger); } async run(signal: AbortSignal): Promise { @@ -118,64 +131,53 @@ export class Orchestrator { await delay(this.fetchDelayInMs); continue; } + await this.eventsRegistry.saveLastProcessedEvent(this.chainId, { ...event, rawEvent: event, }); - event = await this.enhanceStrategyId(event); - if (this.isPoolCreated(event)) { - const handleable = existsHandler(event.strategyId); - await this.strategyRegistry.saveStrategyId( - this.chainId, - event.params.strategy, - event.strategyId, - handleable, - ); - } else if (event.contractName === "Strategy" && "strategyId" in event) { - if (!existsHandler(event.strategyId)) { - this.logger.debug("Skipping event", { - event, - className: Orchestrator.name, - chainId: this.chainId, - }); - // we skip the event if the strategy id is not handled yet - continue; - } - } - - const changesets = await this.eventsProcessor.processEvent(event); - const executionResult = await this.dataLoader.applyChanges(changesets); - - if (executionResult.numFailed > 0) { - //TODO: should we retry the failed changesets? - this.logger.error( - `Failed to apply changesets. ${executionResult.errors.join("\n")} Event: ${stringify( - event, - )}`, - { - className: Orchestrator.name, - chainId: this.chainId, - }, - ); - } + await this.retryHandler.execute( + async () => { + await this.handleEvent(event!); + }, + { abortSignal: signal }, + ); } catch (error: unknown) { - // TODO: improve error handling, retries and notify + // TODO: notify if ( error instanceof UnsupportedStrategy || error instanceof InvalidEvent || error instanceof UnsupportedEventException ) { - // this.logger.error( - // `Current event cannot be handled. ${error.name}: ${error.message}. Event: ${stringify(event)}`, - // ); + this.logger.debug( + `Current event cannot be handled. ${error.name}: ${error.message}.`, + { + className: Orchestrator.name, + chainId: this.chainId, + event, + }, + ); } else { - if (error instanceof Error || isNativeError(error)) { + if (error instanceof RetriableError) { + error.message = `Error processing event after retries. ${error.message}`; this.logger.error(error, { event, className: Orchestrator.name, chainId: this.chainId, }); + } else if (error instanceof Error || isNativeError(error)) { + const shouldIgnoreError = this.shouldIgnoreTimestampsUpdatedError( + error, + event!, + ); + if (!shouldIgnoreError) { + this.logger.error(error, { + event, + className: Orchestrator.name, + chainId: this.chainId, + }); + } } else { this.logger.error( new Error(`Error processing event: ${stringify(event)} ${error}`), @@ -195,6 +197,33 @@ export class Orchestrator { }); } + /** + * Sometimes the TimestampsUpdated event is part of the _initialize() function of a strategy. + * In this case, the event is emitted before the PoolCreated event. We can safely ignore the error + * if the PoolCreated event is present in the same block. + */ + private shouldIgnoreTimestampsUpdatedError( + error: Error, + event: ProcessorEvent, + ): boolean { + const canIgnoreErrorClass = + error instanceof RoundNotFound || error instanceof RoundNotFoundForId; + const canIgnoreEventName = + event?.eventName === "TimestampsUpdated" || + event?.eventName === "TimestampsUpdatedWithRegistrationAndAllocation"; + + if (canIgnoreErrorClass && canIgnoreEventName) { + const events = this.eventsByBlockContext.get(event.blockNumber); + return ( + events + ?.filter((e) => e.logIndex > event.logIndex) + .some((event) => event.eventName === "PoolCreated") ?? false + ); + } + + return false; + } + /** * Enqueue new events from the events fetcher using the last processed event as a starting point */ @@ -203,16 +232,52 @@ export class Orchestrator { const blockNumber = lastProcessedEvent?.blockNumber ?? 0; const logIndex = lastProcessedEvent?.logIndex ?? 0; - const events = await this.eventsFetcher.fetchEventsByBlockNumberAndLogIndex( - this.chainId, + const events = await this.eventsFetcher.fetchEventsByBlockNumberAndLogIndex({ + chainId: this.chainId, blockNumber, logIndex, - this.fetchLimit, - ); + limit: this.fetchLimit, + allowPartialLastBlock: false, + }); + + // Clear previous context + this.eventsByBlockContext.clear(); + for (const event of events) { + if (!this.eventsByBlockContext.has(event.blockNumber)) { + this.eventsByBlockContext.set(event.blockNumber, []); + } + this.eventsByBlockContext.get(event.blockNumber)!.push(event); + } this.eventsQueue.push(...events); } + private async handleEvent(event: ProcessorEvent): Promise { + event = await this.enhanceStrategyId(event); + if (this.isPoolCreated(event)) { + const handleable = existsHandler(event.strategyId); + await this.strategyRegistry.saveStrategyId( + this.chainId, + event.params.strategy, + event.strategyId, + handleable, + ); + } else if (event.contractName === "Strategy" && "strategyId" in event) { + if (!existsHandler(event.strategyId)) { + this.logger.debug("Skipping event", { + event, + className: Orchestrator.name, + chainId: this.chainId, + }); + // we skip the event if the strategy id is not handled yet + return; + } + } + + const changesets = await this.eventsProcessor.processEvent(event); + await this.dataLoader.applyChanges(changesets); + } + /** * Enhance the event with the strategy id when required * @param event - The event diff --git a/packages/data-flow/src/retroactiveProcessor.ts b/packages/data-flow/src/retroactiveProcessor.ts index 1553701..c47c341 100644 --- a/packages/data-flow/src/retroactiveProcessor.ts +++ b/packages/data-flow/src/retroactiveProcessor.ts @@ -9,6 +9,8 @@ import { Hex, ILogger, ProcessorEvent, + RetryHandler, + RetryStrategy, stringify, } from "@grants-stack-indexer/shared"; @@ -62,6 +64,7 @@ export class RetroactiveProcessor { private readonly strategyRegistry: IStrategyRegistry; private readonly dataLoader: DataLoader; private readonly checkpointRepository: IStrategyProcessingCheckpointRepository; + private readonly retryHandler: RetryHandler; /** * Creates a new instance of RetroactiveProcessor @@ -70,6 +73,7 @@ export class RetroactiveProcessor { * @param indexerClient - Client for fetching blockchain events * @param registries - Event and strategy registries for tracking processing state * @param fetchLimit - Maximum number of events to fetch in a single batch (default: 1000) + * @param retryStrategy - The retry strategy * @param logger - Logger instance for debugging and monitoring */ constructor( @@ -82,6 +86,7 @@ export class RetroactiveProcessor { checkpointRepository: IStrategyProcessingCheckpointRepository; }, private fetchLimit: number = 1000, + private retryStrategy: RetryStrategy, private logger: ILogger, ) { this.eventsFetcher = new EventsFetcher(this.indexerClient); @@ -100,8 +105,10 @@ export class RetroactiveProcessor { donation: this.dependencies.donationRepository, applicationPayout: this.dependencies.applicationPayoutRepository, }, + this.dependencies.transactionManager, this.logger, ); + this.retryHandler = new RetryHandler(retryStrategy, this.logger); } /** @@ -207,18 +214,11 @@ export class RetroactiveProcessor { if (this.hasReachedLastEvent(currentPointer, lastEventPointer)) break; event.strategyId = strategyId; - const changesets = await this.eventsProcessor.processEvent(event); - const executionResult = await this.dataLoader.applyChanges(changesets); - if (executionResult.numFailed > 0) { - this.logger.error( - `Failed to apply changesets. ${executionResult.errors.join("\n")} Event: ${stringify(event)}`, - { - className: RetroactiveProcessor.name, - chainId: this.chainId, - }, - ); - } + await this.retryHandler.execute(async () => { + const changesets = await this.eventsProcessor.processEvent(event!); + await this.dataLoader.applyChanges(changesets); + }); } catch (error) { if (error instanceof InvalidEvent || error instanceof UnsupportedEventException) { // Expected errors that we can safely ignore diff --git a/packages/data-flow/src/types/index.ts b/packages/data-flow/src/types/index.ts index a83c8d8..563071a 100644 --- a/packages/data-flow/src/types/index.ts +++ b/packages/data-flow/src/types/index.ts @@ -1,24 +1,13 @@ import { ProcessorDependencies } from "@grants-stack-indexer/processors"; import { - Changeset, IApplicationPayoutRepository, IApplicationRepository, IDonationRepository, IProjectRepository, IRoundRepository, + ITransactionManager, } from "@grants-stack-indexer/repository"; -/** - * The result of the execution of the changesets. - */ -export type ExecutionResult = { - changesets: Changeset["type"][]; - numExecuted: number; - numSuccessful: number; - numFailed: number; - errors: string[]; -}; - /** * The core dependencies for the data flow * @@ -35,4 +24,5 @@ export type CoreDependencies = Pick< applicationRepository: IApplicationRepository; donationRepository: IDonationRepository; applicationPayoutRepository: IApplicationPayoutRepository; + transactionManager: ITransactionManager; }; diff --git a/packages/data-flow/test/data-loader/dataLoader.spec.ts b/packages/data-flow/test/data-loader/dataLoader.spec.ts index 167284b..8c994de 100644 --- a/packages/data-flow/test/data-loader/dataLoader.spec.ts +++ b/packages/data-flow/test/data-loader/dataLoader.spec.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from "vitest"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { Changeset, @@ -7,6 +7,8 @@ import { IDonationRepository, IProjectRepository, IRoundRepository, + ITransactionManager, + TransactionConnection, } from "@grants-stack-indexer/repository"; import { ILogger } from "@grants-stack-indexer/shared"; @@ -14,6 +16,7 @@ import { DataLoader } from "../../src/data-loader/dataLoader.js"; import { InvalidChangeset } from "../../src/internal.js"; describe("DataLoader", () => { + let dataLoader: DataLoader; const mockProjectRepository = { insertProject: vi.fn(), updateProject: vi.fn(), @@ -44,8 +47,14 @@ describe("DataLoader", () => { info: vi.fn(), warn: vi.fn(), }; - const createDataLoader = (): DataLoader => - new DataLoader( + + const mockTx = { query: vi.fn() } as unknown as TransactionConnection; + const mockTransactionManager = { + runInTransaction: async (fn) => await fn(mockTx), + } as ITransactionManager; + + beforeEach(() => { + dataLoader = new DataLoader( { project: mockProjectRepository, round: mockRoundRepository, @@ -53,16 +62,17 @@ describe("DataLoader", () => { donation: mockDonationRepository, applicationPayout: mockApplicationPayoutRepository, }, + mockTransactionManager, logger, ); + }); - beforeEach(() => { + afterEach(() => { vi.clearAllMocks(); }); describe("applyChanges", () => { it("successfully process multiple changesets", async () => { - const dataLoader = createDataLoader(); const changesets = [ { type: "InsertProject", @@ -74,18 +84,21 @@ describe("DataLoader", () => { } as unknown as Changeset, ]; - const result = await dataLoader.applyChanges(changesets); + await dataLoader.applyChanges(changesets); - expect(result.numExecuted).toBe(2); - expect(result.numSuccessful).toBe(2); - expect(result.numFailed).toBe(0); - expect(result.errors).toHaveLength(0); expect(mockProjectRepository.insertProject).toHaveBeenCalledTimes(1); + expect(mockProjectRepository.insertProject).toHaveBeenCalledWith( + { id: "1", name: "Test Project" }, + mockTx, + ); expect(mockRoundRepository.insertRound).toHaveBeenCalledTimes(1); + expect(mockRoundRepository.insertRound).toHaveBeenCalledWith( + { id: "1", name: "Test Round" }, + mockTx, + ); }); it("throw InvalidChangeset when encountering unknown changeset type", async () => { - const dataLoader = createDataLoader(); const changesets = [ { type: "UnknownType", @@ -97,8 +110,7 @@ describe("DataLoader", () => { ); }); - it("stops processing changesets on first error", async () => { - const dataLoader = createDataLoader(); + it("throws an error if the database operation fails", async () => { const error = new Error("Database error"); vi.spyOn(mockProjectRepository, "insertProject").mockRejectedValueOnce(error); @@ -113,13 +125,14 @@ describe("DataLoader", () => { } as unknown as Changeset, ]; - const result = await dataLoader.applyChanges(changesets); + await expect(dataLoader.applyChanges(changesets)).rejects.toThrow(error); - expect(result.numExecuted).toBe(1); - expect(result.numSuccessful).toBe(0); - expect(result.numFailed).toBe(1); - expect(result.errors).toHaveLength(1); - expect(result.errors[0]).toContain("Database error"); + expect(logger.debug).toHaveBeenLastCalledWith( + `Error applying changeset InsertProject. Rolling back transaction with 2 changesets`, + { + className: DataLoader.name, + }, + ); expect(mockRoundRepository.insertRound).not.toHaveBeenCalled(); }); }); diff --git a/packages/data-flow/test/data-loader/handlers/application.handlers.spec.ts b/packages/data-flow/test/data-loader/handlers/application.handlers.spec.ts index 86dad31..1c853d6 100644 --- a/packages/data-flow/test/data-loader/handlers/application.handlers.spec.ts +++ b/packages/data-flow/test/data-loader/handlers/application.handlers.spec.ts @@ -1,6 +1,10 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; -import { IApplicationRepository, NewApplication } from "@grants-stack-indexer/repository"; +import { + IApplicationRepository, + NewApplication, + TransactionConnection, +} from "@grants-stack-indexer/repository"; import { ChainId } from "@grants-stack-indexer/shared"; import { createApplicationHandlers } from "../../../src/data-loader/handlers/application.handlers.js"; @@ -10,6 +14,7 @@ describe("Application Handlers", () => { insertApplication: vi.fn(), updateApplication: vi.fn(), } as unknown as IApplicationRepository; + const mockTxConnection = { query: vi.fn() } as unknown as TransactionConnection; const handlers = createApplicationHandlers(mockRepository); @@ -24,7 +29,7 @@ describe("Application Handlers", () => { args: application, }); - expect(mockRepository.insertApplication).toHaveBeenCalledWith(application); + expect(mockRepository.insertApplication).toHaveBeenCalledWith(application, undefined); }); it("handle UpdateApplication changeset", async () => { @@ -38,11 +43,12 @@ describe("Application Handlers", () => { }, } as const; - await handlers.UpdateApplication(update); + await handlers.UpdateApplication(update, mockTxConnection); expect(mockRepository.updateApplication).toHaveBeenCalledWith( { chainId: 1, roundId: "round1", id: "app1" }, { status: "APPROVED" }, + mockTxConnection, ); }); }); diff --git a/packages/data-flow/test/data-loader/handlers/applicationPayout.handlers.spec.ts b/packages/data-flow/test/data-loader/handlers/applicationPayout.handlers.spec.ts new file mode 100644 index 0000000..0d17dfd --- /dev/null +++ b/packages/data-flow/test/data-loader/handlers/applicationPayout.handlers.spec.ts @@ -0,0 +1,42 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import { + IApplicationPayoutRepository, + NewApplicationPayout, + TransactionConnection, +} from "@grants-stack-indexer/repository"; + +import { createApplicationPayoutHandlers } from "../../../src/data-loader/handlers/applicationPayout.handlers.js"; + +describe("ApplicationPayout Handlers", () => { + const mockRepository = { + insertApplicationPayout: vi.fn(), + } as IApplicationPayoutRepository; + const mockTxConnection = { query: vi.fn() } as unknown as TransactionConnection; + + const handlers = createApplicationPayoutHandlers(mockRepository); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("handle InsertApplication changeset", async () => { + const applicationPayout = { + id: "1", + name: "Test Application", + } as unknown as NewApplicationPayout; + + await handlers.InsertApplicationPayout( + { + type: "InsertApplicationPayout", + args: { applicationPayout }, + }, + mockTxConnection, + ); + + expect(mockRepository.insertApplicationPayout).toHaveBeenCalledWith( + applicationPayout, + mockTxConnection, + ); + }); +}); diff --git a/packages/data-flow/test/data-loader/handlers/donation.handlers.spec.ts b/packages/data-flow/test/data-loader/handlers/donation.handlers.spec.ts new file mode 100644 index 0000000..deb562d --- /dev/null +++ b/packages/data-flow/test/data-loader/handlers/donation.handlers.spec.ts @@ -0,0 +1,54 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import { + IDonationRepository, + NewDonation, + TransactionConnection, +} from "@grants-stack-indexer/repository"; + +import { createDonationHandlers } from "../../../src/data-loader/handlers/donation.handlers.js"; + +describe("Donation Handlers", () => { + const mockRepository = { + insertDonation: vi.fn(), + insertManyDonations: vi.fn(), + } as IDonationRepository; + const mockTxConnection = { query: vi.fn() } as unknown as TransactionConnection; + + const handlers = createDonationHandlers(mockRepository); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("handle InsertDonation changeset", async () => { + const donation = { id: "1", name: "Test Donation" } as unknown as NewDonation; + await handlers.InsertDonation({ + type: "InsertDonation", + args: { donation }, + }); + + expect(mockRepository.insertDonation).toHaveBeenCalledWith(donation, undefined); + }); + + it("handle InsertManyDonations changeset", async () => { + const donations = [ + { id: "1", name: "Test Donation" }, + { id: "2", name: "Test Donation 2" }, + { id: "3", name: "Test Donation 3" }, + ] as unknown as NewDonation[]; + + await handlers.InsertManyDonations( + { + type: "InsertManyDonations", + args: { donations }, + }, + mockTxConnection, + ); + + expect(mockRepository.insertManyDonations).toHaveBeenCalledWith( + donations, + mockTxConnection, + ); + }); +}); diff --git a/packages/data-flow/test/data-loader/handlers/project.handlers.spec.ts b/packages/data-flow/test/data-loader/handlers/project.handlers.spec.ts index 5ef0f45..087ece9 100644 --- a/packages/data-flow/test/data-loader/handlers/project.handlers.spec.ts +++ b/packages/data-flow/test/data-loader/handlers/project.handlers.spec.ts @@ -1,6 +1,10 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; -import { IProjectRepository, NewProject } from "@grants-stack-indexer/repository"; +import { + IProjectRepository, + NewProject, + TransactionConnection, +} from "@grants-stack-indexer/repository"; import { Address, ChainId } from "@grants-stack-indexer/shared"; import { createProjectHandlers } from "../../../src/data-loader/handlers/project.handlers.js"; @@ -14,6 +18,7 @@ describe("Project Handlers", () => { insertProjectRole: vi.fn(), deleteManyProjectRoles: vi.fn(), } as unknown as IProjectRepository; + const mockTxConnection = { query: vi.fn() } as unknown as TransactionConnection; const handlers = createProjectHandlers(mockRepository); @@ -39,12 +44,15 @@ describe("Project Handlers", () => { projectType: "canonical", } as NewProject; - await handlers.InsertProject({ - type: "InsertProject", - args: { project }, - }); + await handlers.InsertProject( + { + type: "InsertProject", + args: { project }, + }, + mockTxConnection, + ); - expect(mockRepository.insertProject).toHaveBeenCalledWith(project); + expect(mockRepository.insertProject).toHaveBeenCalledWith(project, mockTxConnection); }); it("handle UpdateProject changeset", async () => { @@ -65,6 +73,7 @@ describe("Project Handlers", () => { expect(mockRepository.updateProject).toHaveBeenCalledWith( { id: "project-1", chainId: 1 }, { name: "Updated Project", updatedAtBlock: 200n }, + undefined, ); }); @@ -81,7 +90,10 @@ describe("Project Handlers", () => { args: { pendingProjectRole: pendingRole }, }); - expect(mockRepository.insertPendingProjectRole).toHaveBeenCalledWith(pendingRole); + expect(mockRepository.insertPendingProjectRole).toHaveBeenCalledWith( + pendingRole, + undefined, + ); }); it("handle DeletePendingProjectRoles changeset", async () => { @@ -94,7 +106,10 @@ describe("Project Handlers", () => { await handlers.DeletePendingProjectRoles(changeset); - expect(mockRepository.deleteManyPendingProjectRoles).toHaveBeenCalledWith([1, 2, 3]); + expect(mockRepository.deleteManyPendingProjectRoles).toHaveBeenCalledWith( + [1, 2, 3], + undefined, + ); }); it("handle InsertProjectRole changeset", async () => { @@ -111,7 +126,7 @@ describe("Project Handlers", () => { args: { projectRole }, }); - expect(mockRepository.insertProjectRole).toHaveBeenCalledWith(projectRole); + expect(mockRepository.insertProjectRole).toHaveBeenCalledWith(projectRole, undefined); }); it("handle DeleteAllProjectRolesByRole changeset", async () => { @@ -132,6 +147,8 @@ describe("Project Handlers", () => { changeset.args.projectRole.chainId, changeset.args.projectRole.projectId, changeset.args.projectRole.role, + undefined, + undefined, ); }); @@ -155,6 +172,7 @@ describe("Project Handlers", () => { changeset.args.projectRole.projectId, changeset.args.projectRole.role, changeset.args.projectRole.address, + undefined, ); }); }); diff --git a/packages/data-flow/test/data-loader/handlers/round.handlers.spec.ts b/packages/data-flow/test/data-loader/handlers/round.handlers.spec.ts index 6ddb625..c06a960 100644 --- a/packages/data-flow/test/data-loader/handlers/round.handlers.spec.ts +++ b/packages/data-flow/test/data-loader/handlers/round.handlers.spec.ts @@ -1,6 +1,10 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; -import { IRoundRepository, NewRound } from "@grants-stack-indexer/repository"; +import { + IRoundRepository, + NewRound, + TransactionConnection, +} from "@grants-stack-indexer/repository"; import { Address, ChainId } from "@grants-stack-indexer/shared"; import { createRoundHandlers } from "../../../src/data-loader/handlers/round.handlers.js"; @@ -16,6 +20,7 @@ describe("Round Handlers", () => { insertRoundRole: vi.fn(), deleteManyRoundRolesByRoleAndAddress: vi.fn(), } as unknown as IRoundRepository; + const mockTxConnection = { query: vi.fn() } as unknown as TransactionConnection; const handlers = createRoundHandlers(mockRepository); @@ -30,12 +35,15 @@ describe("Round Handlers", () => { matchAmount: 1000n, } as NewRound; - await handlers.InsertRound({ - type: "InsertRound" as const, - args: { round }, - }); + await handlers.InsertRound( + { + type: "InsertRound" as const, + args: { round }, + }, + mockTxConnection, + ); - expect(mockRepository.insertRound).toHaveBeenCalledWith(round); + expect(mockRepository.insertRound).toHaveBeenCalledWith(round, mockTxConnection); }); it("handle UpdateRound changeset", async () => { @@ -56,6 +64,7 @@ describe("Round Handlers", () => { expect(mockRepository.updateRound).toHaveBeenCalledWith( { id: "round-1", chainId: 1 as ChainId }, { matchAmount: 2000n, matchAmountInUsd: "2000" }, + undefined, ); }); @@ -77,6 +86,7 @@ describe("Round Handlers", () => { expect(mockRepository.updateRound).toHaveBeenCalledWith( { chainId: 1 as ChainId, strategyAddress: "0x123" as Address }, { matchAmount: 2000n, matchAmountInUsd: "2000" }, + undefined, ); }); @@ -97,6 +107,7 @@ describe("Round Handlers", () => { { chainId: 1 as ChainId, roundId: "round-1" }, 1000n, "1000", + undefined, ); }); @@ -115,6 +126,7 @@ describe("Round Handlers", () => { expect(mockRepository.incrementRoundTotalDistributed).toHaveBeenCalledWith( { chainId: 1 as ChainId, roundId: "round-1" }, 1000n, + undefined, ); }); @@ -135,6 +147,7 @@ describe("Round Handlers", () => { expect(mockRepository.insertPendingRoundRole).toHaveBeenCalledWith( changeset.args.pendingRoundRole, + undefined, ); }); @@ -148,7 +161,10 @@ describe("Round Handlers", () => { await handlers.DeletePendingRoundRoles(changeset); - expect(mockRepository.deleteManyPendingRoundRoles).toHaveBeenCalledWith([1, 2, 3]); + expect(mockRepository.deleteManyPendingRoundRoles).toHaveBeenCalledWith( + [1, 2, 3], + undefined, + ); }); it("handle InsertRoundRole changeset", async () => { @@ -167,7 +183,10 @@ describe("Round Handlers", () => { await handlers.InsertRoundRole(changeset); - expect(mockRepository.insertRoundRole).toHaveBeenCalledWith(changeset.args.roundRole); + expect(mockRepository.insertRoundRole).toHaveBeenCalledWith( + changeset.args.roundRole, + undefined, + ); }); it("handle DeleteAllRoundRolesByRoleAndAddress changeset", async () => { @@ -190,6 +209,7 @@ describe("Round Handlers", () => { changeset.args.roundRole.roundId, changeset.args.roundRole.role, changeset.args.roundRole.address, + undefined, ); }); }); diff --git a/packages/data-flow/test/unit/eventsFetcher.spec.ts b/packages/data-flow/test/unit/eventsFetcher.spec.ts index 586eedb..2563a55 100644 --- a/packages/data-flow/test/unit/eventsFetcher.spec.ts +++ b/packages/data-flow/test/unit/eventsFetcher.spec.ts @@ -54,22 +54,22 @@ describe("EventsFetcher", () => { const chainId = 1 as ChainId; const blockNumber = 1000; const logIndex = 0; - const limit = 100; indexerClientMock.getEventsAfterBlockNumberAndLogIndex.mockResolvedValue(mockEvents); - const result = await eventsFetcher.fetchEventsByBlockNumberAndLogIndex( + const result = await eventsFetcher.fetchEventsByBlockNumberAndLogIndex({ chainId, blockNumber, logIndex, - ); + }); - expect(indexerClientMock.getEventsAfterBlockNumberAndLogIndex).toHaveBeenCalledWith( + expect(indexerClientMock.getEventsAfterBlockNumberAndLogIndex).toHaveBeenCalledWith({ chainId, blockNumber, logIndex, - limit, - ); + limit: 100, + allowPartialLastBlock: true, + }); expect(result).toEqual(mockEvents); }); @@ -83,7 +83,11 @@ describe("EventsFetcher", () => { ); await expect( - eventsFetcher.fetchEventsByBlockNumberAndLogIndex(chainId, blockNumber, logIndex), + eventsFetcher.fetchEventsByBlockNumberAndLogIndex({ + chainId, + blockNumber, + logIndex, + }), ).rejects.toThrow("Network error"); }); }); diff --git a/packages/data-flow/test/unit/orchestrator.spec.ts b/packages/data-flow/test/unit/orchestrator.spec.ts index ec74912..5989b59 100644 --- a/packages/data-flow/test/unit/orchestrator.spec.ts +++ b/packages/data-flow/test/unit/orchestrator.spec.ts @@ -3,7 +3,6 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { EvmProvider } from "@grants-stack-indexer/chain-providers"; import { IIndexerClient } from "@grants-stack-indexer/indexer-client"; -import { UnsupportedStrategy } from "@grants-stack-indexer/processors"; import { Changeset, IApplicationPayoutRepository, @@ -11,18 +10,22 @@ import { IDonationRepository, IProjectRepository, IRoundRepository, + ITransactionManager, + RoundNotFound, } from "@grants-stack-indexer/repository"; +import { RoundNotFoundForId } from "@grants-stack-indexer/repository/dist/src/internal.js"; import { AlloEvent, ChainId, ContractName, ContractToEventName, EventParams, + ExponentialBackoff, Hex, ILogger, ProcessorEvent, + RateLimitError, StrategyEvent, - stringify, } from "@grants-stack-indexer/shared"; import { @@ -92,6 +95,7 @@ describe("Orchestrator", { sequential: true }, () => { const dependencies: CoreDependencies = { evmProvider: mockEvmProvider, + transactionManager: {} as unknown as ITransactionManager, projectRepository: {} as unknown as IProjectRepository, roundRepository: {} as unknown as IRoundRepository, applicationRepository: {} as unknown as IApplicationRepository, @@ -117,6 +121,7 @@ describe("Orchestrator", { sequential: true }, () => { }, mockFetchLimit, mockFetchDelay, + new ExponentialBackoff({ baseDelay: 10, maxAttempts: 3, factor: 2 }), logger, ); }); @@ -151,13 +156,9 @@ describe("Orchestrator", { sequential: true }, () => { .mockResolvedValueOnce(mockEvents) .mockResolvedValue([]); eventsProcessorSpy.mockResolvedValue([]); - vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { return Promise.resolve(); }); @@ -246,13 +247,9 @@ describe("Orchestrator", { sequential: true }, () => { return Promise.resolve(); }); - vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: ["InsertProject", "InsertRoundRole", "DeletePendingRoundRoles"], - numExecuted: 3, - numSuccessful: 3, - }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); runPromise = orchestrator.run(abortController.signal); @@ -374,13 +371,9 @@ describe("Orchestrator", { sequential: true }, () => { vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { return Promise.resolve(); }); - vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: ["InsertApplication"], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); runPromise = orchestrator.run(abortController.signal); @@ -485,13 +478,9 @@ describe("Orchestrator", { sequential: true }, () => { .mockResolvedValue([]); eventsProcessorSpy.mockResolvedValue([]); - vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { return Promise.resolve(); }); @@ -544,13 +533,9 @@ describe("Orchestrator", { sequential: true }, () => { return Promise.resolve(); }); - vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: ["InsertPendingRoundRole"], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); runPromise = orchestrator.run(abortController.signal); @@ -566,7 +551,33 @@ describe("Orchestrator", { sequential: true }, () => { }); describe("Error Handling", () => { - it.skip("retries error"); + it("retries retriable errors", async () => { + const retriableError = new RateLimitError({ className: "ExternalProvider" }, 10); + const mockEvent = createMockEvent("Allo", "Unknown" as unknown as AlloEvent, 1); + + const eventsProcessorSpy = vi.spyOn(orchestrator["eventsProcessor"], "processEvent"); + + vi.spyOn(mockIndexerClient, "getEventsAfterBlockNumberAndLogIndex") + .mockResolvedValueOnce([mockEvent]) + .mockResolvedValue([]); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); + vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { + return Promise.resolve(); + }); + eventsProcessorSpy.mockRejectedValueOnce(retriableError).mockResolvedValueOnce([]); + + runPromise = orchestrator.run(abortController.signal); + + await vi.waitFor(() => { + if (eventsProcessorSpy.mock.calls.length < 2) throw new Error("Not yet called"); + }); + + expect(eventsProcessorSpy).toHaveBeenCalledTimes(2); + expect(orchestrator["dataLoader"].applyChanges).toHaveBeenCalledTimes(1); + expect(mockEventsRegistry.saveLastProcessedEvent).toHaveBeenCalledTimes(1); + }); it("keeps running when there is an error", async () => { const eventsProcessorSpy = vi.spyOn(orchestrator["eventsProcessor"], "processEvent"); @@ -579,13 +590,9 @@ describe("Orchestrator", { sequential: true }, () => { .mockResolvedValue([]); eventsProcessorSpy.mockRejectedValueOnce(error).mockResolvedValueOnce([]); - vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { return Promise.resolve(); }); @@ -597,14 +604,13 @@ describe("Orchestrator", { sequential: true }, () => { if (eventsProcessorSpy.mock.calls.length < 2) throw new Error("Not yet called"); }, { - timeout: 1000, + timeout: 2000, }, ); expect(eventsProcessorSpy).toHaveBeenCalledTimes(2); expect(orchestrator["dataLoader"].applyChanges).toHaveBeenCalledTimes(1); expect(mockEventsRegistry.saveLastProcessedEvent).toHaveBeenCalledTimes(2); - expect(logger.error).toHaveBeenCalledTimes(1); expect(logger.error).toHaveBeenCalledWith(error, { className: Orchestrator.name, chainId, @@ -612,11 +618,10 @@ describe("Orchestrator", { sequential: true }, () => { }); }); - it.skip("logs error for InvalidEvent", async () => { + it("logs debug for InvalidEvent", async () => { const mockEvent = createMockEvent("Allo", "Unknown" as unknown as AlloEvent, 1); const error = new InvalidEvent(mockEvent); - const consoleSpy = vi.spyOn(console, "error"); const eventsProcessorSpy = vi.spyOn(orchestrator["eventsProcessor"], "processEvent"); vi.spyOn(mockIndexerClient, "getEventsAfterBlockNumberAndLogIndex") @@ -630,85 +635,124 @@ describe("Orchestrator", { sequential: true }, () => { if (eventsProcessorSpy.mock.calls.length < 1) throw new Error("Not yet called"); }); - expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining("InvalidEvent")); - expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining(stringify(mockEvent))); + expect(logger.debug).toHaveBeenNthCalledWith( + 1, + expect.stringContaining( + `Current event cannot be handled. ${error.name}: ${error.message}.`, + ), + { + className: Orchestrator.name, + chainId, + event: mockEvent, + }, + ); expect(orchestrator["dataLoader"].applyChanges).not.toHaveBeenCalled(); - expect(mockEventsRegistry.saveLastProcessedEvent).not.toHaveBeenCalled(); + expect(mockEventsRegistry.saveLastProcessedEvent).toHaveBeenCalled(); }); - it.skip("logs error for UnsupportedEvent", async () => { - const strategyId = - "0x6f9291df02b2664139cec5703c124e4ebce32879c74b6297faa1468aa5ff9ebf" as Hex; - const mockEvent = createMockEvent( - "Strategy", - "NotHandled" as unknown as StrategyEvent, - 1, - ); - const error = new UnsupportedStrategy(strategyId); + it("logs DataLoader errors", async () => { + const mockEvent = createMockEvent("Registry", "ProfileCreated", 1, undefined); + const changesets = [ + { + type: "InsertPendingRoundRole", + args: { chainId, roundId: "1", roundRole: {} }, + } as unknown as Changeset, + ]; - vi.spyOn(mockStrategyRegistry, "getStrategyId").mockResolvedValue({ - id: strategyId, - address: mockEvent.srcAddress, - chainId, - handled: true, - }); + const eventsProcessorSpy = vi.spyOn(orchestrator["eventsProcessor"], "processEvent"); + + vi.spyOn(mockEventsRegistry, "getLastProcessedEvent").mockResolvedValue(undefined); vi.spyOn(mockIndexerClient, "getEventsAfterBlockNumberAndLogIndex") .mockResolvedValueOnce([mockEvent]) .mockResolvedValue([]); - vi.spyOn(orchestrator["eventsProcessor"], "processEvent").mockRejectedValue(error); - const consoleSpy = vi.spyOn(console, "error"); + eventsProcessorSpy.mockResolvedValue(changesets); + + vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { + return Promise.resolve(); + }); + + const dataLoaderSpy = vi.spyOn(orchestrator["dataLoader"], "applyChanges"); + const error = new Error("Failed to apply changesets"); + dataLoaderSpy.mockRejectedValue(error); runPromise = orchestrator.run(abortController.signal); await vi.waitFor(() => { - if (consoleSpy.mock.calls.length < 1) throw new Error("Not yet called"); + if (eventsProcessorSpy.mock.calls.length < 1) throw new Error("Not yet called"); }); - expect(consoleSpy).toHaveBeenCalledTimes(1); - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining(`Strategy ${strategyId} unsupported`), - ); - expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining(stringify(mockEvent))); - expect(orchestrator["dataLoader"].applyChanges).not.toHaveBeenCalled(); - expect(mockEventsRegistry.saveLastProcessedEvent).not.toHaveBeenCalled(); + runPromise = orchestrator.run(abortController.signal); + + await vi.waitFor(() => { + if (eventsProcessorSpy.mock.calls.length < 1) throw new Error("Not yet called"); + }); + + expect(logger.error).toHaveBeenCalledWith(error, { + event: mockEvent, + className: Orchestrator.name, + chainId, + }); + expect(dataLoaderSpy).toHaveBeenCalledTimes(1); }); - it.skip("logs DataLoader errors", async () => { - const mockEvent = createMockEvent("Allo", "PoolCreated", 1); - const mockChangesets: Changeset[] = [ - { type: "UpdateProject", args: { chainId, projectId: "1", project: {} } }, - ]; + it("ignores TimestampsUpdated errors if PoolCreated is in the same block", async () => { + const strategyAddress = "0x123" as Address; + const strategyId = + "0x6f9291df02b2664139cec5703c124e4ebce32879c74b6297faa1468aa5ff9ebf" as Hex; + const timestampsUpdatedEvent = createMockEvent("Strategy", "TimestampsUpdated", 1); + timestampsUpdatedEvent.logIndex = 0; + const timestampUpdatedEvent2 = createMockEvent( + "Strategy", + "TimestampsUpdatedWithRegistrationAndAllocation", + 1, + ); + timestampUpdatedEvent2.logIndex = 1; + const poolCreatedEvent = createMockEvent("Allo", "PoolCreated", 1, { + strategy: strategyAddress, + poolId: "1", + profileId: "0x123", + token: "0x123", + amount: "100", + metadata: ["1", "1"], + }); + poolCreatedEvent.logIndex = 3; - const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); - const dataLoaderSpy = vi.spyOn(orchestrator["dataLoader"], "applyChanges"); + const eventsProcessorSpy = vi.spyOn(orchestrator["eventsProcessor"], "processEvent"); + vi.spyOn(mockEventsRegistry, "getLastProcessedEvent").mockResolvedValue(undefined); vi.spyOn(mockIndexerClient, "getEventsAfterBlockNumberAndLogIndex") - .mockResolvedValueOnce([mockEvent]) + .mockResolvedValueOnce([ + timestampsUpdatedEvent, + timestampUpdatedEvent2, + poolCreatedEvent, + ]) .mockResolvedValue([]); - vi.spyOn(orchestrator["eventsProcessor"], "processEvent").mockResolvedValue( - mockChangesets, - ); - dataLoaderSpy.mockResolvedValue({ - numFailed: 1, - errors: ["Failed to update project"], - changesets: ["UpdateProject"], - numExecuted: 1, - numSuccessful: 0, + + vi.spyOn(mockStrategyRegistry, "getStrategyId").mockResolvedValue(undefined); + vi.spyOn(mockEvmProvider, "readContract").mockResolvedValue(strategyId); + + eventsProcessorSpy + .mockRejectedValueOnce(new RoundNotFound(chainId, strategyAddress)) + .mockRejectedValueOnce(new RoundNotFoundForId(chainId, "1")) + .mockResolvedValueOnce([]); + + vi.spyOn(mockEventsRegistry, "saveLastProcessedEvent").mockImplementation(() => { + return Promise.resolve(); }); + vi.spyOn(orchestrator["dataLoader"], "applyChanges").mockResolvedValue( + await Promise.resolve(), + ); + runPromise = orchestrator.run(abortController.signal); await vi.waitFor(() => { - if (dataLoaderSpy.mock.calls.length < 1) throw new Error("Not yet called"); + if (eventsProcessorSpy.mock.calls.length < 3) throw new Error("Not yet called"); }); - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining("Failed to apply changesets"), - ); - expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining(stringify(mockEvent))); - expect(dataLoaderSpy).toHaveBeenCalledTimes(1); - expect(mockEventsRegistry.saveLastProcessedEvent).not.toHaveBeenCalled(); + expect(orchestrator["eventsProcessor"].processEvent).toHaveBeenCalledTimes(3); + expect(logger.error).not.toHaveBeenCalled(); }); }); }); diff --git a/packages/data-flow/test/unit/retroactiveProcessor.spec.ts b/packages/data-flow/test/unit/retroactiveProcessor.spec.ts index cb52126..e9353ef 100644 --- a/packages/data-flow/test/unit/retroactiveProcessor.spec.ts +++ b/packages/data-flow/test/unit/retroactiveProcessor.spec.ts @@ -9,6 +9,7 @@ import { IProjectRepository, IRoundRepository, IStrategyProcessingCheckpointRepository, + ITransactionManager, Strategy, } from "@grants-stack-indexer/repository"; import { @@ -16,10 +17,12 @@ import { ContractToEventName, DeepPartial, EventParams, + ExponentialBackoff, Hex, ILogger, mergeDeep, ProcessorEvent, + RateLimitError, } from "@grants-stack-indexer/shared"; import { @@ -138,6 +141,7 @@ describe("RetroactiveProcessor", () => { applicationRepository: {} as IApplicationRepository, donationRepository: {} as IDonationRepository, applicationPayoutRepository: {} as IApplicationPayoutRepository, + transactionManager: {} as ITransactionManager, pricingProvider: { getTokenPrice: vi.fn(), }, @@ -156,6 +160,7 @@ describe("RetroactiveProcessor", () => { checkpointRepository: mockCheckpointRepository, }, mockFetchLimit, + new ExponentialBackoff({ baseDelay: 100, maxAttempts: 3, factor: 2 }), mockLogger, ); @@ -216,13 +221,7 @@ describe("RetroactiveProcessor", () => { .mockResolvedValueOnce([mockEvent]) .mockResolvedValue([]); vi.spyOn(mockEventsProcessor, "processEvent").mockResolvedValue([]); - vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); vi.spyOn(mockStrategyRegistry, "saveStrategyId").mockResolvedValue(); await processor.processRetroactiveStrategies(); @@ -271,13 +270,7 @@ describe("RetroactiveProcessor", () => { return []; }); vi.spyOn(mockEventsProcessor, "processEvent").mockResolvedValue([]); - vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); vi.spyOn(mockStrategyRegistry, "saveStrategyId").mockResolvedValue(); await processor.processRetroactiveStrategies(); @@ -325,13 +318,7 @@ describe("RetroactiveProcessor", () => { .mockResolvedValueOnce([mockEvent]) .mockResolvedValue([]); vi.spyOn(mockEventsProcessor, "processEvent").mockResolvedValue([]); - vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); vi.spyOn(mockStrategyRegistry, "saveStrategyId").mockResolvedValue(); await processor.processRetroactiveStrategies(); @@ -372,13 +359,7 @@ describe("RetroactiveProcessor", () => { vi.spyOn(processor["eventsFetcher"], "fetchEvents").mockResolvedValueOnce([mockEvent]); vi.spyOn(mockEventsProcessor, "processEvent").mockResolvedValue([]); - vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); vi.spyOn(mockStrategyRegistry, "saveStrategyId").mockResolvedValue(); await processor.processRetroactiveStrategies(); @@ -417,13 +398,7 @@ describe("RetroactiveProcessor", () => { .mockResolvedValue([]); vi.spyOn(mockEventsProcessor, "processEvent").mockResolvedValue([]); - vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); vi.spyOn(mockStrategyRegistry, "saveStrategyId").mockResolvedValue(); await processor.processRetroactiveStrategies(); @@ -464,13 +439,7 @@ describe("RetroactiveProcessor", () => { .mockRejectedValueOnce(new InvalidEvent(mockEvent1)) .mockResolvedValueOnce([]); - vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue({ - numFailed: 0, - errors: [], - changesets: [], - numExecuted: 1, - numSuccessful: 1, - }); + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); await processor.processRetroactiveStrategies(); @@ -523,6 +492,41 @@ describe("RetroactiveProcessor", () => { }, ); }); + + it("retries on retriable errors", async () => { + const retriableError = new RateLimitError({ className: "ExternalProvider" }, 100); + vi.spyOn(mockEventsProcessor, "processEvent").mockRejectedValueOnce(retriableError); + + const mockEvent = createMockEvent(eventName, defaultParams, existentStrategyId, { + blockNumber: 50, + logIndex: 0, + }); + + vi.spyOn(mockStrategyRegistry, "getStrategies").mockResolvedValue( + mockValidStrategies, + ); + vi.spyOn(mockEventsRegistry, "getLastProcessedEvent").mockResolvedValue({ + blockNumber: 100, + logIndex: 1, + chainId, + blockTimestamp: 1234567890, + }); + + vi.spyOn(mockEventsFetcher, "fetchEvents") + .mockResolvedValueOnce([mockEvent]) + .mockResolvedValue([]); + + const processEventSpy = vi.spyOn(mockEventsProcessor, "processEvent"); + processEventSpy.mockRejectedValueOnce(retriableError).mockResolvedValue([]); + + vi.spyOn(mockDataLoader, "applyChanges").mockResolvedValue(await Promise.resolve()); + + await processor.processRetroactiveStrategies(); + + expect(processEventSpy).toHaveBeenCalledTimes(2); // 1st attempt failed, 2nd attempt succeeded + expect(mockLogger.error).not.toHaveBeenCalled(); + expect(mockDataLoader.applyChanges).toHaveBeenCalledTimes(1); + }); }); }); }); diff --git a/packages/indexer-client/src/exceptions/indexerClientError.exception.ts b/packages/indexer-client/src/exceptions/indexerClientError.exception.ts index 9a9a6a9..281d410 100644 --- a/packages/indexer-client/src/exceptions/indexerClientError.exception.ts +++ b/packages/indexer-client/src/exceptions/indexerClientError.exception.ts @@ -1,6 +1,7 @@ -export class IndexerClientError extends Error { - constructor(message: string) { - super(`Indexer client error - ${message}`); - this.name = "IndexerClientError"; +import { ErrorContext, NonRetriableError } from "@grants-stack-indexer/shared"; + +export class IndexerClientError extends NonRetriableError { + constructor(message: string, context?: ErrorContext, error?: Error) { + super(`Indexer client error - ${message}`, context, error); } } diff --git a/packages/indexer-client/src/exceptions/invalidIndexerResponse.exception.ts b/packages/indexer-client/src/exceptions/invalidIndexerResponse.exception.ts index 01b38ee..cc23497 100644 --- a/packages/indexer-client/src/exceptions/invalidIndexerResponse.exception.ts +++ b/packages/indexer-client/src/exceptions/invalidIndexerResponse.exception.ts @@ -1,6 +1,7 @@ -export class InvalidIndexerResponse extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class InvalidIndexerResponse extends NonRetriableError { constructor(response: string) { super(`Indexer response is invalid - ${response}`); - this.name = "InvalidIndexerResponse"; } } diff --git a/packages/indexer-client/src/external.ts b/packages/indexer-client/src/external.ts index b9a9716..27b9591 100644 --- a/packages/indexer-client/src/external.ts +++ b/packages/indexer-client/src/external.ts @@ -2,4 +2,4 @@ export type { IIndexerClient } from "./internal.js"; export { EnvioIndexerClient } from "./internal.js"; -export type { GetEventsFilters } from "./internal.js"; +export type { GetEventsFilters, GetEventsAfterBlockNumberAndLogIndexParams } from "./internal.js"; diff --git a/packages/indexer-client/src/interfaces/indexerClient.ts b/packages/indexer-client/src/interfaces/indexerClient.ts index 7051923..e2d7edc 100644 --- a/packages/indexer-client/src/interfaces/indexerClient.ts +++ b/packages/indexer-client/src/interfaces/indexerClient.ts @@ -2,6 +2,14 @@ import { AnyIndexerFetchedEvent, ChainId } from "@grants-stack-indexer/shared"; import { GetEventsFilters } from "../internal.js"; +export type GetEventsAfterBlockNumberAndLogIndexParams = { + chainId: ChainId; + blockNumber: number; + logIndex: number; + limit?: number; + allowPartialLastBlock?: boolean; +}; + /** * Interface for the indexer client */ @@ -9,15 +17,13 @@ export interface IIndexerClient { /** * Get the events by block number and log index from the indexer service * @param chainId Id of the chain - * @param fromBlock Block number to start fetching events from + * @param blockNumber Block number to start fetching events from * @param logIndex Log index in the block * @param limit Limit of events to fetch + * @param allowPartialLastBlock Whether last block is allowed to be partially fetched */ getEventsAfterBlockNumberAndLogIndex( - chainId: ChainId, - fromBlock: number, - logIndex: number, - limit?: number, + params: GetEventsAfterBlockNumberAndLogIndexParams, ): Promise; /** diff --git a/packages/indexer-client/src/providers/envioIndexerClient.ts b/packages/indexer-client/src/providers/envioIndexerClient.ts index cff4cd2..5156f93 100644 --- a/packages/indexer-client/src/providers/envioIndexerClient.ts +++ b/packages/indexer-client/src/providers/envioIndexerClient.ts @@ -3,7 +3,11 @@ import { gql, GraphQLClient } from "graphql-request"; import { AnyIndexerFetchedEvent, ChainId, stringify } from "@grants-stack-indexer/shared"; import { IndexerClientError, InvalidIndexerResponse } from "../exceptions/index.js"; -import { GetEventsFilters, IIndexerClient } from "../internal.js"; +import { + GetEventsAfterBlockNumberAndLogIndexParams, + GetEventsFilters, + IIndexerClient, +} from "../internal.js"; /** * Indexer client for the Envio indexer service @@ -16,12 +20,13 @@ export class EnvioIndexerClient implements IIndexerClient { this.client.setHeader("x-hasura-admin-secret", secret); } /* @inheritdoc */ - public async getEventsAfterBlockNumberAndLogIndex( - chainId: ChainId, - blockNumber: number, - logIndex: number, - limit: number = 100, - ): Promise { + public async getEventsAfterBlockNumberAndLogIndex({ + chainId, + blockNumber, + logIndex, + limit = 100, + allowPartialLastBlock = true, + }: GetEventsAfterBlockNumberAndLogIndexParams): Promise { try { const response = (await this.client.request( gql` @@ -61,16 +66,82 @@ export class EnvioIndexerClient implements IIndexerClient { `, { chainId, blockNumber, logIndex, limit }, )) as { raw_events: AnyIndexerFetchedEvent[] }; - if (response?.raw_events) { - return response.raw_events; + const events = response?.raw_events; + + if (events) { + if (allowPartialLastBlock || events.length === 0) { + return events; + } else { + const lastBlockNumber = events[events.length - 1]!.blockNumber; + const countLastBlockEvents = events.filter( + (e) => e.blockNumber === lastBlockNumber, + ).length; + const { lastBlockEvents } = await this.getTotalEventsInBlock( + chainId, + lastBlockNumber, + ); + + // If the last event's block has more events than what we fetched, + // we need to exclude that block's events + return lastBlockEvents.aggregate.count > countLastBlockEvents + ? events.filter((e) => e.blockNumber < lastBlockNumber) + : events; + } } else { throw new InvalidIndexerResponse(stringify(response)); } } catch (error) { - if (error instanceof InvalidIndexerResponse) { - throw error; - } - throw new IndexerClientError(stringify(error, Object.getOwnPropertyNames(error))); + throw this.handleError(error, "getEventsAfterBlockNumberAndLogIndex"); + } + } + + /** + * Get the total number of events in a block + * @param chainId - The chain ID + * @param blockNumber - The block number + * @returns The total number of events in the block + */ + private async getTotalEventsInBlock( + chainId: ChainId, + blockNumber: number, + ): Promise<{ + lastBlockEvents: { + aggregate: { count: number }; + nodes: { block_number: number }[]; + }; + }> { + try { + const response = (await this.client.request( + gql` + query getTotalEventsInBlock($chainId: Int!, $blockNumber: Int!) { + last_block_events: raw_events_aggregate( + where: { + chain_id: { _eq: $chainId } + block_number: { _eq: $blockNumber } + } + ) { + aggregate { + count + } + nodes { + block_number + } + } + } + `, + { chainId, blockNumber }, + )) as { + last_block_events: { + aggregate: { count: number }; + nodes: { block_number: number }[]; + }; + }; + + return { + lastBlockEvents: response.last_block_events, + }; + } catch (error) { + throw this.handleError(error, "getTotalEventsInBlock"); } } @@ -167,10 +238,17 @@ export class EnvioIndexerClient implements IIndexerClient { throw new InvalidIndexerResponse(stringify(response)); } } catch (error) { - if (error instanceof InvalidIndexerResponse) { - throw error; - } - throw new IndexerClientError(stringify(error, Object.getOwnPropertyNames(error))); + throw this.handleError(error, "getEvents"); + } + } + + private handleError(error: unknown, methodName: string): Error { + if (error instanceof InvalidIndexerResponse) { + return error; } + return new IndexerClientError(stringify(error, Object.getOwnPropertyNames(error)), { + className: EnvioIndexerClient.name, + methodName, + }); } } diff --git a/packages/indexer-client/test/unit/envioIndexerClient.spec.ts b/packages/indexer-client/test/unit/envioIndexerClient.spec.ts index 10cd34a..3466851 100644 --- a/packages/indexer-client/test/unit/envioIndexerClient.spec.ts +++ b/packages/indexer-client/test/unit/envioIndexerClient.spec.ts @@ -148,12 +148,12 @@ describe("EnvioIndexerClient", () => { }); it("returns events after the specified block number", async () => { - const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex( - 1 as ChainId, - 100, - 0, - 100, - ); + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 100, + logIndex: 0, + limit: 100, + }); expect(result).toHaveLength(3); expect(result).toEqual( @@ -166,12 +166,12 @@ describe("EnvioIndexerClient", () => { }); it("returns only events after the specified log index within the same block", async () => { - const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex( - 1 as ChainId, - 100, - 2, - 100, - ); + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 100, + logIndex: 2, + limit: 100, + }); expect(result).toHaveLength(2); expect(result).toEqual( @@ -183,23 +183,23 @@ describe("EnvioIndexerClient", () => { }); it("respects the limit parameter", async () => { - const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex( - 1 as ChainId, - 100, - 0, - 2, - ); + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 100, + logIndex: 0, + limit: 2, + }); expect(result).toHaveLength(2); }); it("returns empty array when no matching events found", async () => { - const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex( - 1 as ChainId, - 102, - 0, - 100, - ); + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 102, + logIndex: 0, + limit: 100, + }); expect(result).toHaveLength(0); }); @@ -215,7 +215,12 @@ describe("EnvioIndexerClient", () => { graphqlClient.request.mockResolvedValue(mockedResponse); await expect( - envioIndexerClient.getEventsAfterBlockNumberAndLogIndex(1 as ChainId, 12345, 0), + envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 12345, + logIndex: 0, + limit: 100, + }), ).rejects.toThrow(InvalidIndexerResponse); }); @@ -224,7 +229,12 @@ describe("EnvioIndexerClient", () => { graphqlClient.request.mockRejectedValue(error); await expect( - envioIndexerClient.getEventsAfterBlockNumberAndLogIndex(1 as ChainId, 12345, 0), + envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 12345, + logIndex: 0, + limit: 100, + }), ).rejects.toThrow(IndexerClientError); }); @@ -237,11 +247,11 @@ describe("EnvioIndexerClient", () => { graphqlClient.request.mockResolvedValue(mockedResponse); // Call the method without the limit argument - const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex( - 1 as ChainId, - 12345, - 0, - ); + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 12345, + logIndex: 0, + }); expect(result).toEqual(testEvents); expect(graphqlClient.request).toHaveBeenCalledWith( @@ -256,13 +266,69 @@ describe("EnvioIndexerClient", () => { }); it("returns an empty array when no events are found", async () => { - const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex( - 1 as ChainId, - 10_000, - 0, - ); + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 10_000, + logIndex: 0, + }); expect(result).toEqual([]); }); + + it("discards events from the last block if allowPartialLastBlock is false", async () => { + // Mock the request implementation to simulate database querying + graphqlClient.request + .mockImplementationOnce( + async ( + _document: RequestDocument | RequestOptions, + ...args: object[] + ) => { + const variables = args[0] as { + chainId: ChainId; + blockNumber: number; + logIndex: number; + limit: number; + }; + const { chainId, blockNumber, logIndex, limit } = variables; + + const filteredEvents = testEvents + .filter((event) => { + // Match chainId + if (event.chainId !== chainId) return false; + + // Implement the _or condition from the GraphQL query + return ( + event.blockNumber > blockNumber || + (event.blockNumber === blockNumber && event.logIndex > logIndex) + ); + }) + .slice(0, limit); // Apply limit + + return { raw_events: filteredEvents }; + }, + ) + .mockResolvedValueOnce({ + last_block_events: { + aggregate: { count: 5 }, + nodes: [], + }, + }); + + const result = await envioIndexerClient.getEventsAfterBlockNumberAndLogIndex({ + chainId: 1 as ChainId, + blockNumber: 100, + logIndex: 0, + limit: 3, + allowPartialLastBlock: false, + }); + + expect(result).toHaveLength(2); + expect(result).toEqual(testEvents.slice(0, 2)); + expect(graphqlClient.request).toHaveBeenCalledTimes(2); + expect(graphqlClient.request).toHaveBeenNthCalledWith(2, expect.any(String), { + chainId: 1, + blockNumber: 101, + }); + }); }); describe("getEvents (old test cases)", () => { diff --git a/packages/metadata/package.json b/packages/metadata/package.json index 57b0189..b5156e3 100644 --- a/packages/metadata/package.json +++ b/packages/metadata/package.json @@ -28,6 +28,7 @@ "test:cov": "vitest run --config vitest.config.ts --coverage" }, "dependencies": { + "@grants-stack-indexer/repository": "workspace:*", "@grants-stack-indexer/shared": "workspace:*", "axios": "1.7.7", "zod": "3.23.8" diff --git a/packages/metadata/src/exceptions/emptyGateways.exception.ts b/packages/metadata/src/exceptions/emptyGateways.exception.ts index d70b599..e30b10c 100644 --- a/packages/metadata/src/exceptions/emptyGateways.exception.ts +++ b/packages/metadata/src/exceptions/emptyGateways.exception.ts @@ -1,4 +1,6 @@ -export class EmptyGatewaysUrlsException extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class EmptyGatewaysUrlsException extends NonRetriableError { constructor() { super("Gateways array cannot be empty"); } diff --git a/packages/metadata/src/exceptions/invalidCid.exception.ts b/packages/metadata/src/exceptions/invalidCid.exception.ts index 437cd25..ba80776 100644 --- a/packages/metadata/src/exceptions/invalidCid.exception.ts +++ b/packages/metadata/src/exceptions/invalidCid.exception.ts @@ -1,6 +1,7 @@ -export class InvalidCidException extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class InvalidCidException extends NonRetriableError { constructor(cid: string) { super(`Invalid CID: ${cid}`); - this.name = "InvalidCidException"; } } diff --git a/packages/metadata/src/exceptions/invalidContent.exception.ts b/packages/metadata/src/exceptions/invalidContent.exception.ts index 8da5363..202d305 100644 --- a/packages/metadata/src/exceptions/invalidContent.exception.ts +++ b/packages/metadata/src/exceptions/invalidContent.exception.ts @@ -1,4 +1,6 @@ -export class InvalidContentException extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class InvalidContentException extends NonRetriableError { constructor(message: string) { super(message); } diff --git a/packages/metadata/src/external.ts b/packages/metadata/src/external.ts index 78fd970..6a7bccb 100644 --- a/packages/metadata/src/external.ts +++ b/packages/metadata/src/external.ts @@ -1,4 +1,4 @@ -export { IpfsProvider } from "./internal.js"; +export { IpfsProvider, CachingMetadataProvider } from "./internal.js"; export { EmptyGatewaysUrlsException, diff --git a/packages/metadata/src/providers/cachingProxy.provider.ts b/packages/metadata/src/providers/cachingProxy.provider.ts new file mode 100644 index 0000000..0824103 --- /dev/null +++ b/packages/metadata/src/providers/cachingProxy.provider.ts @@ -0,0 +1,53 @@ +import { z } from "zod"; + +import { ICache } from "@grants-stack-indexer/repository"; +import { ILogger } from "@grants-stack-indexer/shared"; + +import { IMetadataProvider } from "../internal.js"; + +/** + * A metadata provider that caches metadata lookups from the underlying provider. + * When a metadata is requested, it first checks the cache. If found, returns the cached metadata. + * If not found in cache, fetches from the underlying provider and caches the result before returning. + * Cache failures (both reads and writes) are logged but do not prevent the provider from functioning. + */ +export class CachingMetadataProvider implements IMetadataProvider { + constructor( + private readonly provider: IMetadataProvider, + private readonly cache: ICache, + private readonly logger: ILogger, + ) {} + + /** @inheritdoc */ + async getMetadata( + ipfsCid: string, + validateContent?: z.ZodSchema, + ): Promise { + let cachedMetadata: T | undefined = undefined; + try { + cachedMetadata = (await this.cache.get(ipfsCid)) as T | undefined; + } catch (error) { + this.logger.debug(`Failed to get cached metadata for IPFS CID ${ipfsCid}`, { + error, + }); + } + + if (cachedMetadata) { + return cachedMetadata; + } + + const metadata = await this.provider.getMetadata(ipfsCid, validateContent); + + if (metadata) { + try { + await this.cache.set(ipfsCid, metadata); + } catch (error) { + this.logger.debug(`Failed to cache metadata for IPFS CID ${ipfsCid}`, { + error, + }); + } + } + + return metadata; + } +} diff --git a/packages/metadata/src/providers/index.ts b/packages/metadata/src/providers/index.ts index f575273..8831827 100644 --- a/packages/metadata/src/providers/index.ts +++ b/packages/metadata/src/providers/index.ts @@ -1 +1,2 @@ export * from "./ipfs.provider.js"; +export * from "./cachingProxy.provider.js"; diff --git a/packages/metadata/test/providers/cachingProxy.provider.spec.ts b/packages/metadata/test/providers/cachingProxy.provider.spec.ts new file mode 100644 index 0000000..cee7619 --- /dev/null +++ b/packages/metadata/test/providers/cachingProxy.provider.spec.ts @@ -0,0 +1,96 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { z } from "zod"; + +import { ICache } from "@grants-stack-indexer/repository"; +import { ILogger } from "@grants-stack-indexer/shared"; + +import { IMetadataProvider } from "../../src/internal.js"; +import { CachingMetadataProvider } from "../../src/providers/cachingProxy.provider.js"; + +describe("CachingMetadataProvider", () => { + const mockProvider = { + getMetadata: vi.fn(), + } as unknown as IMetadataProvider; + + const mockCache = { + get: vi.fn(), + set: vi.fn(), + } as unknown as ICache; + + const mockLogger = { + debug: vi.fn(), + } as unknown as ILogger; + + let provider: CachingMetadataProvider; + + beforeEach(() => { + vi.clearAllMocks(); + provider = new CachingMetadataProvider(mockProvider, mockCache, mockLogger); + }); + + describe("getMetadata", () => { + const testCid = "QmTest123"; + const testData = { foo: "bar" }; + const testSchema = z.object({ foo: z.string() }); + + it("returns cached metadata when available", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(testData); + + const result = await provider.getMetadata(testCid, testSchema); + + expect(result).toEqual(testData); + expect(mockCache.get).toHaveBeenCalledWith(testCid); + expect(mockProvider.getMetadata).not.toHaveBeenCalled(); + }); + + it("fetches and caches metadata when cache misses", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(undefined); + vi.spyOn(mockProvider, "getMetadata").mockResolvedValue(testData); + + const result = await provider.getMetadata(testCid, testSchema); + + expect(result).toEqual(testData); + expect(mockCache.get).toHaveBeenCalledWith(testCid); + expect(mockProvider.getMetadata).toHaveBeenCalledWith(testCid, testSchema); + expect(mockCache.set).toHaveBeenCalledWith(testCid, testData); + }); + + it("handles cache read failures gracefully", async () => { + vi.spyOn(mockCache, "get").mockRejectedValue(new Error("Cache read error")); + vi.spyOn(mockProvider, "getMetadata").mockResolvedValue(testData); + + const result = await provider.getMetadata(testCid, testSchema); + + expect(result).toEqual(testData); + expect(mockLogger.debug).toHaveBeenCalledWith( + `Failed to get cached metadata for IPFS CID ${testCid}`, + expect.any(Object), + ); + expect(mockProvider.getMetadata).toHaveBeenCalledWith(testCid, testSchema); + }); + + it("handles cache write failures gracefully", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(undefined); + vi.spyOn(mockCache, "set").mockRejectedValue(new Error("Cache write error")); + vi.spyOn(mockProvider, "getMetadata").mockResolvedValue(testData); + + const result = await provider.getMetadata(testCid, testSchema); + + expect(result).toEqual(testData); + expect(mockLogger.debug).toHaveBeenCalledWith( + `Failed to cache metadata for IPFS CID ${testCid}`, + expect.any(Object), + ); + }); + + it("returns undefined when metadata is not found", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(undefined); + vi.spyOn(mockProvider, "getMetadata").mockResolvedValue(undefined); + + const result = await provider.getMetadata(testCid, testSchema); + + expect(result).toBeUndefined(); + expect(mockCache.set).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/packages/pricing/package.json b/packages/pricing/package.json index 69e12af..8a581d5 100644 --- a/packages/pricing/package.json +++ b/packages/pricing/package.json @@ -28,6 +28,7 @@ "test:cov": "vitest run --config vitest.config.ts --coverage" }, "dependencies": { + "@grants-stack-indexer/repository": "workspace:*", "@grants-stack-indexer/shared": "workspace:*", "axios": "1.7.7" }, diff --git a/packages/pricing/src/exceptions/index.ts b/packages/pricing/src/exceptions/index.ts index c4c30df..6a39eb9 100644 --- a/packages/pricing/src/exceptions/index.ts +++ b/packages/pricing/src/exceptions/index.ts @@ -1,4 +1,3 @@ -export * from "./network.exception.js"; export * from "./unsupportedChain.exception.js"; export * from "./unknownPricing.exception.js"; export * from "./unsupportedToken.exception.js"; diff --git a/packages/pricing/src/exceptions/network.exception.ts b/packages/pricing/src/exceptions/network.exception.ts deleted file mode 100644 index b952e1a..0000000 --- a/packages/pricing/src/exceptions/network.exception.ts +++ /dev/null @@ -1,8 +0,0 @@ -export class NetworkException extends Error { - constructor( - message: string, - public readonly status: number, - ) { - super(message); - } -} diff --git a/packages/pricing/src/exceptions/unknownPricing.exception.ts b/packages/pricing/src/exceptions/unknownPricing.exception.ts index 5c77b95..c284f72 100644 --- a/packages/pricing/src/exceptions/unknownPricing.exception.ts +++ b/packages/pricing/src/exceptions/unknownPricing.exception.ts @@ -1,6 +1,7 @@ -export class UnknownPricingException extends Error { - constructor(message: string, stack?: string) { - super(message); - this.stack = stack; +import { ErrorContext, RetriableError } from "@grants-stack-indexer/shared"; + +export class UnknownPricingException extends RetriableError { + constructor(message: string, context: ErrorContext) { + super(message, context); } } diff --git a/packages/pricing/src/exceptions/unsupportedToken.exception.ts b/packages/pricing/src/exceptions/unsupportedToken.exception.ts index e071acb..52ee0e2 100644 --- a/packages/pricing/src/exceptions/unsupportedToken.exception.ts +++ b/packages/pricing/src/exceptions/unsupportedToken.exception.ts @@ -1,7 +1,7 @@ -import { TokenCode } from "@grants-stack-indexer/shared"; +import { ErrorContext, NonRetriableError, TokenCode } from "@grants-stack-indexer/shared"; -export class UnsupportedToken extends Error { - constructor(tokenCode: TokenCode) { - super(`Unsupported token: ${tokenCode}`); +export class UnsupportedToken extends NonRetriableError { + constructor(tokenCode: TokenCode, context: ErrorContext) { + super(`Unsupported token: ${tokenCode}`, context); } } diff --git a/packages/pricing/src/external.ts b/packages/pricing/src/external.ts index abbfedd..f1ee939 100644 --- a/packages/pricing/src/external.ts +++ b/packages/pricing/src/external.ts @@ -1,6 +1,6 @@ export type { TokenPrice, IPricingProvider } from "./internal.js"; -export { CoingeckoProvider, DummyPricingProvider } from "./internal.js"; +export { CoingeckoProvider, DummyPricingProvider, CachingPricingProvider } from "./internal.js"; export { PricingProviderFactory } from "./internal.js"; export type { @@ -12,7 +12,6 @@ export type { export { UnsupportedChainException, - NetworkException, UnknownPricingException, UnsupportedToken, InvalidPricingSource, diff --git a/packages/pricing/src/providers/cachingProxy.provider.ts b/packages/pricing/src/providers/cachingProxy.provider.ts new file mode 100644 index 0000000..6aa4234 --- /dev/null +++ b/packages/pricing/src/providers/cachingProxy.provider.ts @@ -0,0 +1,67 @@ +import { ICache, PriceCacheKey } from "@grants-stack-indexer/repository"; +import { ILogger, TokenCode } from "@grants-stack-indexer/shared"; + +import { IPricingProvider, TokenPrice } from "../internal.js"; + +/** + * A pricing provider that caches token price lookups from the underlying provider. + * When a price is requested, it first checks the cache. If found, returns the cached price. + * If not found in cache, fetches from the underlying provider and caches the result before returning. + * Cache failures (both reads and writes) are logged but do not prevent the provider from functioning. + */ +export class CachingPricingProvider implements IPricingProvider { + constructor( + private readonly provider: IPricingProvider, + private readonly cache: ICache, + private readonly logger: ILogger, + ) {} + + /** @inheritdoc */ + async getTokenPrice( + tokenCode: TokenCode, + startTimestampMs: number, + endTimestampMs?: number, + ): Promise { + let cachedPrice: TokenPrice | undefined = undefined; + try { + cachedPrice = await this.cache.get({ + tokenCode, + timestampMs: startTimestampMs, + }); + } catch (error) { + this.logger.debug( + `Failed to get cached price for token ${tokenCode} at ${startTimestampMs}`, + { error }, + ); + } + + if (cachedPrice) { + return cachedPrice; + } + + const price = await this.provider.getTokenPrice( + tokenCode, + startTimestampMs, + endTimestampMs, + ); + + if (price) { + try { + await this.cache.set( + { + tokenCode, + timestampMs: startTimestampMs, + }, + price, + ); + } catch (error) { + this.logger.debug( + `Failed to cache price for token ${tokenCode} at ${startTimestampMs}`, + { error }, + ); + } + } + + return price; + } +} diff --git a/packages/pricing/src/providers/coingecko.provider.ts b/packages/pricing/src/providers/coingecko.provider.ts index d488132..e1d5296 100644 --- a/packages/pricing/src/providers/coingecko.provider.ts +++ b/packages/pricing/src/providers/coingecko.provider.ts @@ -1,12 +1,18 @@ -import axios, { AxiosInstance, isAxiosError } from "axios"; +import axios, { AxiosError, AxiosInstance, isAxiosError } from "axios"; -import { ILogger, stringify, TokenCode } from "@grants-stack-indexer/shared"; +import { + ILogger, + NetworkError, + NonRetriableError, + RateLimitError, + stringify, + TokenCode, +} from "@grants-stack-indexer/shared"; import { IPricingProvider } from "../interfaces/index.js"; import { CoingeckoPriceChartData, CoingeckoTokenId, - NetworkException, TokenPrice, UnknownPricingException, UnsupportedToken, @@ -90,7 +96,10 @@ export class CoingeckoProvider implements IPricingProvider { ): Promise { const tokenId = TokenMapping[tokenCode]; if (!tokenId) { - throw new UnsupportedToken(tokenCode); + throw new UnsupportedToken(tokenCode, { + className: CoingeckoProvider.name, + methodName: "getTokenPrice", + }); } if (!endTimestampMs) { @@ -103,7 +112,6 @@ export class CoingeckoProvider implements IPricingProvider { const path = `/coins/${tokenId}/market_chart/range?vs_currency=usd&from=${startTimestampMs}&to=${endTimestampMs}&precision=full`; - //TODO: handle retries try { const { data } = await this.axios.get(path); @@ -117,25 +125,58 @@ export class CoingeckoProvider implements IPricingProvider { priceUsd: closestEntry[1], }; } catch (error: unknown) { - //TODO: notify if (isAxiosError(error)) { - if (error.status! >= 400 && error.status! < 500) { - this.logger.error(error, { - className: CoingeckoProvider.name, - path: `${error.response?.config.baseURL}${path}`, - }); - return undefined; - } - - if (error.status! >= 500 || error.message === "Network Error") { - throw new NetworkException(error.message, error.status!); - } + this.handleAxiosError(error, path); } + const errorMessage = - `Coingecko API error: failed to fetch token price ` + + `Unknown Coingecko API error: failed to fetch token price ` + stringify(error, Object.getOwnPropertyNames(error)); - this.logger.error(errorMessage); - throw new UnknownPricingException(errorMessage); + + throw new UnknownPricingException(errorMessage, { + className: CoingeckoProvider.name, + methodName: "getTokenPrice", + additionalData: { + path, + }, + }); + } + } + + private handleAxiosError(error: AxiosError, path: string): void { + const errorContext = { + className: CoingeckoProvider.name, + methodName: "getTokenPrice", + additionalData: { + path, + }, + }; + + if (error.status! >= 400 && error.status! < 500) { + if (error.status === 429) { + throw new RateLimitError( + errorContext, + error.response?.headers["retry-after"] * 1000 || 60000, + ); + } else { + throw new NonRetriableError( + `${error.status} ${error.code} - Coingecko API error: failed to fetch token price`, + errorContext, + error, + ); + } + } + + if (error.status! > 10000) { + throw new NonRetriableError( + `${error.status} Coingecko API error: please check your credentials or consider upgrading your plan`, + errorContext, + error, + ); + } + + if (error.status! >= 500 || error.message === "Network Error") { + throw new NetworkError(errorContext, { statusCode: error.status }, error); } } } diff --git a/packages/pricing/src/providers/index.ts b/packages/pricing/src/providers/index.ts index 9c05289..2829e17 100644 --- a/packages/pricing/src/providers/index.ts +++ b/packages/pricing/src/providers/index.ts @@ -1,2 +1,3 @@ export * from "./coingecko.provider.js"; export * from "./dummy.provider.js"; +export * from "./cachingProxy.provider.js"; diff --git a/packages/pricing/test/providers/cachingProxy.provider.spec.ts b/packages/pricing/test/providers/cachingProxy.provider.spec.ts new file mode 100644 index 0000000..1e80581 --- /dev/null +++ b/packages/pricing/test/providers/cachingProxy.provider.spec.ts @@ -0,0 +1,116 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import { ICache, PriceCacheKey } from "@grants-stack-indexer/repository"; +import { ILogger, TokenCode } from "@grants-stack-indexer/shared"; + +import { IPricingProvider, TokenPrice } from "../../src/internal.js"; +import { CachingPricingProvider } from "../../src/providers/cachingProxy.provider.js"; + +describe("CachingPricingProvider", () => { + const mockProvider = { + getTokenPrice: vi.fn(), + } as unknown as IPricingProvider; + + const mockCache = { + get: vi.fn(), + set: vi.fn(), + } as unknown as ICache; + + const mockLogger = { + debug: vi.fn(), + } as unknown as ILogger; + + let provider: CachingPricingProvider; + + beforeEach(() => { + vi.clearAllMocks(); + provider = new CachingPricingProvider(mockProvider, mockCache, mockLogger); + }); + + describe("getTokenPrice", () => { + const testToken = { + code: "USDC" as TokenCode, + priceSourceCode: "USDC" as TokenCode, + address: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + decimals: 6, + }; + const testStartTime = 1234567890000; + const testEndTime = 1234567899999; + const testPrice: TokenPrice = { + priceUsd: 0.99, + timestampMs: testStartTime, + }; + + it("returns cached price when available", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(testPrice); + + const result = await provider.getTokenPrice(testToken.code, testStartTime); + + expect(result).toEqual(testPrice); + expect(mockCache.get).toHaveBeenCalledWith({ + tokenCode: testToken.code, + timestampMs: testStartTime, + }); + expect(mockProvider.getTokenPrice).not.toHaveBeenCalled(); + }); + + it("fetches and caches price when cache misses", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(undefined); + vi.spyOn(mockProvider, "getTokenPrice").mockResolvedValue(testPrice); + + const result = await provider.getTokenPrice(testToken.code, testStartTime, testEndTime); + + expect(result).toEqual(testPrice); + expect(mockProvider.getTokenPrice).toHaveBeenCalledWith( + testToken.code, + testStartTime, + testEndTime, + ); + expect(mockCache.set).toHaveBeenCalledWith( + { + tokenCode: testToken.code, + timestampMs: testStartTime, + }, + testPrice, + ); + }); + + it("handles cache read failures gracefully", async () => { + vi.spyOn(mockCache, "get").mockRejectedValue(new Error("Cache read error")); + vi.spyOn(mockProvider, "getTokenPrice").mockResolvedValue(testPrice); + + const result = await provider.getTokenPrice(testToken.code, testStartTime, testEndTime); + + expect(result).toEqual(testPrice); + expect(mockLogger.debug).toHaveBeenCalledWith( + `Failed to get cached price for token ${testToken.code} at ${testStartTime}`, + expect.any(Object), + ); + expect(mockProvider.getTokenPrice).toHaveBeenCalled(); + }); + + it("handles cache write failures gracefully", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(undefined); + vi.spyOn(mockCache, "set").mockRejectedValue(new Error("Cache write error")); + vi.spyOn(mockProvider, "getTokenPrice").mockResolvedValue(testPrice); + + const result = await provider.getTokenPrice(testToken.code, testStartTime, testEndTime); + + expect(result).toEqual(testPrice); + expect(mockLogger.debug).toHaveBeenCalledWith( + `Failed to cache price for token ${testToken.code} at ${testStartTime}`, + expect.any(Object), + ); + }); + + it("returns undefined when price is not found", async () => { + vi.spyOn(mockCache, "get").mockResolvedValue(undefined); + vi.spyOn(mockProvider, "getTokenPrice").mockResolvedValue(undefined); + + const result = await provider.getTokenPrice(testToken.code, testStartTime, testEndTime); + + expect(result).toBeUndefined(); + expect(mockCache.set).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/packages/pricing/test/providers/coingecko.provider.spec.ts b/packages/pricing/test/providers/coingecko.provider.spec.ts index 4525278..f72e595 100644 --- a/packages/pricing/test/providers/coingecko.provider.spec.ts +++ b/packages/pricing/test/providers/coingecko.provider.spec.ts @@ -1,9 +1,15 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import { ILogger, TokenCode } from "@grants-stack-indexer/shared"; +import { + ILogger, + NetworkError, + NonRetriableError, + RateLimitError, + TokenCode, +} from "@grants-stack-indexer/shared"; import type { TokenPrice } from "../../src/external.js"; -import { CoingeckoProvider, NetworkException, UnsupportedToken } from "../../src/external.js"; +import { CoingeckoProvider, UnsupportedToken } from "../../src/external.js"; const mock = vi.hoisted(() => ({ get: vi.fn(), @@ -117,20 +123,31 @@ describe("CoingeckoProvider", () => { expect(result).toBeUndefined(); }); - it("return undefined if 400 family error", async () => { + it("return RateLimitError if 429 error", async () => { + mock.get.mockRejectedValueOnce({ + status: 429, + data: "Rate limit exceeded", + isAxiosError: true, + headers: { + "retry-after": 60, + }, + }); + + await expect( + provider.getTokenPrice("ETH" as TokenCode, 1609459200000, 1609545600000), + ).rejects.toThrow(RateLimitError); + }); + + it("throw NonRetriableError for 400 family error", async () => { mock.get.mockRejectedValueOnce({ status: 400, data: "Bad Request", isAxiosError: true, }); - const result = await provider.getTokenPrice( - "ETH" as TokenCode, - 1609459200000, - 1609545600000, - ); - - expect(result).toBeUndefined(); + await expect( + provider.getTokenPrice("ETH" as TokenCode, 1609459200000, 1609545600000), + ).rejects.toThrow(NonRetriableError); }); it("throw UnsupportedTokenException for unsupported token", async () => { @@ -147,7 +164,7 @@ describe("CoingeckoProvider", () => { }); await expect( provider.getTokenPrice("ETH" as TokenCode, 1609459200000, 1609545600000), - ).rejects.toThrow(NetworkException); + ).rejects.toThrow(NetworkError); }); it("throw NetworkException for network errors", async () => { @@ -159,7 +176,7 @@ describe("CoingeckoProvider", () => { await expect( provider.getTokenPrice("ETH" as TokenCode, 1609459200000, 1609545600000), - ).rejects.toThrow(NetworkException); + ).rejects.toThrow(NetworkError); }); }); }); diff --git a/packages/processors/src/exceptions/invalidArgument.exception.ts b/packages/processors/src/exceptions/invalidArgument.exception.ts index 80ff703..92d07e6 100644 --- a/packages/processors/src/exceptions/invalidArgument.exception.ts +++ b/packages/processors/src/exceptions/invalidArgument.exception.ts @@ -1,4 +1,6 @@ -export class InvalidArgument extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class InvalidArgument extends NonRetriableError { constructor(message: string) { super(message); } diff --git a/packages/processors/src/exceptions/metadataNotFound.exception.ts b/packages/processors/src/exceptions/metadataNotFound.exception.ts index 04b4fd5..ab8b57b 100644 --- a/packages/processors/src/exceptions/metadataNotFound.exception.ts +++ b/packages/processors/src/exceptions/metadataNotFound.exception.ts @@ -1,6 +1,7 @@ -export class MetadataNotFound extends Error { - constructor(message: string) { - super(message); - this.name = "MetadataNotFoundError"; +import { ErrorContext, RetriableError } from "@grants-stack-indexer/shared"; + +export class MetadataNotFound extends RetriableError { + constructor(message: string, context: ErrorContext = {}, error?: Error) { + super(message, context, {}, error); } } diff --git a/packages/processors/src/exceptions/metadataParsingFailed.exception.ts b/packages/processors/src/exceptions/metadataParsingFailed.exception.ts index a62cce1..6309e08 100644 --- a/packages/processors/src/exceptions/metadataParsingFailed.exception.ts +++ b/packages/processors/src/exceptions/metadataParsingFailed.exception.ts @@ -1,4 +1,6 @@ -export class MetadataParsingFailed extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class MetadataParsingFailed extends NonRetriableError { constructor(additionalInfo?: string) { super(`Failed to parse application metadata: ${additionalInfo}`); } diff --git a/packages/processors/src/exceptions/unsupportedEvent.exception.ts b/packages/processors/src/exceptions/unsupportedEvent.exception.ts index 07533f8..1099ad7 100644 --- a/packages/processors/src/exceptions/unsupportedEvent.exception.ts +++ b/packages/processors/src/exceptions/unsupportedEvent.exception.ts @@ -1,6 +1,6 @@ -import { ContractName } from "@grants-stack-indexer/shared"; +import { ContractName, NonRetriableError } from "@grants-stack-indexer/shared"; -export class UnsupportedEventException extends Error { +export class UnsupportedEventException extends NonRetriableError { constructor( contract: ContractName, public readonly eventName: string, diff --git a/packages/processors/src/exceptions/unsupportedStrategy.exception.ts b/packages/processors/src/exceptions/unsupportedStrategy.exception.ts index c1ddbb1..eb24bfb 100644 --- a/packages/processors/src/exceptions/unsupportedStrategy.exception.ts +++ b/packages/processors/src/exceptions/unsupportedStrategy.exception.ts @@ -1,6 +1,8 @@ import { Hex } from "viem"; -export class UnsupportedStrategy extends Error { +import { NonRetriableError } from "@grants-stack-indexer/shared"; + +export class UnsupportedStrategy extends NonRetriableError { constructor(strategyId: Hex) { super(`Strategy ${strategyId} unsupported`); } diff --git a/packages/repository/src/db/connection.ts b/packages/repository/src/db/connection.ts index 01b29e9..9770e71 100644 --- a/packages/repository/src/db/connection.ts +++ b/packages/repository/src/db/connection.ts @@ -16,8 +16,10 @@ import { Donation as DonationTable, ProcessedEvent as EventRegistryTable, MatchingDistribution, + Metadata as MetadataCacheTable, PendingProjectRole as PendingProjectRoleTable, PendingRoundRole as PendingRoundRoleTable, + Price as PriceCacheTable, ProjectRole as ProjectRoleTable, Project as ProjectTable, Round, @@ -67,6 +69,8 @@ export interface Database { strategiesRegistry: StrategyRegistryTable; eventsRegistry: EventRegistryTable; strategyProcessingCheckpoints: StrategyProcessingCheckpointTable; + metadataCache: MetadataCacheTable; + priceCache: PriceCacheTable; } /** diff --git a/packages/repository/src/exceptions/applicationNotFound.exception.ts b/packages/repository/src/exceptions/applicationNotFound.exception.ts index b0bfc45..c48d78f 100644 --- a/packages/repository/src/exceptions/applicationNotFound.exception.ts +++ b/packages/repository/src/exceptions/applicationNotFound.exception.ts @@ -1,6 +1,6 @@ -import { ChainId } from "@grants-stack-indexer/shared"; +import { ChainId, NonRetriableError } from "@grants-stack-indexer/shared"; -export class ApplicationNotFound extends Error { +export class ApplicationNotFound extends NonRetriableError { constructor(chainId: ChainId, roundId: string, recipientId: string) { super( `Application not found on chain ${chainId} for round ${roundId} and recipient ${recipientId}`, diff --git a/packages/repository/src/exceptions/index.ts b/packages/repository/src/exceptions/index.ts index 710e180..341015e 100644 --- a/packages/repository/src/exceptions/index.ts +++ b/packages/repository/src/exceptions/index.ts @@ -1,3 +1,4 @@ export * from "./roundNotFound.exception.js"; export * from "./applicationNotFound.exception.js"; export * from "./projectNotFound.exception.js"; +export * from "./postgres.exception.js"; diff --git a/packages/repository/src/exceptions/postgres.exception.ts b/packages/repository/src/exceptions/postgres.exception.ts new file mode 100644 index 0000000..a626ab7 --- /dev/null +++ b/packages/repository/src/exceptions/postgres.exception.ts @@ -0,0 +1,39 @@ +import { ErrorContext, NonRetriableError, RetriableError } from "@grants-stack-indexer/shared"; + +import { PostgresError } from "../internal.js"; + +export class DatabaseException extends NonRetriableError { + constructor(message: string, context: ErrorContext, error: PostgresError) { + super(`Database error: ${message}`, context, error); + } +} + +export class DatabaseConnectionException extends NonRetriableError { + constructor(message: string, context: ErrorContext, error: PostgresError) { + super(`Database connection error: ${message}`, context, error); + } +} + +export class DatabaseDuplicateKeyException extends NonRetriableError { + constructor(key: string, value: string, context: ErrorContext, error: PostgresError) { + super(`Duplicate key violation: ${key}=${value}`, context, error); + } +} + +export class DatabaseConstraintViolationException extends NonRetriableError { + constructor(constraint: string, message: string, context: ErrorContext, error: PostgresError) { + super(`Constraint violation (${constraint}): ${message}`, context, error); + } +} + +export class DatabaseInvalidParametersException extends NonRetriableError { + constructor(message: string, context: ErrorContext, error: PostgresError) { + super(`Invalid parameters: ${message}`, context, error); + } +} + +export class DatabaseQueryTimeoutException extends RetriableError { + constructor(message: string, context: ErrorContext, error: PostgresError) { + super(`Query timeout: ${message}`, context, undefined, error); + } +} diff --git a/packages/repository/src/exceptions/projectNotFound.exception.ts b/packages/repository/src/exceptions/projectNotFound.exception.ts index 9ff64cd..7bf6d77 100644 --- a/packages/repository/src/exceptions/projectNotFound.exception.ts +++ b/packages/repository/src/exceptions/projectNotFound.exception.ts @@ -1,12 +1,12 @@ -import { ChainId } from "@grants-stack-indexer/shared"; +import { ChainId, NonRetriableError } from "@grants-stack-indexer/shared"; -export class ProjectNotFound extends Error { +export class ProjectNotFound extends NonRetriableError { constructor(chainId: ChainId, anchorAddress: string) { super(`Project not found for chainId: ${chainId} and anchorAddress: ${anchorAddress}`); } } -export class ProjectByRoleNotFound extends Error { +export class ProjectByRoleNotFound extends NonRetriableError { constructor(chainId: ChainId, role: string) { super(`Project not found for chainId: ${chainId} and role: ${role}`); } diff --git a/packages/repository/src/exceptions/roundNotFound.exception.ts b/packages/repository/src/exceptions/roundNotFound.exception.ts index 5705522..a7ed6fd 100644 --- a/packages/repository/src/exceptions/roundNotFound.exception.ts +++ b/packages/repository/src/exceptions/roundNotFound.exception.ts @@ -1,12 +1,12 @@ -import { ChainId } from "@grants-stack-indexer/shared"; +import { ChainId, NonRetriableError } from "@grants-stack-indexer/shared"; -export class RoundNotFound extends Error { +export class RoundNotFound extends NonRetriableError { constructor(chainId: ChainId, strategyAddress: string) { super(`Round not found for chainId: ${chainId} and strategyAddress: ${strategyAddress}`); } } -export class RoundNotFoundForId extends Error { +export class RoundNotFoundForId extends NonRetriableError { constructor(chainId: ChainId, roundId: string) { super(`Round not found for chainId: ${chainId} and roundId: ${roundId}`); } diff --git a/packages/repository/src/external.ts b/packages/repository/src/external.ts index 9966150..04a2893 100644 --- a/packages/repository/src/external.ts +++ b/packages/repository/src/external.ts @@ -63,6 +63,7 @@ export { export { RoundNotFound, + RoundNotFoundForId, ApplicationNotFound, ProjectNotFound, ProjectByRoleNotFound, @@ -70,4 +71,12 @@ export { export type { StrategyProcessingCheckpoint, NewStrategyProcessingCheckpoint } from "./internal.js"; +export type { ITransactionManager, TransactionConnection } from "./internal.js"; +export { KyselyTransactionManager } from "./internal.js"; + +export type { ICache } from "./internal.js"; +export type { Metadata, NewMetadata, PartialMetadata } from "./internal.js"; +export type { Price, NewPrice, PartialPrice, PriceCacheKey } from "./internal.js"; +export { KyselyMetadataCache, KyselyPricingCache } from "./internal.js"; + export { createKyselyPostgresDb as createKyselyDatabase } from "./internal.js"; diff --git a/packages/repository/src/interfaces/applicationPayoutRepository.interface.ts b/packages/repository/src/interfaces/applicationPayoutRepository.interface.ts index 6499187..2c66693 100644 --- a/packages/repository/src/interfaces/applicationPayoutRepository.interface.ts +++ b/packages/repository/src/interfaces/applicationPayoutRepository.interface.ts @@ -1,10 +1,17 @@ import { NewApplicationPayout } from "../types/applicationPayout.types.js"; +import { TransactionConnection } from "../types/transaction.types.js"; -export interface IApplicationPayoutRepository { +export interface IApplicationPayoutRepository< + TxConnection extends TransactionConnection = TransactionConnection, +> { /** * Inserts a new application payout into the database. * @param applicationPayout - The new application payout to insert. + * @param tx Optional transaction connection * @returns A promise that resolves when the application payout is inserted. */ - insertApplicationPayout(applicationPayout: NewApplicationPayout): Promise; + insertApplicationPayout( + applicationPayout: NewApplicationPayout, + tx?: TxConnection, + ): Promise; } diff --git a/packages/repository/src/interfaces/applicationRepository.interface.ts b/packages/repository/src/interfaces/applicationRepository.interface.ts index 2eb2f60..69b3d09 100644 --- a/packages/repository/src/interfaces/applicationRepository.interface.ts +++ b/packages/repository/src/interfaces/applicationRepository.interface.ts @@ -1,6 +1,7 @@ import { Address, ChainId } from "@grants-stack-indexer/shared"; import { Application, NewApplication, PartialApplication } from "../types/application.types.js"; +import { TransactionConnection } from "../types/transaction.types.js"; export interface IApplicationReadRepository { /** @@ -65,22 +66,27 @@ export interface IApplicationReadRepository { getApplicationsByRoundId(chainId: ChainId, roundId: string): Promise; } -export interface IApplicationRepository extends IApplicationReadRepository { +export interface IApplicationRepository< + TxConnection extends TransactionConnection = TransactionConnection, +> extends IApplicationReadRepository { /** * Inserts a new application into the repository. * @param application The new application to insert. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertApplication(application: NewApplication): Promise; + insertApplication(application: NewApplication, tx?: TxConnection): Promise; /** * Updates an existing application in the repository. * @param where An object containing the (id, chainId, and roundId) of the application to update. * @param application The partial application data to update. + * @param tx Optional transaction connection * @returns A promise that resolves when the update is complete. */ updateApplication( where: { id: string; chainId: ChainId; roundId: string }, application: PartialApplication, + tx?: TxConnection, ): Promise; } diff --git a/packages/repository/src/interfaces/cache.interface.ts b/packages/repository/src/interfaces/cache.interface.ts new file mode 100644 index 0000000..a026730 --- /dev/null +++ b/packages/repository/src/interfaces/cache.interface.ts @@ -0,0 +1,22 @@ +/** + * Interface for a cache. + * @template Key - The type of the key. + * @template Value - The type of the value. + */ +export interface ICache { + /** + * Get the value for a given key. + * @param key - The key to get the value for. + * @returns The value for the given key, or undefined if the key is not found. + * @throws If there is an error getting the value. + */ + get(key: Key): Promise; + + /** + * Set the value for a given key. + * @param key - The key to set the value for. + * @param value - The value to set for the given key. + * @throws If there is an error setting the value. + */ + set(key: Key, value: Value): Promise; +} diff --git a/packages/repository/src/interfaces/donationRepository.interface.ts b/packages/repository/src/interfaces/donationRepository.interface.ts index 4ddccc5..45f2b00 100644 --- a/packages/repository/src/interfaces/donationRepository.interface.ts +++ b/packages/repository/src/interfaces/donationRepository.interface.ts @@ -1,17 +1,22 @@ import { NewDonation } from "../internal.js"; +import { TransactionConnection } from "../types/transaction.types.js"; -export interface IDonationRepository { +export interface IDonationRepository< + TxConnection extends TransactionConnection = TransactionConnection, +> { /** * Insert a single donation * @param donation The donation to insert + * @param tx Optional transaction connection * @returns A promise that resolves when the donation is inserted */ - insertDonation(donation: NewDonation): Promise; + insertDonation(donation: NewDonation, tx?: TxConnection): Promise; /** * Insert many donations * @param donations The donations to insert + * @param tx Optional transaction connection * @returns A promise that resolves when the donations are inserted */ - insertManyDonations(donations: NewDonation[]): Promise; + insertManyDonations(donations: NewDonation[], tx?: TxConnection): Promise; } diff --git a/packages/repository/src/interfaces/index.ts b/packages/repository/src/interfaces/index.ts index f8818a5..761d20b 100644 --- a/packages/repository/src/interfaces/index.ts +++ b/packages/repository/src/interfaces/index.ts @@ -6,3 +6,6 @@ export * from "./applicationPayoutRepository.interface.js"; export * from "./strategyRepository.interface.js"; export * from "./eventsRepository.interface.js"; export * from "./strategyProcessingCheckpointRepository.interface.js"; +export * from "./transactionManager.interface.js"; +export * from "./pgError.interface.js"; +export * from "./cache.interface.js"; diff --git a/packages/repository/src/interfaces/pgError.interface.ts b/packages/repository/src/interfaces/pgError.interface.ts new file mode 100644 index 0000000..5537ce4 --- /dev/null +++ b/packages/repository/src/interfaces/pgError.interface.ts @@ -0,0 +1,5 @@ +export interface PostgresError extends Error { + code?: string; + detail?: string; + constraint?: string; +} diff --git a/packages/repository/src/interfaces/projectRepository.interface.ts b/packages/repository/src/interfaces/projectRepository.interface.ts index 4e70c98..6f5732c 100644 --- a/packages/repository/src/interfaces/projectRepository.interface.ts +++ b/packages/repository/src/interfaces/projectRepository.interface.ts @@ -9,6 +9,7 @@ import { Project, ProjectRoleNames, } from "../types/project.types.js"; +import { TransactionConnection } from "../types/transaction.types.js"; export interface IProjectReadRepository { /** @@ -67,28 +68,37 @@ export interface IProjectReadRepository { getProjectByAnchorOrThrow(chainId: ChainId, anchorAddress: Address): Promise; } -export interface IProjectRepository extends IProjectReadRepository { +export interface IProjectRepository< + TxConnection extends TransactionConnection = TransactionConnection, +> extends IProjectReadRepository { /** * Inserts a new project into the repository. * @param project The new project to be inserted. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertProject(project: NewProject): Promise; + insertProject(project: NewProject, tx?: TxConnection): Promise; /** * Updates an existing project in the repository. * @param where An object containing the id and chainId to identify the project to update. * @param project The partial project data to update. + * @param tx Optional transaction connection * @returns A promise that resolves when the update is complete. */ - updateProject(where: { id: string; chainId: ChainId }, project: PartialProject): Promise; + updateProject( + where: { id: string; chainId: ChainId }, + project: PartialProject, + tx?: TxConnection, + ): Promise; /** * Inserts a new project role into the repository. * @param projectRole The new project role to be inserted. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertProjectRole(projectRole: NewProjectRole): Promise; + insertProjectRole(projectRole: NewProjectRole, tx?: TxConnection): Promise; /** * Deletes multiple project roles based on the provided criteria. @@ -96,6 +106,7 @@ export interface IProjectRepository extends IProjectReadRepository { * @param projectId The project ID of the roles to delete. * @param role The role type to delete. * @param address Optional address to further filter the roles to delete. + * @param tx Optional transaction connection * @returns A promise that resolves when the deletion is complete. */ deleteManyProjectRoles( @@ -103,19 +114,25 @@ export interface IProjectRepository extends IProjectReadRepository { projectId: string, role: ProjectRoleNames, address?: Address, + tx?: TxConnection, ): Promise; /** * Inserts a new pending project role into the repository. * @param pendingProjectRole The new pending project role to be inserted. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertPendingProjectRole(pendingProjectRole: NewPendingProjectRole): Promise; + insertPendingProjectRole( + pendingProjectRole: NewPendingProjectRole, + tx?: TxConnection, + ): Promise; /** * Deletes multiple pending project roles based on their IDs. * @param ids An array of IDs of the pending project roles to delete. + * @param tx Optional transaction connection * @returns A promise that resolves when the deletion is complete. */ - deleteManyPendingProjectRoles(ids: number[]): Promise; + deleteManyPendingProjectRoles(ids: number[], tx?: TxConnection): Promise; } diff --git a/packages/repository/src/interfaces/roundRepository.interface.ts b/packages/repository/src/interfaces/roundRepository.interface.ts index e94fe2c..1284e86 100644 --- a/packages/repository/src/interfaces/roundRepository.interface.ts +++ b/packages/repository/src/interfaces/roundRepository.interface.ts @@ -10,6 +10,7 @@ import { RoundRole, RoundRoleNames, } from "../types/round.types.js"; +import { TransactionConnection } from "../types/transaction.types.js"; export interface IRoundReadRepository { /** @@ -94,23 +95,28 @@ export interface IRoundReadRepository { getPendingRoundRoles(chainId: ChainId, role: RoundRoleNames): Promise; } -export interface IRoundRepository extends IRoundReadRepository { +export interface IRoundRepository< + TxConnection extends TransactionConnection = TransactionConnection, +> extends IRoundReadRepository { /** * Inserts a new round into the repository. * @param round The new round to insert. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertRound(round: NewRound): Promise; + insertRound(round: NewRound, tx?: TxConnection): Promise; /** * Updates an existing round in the repository. * @param where An object containing the id and chainId of the round to update. * @param round The partial round data to update. + * @param tx Optional transaction connection * @returns A promise that resolves when the update is complete. */ updateRound( where: { id: string; chainId: ChainId } | { chainId: ChainId; strategyAddress: Address }, round: PartialRound, + tx?: TxConnection, ): Promise; /** @@ -118,6 +124,7 @@ export interface IRoundRepository extends IRoundReadRepository { * @param where An object containing the chainId and roundId of the round to update. * @param amount The amount to increment by. * @param amountInUsd The amount in USD to increment by. + * @param tx Optional transaction connection * @returns A promise that resolves when the increment is complete. */ incrementRoundFunds( @@ -127,12 +134,14 @@ export interface IRoundRepository extends IRoundReadRepository { }, amount: bigint, amountInUsd: string, + tx?: TxConnection, ): Promise; /** * Increments the total distributed amount for a specific round. * @param where An object containing the chainId and roundId of the round to update. * @param amount The amount to increment by. + * @param tx Optional transaction connection * @returns A promise that resolves when the increment is complete. */ incrementRoundTotalDistributed( @@ -141,14 +150,16 @@ export interface IRoundRepository extends IRoundReadRepository { roundId: string; }, amount: bigint, + tx?: TxConnection, ): Promise; /** * Inserts a new round role into the repository. * @param roundRole The new round role to insert. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertRoundRole(roundRole: NewRoundRole): Promise; + insertRoundRole(roundRole: NewRoundRole, tx?: TxConnection): Promise; /** * Deletes multiple round roles based on chain ID, round ID, role, and address. @@ -156,6 +167,7 @@ export interface IRoundRepository extends IRoundReadRepository { * @param roundId The round ID of the roles to delete. * @param role The role name of the roles to delete. * @param address The address associated with the roles to delete. + * @param tx Optional transaction connection * @returns A promise that resolves when the deletion is complete. */ deleteManyRoundRolesByRoleAndAddress( @@ -163,19 +175,22 @@ export interface IRoundRepository extends IRoundReadRepository { roundId: string, role: RoundRoleNames, address: Address, + tx?: TxConnection, ): Promise; /** * Inserts a new pending round role into the repository. * @param pendingRoundRole The new pending round role to insert. + * @param tx Optional transaction connection * @returns A promise that resolves when the insertion is complete. */ - insertPendingRoundRole(pendingRoundRole: NewPendingRoundRole): Promise; + insertPendingRoundRole(pendingRoundRole: NewPendingRoundRole, tx?: TxConnection): Promise; /** * Deletes multiple pending round roles by their IDs. * @param ids An array of IDs of the pending round roles to delete. + * @param tx Optional transaction connection * @returns A promise that resolves when the deletion is complete. */ - deleteManyPendingRoundRoles(ids: number[]): Promise; + deleteManyPendingRoundRoles(ids: number[], tx?: TxConnection): Promise; } diff --git a/packages/repository/src/interfaces/transactionManager.interface.ts b/packages/repository/src/interfaces/transactionManager.interface.ts new file mode 100644 index 0000000..7da171f --- /dev/null +++ b/packages/repository/src/interfaces/transactionManager.interface.ts @@ -0,0 +1,20 @@ +import { TransactionConnection } from "../internal.js"; + +/** + * The ITransactionManager interface provides a generic transaction management solution using a callback pattern. + * + * The generic type parameter TxConnection extends TransactionConnection to allow for different transaction + * connection implementations while maintaining type safety. + */ +export interface ITransactionManager< + TxConnection extends TransactionConnection = TransactionConnection, +> { + /* + * Provides a transaction connection to the given function. + * If the function throws an error, the transaction will be rolled back. + * If the function returns a promise, the transaction will be committed after the promise is resolved. + * + * Note: only DB calls that use the provided transaction connection will be executed in the transaction. + */ + runInTransaction(fn: (tx: TxConnection) => Promise): Promise; +} diff --git a/packages/repository/src/internal.ts b/packages/repository/src/internal.ts index bdbaa77..f8c7d74 100644 --- a/packages/repository/src/internal.ts +++ b/packages/repository/src/internal.ts @@ -1,5 +1,6 @@ export * from "./types/index.js"; export * from "./interfaces/index.js"; +export * from "./exceptions/index.js"; +export * from "./utils/index.js"; export * from "./db/connection.js"; export * from "./repositories/kysely/index.js"; -export * from "./exceptions/index.js"; diff --git a/packages/repository/src/repositories/kysely/application.repository.ts b/packages/repository/src/repositories/kysely/application.repository.ts index 48d6f8f..a4dae53 100644 --- a/packages/repository/src/repositories/kysely/application.repository.ts +++ b/packages/repository/src/repositories/kysely/application.repository.ts @@ -6,12 +6,14 @@ import { Application, ApplicationNotFound, Database, + handlePostgresError, IApplicationRepository, + KyselyTransaction, NewApplication, PartialApplication, } from "../../internal.js"; -export class KyselyApplicationRepository implements IApplicationRepository { +export class KyselyApplicationRepository implements IApplicationRepository { constructor( private readonly db: Kysely, private readonly schemaName: string, @@ -96,31 +98,54 @@ export class KyselyApplicationRepository implements IApplicationRepository { } /* @inheritdoc */ - async insertApplication(application: NewApplication): Promise { + async insertApplication(application: NewApplication, tx?: KyselyTransaction): Promise { const _application = this.formatApplication(application); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); - await this.db - .withSchema(this.schemaName) - .insertInto("applications") - .values(_application) - .execute(); + await queryBuilder.insertInto("applications").values(_application).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyApplicationRepository.name, + methodName: "insertApplication", + additionalData: { + application: _application, + }, + }); + } } /* @inheritdoc */ async updateApplication( where: { id: string; chainId: ChainId; roundId: string }, application: PartialApplication, + tx?: KyselyTransaction, ): Promise { const _application = this.formatApplication(application); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); - await this.db - .withSchema(this.schemaName) - .updateTable("applications") - .set(_application) - .where("id", "=", where.id) - .where("chainId", "=", where.chainId) - .where("roundId", "=", where.roundId) - .execute(); + await queryBuilder + .updateTable("applications") + .set(_application) + .where("id", "=", where.id) + .where("chainId", "=", where.chainId) + .where("roundId", "=", where.roundId) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyApplicationRepository.name, + methodName: "updateApplication", + additionalData: { + params: { + chainId: where.chainId, + roundId: where.roundId, + applicationId: where.id, + }, + application: _application, + }, + }); + } } /** diff --git a/packages/repository/src/repositories/kysely/applicationPayout.repository.ts b/packages/repository/src/repositories/kysely/applicationPayout.repository.ts index b5d0196..4772d9c 100644 --- a/packages/repository/src/repositories/kysely/applicationPayout.repository.ts +++ b/packages/repository/src/repositories/kysely/applicationPayout.repository.ts @@ -1,19 +1,40 @@ import { Kysely } from "kysely"; -import { Database, IApplicationPayoutRepository, NewApplicationPayout } from "../../internal.js"; +import { + Database, + handlePostgresError, + IApplicationPayoutRepository, + KyselyTransaction, + NewApplicationPayout, +} from "../../internal.js"; -export class KyselyApplicationPayoutRepository implements IApplicationPayoutRepository { +export class KyselyApplicationPayoutRepository + implements IApplicationPayoutRepository +{ constructor( private readonly db: Kysely, private readonly schemaName: string, ) {} /** @inheritdoc */ - async insertApplicationPayout(applicationPayout: NewApplicationPayout): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("applicationsPayouts") - .values(applicationPayout) - .execute(); + async insertApplicationPayout( + applicationPayout: NewApplicationPayout, + tx?: KyselyTransaction, + ): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder + .insertInto("applicationsPayouts") + .values(applicationPayout) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyApplicationPayoutRepository.name, + methodName: "insertApplicationPayout", + additionalData: { + applicationPayout, + }, + }); + } } } diff --git a/packages/repository/src/repositories/kysely/donation.repository.ts b/packages/repository/src/repositories/kysely/donation.repository.ts index 85a762d..5ef7ba6 100644 --- a/packages/repository/src/repositories/kysely/donation.repository.ts +++ b/packages/repository/src/repositories/kysely/donation.repository.ts @@ -1,35 +1,61 @@ import { Kysely } from "kysely"; -import { IDonationRepository } from "../../interfaces/donationRepository.interface.js"; -import { Database, NewDonation } from "../../internal.js"; +import { + Database, + handlePostgresError, + IDonationRepository, + KyselyTransaction, + NewDonation, +} from "../../internal.js"; -export class KyselyDonationRepository implements IDonationRepository { +export class KyselyDonationRepository implements IDonationRepository { constructor( private readonly db: Kysely, private readonly schemaName: string, ) {} /** @inheritdoc */ - async insertDonation(donation: NewDonation): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("donations") - .values(donation) - .onConflict((c) => { - return c.column("id").doNothing(); - }) - .execute(); + async insertDonation(donation: NewDonation, tx?: KyselyTransaction): Promise { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + try { + await queryBuilder + .insertInto("donations") + .values(donation) + .onConflict((c) => { + return c.column("id").doNothing(); + }) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyDonationRepository.name, + methodName: "insertDonation", + additionalData: { + donation, + }, + }); + } } /** @inheritdoc */ - async insertManyDonations(donations: NewDonation[]): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("donations") - .values(donations) - .onConflict((c) => { - return c.column("id").doNothing(); - }) - .execute(); + async insertManyDonations(donations: NewDonation[], tx?: KyselyTransaction): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + + await queryBuilder + .insertInto("donations") + .values(donations) + .onConflict((c) => { + return c.column("id").doNothing(); + }) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyDonationRepository.name, + methodName: "insertManyDonations", + additionalData: { + donations, + }, + }); + } } } diff --git a/packages/repository/src/repositories/kysely/eventRegistry.repository.ts b/packages/repository/src/repositories/kysely/eventRegistry.repository.ts index 5a88d8e..d9c228e 100644 --- a/packages/repository/src/repositories/kysely/eventRegistry.repository.ts +++ b/packages/repository/src/repositories/kysely/eventRegistry.repository.ts @@ -3,7 +3,12 @@ import { Kysely } from "kysely"; import { ChainId } from "@grants-stack-indexer/shared"; import { IEventRegistryRepository } from "../../interfaces/index.js"; -import { Database, NewProcessedEvent, ProcessedEvent } from "../../internal.js"; +import { + Database, + handlePostgresError, + NewProcessedEvent, + ProcessedEvent, +} from "../../internal.js"; export class KyselyEventRegistryRepository implements IEventRegistryRepository { constructor( @@ -24,19 +29,30 @@ export class KyselyEventRegistryRepository implements IEventRegistryRepository { /** @inheritdoc */ async saveLastProcessedEvent(chainId: ChainId, event: NewProcessedEvent): Promise { const { blockNumber, blockTimestamp, logIndex, rawEvent } = event; // Extract only the fields from NewProcessedEvent - await this.db - .withSchema(this.schemaName) - .insertInto("eventsRegistry") - .values({ blockNumber, blockTimestamp, logIndex, chainId, rawEvent }) - .onConflict((oc) => - oc.columns(["chainId"]).doUpdateSet({ - blockNumber, - blockTimestamp, - logIndex, - rawEvent, + try { + await this.db + .withSchema(this.schemaName) + .insertInto("eventsRegistry") + .values({ blockNumber, blockTimestamp, logIndex, chainId, rawEvent }) + .onConflict((oc) => + oc.columns(["chainId"]).doUpdateSet({ + blockNumber, + blockTimestamp, + logIndex, + rawEvent, + chainId, + }), + ) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyEventRegistryRepository.name, + methodName: "saveLastProcessedEvent", + additionalData: { chainId, - }), - ) - .execute(); + event, + }, + }); + } } } diff --git a/packages/repository/src/repositories/kysely/index.ts b/packages/repository/src/repositories/kysely/index.ts index fcdd4e3..ea1e5ea 100644 --- a/packages/repository/src/repositories/kysely/index.ts +++ b/packages/repository/src/repositories/kysely/index.ts @@ -6,3 +6,6 @@ export * from "./applicationPayout.repository.js"; export * from "./strategyRegistry.repository.js"; export * from "./eventRegistry.repository.js"; export * from "./strategyProcessingCheckpoint.repository.js"; +export * from "./transactionManager.js"; +export * from "./prices.repository.js"; +export * from "./metadata.repository.js"; diff --git a/packages/repository/src/repositories/kysely/metadata.repository.ts b/packages/repository/src/repositories/kysely/metadata.repository.ts new file mode 100644 index 0000000..66fd5ea --- /dev/null +++ b/packages/repository/src/repositories/kysely/metadata.repository.ts @@ -0,0 +1,66 @@ +import { Kysely } from "kysely"; + +import { Database, handlePostgresError, ICache } from "../../internal.js"; + +export class KyselyMetadataCache implements ICache { + constructor( + private readonly db: Kysely, + private readonly schema: string, + ) {} + + /** @inheritdoc */ + async get(id: string): Promise { + try { + const result = await this.db + .withSchema(this.schema) + .selectFrom("metadataCache") + .select("metadata") + .where("id", "=", id) + .executeTakeFirst(); + + if (!result) { + return undefined; + } + + return result.metadata as T; + } catch (error) { + throw handlePostgresError(error, { + className: KyselyMetadataCache.name, + methodName: "get", + additionalData: { + id, + }, + }); + } + } + + /** @inheritdoc */ + async set(id: string, metadata: T): Promise { + try { + await this.db + .withSchema(this.schema) + .insertInto("metadataCache") + .values({ + id: id, + metadata: metadata as unknown, + createdAt: new Date(), + }) + .onConflict((oc) => + oc.column("id").doUpdateSet({ + metadata: metadata as unknown, + createdAt: new Date(), + }), + ) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyMetadataCache.name, + methodName: "set", + additionalData: { + id, + metadata, + }, + }); + } + } +} diff --git a/packages/repository/src/repositories/kysely/prices.repository.ts b/packages/repository/src/repositories/kysely/prices.repository.ts new file mode 100644 index 0000000..5d1ca10 --- /dev/null +++ b/packages/repository/src/repositories/kysely/prices.repository.ts @@ -0,0 +1,89 @@ +import { Kysely } from "kysely"; + +import { TokenCode, TokenPrice } from "@grants-stack-indexer/shared"; + +import { Database, handlePostgresError, ICache } from "../../internal.js"; + +export type PriceCacheKey = { tokenCode: TokenCode; timestampMs: number }; + +/** + * A cache for token prices using Kysely. + * This cache is used to store and retrieve token prices for a given token and timestamp. + * It uses the `priceCache` table in the database to store the prices. + * Note: no eviction strategy is implemented since is not needed for the current use case. + */ +export class KyselyPricingCache implements ICache { + constructor( + private readonly db: Kysely, + private readonly schema: string, + ) {} + + /** @inheritdoc */ + async get(key: { tokenCode: TokenCode; timestampMs: number }): Promise { + const { tokenCode, timestampMs } = key; + + try { + const result = await this.db + .withSchema(this.schema) + .selectFrom("priceCache") + .select(["timestampMs", "priceUsd"]) + .where("tokenCode", "=", tokenCode) + .where("timestampMs", "=", timestampMs) + .executeTakeFirst(); + + if (!result) { + return undefined; + } + + return { + timestampMs: result.timestampMs, + priceUsd: result.priceUsd, + }; + } catch (error) { + throw handlePostgresError(error, { + className: KyselyPricingCache.name, + methodName: "get", + additionalData: { + key, + }, + }); + } + } + + /** @inheritdoc */ + async set( + key: { tokenCode: TokenCode; timestampMs: number }, + value: TokenPrice, + ): Promise { + const { tokenCode, timestampMs } = key; + const { priceUsd } = value; + + try { + await this.db + .withSchema(this.schema) + .insertInto("priceCache") + .values({ + tokenCode: tokenCode, + timestampMs: timestampMs, + priceUsd: priceUsd, + createdAt: new Date(), + }) + .onConflict((oc) => + oc.columns(["tokenCode", "timestampMs"]).doUpdateSet({ + priceUsd: priceUsd, + createdAt: new Date(), + }), + ) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyPricingCache.name, + methodName: "set", + additionalData: { + key, + value, + }, + }); + } + } +} diff --git a/packages/repository/src/repositories/kysely/project.repository.ts b/packages/repository/src/repositories/kysely/project.repository.ts index abba90f..58ee32a 100644 --- a/packages/repository/src/repositories/kysely/project.repository.ts +++ b/packages/repository/src/repositories/kysely/project.repository.ts @@ -5,6 +5,8 @@ import { Address, ChainId } from "@grants-stack-indexer/shared"; import { IProjectRepository } from "../../interfaces/projectRepository.interface.js"; import { Database, + handlePostgresError, + KyselyTransaction, NewPendingProjectRole, NewProject, NewProjectRole, @@ -15,7 +17,7 @@ import { ProjectRoleNames, } from "../../internal.js"; -export class KyselyProjectRepository implements IProjectRepository { +export class KyselyProjectRepository implements IProjectRepository { constructor( private readonly db: Kysely, private readonly schemaName: string, @@ -73,33 +75,63 @@ export class KyselyProjectRepository implements IProjectRepository { } /* @inheritdoc */ - async insertProject(project: NewProject): Promise { - await this.db.withSchema(this.schemaName).insertInto("projects").values(project).execute(); + async insertProject(project: NewProject, tx?: KyselyTransaction): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.insertInto("projects").values(project).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyProjectRepository.name, + methodName: "insertProject", + additionalData: { + project, + }, + }); + } } /* @inheritdoc */ async updateProject( where: { id: string; chainId: ChainId }, project: PartialProject, + tx?: KyselyTransaction, ): Promise { - await this.db - .withSchema(this.schemaName) - .updateTable("projects") - .set(project) - .where("id", "=", where.id) - .where("chainId", "=", where.chainId) - .execute(); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder + .updateTable("projects") + .set(project) + .where("id", "=", where.id) + .where("chainId", "=", where.chainId) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyProjectRepository.name, + methodName: "updateProject", + additionalData: { + where, + project, + }, + }); + } } // ============================ PROJECT ROLES ============================ /* @inheritdoc */ - async insertProjectRole(projectRole: NewProjectRole): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("projectRoles") - .values(projectRole) - .execute(); + async insertProjectRole(projectRole: NewProjectRole, tx?: KyselyTransaction): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.insertInto("projectRoles").values(projectRole).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyProjectRepository.name, + methodName: "insertProjectRole", + additionalData: { + projectRole, + }, + }); + } } /* @inheritdoc */ @@ -108,19 +140,33 @@ export class KyselyProjectRepository implements IProjectRepository { projectId: string, role: ProjectRoleNames, address?: Address, + tx?: KyselyTransaction, ): Promise { - const query = this.db - .withSchema(this.schemaName) - .deleteFrom("projectRoles") - .where("chainId", "=", chainId) - .where("projectId", "=", projectId) - .where("role", "=", role); - - if (address) { - query.where("address", "=", address); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + const query = queryBuilder + .deleteFrom("projectRoles") + .where("chainId", "=", chainId) + .where("projectId", "=", projectId) + .where("role", "=", role); + + if (address) { + query.where("address", "=", address); + } + + await query.execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyProjectRepository.name, + methodName: "deleteManyProjectRoles", + additionalData: { + chainId, + projectId, + role, + address, + }, + }); } - - await query.execute(); } // ============================ PENDING PROJECT ROLES ============================ @@ -149,20 +195,40 @@ export class KyselyProjectRepository implements IProjectRepository { } /* @inheritdoc */ - async insertPendingProjectRole(pendingProjectRole: NewPendingProjectRole): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("pendingProjectRoles") - .values(pendingProjectRole) - .execute(); + async insertPendingProjectRole( + pendingProjectRole: NewPendingProjectRole, + tx?: KyselyTransaction, + ): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder + .insertInto("pendingProjectRoles") + .values(pendingProjectRole) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyProjectRepository.name, + methodName: "insertPendingProjectRole", + additionalData: { + pendingProjectRole, + }, + }); + } } /* @inheritdoc */ - async deleteManyPendingProjectRoles(ids: number[]): Promise { - await this.db - .withSchema(this.schemaName) - .deleteFrom("pendingProjectRoles") - .where("id", "in", ids) - .execute(); + async deleteManyPendingProjectRoles(ids: number[], tx?: KyselyTransaction): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.deleteFrom("pendingProjectRoles").where("id", "in", ids).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyProjectRepository.name, + methodName: "deleteManyPendingProjectRoles", + additionalData: { + ids, + }, + }); + } } } diff --git a/packages/repository/src/repositories/kysely/round.repository.ts b/packages/repository/src/repositories/kysely/round.repository.ts index 400ab72..0314821 100644 --- a/packages/repository/src/repositories/kysely/round.repository.ts +++ b/packages/repository/src/repositories/kysely/round.repository.ts @@ -4,7 +4,9 @@ import { Address, ChainId, stringify } from "@grants-stack-indexer/shared"; import { Database, + handlePostgresError, IRoundRepository, + KyselyTransaction, NewPendingRoundRole, NewRound, NewRoundRole, @@ -17,7 +19,7 @@ import { RoundRoleNames, } from "../../internal.js"; -export class KyselyRoundRepository implements IRoundRepository { +export class KyselyRoundRepository implements IRoundRepository { constructor( private readonly db: Kysely, private readonly schemaName: string, @@ -114,68 +116,110 @@ export class KyselyRoundRepository implements IRoundRepository { } /* @inheritdoc */ - async insertRound(round: NewRound): Promise { - await this.db.withSchema(this.schemaName).insertInto("rounds").values(round).execute(); + async insertRound(round: NewRound, tx?: KyselyTransaction): Promise { + const _round = this.formatRound(round); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.insertInto("rounds").values(_round).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "insertRound", + additionalData: { + round: _round, + }, + }); + } } /* @inheritdoc */ async updateRound( where: { id: string; chainId: ChainId } | { chainId: ChainId; strategyAddress: Address }, round: PartialRound, + tx?: KyselyTransaction, ): Promise { const _round = this.formatRound(round); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + const query = queryBuilder + .updateTable("rounds") + .set(_round) + .where("chainId", "=", where.chainId); - const query = this.db - .withSchema(this.schemaName) - .updateTable("rounds") - .set(_round) - .where("chainId", "=", where.chainId); - - if ("id" in where) { - await query.where("id", "=", where.id).execute(); - } else { - await query.where("strategyAddress", "=", where.strategyAddress).execute(); + if ("id" in where) { + await query.where("id", "=", where.id).execute(); + } else { + await query.where("strategyAddress", "=", where.strategyAddress).execute(); + } + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "updateRound", + additionalData: { + where, + round: _round, + }, + }); } } /* @inheritdoc */ async incrementRoundFunds( - where: { - chainId: ChainId; - roundId: string; - }, + where: { chainId: ChainId; roundId: string }, amount: bigint, amountInUsd: string, + tx?: KyselyTransaction, ): Promise { - await this.db - .withSchema(this.schemaName) - .updateTable("rounds") - .set((eb) => ({ - fundedAmount: eb("fundedAmount", "+", amount), - fundedAmountInUsd: eb("fundedAmountInUsd", "+", amountInUsd), - })) - .where("chainId", "=", where.chainId) - .where("id", "=", where.roundId) - .execute(); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder + .updateTable("rounds") + .set((eb) => ({ + fundedAmount: eb("fundedAmount", "+", amount), + fundedAmountInUsd: eb("fundedAmountInUsd", "+", amountInUsd), + })) + .where("chainId", "=", where.chainId) + .where("id", "=", where.roundId) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "incrementRoundFunds", + additionalData: { + where, + amount, + amountInUsd, + }, + }); + } } /* @inheritdoc */ async incrementRoundTotalDistributed( - where: { - chainId: ChainId; - roundId: string; - }, + where: { chainId: ChainId; roundId: string }, amount: bigint, + tx?: KyselyTransaction, ): Promise { - await this.db - .withSchema(this.schemaName) - .updateTable("rounds") - .set((eb) => ({ - totalDistributed: eb("totalDistributed", "+", amount), - })) - .where("chainId", "=", where.chainId) - .where("id", "=", where.roundId) - .execute(); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder + .updateTable("rounds") + .set((eb) => ({ + totalDistributed: eb("totalDistributed", "+", amount), + })) + .where("chainId", "=", where.chainId) + .where("id", "=", where.roundId) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "incrementRoundTotalDistributed", + additionalData: { + where, + amount, + }, + }); + } } // ============================ ROUND ROLES ============================ @@ -186,12 +230,19 @@ export class KyselyRoundRepository implements IRoundRepository { } /* @inheritdoc */ - async insertRoundRole(roundRole: NewRoundRole): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("roundRoles") - .values(roundRole) - .execute(); + async insertRoundRole(roundRole: NewRoundRole, tx?: KyselyTransaction): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.insertInto("roundRoles").values(roundRole).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "insertRoundRole", + additionalData: { + roundRole, + }, + }); + } } /* @inheritdoc */ @@ -200,15 +251,29 @@ export class KyselyRoundRepository implements IRoundRepository { roundId: string, role: RoundRoleNames, address: Address, + tx?: KyselyTransaction, ): Promise { - await this.db - .withSchema(this.schemaName) - .deleteFrom("roundRoles") - .where("chainId", "=", chainId) - .where("roundId", "=", roundId) - .where("role", "=", role) - .where("address", "=", address) - .execute(); + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder + .deleteFrom("roundRoles") + .where("chainId", "=", chainId) + .where("roundId", "=", roundId) + .where("role", "=", role) + .where("address", "=", address) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "deleteManyRoundRolesByRoleAndAddress", + additionalData: { + chainId, + roundId, + role, + address, + }, + }); + } } // ============================ PENDING ROUND ROLES ============================ @@ -228,21 +293,38 @@ export class KyselyRoundRepository implements IRoundRepository { } /* @inheritdoc */ - async insertPendingRoundRole(pendingRoundRole: NewPendingRoundRole): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("pendingRoundRoles") - .values(pendingRoundRole) - .execute(); + async insertPendingRoundRole( + pendingRoundRole: NewPendingRoundRole, + tx?: KyselyTransaction, + ): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.insertInto("pendingRoundRoles").values(pendingRoundRole).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "insertPendingRoundRole", + additionalData: { + pendingRoundRole, + }, + }); + } } /* @inheritdoc */ - async deleteManyPendingRoundRoles(ids: number[]): Promise { - await this.db - .withSchema(this.schemaName) - .deleteFrom("pendingRoundRoles") - .where("id", "in", ids) - .execute(); + async deleteManyPendingRoundRoles(ids: number[], tx?: KyselyTransaction): Promise { + try { + const queryBuilder = (tx || this.db).withSchema(this.schemaName); + await queryBuilder.deleteFrom("pendingRoundRoles").where("id", "in", ids).execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyRoundRepository.name, + methodName: "deleteManyPendingRoundRoles", + additionalData: { + ids, + }, + }); + } } /** diff --git a/packages/repository/src/repositories/kysely/strategyProcessingCheckpoint.repository.ts b/packages/repository/src/repositories/kysely/strategyProcessingCheckpoint.repository.ts index b4a6395..1f900c9 100644 --- a/packages/repository/src/repositories/kysely/strategyProcessingCheckpoint.repository.ts +++ b/packages/repository/src/repositories/kysely/strategyProcessingCheckpoint.repository.ts @@ -4,6 +4,7 @@ import { ChainId, Hex } from "@grants-stack-indexer/shared"; import { Database, + handlePostgresError, IStrategyProcessingCheckpointRepository, NewStrategyProcessingCheckpoint, StrategyProcessingCheckpoint, @@ -33,31 +34,52 @@ export class KyselyStrategyProcessingCheckpointRepository /** @inheritdoc */ async upsertCheckpoint(checkpoint: NewStrategyProcessingCheckpoint): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("strategyProcessingCheckpoints") - .values({ - ...checkpoint, - createdAt: new Date(), - updatedAt: new Date(), - }) - .onConflict((oc) => - oc.columns(["chainId", "strategyId"]).doUpdateSet({ - lastProcessedBlockNumber: checkpoint.lastProcessedBlockNumber, - lastProcessedLogIndex: checkpoint.lastProcessedLogIndex, + try { + await this.db + .withSchema(this.schemaName) + .insertInto("strategyProcessingCheckpoints") + .values({ + ...checkpoint, + createdAt: new Date(), updatedAt: new Date(), - }), - ) - .execute(); + }) + .onConflict((oc) => + oc.columns(["chainId", "strategyId"]).doUpdateSet({ + lastProcessedBlockNumber: checkpoint.lastProcessedBlockNumber, + lastProcessedLogIndex: checkpoint.lastProcessedLogIndex, + updatedAt: new Date(), + }), + ) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyStrategyProcessingCheckpointRepository.name, + methodName: "upsertCheckpoint", + additionalData: { + checkpoint, + }, + }); + } } /** @inheritdoc */ async deleteCheckpoint(chainId: ChainId, strategyId: Hex): Promise { - await this.db - .withSchema(this.schemaName) - .deleteFrom("strategyProcessingCheckpoints") - .where("chainId", "=", chainId) - .where("strategyId", "=", strategyId) - .execute(); + try { + await this.db + .withSchema(this.schemaName) + .deleteFrom("strategyProcessingCheckpoints") + .where("chainId", "=", chainId) + .where("strategyId", "=", strategyId) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyStrategyProcessingCheckpointRepository.name, + methodName: "deleteCheckpoint", + additionalData: { + chainId, + strategyId, + }, + }); + } } } diff --git a/packages/repository/src/repositories/kysely/strategyRegistry.repository.ts b/packages/repository/src/repositories/kysely/strategyRegistry.repository.ts index eef386c..e0c0ab9 100644 --- a/packages/repository/src/repositories/kysely/strategyRegistry.repository.ts +++ b/packages/repository/src/repositories/kysely/strategyRegistry.repository.ts @@ -3,7 +3,7 @@ import { Kysely } from "kysely"; import { Address, ChainId } from "@grants-stack-indexer/shared"; import { IStrategyRegistryRepository } from "../../interfaces/index.js"; -import { Database, Strategy } from "../../internal.js"; +import { Database, handlePostgresError, Strategy } from "../../internal.js"; export class KyselyStrategyRegistryRepository implements IStrategyRegistryRepository { constructor( @@ -27,12 +27,22 @@ export class KyselyStrategyRegistryRepository implements IStrategyRegistryReposi /** @inheritdoc */ async saveStrategy(strategy: Strategy): Promise { - await this.db - .withSchema(this.schemaName) - .insertInto("strategiesRegistry") - .values(strategy) - .onConflict((oc) => oc.columns(["chainId", "address"]).doUpdateSet(strategy)) - .execute(); + try { + await this.db + .withSchema(this.schemaName) + .insertInto("strategiesRegistry") + .values(strategy) + .onConflict((oc) => oc.columns(["chainId", "address"]).doUpdateSet(strategy)) + .execute(); + } catch (error) { + throw handlePostgresError(error, { + className: KyselyStrategyRegistryRepository.name, + methodName: "saveStrategy", + additionalData: { + strategy, + }, + }); + } } /** @inheritdoc */ diff --git a/packages/repository/src/repositories/kysely/transactionManager.ts b/packages/repository/src/repositories/kysely/transactionManager.ts new file mode 100644 index 0000000..6c4c810 --- /dev/null +++ b/packages/repository/src/repositories/kysely/transactionManager.ts @@ -0,0 +1,12 @@ +import { Kysely } from "kysely"; + +import { Database, ITransactionManager, KyselyTransaction } from "../../internal.js"; + +export class KyselyTransactionManager implements ITransactionManager { + constructor(private readonly db: Kysely) {} + + /** @inheritdoc */ + async runInTransaction(fn: (tx: KyselyTransaction) => Promise): Promise { + return this.db.transaction().execute(fn); + } +} diff --git a/packages/repository/src/types/index.ts b/packages/repository/src/types/index.ts index 64da8f9..d7e5272 100644 --- a/packages/repository/src/types/index.ts +++ b/packages/repository/src/types/index.ts @@ -7,3 +7,6 @@ export * from "./applicationPayout.types.js"; export * from "./strategy.types.js"; export * from "./event.types.js"; export * from "./strategyProcessingCheckpoint.types.js"; +export * from "./transaction.types.js"; +export * from "./metadata.types.js"; +export * from "./price.types.js"; diff --git a/packages/repository/src/types/metadata.types.ts b/packages/repository/src/types/metadata.types.ts new file mode 100644 index 0000000..7ac4d67 --- /dev/null +++ b/packages/repository/src/types/metadata.types.ts @@ -0,0 +1,8 @@ +export type Metadata = { + id: string; + metadata: unknown; + createdAt: Date; +}; + +export type NewMetadata = Omit; +export type PartialMetadata = Partial; diff --git a/packages/repository/src/types/price.types.ts b/packages/repository/src/types/price.types.ts new file mode 100644 index 0000000..644f05e --- /dev/null +++ b/packages/repository/src/types/price.types.ts @@ -0,0 +1,11 @@ +import { TokenCode } from "@grants-stack-indexer/shared"; + +export type Price = { + tokenCode: TokenCode; + timestampMs: number; + priceUsd: number; + createdAt: Date; +}; + +export type NewPrice = Omit; +export type PartialPrice = Partial; diff --git a/packages/repository/src/types/transaction.types.ts b/packages/repository/src/types/transaction.types.ts new file mode 100644 index 0000000..fddf619 --- /dev/null +++ b/packages/repository/src/types/transaction.types.ts @@ -0,0 +1,7 @@ +import { Transaction } from "kysely"; + +import { Database } from "../internal.js"; + +export type KyselyTransaction = Transaction; + +export type TransactionConnection = KyselyTransaction; diff --git a/packages/repository/src/utils/error.ts b/packages/repository/src/utils/error.ts new file mode 100644 index 0000000..e66f6aa --- /dev/null +++ b/packages/repository/src/utils/error.ts @@ -0,0 +1,66 @@ +import { ErrorContext, NonRetriableError, RetriableError } from "@grants-stack-indexer/shared"; + +import { + DatabaseConnectionException, + DatabaseConstraintViolationException, + DatabaseDuplicateKeyException, + DatabaseException, + DatabaseInvalidParametersException, +} from "../exceptions/index.js"; +import { PostgresError } from "../internal.js"; + +/** + * Handles database errors by mapping them to our custom exception types + * @param error - The caught database error + * @returns A standardized DatabaseException + */ +export const handlePostgresError = ( + error: unknown, + context: ErrorContext, +): RetriableError | NonRetriableError => { + const pgError = error as PostgresError; + + if (pgError instanceof Error && "code" in pgError) { + // Connection errors + if (pgError.code === "57P01" || pgError.code === "57P03") { + return new DatabaseConnectionException(pgError.message, context, pgError); + } + + // Duplicate key violations + if (pgError.code === "23505") { + const key = pgError.detail?.split("=")[0] || "unknown"; + const value = pgError.detail?.split("=")[1] || "unknown"; + return new DatabaseDuplicateKeyException(key, value, context, pgError); + } + + // Constraint violations + if (pgError.code?.startsWith("23")) { + return new DatabaseConstraintViolationException( + pgError.constraint || "unknown", + pgError.message, + context, + pgError, + ); + } + + // Invalid parameters + if (pgError.code === "22P02" || pgError.code === "22003") { + return new DatabaseInvalidParametersException(pgError.message, context, pgError); + } + + // Default database error + return new DatabaseException(pgError.message, context, pgError); + } + + // If it's already one of our exceptions, return it as is + if (error instanceof DatabaseException) { + return error; + } + + // Unknown errors + return new DatabaseException( + error instanceof Error ? error.message : "An unknown database error occurred", + context, + pgError, + ); +}; diff --git a/packages/repository/src/utils/index.ts b/packages/repository/src/utils/index.ts new file mode 100644 index 0000000..5327826 --- /dev/null +++ b/packages/repository/src/utils/index.ts @@ -0,0 +1 @@ +export * from "./error.js"; diff --git a/packages/shared/src/exceptions/base.ts b/packages/shared/src/exceptions/base.ts new file mode 100644 index 0000000..ee2b749 --- /dev/null +++ b/packages/shared/src/exceptions/base.ts @@ -0,0 +1,40 @@ +export interface ErrorContext { + chainId?: string; + className?: string; + methodName?: string; + timestamp?: Date; + additionalData?: Record; +} + +export abstract class BaseError extends Error { + public readonly context: ErrorContext; + + constructor( + message: string, + context: ErrorContext, + public override readonly cause?: Error, + ) { + super(message); + this.context = { + ...context, + timestamp: new Date(), + }; + + Error.captureStackTrace(this, this.constructor); + } + + /** + * Get the full error chain as a string + */ + getFullStack(): string { + let stack = this.stack || ""; + let currentCause = this.cause; + + while (currentCause) { + stack += "\nCaused by: " + (currentCause.stack || currentCause); + currentCause = (currentCause as Error & { cause?: Error }).cause; + } + + return stack; + } +} diff --git a/packages/shared/src/exceptions/common.ts b/packages/shared/src/exceptions/common.ts new file mode 100644 index 0000000..3546336 --- /dev/null +++ b/packages/shared/src/exceptions/common.ts @@ -0,0 +1,17 @@ +import { ErrorContext, RetriableError, RetryMetadata } from "./index.js"; + +export class NetworkError extends RetriableError { + constructor(context: ErrorContext, metadata?: RetryMetadata, cause?: Error) { + super("Network request failed", context, metadata, cause); + } +} + +export class RateLimitError extends RetriableError { + constructor(context: ErrorContext, retryAfterInMs?: number) { + super("Rate limit exceeded", context, { + retryAfterInMs, + statusCode: 429, + failureReason: "Rate limit exceeded", + }); + } +} diff --git a/packages/shared/src/exceptions/index.ts b/packages/shared/src/exceptions/index.ts new file mode 100644 index 0000000..2bd2dd8 --- /dev/null +++ b/packages/shared/src/exceptions/index.ts @@ -0,0 +1,4 @@ +export * from "./base.js"; +export * from "./nonRetriable.js"; +export * from "./retriable.js"; +export * from "./common.js"; diff --git a/packages/shared/src/exceptions/nonRetriable.ts b/packages/shared/src/exceptions/nonRetriable.ts new file mode 100644 index 0000000..837cb01 --- /dev/null +++ b/packages/shared/src/exceptions/nonRetriable.ts @@ -0,0 +1,7 @@ +import { BaseError, ErrorContext } from "./index.js"; + +export class NonRetriableError extends BaseError { + constructor(message: string, context: ErrorContext = {}, cause?: Error) { + super(message, context, cause); + } +} diff --git a/packages/shared/src/exceptions/retriable.ts b/packages/shared/src/exceptions/retriable.ts new file mode 100644 index 0000000..0c8a2ef --- /dev/null +++ b/packages/shared/src/exceptions/retriable.ts @@ -0,0 +1,18 @@ +import { BaseError, ErrorContext } from "./index.js"; + +export interface RetryMetadata { + retryAfterInMs?: number; // Optional time in ms specified by service + statusCode?: number; // HTTP status code if applicable + failureReason?: string; // Specific reason for the failure +} + +export class RetriableError extends BaseError { + constructor( + message: string, + context: ErrorContext, + public readonly metadata?: RetryMetadata, + cause?: Error, + ) { + super(message, context, cause); + } +} diff --git a/packages/shared/src/external.ts b/packages/shared/src/external.ts index 01ba50e..dbd0d85 100644 --- a/packages/shared/src/external.ts +++ b/packages/shared/src/external.ts @@ -16,8 +16,14 @@ export { Logger } from "./logger/logger.js"; export { BigNumber } from "./internal.js"; export type { BigNumberType } from "./internal.js"; -export type { TokenCode, Token } from "./internal.js"; +export type { TokenCode, Token, TokenPrice } from "./internal.js"; export { TOKENS, getToken, getTokenOrThrow, UnknownToken } from "./internal.js"; export { isAlloEvent, isRegistryEvent, isStrategyEvent } from "./internal.js"; export { stringify } from "./internal.js"; + +export { RetriableError, NonRetriableError, RateLimitError, NetworkError } from "./internal.js"; +export type { RetryMetadata, ErrorContext } from "./internal.js"; + +export { ExponentialBackoff, RetryHandler } from "./internal.js"; +export type { RetryStrategy, RetryStrategyOptions } from "./internal.js"; diff --git a/packages/shared/src/internal.ts b/packages/shared/src/internal.ts index 1130d35..5229d88 100644 --- a/packages/shared/src/internal.ts +++ b/packages/shared/src/internal.ts @@ -6,3 +6,5 @@ export * from "./constants/index.js"; export * from "./utils/testing.js"; export * from "./logger/index.js"; export * from "./tokens/tokens.js"; +export * from "./exceptions/index.js"; +export * from "./retry/index.js"; diff --git a/packages/shared/src/retry/exponentialBackoff.strategy.ts b/packages/shared/src/retry/exponentialBackoff.strategy.ts new file mode 100644 index 0000000..115726e --- /dev/null +++ b/packages/shared/src/retry/exponentialBackoff.strategy.ts @@ -0,0 +1,57 @@ +import { RetryStrategy, RetryStrategyOptions } from "./index.js"; + +type ExponentialBackoffOptions = RetryStrategyOptions & { + factor: number; +}; + +/** + * Implements exponential backoff retry strategy with jitter + * + * Exponentially increases delay between retry attempts by multiplying base delay + * by a factor raised to the attempt count. Also adds random jitter to prevent + * thundering herd problems. + */ +export class ExponentialBackoff implements RetryStrategy { + /** + * @param options - Configuration options + * @param options.baseDelay - Initial delay in milliseconds (default: 5000) + * @param options.factor - Multiplier for exponential increase (default: 2) + * @param options.maxAttempts - Maximum number of retry attempts (default: 3) + * @param options.maxDelay - Optional maximum delay cap in milliseconds + */ + constructor( + private readonly options: ExponentialBackoffOptions = { + baseDelay: 5000, + factor: 2, + maxAttempts: 3, + }, + ) {} + + /** @inheritdoc */ + getDelay(attemptCount: number, retryAfter?: number): number { + const calculatedDelay = + this.options.baseDelay * Math.pow(this.options.factor, attemptCount); + const targetDelay = this.options.maxDelay + ? Math.min(calculatedDelay, this.options.maxDelay) + : calculatedDelay; + + const delay = retryAfter ? Math.max(retryAfter, targetDelay) : targetDelay; + return this.addJitter(delay); + } + + /** @inheritdoc */ + shouldRetry(attemptCount: number): boolean { + return attemptCount < this.options.maxAttempts; + } + + /** + * Adds random jitter to delay value to prevent thundering herd + * @param delay - Base delay value in milliseconds + * @returns Delay with jitter applied (±20% of base delay) + */ + private addJitter(delay: number): number { + // Random value between 0.8 and 1.2 + const jitterFactor = 0.8 + Math.random() * 0.4; + return Math.floor(delay * jitterFactor); + } +} diff --git a/packages/shared/src/retry/index.ts b/packages/shared/src/retry/index.ts new file mode 100644 index 0000000..56602c4 --- /dev/null +++ b/packages/shared/src/retry/index.ts @@ -0,0 +1,3 @@ +export * from "./retryStrategy.interface.js"; +export * from "./exponentialBackoff.strategy.js"; +export * from "./retry.js"; diff --git a/packages/shared/src/retry/retry.ts b/packages/shared/src/retry/retry.ts new file mode 100644 index 0000000..7134e2b --- /dev/null +++ b/packages/shared/src/retry/retry.ts @@ -0,0 +1,65 @@ +import { ILogger, RetriableError } from "../internal.js"; +import { ExponentialBackoff, RetryStrategy } from "./index.js"; + +/** + * Handles retrying operations with configurable retry strategies. + * Supports exponential backoff and other retry patterns through the RetryStrategy interface. + */ +export class RetryHandler { + /** + * Creates a new RetryHandler instance + * @param strategy - The retry strategy to use, defaults to ExponentialBackoff + * @param logger - Logger instance for debug messages + */ + constructor( + private readonly strategy: RetryStrategy = new ExponentialBackoff(), + private readonly logger: ILogger, + ) {} + + /** + * Executes an operation with retry logic + * @param operation - Async operation to execute and potentially retry + * @param params - Optional parameters + * @param params.abortSignal - Optional AbortSignal to cancel retries + * @returns Promise that resolves when operation succeeds or max retries exceeded + * @throws RetriableError if max retries exceeded + * @throws Error if operation aborted or non-retriable error occurs + */ + async execute( + operation: () => Promise, + params: { abortSignal?: AbortSignal } = {}, + ): Promise { + let attemptCount = 0; + while (true && !params.abortSignal?.aborted) { + try { + await operation(); + break; + } catch (error) { + if (!(error instanceof RetriableError)) { + throw error; + } + attemptCount++; + + if (!this.strategy.shouldRetry(attemptCount)) { + throw error; + } else { + const delay = this.strategy.getDelay( + attemptCount, + error.metadata?.retryAfterInMs, + ); + + this.logger.debug(`Retrying in ${delay}ms`, { + className: RetryHandler.name, + delay, + }); + + await new Promise((resolve) => setTimeout(resolve, delay, params.abortSignal)); + } + } + } + + if (params.abortSignal?.aborted) { + throw new Error("Operation aborted"); + } + } +} diff --git a/packages/shared/src/retry/retryStrategy.interface.ts b/packages/shared/src/retry/retryStrategy.interface.ts new file mode 100644 index 0000000..b290913 --- /dev/null +++ b/packages/shared/src/retry/retryStrategy.interface.ts @@ -0,0 +1,23 @@ +export interface RetryStrategy { + /** + * Calculate delay for next retry attempt + * @param attemptCount - Current retry attempt number + * @param retryAfter - Optional minimum delay specified by service + * @returns Delay duration in milliseconds + */ + getDelay(attemptCount: number, retryAfter?: number): number; + + /** + * Determine if another retry should be attempted + * @param attemptCount - Current retry attempt number + * @returns True if retry should be attempted, false otherwise + */ + shouldRetry(attemptCount: number): boolean; +} + +export type RetryStrategyOptions = { + baseDelay: number; + maxDelay?: number; + factor?: number; + maxAttempts: number; +}; diff --git a/packages/shared/src/tokens/tokens.ts b/packages/shared/src/tokens/tokens.ts index dcb3961..29a8434 100644 --- a/packages/shared/src/tokens/tokens.ts +++ b/packages/shared/src/tokens/tokens.ts @@ -12,6 +12,11 @@ export type Token = { voteAmountCap?: bigint; }; +export type TokenPrice = { + timestampMs: number; + priceUsd: number; +}; + export const TOKENS: { [chainId: number]: { [tokenAddress: Address]: Token; diff --git a/packages/shared/test/retry/exponentialBackoff.strategy.spec.ts b/packages/shared/test/retry/exponentialBackoff.strategy.spec.ts new file mode 100644 index 0000000..6f0502c --- /dev/null +++ b/packages/shared/test/retry/exponentialBackoff.strategy.spec.ts @@ -0,0 +1,108 @@ +import { describe, expect, it, vi } from "vitest"; + +import { ExponentialBackoff } from "../../src/retry/exponentialBackoff.strategy.js"; + +describe("ExponentialBackoff", () => { + describe("constructor", () => { + it("uses default options when none provided", () => { + const strategy = new ExponentialBackoff(); + expect(strategy["options"].baseDelay).toBe(5000); + expect(strategy["options"].factor).toBe(2); + expect(strategy["options"].maxAttempts).toBe(3); + }); + + it("applies provided options", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 3, + maxAttempts: 5, + maxDelay: 10000, + }); + expect(strategy["options"].baseDelay).toBe(1000); + expect(strategy["options"].factor).toBe(3); + expect(strategy["options"].maxAttempts).toBe(5); + expect(strategy["options"].maxDelay).toBe(10000); + }); + }); + + describe("getDelay", () => { + it("calculates exponential delay correctly", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 2, + maxAttempts: 3, + }); + + // Mock Math.random to return 0.5 for consistent jitter testing + vi.spyOn(Math, "random").mockReturnValue(0.5); + + // With jitter factor of 1.0 (0.8 + 0.5 * 0.4): + // Attempt 1: 1000 * (2^1) * 1.0 = 2000 + // Attempt 2: 1000 * (2^2) * 1.0 = 4000 + expect(strategy.getDelay(1)).toBe(2000); + expect(strategy.getDelay(2)).toBe(4000); + }); + + it("respects maxDelay cap", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 2, + maxAttempts: 3, + maxDelay: 3000, + }); + + vi.spyOn(Math, "random").mockReturnValue(0.5); + + // Should be capped at 3000 even though calculation would be 4000 + expect(strategy.getDelay(2)).toBe(3000); + }); + + it("uses retryAfter when larger than calculated delay", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 2, + maxAttempts: 3, + }); + + vi.spyOn(Math, "random").mockReturnValue(0.5); + + // Calculated delay would be 2000, but retryAfter is 3000 + expect(strategy.getDelay(1, 3000)).toBe(3000); + }); + + it("adds jitter within expected range", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 2, + maxAttempts: 3, + }); + + const delay = strategy.getDelay(1); // Base delay would be 2000 + expect(delay).toBeGreaterThanOrEqual(1600); // 2000 * 0.8 + expect(delay).toBeLessThanOrEqual(2400); // 2000 * 1.2 + }); + }); + + describe("shouldRetry", () => { + it("returns true when attempt count is below max attempts", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 2, + maxAttempts: 3, + }); + expect(strategy.shouldRetry(0)).toBe(true); + expect(strategy.shouldRetry(1)).toBe(true); + expect(strategy.shouldRetry(2)).toBe(true); + }); + + it("returns false when attempt count reaches or exceeds max attempts", () => { + const strategy = new ExponentialBackoff({ + baseDelay: 1000, + factor: 2, + maxAttempts: 3, + }); + expect(strategy.shouldRetry(3)).toBe(false); + expect(strategy.shouldRetry(4)).toBe(false); + }); + }); +}); diff --git a/packages/shared/test/retry/retry.spec.ts b/packages/shared/test/retry/retry.spec.ts new file mode 100644 index 0000000..7e4ae6f --- /dev/null +++ b/packages/shared/test/retry/retry.spec.ts @@ -0,0 +1,87 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +import { NonRetriableError, RetriableError } from "../../src/internal.js"; +import { ExponentialBackoff } from "../../src/retry/exponentialBackoff.strategy.js"; +import { RetryHandler } from "../../src/retry/retry.js"; + +describe("RetryHandler", () => { + let retriableError: RetriableError; + const mockLogger = { + debug: vi.fn(), + error: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + }; + + beforeEach(() => { + retriableError = new RetriableError( + "Temporary error", + { className: "MyClass" }, + { retryAfterInMs: 5000 }, + ); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + it("executes operation successfully on first try", async () => { + const handler = new RetryHandler(new ExponentialBackoff(), mockLogger); + const operation = vi.fn().mockResolvedValue("success"); + + await handler.execute(operation); + + expect(operation).toHaveBeenCalledTimes(1); + expect(mockLogger.debug).not.toHaveBeenCalled(); + }); + + it("retries on RetriableError and succeeds", async () => { + vi.useFakeTimers(); + const handler = new RetryHandler(new ExponentialBackoff(), mockLogger); + const operation = vi + .fn() + .mockRejectedValueOnce(retriableError) + .mockResolvedValueOnce("success"); + + const promise = handler.execute(operation); + + // Fast-forward through the delay + await vi.runAllTimersAsync(); + await promise; + + expect(operation).toHaveBeenCalledTimes(2); + expect(mockLogger.debug).toHaveBeenCalled(); + vi.useRealTimers(); + }); + + it("throws non-RetriableError immediately", async () => { + const handler = new RetryHandler(new ExponentialBackoff(), mockLogger); + const error = new NonRetriableError("Non-retriable error", { className: "MyClass" }); + const operation = vi.fn().mockRejectedValue(error); + + await expect(handler.execute(operation)).rejects.toThrow(error); + expect(operation).toHaveBeenCalledTimes(1); + expect(mockLogger.debug).not.toHaveBeenCalled(); + }); + + it("throws error after max attempts", async () => { + const strategy = new ExponentialBackoff({ maxAttempts: 2, baseDelay: 1, factor: 1 }); + const handler = new RetryHandler(strategy, mockLogger); + const error = new RetriableError("Temporary error", { className: "MyClass" }); + const error2 = new RetriableError("Temporary error 2", { className: "MyClass" }); + const operation = vi + .fn() + .mockImplementation(() => { + throw error; + }) + .mockImplementation(() => { + throw error2; + }); + + const promise = handler.execute(operation); + + await expect(promise).rejects.toThrow(error2); + + expect(operation).toHaveBeenCalledTimes(2); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 74a274f..5b7d7ed 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -186,6 +186,9 @@ importers: packages/metadata: dependencies: + "@grants-stack-indexer/repository": + specifier: workspace:* + version: link:../repository "@grants-stack-indexer/shared": specifier: workspace:* version: link:../shared @@ -202,6 +205,9 @@ importers: packages/pricing: dependencies: + "@grants-stack-indexer/repository": + specifier: workspace:* + version: link:../repository "@grants-stack-indexer/shared": specifier: workspace:* version: link:../shared diff --git a/scripts/migrations/src/migrations/20250127T000000_add_cache_tables.ts b/scripts/migrations/src/migrations/20250127T000000_add_cache_tables.ts new file mode 100644 index 0000000..ec78723 --- /dev/null +++ b/scripts/migrations/src/migrations/20250127T000000_add_cache_tables.ts @@ -0,0 +1,27 @@ +import { Kysely, sql } from "kysely"; + +export async function up(db: Kysely): Promise { + // Create pricing cache table + await db.schema + .createTable("priceCache") + .addColumn("tokenCode", "text", (col) => col.notNull()) + .addColumn("timestampMs", "integer", (col) => col.notNull()) + .addColumn("priceUsd", "decimal(36, 18)", (col) => col.notNull()) + .addColumn("createdAt", "timestamptz", (col) => col.defaultTo(sql`now()`)) + .addPrimaryKeyConstraint("pricing_cache_pkey", ["tokenCode", "timestampMs"]) + .execute(); + + // Create metadata cache table + await db.schema + .createTable("metadataCache") + .addColumn("id", "text", (col) => col.notNull()) + .addColumn("metadata", "jsonb", (col) => col.notNull()) + .addColumn("createdAt", "timestamptz", (col) => col.defaultTo(sql`now()`)) + .addPrimaryKeyConstraint("metadata_cache_pkey", ["id"]) + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropTable("priceCache").execute(); + await db.schema.dropTable("metadataCache").execute(); +}