diff --git a/packages/core/src/Ponder.ts b/packages/core/src/Ponder.ts index 9a179ace1..0b47762d7 100644 --- a/packages/core/src/Ponder.ts +++ b/packages/core/src/Ponder.ts @@ -7,21 +7,21 @@ import { buildDatabase } from "@/config/database"; import { type Network, buildNetwork } from "@/config/networks"; import { type Options } from "@/config/options"; import { UserErrorService } from "@/errors/service"; -import { EventAggregatorService } from "@/event-aggregator/service"; -import { PostgresEventStore } from "@/event-store/postgres/store"; -import { SqliteEventStore } from "@/event-store/sqlite/store"; -import { type EventStore } from "@/event-store/store"; -import { HistoricalSyncService } from "@/historical-sync/service"; import { IndexingService } from "@/indexing/service"; +import { PostgresIndexingStore } from "@/indexing-store/postgres/store"; +import { SqliteIndexingStore } from "@/indexing-store/sqlite/store"; +import { type IndexingStore } from "@/indexing-store/store"; import { LoggerService } from "@/logs/service"; import { MetricsService } from "@/metrics/service"; -import { RealtimeSyncService } from "@/realtime-sync/service"; import { ServerService } from "@/server/service"; +import { SyncGateway } from "@/sync-gateway/service"; +import { HistoricalSyncService } from "@/sync-historical/service"; +import { RealtimeSyncService } from "@/sync-realtime/service"; +import { PostgresSyncStore } from "@/sync-store/postgres/store"; +import { SqliteSyncStore } from "@/sync-store/sqlite/store"; +import { type SyncStore } from "@/sync-store/store"; import { TelemetryService } from "@/telemetry/service"; import { UiService } from "@/ui/service"; -import { PostgresUserStore } from "@/user-store/postgres/store"; -import { SqliteUserStore } from "@/user-store/sqlite/store"; -import { type UserStore } from "@/user-store/store"; import { hydrateIndexingFunctions } from "./build/functions"; import { buildSources, Source } from "./config/sources"; @@ -42,17 +42,17 @@ export class Ponder { sources: Source[] = undefined!; // Sync services - eventStore: EventStore = undefined!; + syncStore: SyncStore = undefined!; syncServices: { network: Network; sources: Source[]; historical: HistoricalSyncService; realtime: RealtimeSyncService; }[] = undefined!; - eventAggregatorService: EventAggregatorService = undefined!; + syncGatewayService: SyncGateway = undefined!; // Indexing services - userStore: UserStore = undefined!; + indexingStore: IndexingStore = undefined!; indexingService: IndexingService = undefined!; // Misc services @@ -75,12 +75,12 @@ export class Ponder { } async setup({ - eventStore, - userStore, + syncStore, + indexingStore, }: { // These options are only used for testing. - eventStore?: EventStore; - userStore?: UserStore; + syncStore?: SyncStore; + indexingStore?: IndexingStore; } = {}) { this.common.logger.debug({ service: "app", @@ -103,17 +103,17 @@ export class Ponder { } const database = buildDatabase({ common: this.common, config }); - this.eventStore = - eventStore ?? + this.syncStore = + syncStore ?? (database.kind === "sqlite" - ? new SqliteEventStore({ db: database.db }) - : new PostgresEventStore({ pool: database.pool })); + ? new SqliteSyncStore({ db: database.db }) + : new PostgresSyncStore({ pool: database.pool })); - this.userStore = - userStore ?? + this.indexingStore = + indexingStore ?? (database.kind === "sqlite" - ? new SqliteUserStore({ db: database.db }) - : new PostgresUserStore({ pool: database.pool })); + ? new SqliteIndexingStore({ db: database.db }) + : new PostgresIndexingStore({ pool: database.pool })); this.sources = buildSources({ config }); @@ -142,36 +142,36 @@ export class Ponder { sources: sourcesForNetwork, historical: new HistoricalSyncService({ common: this.common, - eventStore: this.eventStore, + syncStore: this.syncStore, network, sources: sourcesForNetwork, }), realtime: new RealtimeSyncService({ common: this.common, - eventStore: this.eventStore, + syncStore: this.syncStore, network, sources: sourcesForNetwork, }), }); }); - this.eventAggregatorService = new EventAggregatorService({ + this.syncGatewayService = new SyncGateway({ common: this.common, - eventStore: this.eventStore, + syncStore: this.syncStore, networks: networksToSync, sources: this.sources, }); this.indexingService = new IndexingService({ common: this.common, - eventStore: this.eventStore, - userStore: this.userStore, - eventAggregatorService: this.eventAggregatorService, + syncStore: this.syncStore, + indexingStore: this.indexingStore, + syncGatewayService: this.syncGatewayService, sources: this.sources, }); this.serverService = new ServerService({ common: this.common, - userStore: this.userStore, + indexingStore: this.indexingStore, }); this.codegenService = new CodegenService({ common: this.common, @@ -190,7 +190,7 @@ export class Ponder { this.codegenService.generateAppFile(); // One-time setup for some services. - await this.eventStore.migrateUp(); + await this.syncStore.migrateUp(); await this.serverService.start(); // Finally, load the schema + indexing functions which will trigger @@ -205,7 +205,7 @@ export class Ponder { properties: { command: "ponder dev", contractCount: this.sources.length, - databaseKind: this.eventStore.kind, + databaseKind: this.syncStore.kind, }, }); @@ -226,7 +226,7 @@ export class Ponder { properties: { command: "ponder start", contractCount: this.sources.length, - databaseKind: this.eventStore.kind, + databaseKind: this.syncStore.kind, }, }); @@ -254,7 +254,7 @@ export class Ponder { } async kill() { - this.eventAggregatorService.clearListeners(); + this.syncGatewayService.clearListeners(); this.common.telemetry.record({ event: "App Killed", @@ -276,8 +276,8 @@ export class Ponder { await this.serverService.kill(); await this.common.telemetry.kill(); - await this.userStore.kill(); - await this.eventStore.kill(); + await this.indexingStore.kill(); + await this.syncStore.kill(); this.common.logger.debug({ service: "app", @@ -326,50 +326,47 @@ export class Ponder { const { chainId } = network; historical.on("historicalCheckpoint", ({ blockTimestamp }) => { - this.eventAggregatorService.handleNewHistoricalCheckpoint({ + this.syncGatewayService.handleNewHistoricalCheckpoint({ chainId, timestamp: blockTimestamp, }); }); historical.on("syncComplete", () => { - this.eventAggregatorService.handleHistoricalSyncComplete({ + this.syncGatewayService.handleHistoricalSyncComplete({ chainId, }); }); realtime.on("realtimeCheckpoint", ({ blockTimestamp }) => { - this.eventAggregatorService.handleNewRealtimeCheckpoint({ + this.syncGatewayService.handleNewRealtimeCheckpoint({ chainId, timestamp: blockTimestamp, }); }); realtime.on("finalityCheckpoint", ({ blockTimestamp }) => { - this.eventAggregatorService.handleNewFinalityCheckpoint({ + this.syncGatewayService.handleNewFinalityCheckpoint({ chainId, timestamp: blockTimestamp, }); }); realtime.on("shallowReorg", ({ commonAncestorBlockTimestamp }) => { - this.eventAggregatorService.handleReorg({ + this.syncGatewayService.handleReorg({ commonAncestorTimestamp: commonAncestorBlockTimestamp, }); }); }); - this.eventAggregatorService.on("newCheckpoint", async () => { + this.syncGatewayService.on("newCheckpoint", async () => { await this.indexingService.processEvents(); }); - this.eventAggregatorService.on( - "reorg", - async ({ commonAncestorTimestamp }) => { - await this.indexingService.handleReorg({ commonAncestorTimestamp }); - await this.indexingService.processEvents(); - } - ); + this.syncGatewayService.on("reorg", async ({ commonAncestorTimestamp }) => { + await this.indexingService.handleReorg({ commonAncestorTimestamp }); + await this.indexingService.processEvents(); + }); this.indexingService.on("eventsProcessed", ({ toTimestamp }) => { if (this.serverService.isHistoricalIndexingComplete) return; @@ -378,8 +375,8 @@ export class Ponder { // the new toTimestamp is greater than the historical sync completion timestamp, // historical event processing is complete, and the server should begin responding as healthy. if ( - this.eventAggregatorService.historicalSyncCompletedAt && - toTimestamp >= this.eventAggregatorService.historicalSyncCompletedAt + this.syncGatewayService.historicalSyncCompletedAt && + toTimestamp >= this.syncGatewayService.historicalSyncCompletedAt ) { this.serverService.setIsHistoricalIndexingComplete(); } diff --git a/packages/core/src/_test/art-gobblers/app.test.ts b/packages/core/src/_test/art-gobblers/app.test.ts index 35327950f..7b63ab632 100644 --- a/packages/core/src/_test/art-gobblers/app.test.ts +++ b/packages/core/src/_test/art-gobblers/app.test.ts @@ -2,12 +2,12 @@ import { rmSync } from "node:fs"; import request from "supertest"; import { type TestContext, afterEach, beforeEach, expect, test } from "vitest"; -import { setupEventStore, setupUserStore } from "@/_test/setup"; +import { setupIndexingStore, setupSyncStore } from "@/_test/setup"; import { buildOptions } from "@/config/options"; import { Ponder } from "@/Ponder"; -beforeEach((context) => setupEventStore(context)); -beforeEach((context) => setupUserStore(context)); +beforeEach((context) => setupSyncStore(context)); +beforeEach((context) => setupIndexingStore(context)); const setup = async ({ context }: { context: TestContext }) => { const options = buildOptions({ @@ -25,8 +25,8 @@ const setup = async ({ context }: { context: TestContext }) => { const ponder = new Ponder({ options: testOptions }); await ponder.setup({ - eventStore: context.eventStore, - userStore: context.userStore, + syncStore: context.syncStore, + indexingStore: context.indexingStore, }); await ponder.start(); diff --git a/packages/core/src/_test/ens/app.test.ts b/packages/core/src/_test/ens/app.test.ts index 581fe7004..46a9726e9 100644 --- a/packages/core/src/_test/ens/app.test.ts +++ b/packages/core/src/_test/ens/app.test.ts @@ -2,12 +2,12 @@ import { rmSync } from "node:fs"; import request from "supertest"; import { type TestContext, afterEach, beforeEach, expect, test } from "vitest"; -import { setupEventStore, setupUserStore } from "@/_test/setup"; +import { setupIndexingStore, setupSyncStore } from "@/_test/setup"; import { buildOptions } from "@/config/options"; import { Ponder } from "@/Ponder"; -beforeEach((context) => setupEventStore(context)); -beforeEach((context) => setupUserStore(context)); +beforeEach((context) => setupSyncStore(context)); +beforeEach((context) => setupIndexingStore(context)); const setup = async ({ context }: { context: TestContext }) => { const options = buildOptions({ @@ -26,8 +26,8 @@ const setup = async ({ context }: { context: TestContext }) => { const ponder = new Ponder({ options: testOptions }); await ponder.setup({ - eventStore: context.eventStore, - userStore: context.userStore, + syncStore: context.syncStore, + indexingStore: context.indexingStore, }); await ponder.start(); diff --git a/packages/core/src/_test/setup.ts b/packages/core/src/_test/setup.ts index 6af8cddf1..779c96d11 100644 --- a/packages/core/src/_test/setup.ts +++ b/packages/core/src/_test/setup.ts @@ -7,16 +7,16 @@ import { type TestContext, beforeEach } from "vitest"; import { patchSqliteDatabase } from "@/config/database"; import { buildOptions } from "@/config/options"; import { UserErrorService } from "@/errors/service"; -import { PostgresEventStore } from "@/event-store/postgres/store"; -import { SqliteEventStore } from "@/event-store/sqlite/store"; -import type { EventStore } from "@/event-store/store"; +import { PostgresIndexingStore } from "@/indexing-store/postgres/store"; +import { SqliteIndexingStore } from "@/indexing-store/sqlite/store"; +import type { IndexingStore } from "@/indexing-store/store"; import { LoggerService } from "@/logs/service"; import { MetricsService } from "@/metrics/service"; import type { Common } from "@/Ponder"; +import { PostgresSyncStore } from "@/sync-store/postgres/store"; +import { SqliteSyncStore } from "@/sync-store/sqlite/store"; +import type { SyncStore } from "@/sync-store/store"; import { TelemetryService } from "@/telemetry/service"; -import { PostgresUserStore } from "@/user-store/postgres/store"; -import { SqliteUserStore } from "@/user-store/sqlite/store"; -import type { UserStore } from "@/user-store/store"; import { FORK_BLOCK_NUMBER, vitalik } from "./constants"; import { poolId, testClient } from "./utils"; @@ -28,18 +28,18 @@ const ponderCoreDir = path.resolve(__dirname, "../../"); moduleAlias.addAlias("@ponder/core", ponderCoreDir); /** - * Inject an isolated event store into the test context. + * Inject an isolated sync store into the test context. * * If `process.env.DATABASE_URL` is set, assume it's a Postgres connection string - * and run tests against it. If passed a `schema`, PostgresEventStore will create + * and run tests against it. If passed a `schema`, PostgresSyncStore will create * it if it doesn't exist, then use for all connections. We use the Vitest pool ID as * the schema key which enables test isolation (same approach as Anvil.js). */ declare module "vitest" { export interface TestContext { common: Common; - eventStore: EventStore; - userStore: UserStore; + syncStore: SyncStore; + indexingStore: IndexingStore; } } @@ -60,28 +60,28 @@ beforeEach((context) => { }); /** - * Sets up an isolated EventStore on the test context. + * Sets up an isolated SyncStore on the test context. * * ```ts * // Add this to any test suite that uses the test client. - * beforeEach((context) => setupEventStore(context)) + * beforeEach((context) => setupSyncStore(context)) * ``` */ -export async function setupEventStore( +export async function setupSyncStore( context: TestContext, options = { migrateUp: true } ) { if (process.env.DATABASE_URL) { const pool = new Pool({ connectionString: process.env.DATABASE_URL }); const databaseSchema = `vitest_pool_${process.pid}_${poolId}`; - context.eventStore = new PostgresEventStore({ pool, databaseSchema }); + context.syncStore = new PostgresSyncStore({ pool, databaseSchema }); - if (options.migrateUp) await context.eventStore.migrateUp(); + if (options.migrateUp) await context.syncStore.migrateUp(); return async () => { try { await pool.query(`DROP SCHEMA IF EXISTS "${databaseSchema}" CASCADE`); - await context.eventStore.kill(); + await context.syncStore.kill(); } catch (e) { // This fails in end-to-end tests where the pool has // already been shut down during the Ponder instance kill() method. @@ -91,38 +91,38 @@ export async function setupEventStore( } else { const rawSqliteDb = new SqliteDatabase(":memory:"); const db = patchSqliteDatabase({ db: rawSqliteDb }); - context.eventStore = new SqliteEventStore({ db }); + context.syncStore = new SqliteSyncStore({ db }); - if (options.migrateUp) await context.eventStore.migrateUp(); + if (options.migrateUp) await context.syncStore.migrateUp(); return async () => { - await context.eventStore.kill(); + await context.syncStore.kill(); }; } } /** - * Sets up an isolated UserStore on the test context. + * Sets up an isolated IndexingStore on the test context. * * ```ts * // Add this to any test suite that uses the test client. - * beforeEach((context) => setupUserStore(context)) + * beforeEach((context) => setupIndexingStore(context)) * ``` */ -export async function setupUserStore(context: TestContext) { +export async function setupIndexingStore(context: TestContext) { if (process.env.DATABASE_URL) { const pool = new Pool({ connectionString: process.env.DATABASE_URL }); const databaseSchema = `vitest_pool_${process.pid}_${poolId}`; - context.userStore = new PostgresUserStore({ pool, databaseSchema }); + context.indexingStore = new PostgresIndexingStore({ pool, databaseSchema }); } else { const rawSqliteDb = new SqliteDatabase(":memory:"); const db = patchSqliteDatabase({ db: rawSqliteDb }); - context.userStore = new SqliteUserStore({ db }); + context.indexingStore = new SqliteIndexingStore({ db }); } return async () => { try { - await context.userStore.kill(); + await context.indexingStore.kill(); } catch (e) { // This fails in end-to-end tests where the pool has // already been shut down during the Ponder instance kill() method. diff --git a/packages/core/src/build/functions.ts b/packages/core/src/build/functions.ts index bb2614ee1..90bee6770 100644 --- a/packages/core/src/build/functions.ts +++ b/packages/core/src/build/functions.ts @@ -86,7 +86,7 @@ export type IndexingFunctions = { }; eventSources: { [key: EventSourceName]: { - // This mapping is passed from the IndexingService to the EventAggregatorService, which uses + // This mapping is passed from the IndexingService to the SyncGatewayService, which uses // it to fetch from the store _only_ the events that the user has indexed. bySelector: { [key: Hex]: LogEventMetadata }; // This mapping is used by the IndexingService to fetch the user-provided `fn` before running it. diff --git a/packages/core/src/user-store/postgres/store.ts b/packages/core/src/indexing-store/postgres/store.ts similarity index 99% rename from packages/core/src/user-store/postgres/store.ts rename to packages/core/src/indexing-store/postgres/store.ts index b67b7de5f..3b2dbe707 100644 --- a/packages/core/src/user-store/postgres/store.ts +++ b/packages/core/src/indexing-store/postgres/store.ts @@ -10,9 +10,9 @@ import { } from "@/schema/utils"; import type { + IndexingStore, ModelInstance, OrderByInput, - UserStore, WhereInput, } from "../store"; import { formatModelFieldValue, formatModelInstance } from "../utils/format"; @@ -34,7 +34,7 @@ const scalarToSqlType = { bytes: "text", } as const; -export class PostgresUserStore implements UserStore { +export class PostgresIndexingStore implements IndexingStore { db: Kysely; schema?: Schema; diff --git a/packages/core/src/user-store/sqlite/store.ts b/packages/core/src/indexing-store/sqlite/store.ts similarity index 99% rename from packages/core/src/user-store/sqlite/store.ts rename to packages/core/src/indexing-store/sqlite/store.ts index c3af54f65..5917261d9 100644 --- a/packages/core/src/user-store/sqlite/store.ts +++ b/packages/core/src/indexing-store/sqlite/store.ts @@ -11,9 +11,9 @@ import { import { decodeToBigInt } from "@/utils/encoding"; import type { + IndexingStore, ModelInstance, OrderByInput, - UserStore, WhereInput, } from "../store"; import { formatModelFieldValue, formatModelInstance } from "../utils/format"; @@ -35,7 +35,7 @@ const scalarToSqlType = { bytes: "text", } as const; -export class SqliteUserStore implements UserStore { +export class SqliteIndexingStore implements IndexingStore { db: Kysely; schema?: Schema; diff --git a/packages/core/src/user-store/store.test.ts b/packages/core/src/indexing-store/store.test.ts similarity index 65% rename from packages/core/src/user-store/store.test.ts rename to packages/core/src/indexing-store/store.test.ts index 2c48239a1..f5bb390d4 100644 --- a/packages/core/src/user-store/store.test.ts +++ b/packages/core/src/indexing-store/store.test.ts @@ -1,9 +1,9 @@ import { beforeEach, expect, test } from "vitest"; -import { setupUserStore } from "@/_test/setup"; +import { setupIndexingStore } from "@/_test/setup"; import * as p from "@/schema"; -beforeEach((context) => setupUserStore(context)); +beforeEach((context) => setupIndexingStore(context)); const schema = p.createSchema({ PetKind: p.createEnum(["CAT", "DOG"]), @@ -21,24 +21,24 @@ const schema = p.createSchema({ }); test("reload() binds the schema", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - expect(userStore.schema).toBe(schema); + expect(indexingStore.schema).toBe(schema); }); test("create() inserts a record that is effective after timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 25, id: "id1", @@ -47,17 +47,17 @@ test("create() inserts a record that is effective after timestamp", async (conte }); test("create() inserts a record that is effective at timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 10, id: "id1", @@ -66,17 +66,17 @@ test("create() inserts a record that is effective at timestamp", async (context) }); test("create() inserts a record that is not effective before timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 8, id: "id1", @@ -85,10 +85,10 @@ test("create() inserts a record that is not effective before timestamp", async ( }); test("create() throws on unique constraint violation", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", @@ -96,7 +96,7 @@ test("create() throws on unique constraint violation", async (context) => { }); await expect(() => - userStore.create({ + indexingStore.create({ modelName: "Pet", timestamp: 15, id: "id1", @@ -106,17 +106,17 @@ test("create() throws on unique constraint violation", async (context) => { }); test("create() respects optional fields", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip" }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 11, id: "id1", @@ -126,17 +126,17 @@ test("create() respects optional fields", async (context) => { }); test("create() accepts enums", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", kind: "CAT" }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 11, id: "id1", @@ -146,11 +146,11 @@ test("create() accepts enums", async (context) => { }); test("create() throws on invalid enum value", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); await expect(() => - userStore.create({ + indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", @@ -160,17 +160,17 @@ test("create() throws on invalid enum value", async (context) => { }); test("create() accepts BigInt fields as bigint and returns as bigint", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 100n }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 10, id: "id1", @@ -180,30 +180,30 @@ test("create() accepts BigInt fields as bigint and returns as bigint", async (co }); test("update() updates a record", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 100n }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 11, id: "id1", data: { name: "Peanut Butter" }, }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -211,23 +211,23 @@ test("update() updates a record", async (context) => { }); test("update() updates a record using an update function", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 100n }, }); - const instance = await userStore.findUnique({ + const instance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 11, id: "id1", @@ -236,7 +236,7 @@ test("update() updates a record using an update function", async (context) => { }), }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -247,24 +247,24 @@ test("update() updates a record using an update function", async (context) => { }); test("update() updates a record and maintains older version", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 100n }, }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 11, id: "id1", data: { name: "Peanut Butter" }, }); - const originalInstance = await userStore.findUnique({ + const originalInstance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 10, id: "id1", @@ -277,10 +277,10 @@ test("update() updates a record and maintains older version", async (context) => }); test("update() throws if trying to update an instance in the past", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", @@ -288,7 +288,7 @@ test("update() throws if trying to update an instance in the past", async (conte }); await expect(() => - userStore.update({ + indexingStore.update({ modelName: "Pet", timestamp: 8, id: "id1", @@ -298,24 +298,24 @@ test("update() throws if trying to update an instance in the past", async (conte }); test("update() updates a record in-place within the same timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip" }, }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Peanut Butter" }, }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -323,34 +323,40 @@ test("update() updates a record in-place within the same timestamp", async (cont }); test("upsert() inserts a new record", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.upsert({ + await indexingStore.upsert({ modelName: "Pet", timestamp: 10, id: "id1", create: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ modelName: "Pet", id: "id1" }); + const instance = await indexingStore.findUnique({ + modelName: "Pet", + id: "id1", + }); expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 }); }); test("upsert() updates a record", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ modelName: "Pet", id: "id1" }); + const instance = await indexingStore.findUnique({ + modelName: "Pet", + id: "id1", + }); expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 }); - await userStore.upsert({ + await indexingStore.upsert({ modelName: "Pet", timestamp: 12, id: "id1", @@ -358,7 +364,7 @@ test("upsert() updates a record", async (context) => { update: { name: "Jelly" }, }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -366,19 +372,22 @@ test("upsert() updates a record", async (context) => { }); test("upsert() updates a record using an update function", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ modelName: "Pet", id: "id1" }); + const instance = await indexingStore.findUnique({ + modelName: "Pet", + id: "id1", + }); expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 }); - await userStore.upsert({ + await indexingStore.upsert({ modelName: "Pet", timestamp: 12, id: "id1", @@ -388,7 +397,7 @@ test("upsert() updates a record using an update function", async (context) => { }), }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -396,10 +405,10 @@ test("upsert() updates a record using an update function", async (context) => { }); test("upsert() throws if trying to update an instance in the past", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", @@ -407,7 +416,7 @@ test("upsert() throws if trying to update an instance in the past", async (conte }); await expect(() => - userStore.upsert({ + indexingStore.upsert({ modelName: "Pet", timestamp: 8, id: "id1", @@ -418,17 +427,17 @@ test("upsert() throws if trying to update an instance in the past", async (conte }); test("upsert() updates a record in-place within the same timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip" }, }); - await userStore.upsert({ + await indexingStore.upsert({ modelName: "Pet", timestamp: 10, id: "id1", @@ -436,7 +445,7 @@ test("upsert() updates a record in-place within the same timestamp", async (cont update: { name: "Peanut Butter" }, }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -444,21 +453,24 @@ test("upsert() updates a record in-place within the same timestamp", async (cont }); test("delete() removes a record", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ modelName: "Pet", id: "id1" }); + const instance = await indexingStore.findUnique({ + modelName: "Pet", + id: "id1", + }); expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 }); - await userStore.delete({ modelName: "Pet", timestamp: 15, id: "id1" }); + await indexingStore.delete({ modelName: "Pet", timestamp: 15, id: "id1" }); - const deletedInstance = await userStore.findUnique({ + const deletedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -466,19 +478,19 @@ test("delete() removes a record", async (context) => { }); test("delete() retains older version of record", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - await userStore.delete({ modelName: "Pet", timestamp: 15, id: "id1" }); + await indexingStore.delete({ modelName: "Pet", timestamp: 15, id: "id1" }); - const deletedInstance = await userStore.findUnique({ + const deletedInstance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 12, id: "id1", @@ -487,21 +499,24 @@ test("delete() retains older version of record", async (context) => { }); test("delete() removes a record entirely if only present for one timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ modelName: "Pet", id: "id1" }); + const instance = await indexingStore.findUnique({ + modelName: "Pet", + id: "id1", + }); expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 }); - await userStore.delete({ modelName: "Pet", timestamp: 10, id: "id1" }); + await indexingStore.delete({ modelName: "Pet", timestamp: 10, id: "id1" }); - const deletedInstance = await userStore.findUnique({ + const deletedInstance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 10, id: "id1", @@ -510,25 +525,28 @@ test("delete() removes a record entirely if only present for one timestamp", asy }); test("delete() removes a record entirely if only present for one timestamp after update()", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - const instance = await userStore.findUnique({ modelName: "Pet", id: "id1" }); + const instance = await indexingStore.findUnique({ + modelName: "Pet", + id: "id1", + }); expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 12, id: "id1", data: { name: "Skipper", age: 12 }, }); - const updatedInstance = await userStore.findUnique({ + const updatedInstance = await indexingStore.findUnique({ modelName: "Pet", id: "id1", }); @@ -538,9 +556,9 @@ test("delete() removes a record entirely if only present for one timestamp after age: 12, }); - await userStore.delete({ modelName: "Pet", timestamp: 12, id: "id1" }); + await indexingStore.delete({ modelName: "Pet", timestamp: 12, id: "id1" }); - const deletedInstance = await userStore.findUnique({ + const deletedInstance = await indexingStore.findUnique({ modelName: "Pet", timestamp: 12, id: "id1", @@ -549,26 +567,26 @@ test("delete() removes a record entirely if only present for one timestamp after }); test("delete() deletes versions effective in the delete timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", age: 12 }, }); - await userStore.delete({ modelName: "Pet", timestamp: 15, id: "id1" }); + await indexingStore.delete({ modelName: "Pet", timestamp: 15, id: "id1" }); - const instanceDuringDeleteTimestamp = await userStore.findUnique({ + const instanceDuringDeleteTimestamp = await indexingStore.findUnique({ modelName: "Pet", timestamp: 15, id: "id1", }); expect(instanceDuringDeleteTimestamp).toBe(null); - const instancePriorToDelete = await userStore.findUnique({ + const instancePriorToDelete = await indexingStore.findUnique({ modelName: "Pet", timestamp: 14, id: "id1", @@ -577,35 +595,35 @@ test("delete() deletes versions effective in the delete timestamp", async (conte }); test("findMany() returns current versions of all records", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 8, id: "id1", data: { name: "Skip", age: 12 }, }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "SkipUpdated" }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id2", data: { name: "Foo" }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id3", data: { name: "Bar", bigAge: 100n }, }); - const instances = await userStore.findMany({ modelName: "Pet" }); + const instances = await indexingStore.findMany({ modelName: "Pet" }); expect(instances).toHaveLength(3); expect(instances.map((i) => i.name)).toMatchObject([ "SkipUpdated", @@ -615,35 +633,35 @@ test("findMany() returns current versions of all records", async (context) => { }); test("findMany() sorts on bigint field", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 105n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id2", data: { name: "Foo", bigAge: 10n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id3", data: { name: "Bar", bigAge: 190n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id4", data: { name: "Patch" }, }); - const instances = await userStore.findMany({ + const instances = await indexingStore.findMany({ modelName: "Pet", orderBy: { bigAge: "asc" }, }); @@ -651,35 +669,35 @@ test("findMany() sorts on bigint field", async (context) => { }); test("findMany() filters on bigint gt", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 105n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id2", data: { name: "Foo", bigAge: 10n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id3", data: { name: "Bar", bigAge: 190n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id4", data: { name: "Patch" }, }); - const instances = await userStore.findMany({ + const instances = await indexingStore.findMany({ modelName: "Pet", where: { bigAge: { gt: 50n } }, }); @@ -688,35 +706,35 @@ test("findMany() filters on bigint gt", async (context) => { }); test("findMany() sorts and filters together", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip", bigAge: 105n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id2", data: { name: "Foo", bigAge: 10n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id3", data: { name: "Bar", bigAge: 190n }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id4", data: { name: "Zarbar" }, }); - const instances = await userStore.findMany({ + const instances = await indexingStore.findMany({ modelName: "Pet", where: { name: { endsWith: "ar" } }, orderBy: { name: "asc" }, @@ -726,11 +744,11 @@ test("findMany() sorts and filters together", async (context) => { }); test("findMany() errors on invalid filter condition", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); expect(() => - userStore.findMany({ + indexingStore.findMany({ modelName: "Pet", where: { name: { invalidWhereCondition: "ar" } }, }) @@ -738,11 +756,11 @@ test("findMany() errors on invalid filter condition", async (context) => { }); test("findMany() errors on orderBy object with multiple keys", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); expect(() => - userStore.findMany({ + indexingStore.findMany({ modelName: "Pet", orderBy: { name: "asc", bigAge: "desc" }, }) @@ -750,10 +768,10 @@ test("findMany() errors on orderBy object with multiple keys", async (context) = }); test("createMany() inserts multiple entities", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - const createdInstances = await userStore.createMany({ + const createdInstances = await indexingStore.createMany({ modelName: "Pet", timestamp: 10, data: [ @@ -764,17 +782,17 @@ test("createMany() inserts multiple entities", async (context) => { }); expect(createdInstances.length).toBe(3); - const instances = await userStore.findMany({ modelName: "Pet" }); + const instances = await indexingStore.findMany({ modelName: "Pet" }); expect(instances.length).toBe(3); }); test("createMany() inserts a large number of entities", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); const ENTITY_COUNT = 100_000; - const createdInstances = await userStore.createMany({ + const createdInstances = await indexingStore.createMany({ modelName: "Pet", timestamp: 10, data: [...Array(ENTITY_COUNT).keys()].map((i) => ({ @@ -785,15 +803,15 @@ test("createMany() inserts a large number of entities", async (context) => { }); expect(createdInstances.length).toBe(ENTITY_COUNT); - const instances = await userStore.findMany({ modelName: "Pet" }); + const instances = await indexingStore.findMany({ modelName: "Pet" }); expect(instances.length).toBe(ENTITY_COUNT); }); test("updateMany() updates multiple entities", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.createMany({ + await indexingStore.createMany({ modelName: "Pet", timestamp: 10, data: [ @@ -803,7 +821,7 @@ test("updateMany() updates multiple entities", async (context) => { ], }); - const updatedInstances = await userStore.updateMany({ + const updatedInstances = await indexingStore.updateMany({ modelName: "Pet", timestamp: 11, where: { bigAge: { gt: 50n } }, @@ -812,76 +830,76 @@ test("updateMany() updates multiple entities", async (context) => { expect(updatedInstances.length).toBe(2); - const instances = await userStore.findMany({ modelName: "Pet" }); + const instances = await indexingStore.findMany({ modelName: "Pet" }); expect(instances.map((i) => i.bigAge)).toMatchObject([10n, 300n, 300n]); }); test("revert() deletes versions newer than the safe timestamp", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip" }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 13, id: "id2", data: { name: "Foo" }, }); - await userStore.update({ + await indexingStore.update({ modelName: "Pet", timestamp: 15, id: "id1", data: { name: "SkipUpdated" }, }); - await userStore.create({ + await indexingStore.create({ modelName: "Person", timestamp: 10, id: "id1", data: { name: "Bob" }, }); - await userStore.update({ + await indexingStore.update({ modelName: "Person", timestamp: 11, id: "id1", data: { name: "Bobby" }, }); - await userStore.revert({ safeTimestamp: 12 }); + await indexingStore.revert({ safeTimestamp: 12 }); - const pets = await userStore.findMany({ modelName: "Pet" }); + const pets = await indexingStore.findMany({ modelName: "Pet" }); expect(pets.length).toBe(1); expect(pets[0].name).toBe("Skip"); - const persons = await userStore.findMany({ modelName: "Person" }); + const persons = await indexingStore.findMany({ modelName: "Person" }); expect(persons.length).toBe(1); expect(persons[0].name).toBe("Bobby"); }); test("revert() updates versions that only existed during the safe timestamp to latest", async (context) => { - const { userStore } = context; - await userStore.reload({ schema }); + const { indexingStore } = context; + await indexingStore.reload({ schema }); - await userStore.create({ + await indexingStore.create({ modelName: "Pet", timestamp: 10, id: "id1", data: { name: "Skip" }, }); - await userStore.delete({ + await indexingStore.delete({ modelName: "Pet", timestamp: 11, id: "id1", }); - await userStore.revert({ safeTimestamp: 10 }); + await indexingStore.revert({ safeTimestamp: 10 }); - const pets = await userStore.findMany({ modelName: "Pet" }); + const pets = await indexingStore.findMany({ modelName: "Pet" }); expect(pets.length).toBe(1); expect(pets[0].name).toBe("Skip"); }); diff --git a/packages/core/src/user-store/store.ts b/packages/core/src/indexing-store/store.ts similarity index 99% rename from packages/core/src/user-store/store.ts rename to packages/core/src/indexing-store/store.ts index c83361e1a..f2398dd1a 100644 --- a/packages/core/src/user-store/store.ts +++ b/packages/core/src/indexing-store/store.ts @@ -73,7 +73,7 @@ export type OrderByInput = [FieldName in keyof TModel]?: "asc" | "desc"; }[]; -export interface UserStore { +export interface IndexingStore { schema?: Schema; versionId?: string; diff --git a/packages/core/src/user-store/utils/format.ts b/packages/core/src/indexing-store/utils/format.ts similarity index 100% rename from packages/core/src/user-store/utils/format.ts rename to packages/core/src/indexing-store/utils/format.ts diff --git a/packages/core/src/user-store/utils/pagination.test.ts b/packages/core/src/indexing-store/utils/pagination.test.ts similarity index 100% rename from packages/core/src/user-store/utils/pagination.test.ts rename to packages/core/src/indexing-store/utils/pagination.test.ts diff --git a/packages/core/src/user-store/utils/pagination.ts b/packages/core/src/indexing-store/utils/pagination.ts similarity index 100% rename from packages/core/src/user-store/utils/pagination.ts rename to packages/core/src/indexing-store/utils/pagination.ts diff --git a/packages/core/src/user-store/utils/where.ts b/packages/core/src/indexing-store/utils/where.ts similarity index 100% rename from packages/core/src/user-store/utils/where.ts rename to packages/core/src/indexing-store/utils/where.ts diff --git a/packages/core/src/indexing/model.ts b/packages/core/src/indexing/model.ts index 2390167f6..0962c6f68 100644 --- a/packages/core/src/indexing/model.ts +++ b/packages/core/src/indexing/model.ts @@ -1,16 +1,16 @@ +import type { IndexingStore, ModelInstance } from "@/indexing-store/store"; import type { Common } from "@/Ponder"; import type { Schema } from "@/schema/types"; import type { Model } from "@/types/model"; -import type { ModelInstance, UserStore } from "@/user-store/store"; export function buildModels({ common, - userStore, + indexingStore, schema, getCurrentEventTimestamp, }: { common: Common; - userStore: UserStore; + indexingStore: IndexingStore; schema: Schema; getCurrentEventTimestamp: () => number; }) { @@ -23,7 +23,7 @@ export function buildModels({ service: "store", msg: `findUnique (model=${modelName}, id=${id})`, }); - return userStore.findUnique({ + return indexingStore.findUnique({ modelName, timestamp: getCurrentEventTimestamp(), id, @@ -34,7 +34,7 @@ export function buildModels({ service: "store", msg: `findMany (model=${modelName})`, }); - return userStore.findMany({ + return indexingStore.findMany({ modelName, timestamp: getCurrentEventTimestamp(), where, @@ -48,7 +48,7 @@ export function buildModels({ service: "store", msg: `create (model=${modelName}, id=${id})`, }); - return userStore.create({ + return indexingStore.create({ modelName, timestamp: getCurrentEventTimestamp(), id, @@ -60,7 +60,7 @@ export function buildModels({ service: "store", msg: `createMany (model=${modelName}, count=${data.length})`, }); - return userStore.createMany({ + return indexingStore.createMany({ modelName, timestamp: getCurrentEventTimestamp(), data, @@ -71,7 +71,7 @@ export function buildModels({ service: "store", msg: `update (model=${modelName}, id=${id})`, }); - return userStore.update({ + return indexingStore.update({ modelName, timestamp: getCurrentEventTimestamp(), id, @@ -83,7 +83,7 @@ export function buildModels({ service: "store", msg: `updateMany (model=${modelName})`, }); - return userStore.updateMany({ + return indexingStore.updateMany({ modelName, timestamp: getCurrentEventTimestamp(), where, @@ -95,7 +95,7 @@ export function buildModels({ service: "store", msg: `upsert (model=${modelName}, id=${id})`, }); - return userStore.upsert({ + return indexingStore.upsert({ modelName, timestamp: getCurrentEventTimestamp(), id, @@ -108,7 +108,7 @@ export function buildModels({ service: "store", msg: `delete (model=${modelName}, id=${id})`, }); - return userStore.delete({ + return indexingStore.delete({ modelName, timestamp: getCurrentEventTimestamp(), id, diff --git a/packages/core/src/indexing/service.test.ts b/packages/core/src/indexing/service.test.ts index 620b87a9b..87767fff3 100644 --- a/packages/core/src/indexing/service.test.ts +++ b/packages/core/src/indexing/service.test.ts @@ -1,18 +1,18 @@ import { beforeEach, expect, test, vi } from "vitest"; import { usdcContractConfig } from "@/_test/constants"; -import { setupEventStore, setupUserStore } from "@/_test/setup"; +import { setupIndexingStore, setupSyncStore } from "@/_test/setup"; import { publicClient } from "@/_test/utils"; import type { IndexingFunctions } from "@/build/functions"; import { LogEventMetadata } from "@/config/abi"; import { Source } from "@/config/sources"; -import { EventAggregatorService } from "@/event-aggregator/service"; import * as p from "@/schema"; +import { SyncGateway } from "@/sync-gateway/service"; import { IndexingService } from "./service"; -beforeEach((context) => setupEventStore(context)); -beforeEach((context) => setupUserStore(context)); +beforeEach((context) => setupSyncStore(context)); +beforeEach((context) => setupIndexingStore(context)); const network = { name: "mainnet", @@ -100,25 +100,25 @@ const getEvents = vi.fn(async function* getEvents({ }; }); -const eventAggregatorService = { +const syncGatewayService = { getEvents, checkpoint: 0, -} as unknown as EventAggregatorService; +} as unknown as SyncGateway; beforeEach(() => { // Restore getEvents to the initial implementation. vi.restoreAllMocks(); - eventAggregatorService.checkpoint = 0; + syncGatewayService.checkpoint = 0; }); test("processEvents() calls getEvents with sequential timestamp ranges", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); @@ -126,7 +126,7 @@ test("processEvents() calls getEvents with sequential timestamp ranges", async ( expect(getEvents).not.toHaveBeenCalled(); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); expect(getEvents).toHaveBeenLastCalledWith( @@ -136,7 +136,7 @@ test("processEvents() calls getEvents with sequential timestamp ranges", async ( }) ); - eventAggregatorService.checkpoint = 50; + syncGatewayService.checkpoint = 50; await service.processEvents(); expect(getEvents).toHaveBeenLastCalledWith( @@ -150,19 +150,19 @@ test("processEvents() calls getEvents with sequential timestamp ranges", async ( }); test("processEvents() calls indexing functions with correct arguments", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); expect(transferIndexingFunction).toHaveBeenCalledWith( @@ -185,23 +185,23 @@ test("processEvents() calls indexing functions with correct arguments", async (c service.kill(); }); -test("processEvents() model methods insert data into the user store", async (context) => { - const { common, eventStore, userStore } = context; +test("processEvents() model methods insert data into the indexing store", async (context) => { + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); - const transferEvents = await userStore.findMany({ + const transferEvents = await indexingStore.findMany({ modelName: "TransferEvent", }); expect(transferEvents.length).toBe(1); @@ -210,19 +210,19 @@ test("processEvents() model methods insert data into the user store", async (con }); test("processEvents() updates event count metrics", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); const matchedEventsMetric = ( @@ -250,34 +250,34 @@ test("processEvents() updates event count metrics", async (context) => { service.kill(); }); -test("reset() reloads the user store", async (context) => { - const { common, eventStore, userStore } = context; +test("reset() reloads the indexing store", async (context) => { + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); - const transferEvents = await userStore.findMany({ + const transferEvents = await indexingStore.findMany({ modelName: "TransferEvent", }); expect(transferEvents.length).toBe(1); - const versionIdBeforeReset = userStore.versionId; + const versionIdBeforeReset = indexingStore.versionId; await service.reset({ schema, indexingFunctions }); - expect(userStore.versionId).not.toBe(versionIdBeforeReset); + expect(indexingStore.versionId).not.toBe(versionIdBeforeReset); - const transferEventsAfterReset = await userStore.findMany({ + const transferEventsAfterReset = await indexingStore.findMany({ modelName: "TransferEvent", }); expect(transferEventsAfterReset.length).toBe(0); @@ -286,19 +286,19 @@ test("reset() reloads the user store", async (context) => { }); test("handleReorg() updates ponder_handlers_latest_processed_timestamp metric", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); const latestProcessedTimestampMetric = ( @@ -316,43 +316,43 @@ test("handleReorg() updates ponder_handlers_latest_processed_timestamp metric", service.kill(); }); -test("handleReorg() reverts the user store", async (context) => { - const { common, eventStore, userStore } = context; +test("handleReorg() reverts the indexing store", async (context) => { + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); - const userStoreRevertSpy = vi.spyOn(userStore, "revert"); + const indexingStoreRevertSpy = vi.spyOn(indexingStore, "revert"); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); await service.handleReorg({ commonAncestorTimestamp: 6 }); - expect(userStoreRevertSpy).toHaveBeenLastCalledWith({ safeTimestamp: 6 }); + expect(indexingStoreRevertSpy).toHaveBeenLastCalledWith({ safeTimestamp: 6 }); service.kill(); }); test("handleReorg() does nothing if there is a user error", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); - const userStoreRevertSpy = vi.spyOn(userStore, "revert"); + const indexingStoreRevertSpy = vi.spyOn(indexingStore, "revert"); await service.reset({ schema, indexingFunctions }); @@ -360,30 +360,30 @@ test("handleReorg() does nothing if there is a user error", async (context) => { throw new Error("User error!"); }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); await service.handleReorg({ commonAncestorTimestamp: 6 }); - expect(userStoreRevertSpy).not.toHaveBeenCalled(); + expect(indexingStoreRevertSpy).not.toHaveBeenCalled(); service.kill(); }); test("handleReorg() processes the correct range of events after a reorg", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); expect(getEvents).toHaveBeenLastCalledWith( @@ -395,7 +395,7 @@ test("handleReorg() processes the correct range of events after a reorg", async // This simulates a scenario where there was a reorg back to 6 // and the new latest block is 9. - eventAggregatorService.checkpoint = 9; + syncGatewayService.checkpoint = 9; await service.handleReorg({ commonAncestorTimestamp: 6 }); await service.processEvents(); @@ -410,19 +410,19 @@ test("handleReorg() processes the correct range of events after a reorg", async }); test("handleReorg() updates ponder_handlers_latest_processed_timestamp metric", async (context) => { - const { common, eventStore, userStore } = context; + const { common, syncStore, indexingStore } = context; const service = new IndexingService({ common, - eventStore, - userStore, - eventAggregatorService, + syncStore, + indexingStore, + syncGatewayService, sources, }); await service.reset({ schema, indexingFunctions }); - eventAggregatorService.checkpoint = 10; + syncGatewayService.checkpoint = 10; await service.processEvents(); const latestProcessedTimestampMetric = ( @@ -432,7 +432,7 @@ test("handleReorg() updates ponder_handlers_latest_processed_timestamp metric", // This simulates a scenario where there was a reorg back to 6 // and the new latest block is 9. - eventAggregatorService.checkpoint = 9; + syncGatewayService.checkpoint = 9; await service.handleReorg({ commonAncestorTimestamp: 6 }); const latestProcessedTimestampMetricAfterReorg = ( diff --git a/packages/core/src/indexing/service.ts b/packages/core/src/indexing/service.ts index 960536228..3bfdabb93 100644 --- a/packages/core/src/indexing/service.ts +++ b/packages/core/src/indexing/service.ts @@ -5,15 +5,12 @@ import type { IndexingFunctions } from "@/build/functions"; import { LogEventMetadata } from "@/config/abi"; import type { Source } from "@/config/sources"; import { UserError } from "@/errors/user"; -import type { - EventAggregatorService, - LogEvent, -} from "@/event-aggregator/service"; -import type { EventStore } from "@/event-store/store"; +import type { IndexingStore, ModelInstance } from "@/indexing-store/store"; import type { Common } from "@/Ponder"; import type { Schema } from "@/schema/types"; +import type { LogEvent, SyncGateway } from "@/sync-gateway/service"; +import type { SyncStore } from "@/sync-store/store"; import type { Model } from "@/types/model"; -import type { ModelInstance, UserStore } from "@/user-store/store"; import { formatShortDate } from "@/utils/date"; import { prettyPrint } from "@/utils/print"; import { type Queue, type Worker, createQueue } from "@/utils/queue"; @@ -33,8 +30,8 @@ type IndexingFunctionQueue = Queue; export class IndexingService extends Emittery { private common: Common; - private userStore: UserStore; - private eventAggregatorService: EventAggregatorService; + private indexingStore: IndexingStore; + private syncGatewayService: SyncGateway; private sources: Source[]; private schema?: Schema; @@ -54,22 +51,22 @@ export class IndexingService extends Emittery { constructor({ common, - // eventStore, - userStore, - eventAggregatorService, + // syncStore, + indexingStore, + syncGatewayService, sources = [], }: { common: Common; - eventStore: EventStore; - userStore: UserStore; - eventAggregatorService: EventAggregatorService; + syncStore: SyncStore; + indexingStore: IndexingStore; + syncGatewayService: SyncGateway; sources?: Source[]; }) { super(); this.common = common; - this.userStore = userStore; - this.eventAggregatorService = eventAggregatorService; + this.indexingStore = indexingStore; + this.syncGatewayService = syncGatewayService; this.sources = sources; this.eventProcessingMutex = new Mutex(); @@ -88,7 +85,7 @@ export class IndexingService extends Emittery { /** * Registers a new set of indexing functions and/or a new schema, cancels * the current event processing mutex & event queue, drops and re-creates - * all tables from the user store, and resets eventsProcessedToTimestamp to zero. + * all tables from the indexing store, and resets eventsProcessedToTimestamp to zero. * * Note: Caller should (probably) immediately call processEvents after this method. */ @@ -103,7 +100,7 @@ export class IndexingService extends Emittery { this.schema = newSchema; this.models = buildModels({ common: this.common, - userStore: this.userStore, + indexingStore: this.indexingStore, schema: this.schema, getCurrentEventTimestamp: () => this.currentEventTimestamp, }); @@ -132,7 +129,7 @@ export class IndexingService extends Emittery { }); this.common.logger.debug({ service: "indexing", - msg: `Paused event queue (versionId=${this.userStore.versionId})`, + msg: `Paused event queue (versionId=${this.indexingStore.versionId})`, }); this.hasError = false; @@ -142,13 +139,13 @@ export class IndexingService extends Emittery { this.common.metrics.ponder_indexing_handled_events.reset(); this.common.metrics.ponder_indexing_processed_events.reset(); - await this.userStore.reload({ schema: this.schema }); + await this.indexingStore.reload({ schema: this.schema }); this.common.logger.debug({ service: "indexing", - msg: `Reset user store (versionId=${this.userStore.versionId})`, + msg: `Reset indexing store (versionId=${this.indexingStore.versionId})`, }); - // When we call userStore.reload() above, the user store is dropped. + // When we call indexingStore.reload() above, the indexing store is dropped. // Set the latest processed timestamp to zero accordingly. this.eventsProcessedToTimestamp = 0; this.common.metrics.ponder_indexing_latest_processed_timestamp.set(0); @@ -156,7 +153,7 @@ export class IndexingService extends Emittery { /** * This method is triggered by the realtime sync service detecting a reorg, - * which can happen at any time. The event queue and the user store can be + * which can happen at any time. The event queue and the indexing store can be * in one of several different states that we need to keep in mind: * * 1) No events have been added to the queue yet. @@ -166,7 +163,7 @@ export class IndexingService extends Emittery { * * Note: It's crucial that we acquire a mutex lock while handling the reorg. * This will only ever run while the queue is idle, so we can be confident - * that eventsProcessedToTimestamp matches the current state of the user store, + * that eventsProcessedToTimestamp matches the current state of the indexing store, * and that no unsafe events will get processed after handling the reorg. * * Note: Caller should (probably) immediately call processEvents after this method. @@ -178,7 +175,7 @@ export class IndexingService extends Emittery { }) => { try { await this.eventProcessingMutex.runExclusive(async () => { - // If there is a user error, the queue & user store will be wiped on reload (case 4). + // If there is a user error, the queue & indexing store will be wiped on reload (case 4). if (this.hasError) return; if (this.eventsProcessedToTimestamp <= commonAncestorTimestamp) { @@ -188,9 +185,9 @@ export class IndexingService extends Emittery { msg: `No unsafe events were detected while reconciling a reorg, no-op`, }); } else { - // Unsafe events have been processed, must revert the user store and update + // Unsafe events have been processed, must revert the indexing store and update // eventsProcessedToTimestamp accordingly (case 3). - await this.userStore.revert({ + await this.indexingStore.revert({ safeTimestamp: commonAncestorTimestamp, }); @@ -205,7 +202,7 @@ export class IndexingService extends Emittery { this.common.logger.debug({ service: "indexing", - msg: `Reverted user store to safe timestamp ${commonAncestorTimestamp}`, + msg: `Reverted indexing store to safe timestamp ${commonAncestorTimestamp}`, }); } }); @@ -220,7 +217,7 @@ export class IndexingService extends Emittery { * Processes all newly available events. * * Acquires a lock on the event processing mutex, then gets the latest checkpoint - * from the event aggregator service. Fetches events between previous checkpoint + * from the sync gateway service. Fetches events between previous checkpoint * and the new checkpoint, adds them to the queue, then processes them. */ processEvents = async () => { @@ -228,12 +225,12 @@ export class IndexingService extends Emittery { await this.eventProcessingMutex.runExclusive(async () => { if (this.hasError || !this.queue) return; - const eventsAvailableTo = this.eventAggregatorService.checkpoint; + const eventsAvailableTo = this.syncGatewayService.checkpoint; // If we have already added events to the queue for the current checkpoint, // do nothing and return. This can happen if a number of calls to processEvents // "stack up" while one is being processed, and then they all run sequentially - // but the event aggregator service checkpoint has not moved. + // but the sync gateway service checkpoint has not moved. if (this.eventsProcessedToTimestamp >= eventsAvailableTo) { return; } @@ -260,7 +257,7 @@ export class IndexingService extends Emittery { } } - const iterator = this.eventAggregatorService.getEvents({ + const iterator = this.syncGatewayService.getEvents({ fromTimestamp, toTimestamp, indexingMetadata: this.indexingMetadata, diff --git a/packages/core/src/indexing/transport.test.ts b/packages/core/src/indexing/transport.test.ts index a3ffeab3e..cfce59482 100644 --- a/packages/core/src/indexing/transport.test.ts +++ b/packages/core/src/indexing/transport.test.ts @@ -2,17 +2,17 @@ import { getFunctionSelector, http, toHex, Transport } from "viem"; import { assertType, beforeEach, expect, test, vi } from "vitest"; import { usdcContractConfig } from "@/_test/constants"; -import { setupEventStore } from "@/_test/setup"; +import { setupSyncStore } from "@/_test/setup"; import { anvil } from "@/_test/utils"; import { ponderTransport } from "./transport"; -beforeEach((context) => setupEventStore(context)); +beforeEach((context) => setupSyncStore(context)); -test("default", ({ eventStore }) => { +test("default", ({ syncStore }) => { const transport = ponderTransport({ transport: http("https://mockapi.com/rpc"), - eventStore, + syncStore, }); assertType(transport); @@ -34,10 +34,10 @@ test("default", ({ eventStore }) => { `); }); -test("eth_call", async ({ eventStore }) => { +test("eth_call", async ({ syncStore }) => { const transport = ponderTransport({ transport: http(), - eventStore, + syncStore, })({ chain: anvil, }); @@ -55,7 +55,7 @@ test("eth_call", async ({ eventStore }) => { expect(response1).toBeDefined(); - const callSpy = vi.spyOn(eventStore, "insertRpcRequestResult"); + const callSpy = vi.spyOn(syncStore, "insertRpcRequestResult"); const response2 = await transport.request({ method: "eth_call", @@ -73,10 +73,10 @@ test("eth_call", async ({ eventStore }) => { expect(callSpy).toHaveBeenCalledTimes(0); }); -test("eth_getBalance", async ({ eventStore }) => { +test("eth_getBalance", async ({ syncStore }) => { const transport = ponderTransport({ transport: http(), - eventStore, + syncStore, })({ chain: anvil, }); @@ -88,7 +88,7 @@ test("eth_getBalance", async ({ eventStore }) => { expect(response1).toBeDefined(); - const callSpy = vi.spyOn(eventStore, "insertRpcRequestResult"); + const callSpy = vi.spyOn(syncStore, "insertRpcRequestResult"); const response2 = await transport.request({ method: "eth_getBalance", @@ -100,10 +100,10 @@ test("eth_getBalance", async ({ eventStore }) => { expect(callSpy).toHaveBeenCalledTimes(0); }); -test("eth_getStorageAt", async ({ eventStore }) => { +test("eth_getStorageAt", async ({ syncStore }) => { const transport = ponderTransport({ transport: http(), - eventStore, + syncStore, })({ chain: anvil, }); @@ -115,7 +115,7 @@ test("eth_getStorageAt", async ({ eventStore }) => { expect(response1).toBeDefined(); - const callSpy = vi.spyOn(eventStore, "insertRpcRequestResult"); + const callSpy = vi.spyOn(syncStore, "insertRpcRequestResult"); const response2 = await transport.request({ method: "eth_getStorageAt", @@ -127,10 +127,10 @@ test("eth_getStorageAt", async ({ eventStore }) => { expect(callSpy).toHaveBeenCalledTimes(0); }); -test("eth_getCode", async ({ eventStore }) => { +test("eth_getCode", async ({ syncStore }) => { const transport = ponderTransport({ transport: http(), - eventStore, + syncStore, })({ chain: anvil, }); @@ -142,7 +142,7 @@ test("eth_getCode", async ({ eventStore }) => { expect(response1).toBeDefined(); - const callSpy = vi.spyOn(eventStore, "insertRpcRequestResult"); + const callSpy = vi.spyOn(syncStore, "insertRpcRequestResult"); const response2 = await transport.request({ method: "eth_getCode", @@ -154,10 +154,10 @@ test("eth_getCode", async ({ eventStore }) => { expect(callSpy).toHaveBeenCalledTimes(0); }); -test("fallback method", async ({ eventStore }) => { +test("fallback method", async ({ syncStore }) => { const transport = ponderTransport({ transport: http(), - eventStore, + syncStore, })({ chain: anvil, }); diff --git a/packages/core/src/indexing/transport.ts b/packages/core/src/indexing/transport.ts index 4941b3dba..a86957ff9 100644 --- a/packages/core/src/indexing/transport.ts +++ b/packages/core/src/indexing/transport.ts @@ -1,14 +1,14 @@ import { Address, custom, Hex, Transport } from "viem"; -import { EventStore } from "@/event-store/store"; +import { SyncStore } from "@/sync-store/store"; import { toLowerCase } from "@/utils/lowercase"; export const ponderTransport = ({ transport, - eventStore, + syncStore, }: { transport: Transport; - eventStore: EventStore; + syncStore: SyncStore; }): Transport => { return ({ chain }) => { const underlyingTransport = transport({ chain }); @@ -49,7 +49,7 @@ export const ponderTransport = ({ } if (request !== null && blockNumber !== null) { - const cachedResult = await eventStore.getRpcRequestResult({ + const cachedResult = await syncStore.getRpcRequestResult({ blockNumber, chainId: chain!.id, request, @@ -58,7 +58,7 @@ export const ponderTransport = ({ if (cachedResult?.result) return cachedResult.result; else { const response = await underlyingTransport.request(body); - await eventStore.insertRpcRequestResult({ + await syncStore.insertRpcRequestResult({ blockNumber: BigInt(blockNumber), chainId: chain!.id, request, diff --git a/packages/core/src/server/graphql/schema.ts b/packages/core/src/server/graphql/schema.ts index 8c41f7029..54e72621d 100644 --- a/packages/core/src/server/graphql/schema.ts +++ b/packages/core/src/server/graphql/schema.ts @@ -9,8 +9,8 @@ import { GraphQLString, } from "graphql"; +import type { IndexingStore } from "@/indexing-store/store"; import type { Scalar, Schema } from "@/schema/types"; -import type { UserStore } from "@/user-store/store"; import { buildEntityTypes } from "./entity"; import { buildPluralField } from "./plural"; @@ -41,7 +41,7 @@ export const tsTypeToGqlScalar: { [type in Scalar]: GraphQLScalarType } = { }; export type Source = { request: unknown }; -export type Context = { store: UserStore }; +export type Context = { store: IndexingStore }; export const buildGqlSchema = (schema: Schema): GraphQLSchema => { const queryFields: Record> = {}; diff --git a/packages/core/src/server/service.test.ts b/packages/core/src/server/service.test.ts index 31a7c482d..8e4828a51 100644 --- a/packages/core/src/server/service.test.ts +++ b/packages/core/src/server/service.test.ts @@ -1,16 +1,16 @@ import request from "supertest"; import { beforeEach, expect, test } from "vitest"; -import { setupUserStore } from "@/_test/setup"; +import { setupIndexingStore } from "@/_test/setup"; +import type { IndexingStore } from "@/indexing-store/store"; import type { Common } from "@/Ponder"; import * as p from "@/schema"; -import type { UserStore } from "@/user-store/store"; import { range } from "@/utils/range"; import { buildGqlSchema } from "./graphql/schema"; import { ServerService } from "./service"; -beforeEach((context) => setupUserStore(context)); +beforeEach((context) => setupIndexingStore(context)); const s = p.createSchema({ TestEnum: p.createEnum(["ZERO", "ONE", "TWO"]), @@ -42,20 +42,20 @@ const graphqlSchema = buildGqlSchema(s); const setup = async ({ common, - userStore, + indexingStore, options = { hasCompletedHistoricalIndexing: true, }, }: { common: Common; - userStore: UserStore; + indexingStore: IndexingStore; options?: { hasCompletedHistoricalIndexing?: boolean; }; }) => { - await userStore.reload({ schema: s }); + await indexingStore.reload({ schema: s }); - const service = new ServerService({ common, userStore }); + const service = new ServerService({ common, indexingStore }); await service.start(); service.reload({ graphqlSchema }); @@ -69,7 +69,7 @@ const setup = async ({ .send({ query: `query { ${query} }` }); const createTestEntity = async ({ id }: { id: number }) => { - await userStore.create({ + await indexingStore.create({ modelName: "TestEntity", timestamp: 0, id: String(id), @@ -97,7 +97,7 @@ const setup = async ({ id: bigint; testEntityId: string; }) => { - await userStore.create({ + await indexingStore.create({ modelName: "EntityWithBigIntId", timestamp: 0, id, @@ -108,7 +108,7 @@ const setup = async ({ }; const createEntityWithIntId = async ({ id }: { id: number }) => { - await userStore.create({ + await indexingStore.create({ modelName: "EntityWithIntId", timestamp: 0, id, @@ -125,10 +125,10 @@ const setup = async ({ }; test("serves all scalar types correctly", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -184,10 +184,10 @@ test("serves all scalar types correctly", async (context) => { }); test("serves all scalar list types correctly", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -239,10 +239,10 @@ test("serves all scalar list types correctly", async (context) => { }); test("serves enum types correctly", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -278,9 +278,9 @@ test("serves enum types correctly", async (context) => { }); test("serves derived types correctly", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = - await setup({ common, userStore }); + await setup({ common, indexingStore }); await createTestEntity({ id: 0 }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); @@ -309,9 +309,9 @@ test("serves derived types correctly", async (context) => { }); test("serves relationship types correctly", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = - await setup({ common, userStore }); + await setup({ common, indexingStore }); await createTestEntity({ id: 0 }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); @@ -353,10 +353,10 @@ test("serves relationship types correctly", async (context) => { }); test("finds unique entity by bigint id", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createEntityWithBigIntId } = await setup({ common, - userStore, + indexingStore, }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); @@ -377,10 +377,10 @@ test("finds unique entity by bigint id", async (context) => { }); test("finds unique entity with id: 0", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createEntityWithIntId } = await setup({ common, - userStore, + indexingStore, }); await createEntityWithIntId({ id: 0 }); @@ -401,10 +401,10 @@ test("finds unique entity with id: 0", async (context) => { }); test("filters on string field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 123 }); @@ -428,10 +428,10 @@ test("filters on string field equals", async (context) => { }); test("filters on string field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 123 }); @@ -456,10 +456,10 @@ test("filters on string field in", async (context) => { }); test("filters on string field contains", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 123 }); @@ -483,10 +483,10 @@ test("filters on string field contains", async (context) => { }); test("filters on string field starts with", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 123 }); @@ -511,10 +511,10 @@ test("filters on string field starts with", async (context) => { }); test("filters on string field not ends with", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 123 }); @@ -539,10 +539,10 @@ test("filters on string field not ends with", async (context) => { }); test("filters on integer field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -566,10 +566,10 @@ test("filters on integer field equals", async (context) => { }); test("filters on integer field greater than", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -593,10 +593,10 @@ test("filters on integer field greater than", async (context) => { }); test("filters on integer field less than or equal to", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -621,10 +621,10 @@ test("filters on integer field less than or equal to", async (context) => { }); test("filters on integer field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -649,10 +649,10 @@ test("filters on integer field in", async (context) => { }); test("filters on float field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -678,10 +678,10 @@ test("filters on float field equals", async (context) => { }); test("filters on float field greater than", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -705,10 +705,10 @@ test("filters on float field greater than", async (context) => { }); test("filters on float field less than or equal to", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -734,10 +734,10 @@ test("filters on float field less than or equal to", async (context) => { }); test("filters on float field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -763,10 +763,10 @@ test("filters on float field in", async (context) => { }); test("filters on bigInt field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -792,10 +792,10 @@ test("filters on bigInt field equals", async (context) => { }); test("filters on bigInt field greater than", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -819,10 +819,10 @@ test("filters on bigInt field greater than", async (context) => { }); test("filters on bigInt field less than or equal to", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -848,10 +848,10 @@ test("filters on bigInt field less than or equal to", async (context) => { }); test("filters on bigInt field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -877,10 +877,10 @@ test("filters on bigInt field in", async (context) => { }); test("filters on string list field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -904,10 +904,10 @@ test("filters on string list field equals", async (context) => { }); test("filters on string list field has", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -931,10 +931,10 @@ test("filters on string list field has", async (context) => { }); test("filters on enum field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -958,10 +958,10 @@ test("filters on enum field equals", async (context) => { }); test("filters on enum field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -986,9 +986,9 @@ test("filters on enum field in", async (context) => { }); test("filters on relationship field equals", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = - await setup({ common, userStore }); + await setup({ common, indexingStore }); await createTestEntity({ id: 0 }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); @@ -1019,9 +1019,9 @@ test("filters on relationship field equals", async (context) => { }); test("filters on relationship field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = - await setup({ common, userStore }); + await setup({ common, indexingStore }); await createTestEntity({ id: 0 }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); @@ -1046,9 +1046,9 @@ test("filters on relationship field in", async (context) => { }); test("filters on relationship field in", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = - await setup({ common, userStore }); + await setup({ common, indexingStore }); await createTestEntity({ id: 0 }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); @@ -1073,10 +1073,10 @@ test("filters on relationship field in", async (context) => { }); test("orders by on int field ascending", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 1 }); @@ -1102,10 +1102,10 @@ test("orders by on int field ascending", async (context) => { }); test("orders by on int field descending", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 1 }); @@ -1131,10 +1131,10 @@ test("orders by on int field descending", async (context) => { }); test("orders by on bigInt field ascending including negative values", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 1 }); @@ -1162,10 +1162,10 @@ test("orders by on bigInt field ascending including negative values", async (con }); test("orders by on bigInt field descending", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 1 }); @@ -1191,10 +1191,10 @@ test("orders by on bigInt field descending", async (context) => { }); test("limits to the first 100 by default", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await Promise.all(range(0, 105).map((n) => createTestEntity({ id: n }))); @@ -1216,10 +1216,10 @@ test("limits to the first 100 by default", async (context) => { }); test("limits as expected if less than 1000", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await Promise.all(range(0, 105).map((n) => createTestEntity({ id: n }))); @@ -1241,10 +1241,10 @@ test("limits as expected if less than 1000", async (context) => { }); test("throws if limit is greater than 1000", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -1266,10 +1266,10 @@ test("throws if limit is greater than 1000", async (context) => { }); test("skips as expected", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await Promise.all(range(0, 105).map((n) => createTestEntity({ id: n }))); @@ -1291,10 +1291,10 @@ test("skips as expected", async (context) => { }); test("throws if skip is greater than 5000", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await Promise.all(range(0, 105).map((n) => createTestEntity({ id: n }))); @@ -1314,10 +1314,10 @@ test("throws if skip is greater than 5000", async (context) => { }); test("limits and skips together as expected", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await Promise.all(range(0, 105).map((n) => createTestEntity({ id: n }))); @@ -1340,14 +1340,14 @@ test("limits and skips together as expected", async (context) => { }); test("serves singular entity versioned at specified timestamp", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 1 }); - await userStore.update({ + await indexingStore.update({ modelName: "TestEntity", timestamp: 10, id: String(1), @@ -1382,16 +1382,16 @@ test("serves singular entity versioned at specified timestamp", async (context) }); test("serves plural entities versioned at specified timestamp", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 1 }); await createTestEntity({ id: 2 }); - await userStore.update({ + await indexingStore.update({ modelName: "TestEntity", timestamp: 10, id: String(1), @@ -1399,7 +1399,7 @@ test("serves plural entities versioned at specified timestamp", async (context) string: "updated", }, }); - await userStore.update({ + await indexingStore.update({ modelName: "TestEntity", timestamp: 15, id: String(2), @@ -1440,11 +1440,11 @@ test("serves plural entities versioned at specified timestamp", async (context) }); test("derived field respects skip argument", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); @@ -1472,10 +1472,10 @@ test("derived field respects skip argument", async (context) => { }); test("responds with appropriate status code pre and post historical sync", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity } = await setup({ common, - userStore, + indexingStore, options: { hasCompletedHistoricalIndexing: false, }, @@ -1520,17 +1520,17 @@ test("responds with appropriate status code pre and post historical sync", async // So, if you want to use time-travel queries with derived fields, you need to manually // include the desired timestamp at every level of the query. test.skip("serves derived entities versioned at provided timestamp", async (context) => { - const { common, userStore } = context; + const { common, indexingStore } = context; const { service, gql, createTestEntity, createEntityWithBigIntId } = await setup({ common, - userStore, + indexingStore, }); await createTestEntity({ id: 0 }); await createEntityWithBigIntId({ id: BigInt(0), testEntityId: "0" }); - await userStore.update({ + await indexingStore.update({ modelName: "EntityWithBigIntId", timestamp: 10, id: BigInt(0), diff --git a/packages/core/src/server/service.ts b/packages/core/src/server/service.ts index 47d912cfe..f74580036 100644 --- a/packages/core/src/server/service.ts +++ b/packages/core/src/server/service.ts @@ -6,14 +6,14 @@ import { createHandler } from "graphql-http/lib/use/express"; import { createHttpTerminator } from "http-terminator"; import { createServer, Server } from "node:http"; +import type { IndexingStore } from "@/indexing-store/store"; import type { Common } from "@/Ponder"; import { graphiQLHtml } from "@/ui/graphiql.html"; -import type { UserStore } from "@/user-store/store"; import { startClock } from "@/utils/timer"; export class ServerService { private common: Common; - private userStore: UserStore; + private indexingStore: IndexingStore; private port: number; app?: express.Express; @@ -22,9 +22,15 @@ export class ServerService { isHistoricalIndexingComplete = false; - constructor({ common, userStore }: { common: Common; userStore: UserStore }) { + constructor({ + common, + indexingStore, + }: { + common: Common; + indexingStore: IndexingStore; + }) { this.common = common; - this.userStore = userStore; + this.indexingStore = indexingStore; this.port = this.common.options.port; } @@ -143,7 +149,7 @@ export class ServerService { reload({ graphqlSchema }: { graphqlSchema: GraphQLSchema }) { const graphqlMiddleware = createHandler({ schema: graphqlSchema, - context: { store: this.userStore }, + context: { store: this.indexingStore }, }); /** diff --git a/packages/core/src/event-aggregator/service.test.ts b/packages/core/src/sync-gateway/service.test.ts similarity index 86% rename from packages/core/src/event-aggregator/service.test.ts rename to packages/core/src/sync-gateway/service.test.ts index e77aeb684..e013be24b 100644 --- a/packages/core/src/event-aggregator/service.test.ts +++ b/packages/core/src/sync-gateway/service.test.ts @@ -1,14 +1,14 @@ import { beforeEach, expect, test, vi } from "vitest"; import { usdcContractConfig } from "@/_test/constants"; -import { setupEventStore } from "@/_test/setup"; +import { setupSyncStore } from "@/_test/setup"; import { publicClient } from "@/_test/utils"; import type { Network } from "@/config/networks"; import type { Source } from "@/config/sources"; -import { EventAggregatorService } from "./service"; +import { SyncGateway } from "./service"; -beforeEach((context) => setupEventStore(context)); +beforeEach((context) => setupSyncStore(context)); const mainnet: Network = { name: "mainnet", @@ -49,11 +49,11 @@ const sources: Source[] = [ ]; test("handleNewHistoricalCheckpoint emits new checkpoint", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, networks, sources, }); @@ -72,11 +72,11 @@ test("handleNewHistoricalCheckpoint emits new checkpoint", async (context) => { }); test("handleNewHistoricalCheckpoint does not emit new checkpoint if not best", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); @@ -102,11 +102,11 @@ test("handleNewHistoricalCheckpoint does not emit new checkpoint if not best", a }); test("handleHistoricalSyncComplete sets historicalSyncCompletedAt if final historical sync is complete", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); @@ -130,11 +130,11 @@ test("handleHistoricalSyncComplete sets historicalSyncCompletedAt if final histo }); test("handleNewRealtimeCheckpoint does not emit new checkpoint if historical sync is not complete", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); @@ -159,11 +159,11 @@ test("handleNewRealtimeCheckpoint does not emit new checkpoint if historical syn }); test("handleNewRealtimeCheckpoint emits new checkpoint if historical sync is complete", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); @@ -197,11 +197,11 @@ test("handleNewRealtimeCheckpoint emits new checkpoint if historical sync is com }); test("handleNewFinalityCheckpoint emits newFinalityCheckpoint", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); @@ -223,11 +223,11 @@ test("handleNewFinalityCheckpoint emits newFinalityCheckpoint", async (context) }); test("handleNewFinalityCheckpoint does not emit newFinalityCheckpoint if subsequent event is earlier", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); @@ -253,11 +253,11 @@ test("handleNewFinalityCheckpoint does not emit newFinalityCheckpoint if subsequ }); test("handleNewFinalityCheckpoint emits newFinalityCheckpoint if subsequent event is later", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const service = new EventAggregatorService({ + const service = new SyncGateway({ common, - eventStore, + syncStore, sources, networks, }); diff --git a/packages/core/src/event-aggregator/service.ts b/packages/core/src/sync-gateway/service.ts similarity index 94% rename from packages/core/src/event-aggregator/service.ts rename to packages/core/src/sync-gateway/service.ts index 190b00cad..5f146d4ed 100644 --- a/packages/core/src/event-aggregator/service.ts +++ b/packages/core/src/sync-gateway/service.ts @@ -4,8 +4,8 @@ import { type Hex, decodeEventLog } from "viem"; import { LogEventMetadata } from "@/config/abi"; import type { Network } from "@/config/networks"; import { Source, sourceIsFactory, sourceIsLogFilter } from "@/config/sources"; -import type { EventStore } from "@/event-store/store"; import type { Common } from "@/Ponder"; +import type { SyncStore } from "@/sync-store/store"; import type { Block } from "@/types/block"; import type { Log } from "@/types/log"; import type { Transaction } from "@/types/transaction"; @@ -20,7 +20,7 @@ export type LogEvent = { transaction: Transaction; }; -type EventAggregatorEvents = { +type SyncGatewayEvents = { /** * Emitted when a new event checkpoint is reached. This is the minimum timestamp * at which events are available across all registered networks. @@ -37,11 +37,11 @@ type EventAggregatorEvents = { reorg: { commonAncestorTimestamp: number }; }; -type EventAggregatorMetrics = {}; +type SyncGatewayMetrics = {}; -export class EventAggregatorService extends Emittery { +export class SyncGateway extends Emittery { private common: Common; - private eventStore: EventStore; + private syncStore: SyncStore; private networks: Network[]; private sources: Source[]; @@ -64,23 +64,23 @@ export class EventAggregatorService extends Emittery { } >; - metrics: EventAggregatorMetrics; + metrics: SyncGatewayMetrics; constructor({ common, - eventStore, + syncStore, networks, sources = [], }: { common: Common; - eventStore: EventStore; + syncStore: SyncStore; networks: Network[]; sources?: Source[]; }) { super(); this.common = common; - this.eventStore = eventStore; + this.syncStore = syncStore; this.networks = networks; this.sources = sources; this.metrics = {}; @@ -121,7 +121,7 @@ export class EventAggregatorService extends Emittery { | undefined; }; }) { - const iterator = this.eventStore.getLogEvents({ + const iterator = this.syncStore.getLogEvents({ fromTimestamp, toTimestamp, logFilters: this.sources.filter(sourceIsLogFilter).map((logFilter) => ({ @@ -207,7 +207,7 @@ export class EventAggregatorService extends Emittery { this.networkCheckpoints[chainId].historicalCheckpoint = timestamp; this.common.logger.trace({ - service: "aggregator", + service: "gateway", msg: `New historical checkpoint at ${timestamp} [${formatShortDate( timestamp )}] (chainId=${chainId})`, @@ -229,7 +229,7 @@ export class EventAggregatorService extends Emittery { this.historicalSyncCompletedAt = maxHistoricalCheckpoint; this.common.logger.debug({ - service: "aggregator", + service: "gateway", msg: `Completed historical sync across all networks`, }); } @@ -245,7 +245,7 @@ export class EventAggregatorService extends Emittery { this.networkCheckpoints[chainId].realtimeCheckpoint = timestamp; this.common.logger.trace({ - service: "aggregator", + service: "gateway", msg: `New realtime checkpoint at ${timestamp} [${formatShortDate( timestamp )}] (chainId=${chainId})`, @@ -285,7 +285,7 @@ export class EventAggregatorService extends Emittery { this.checkpoint = newCheckpoint; this.common.logger.trace({ - service: "aggregator", + service: "gateway", msg: `New event checkpoint at ${this.checkpoint} [${formatShortDate( this.checkpoint )}]`, @@ -304,7 +304,7 @@ export class EventAggregatorService extends Emittery { this.finalityCheckpoint = newFinalityCheckpoint; this.common.logger.trace({ - service: "aggregator", + service: "gateway", msg: `New finality checkpoint at ${ this.finalityCheckpoint } [${formatShortDate(this.finalityCheckpoint)}]`, diff --git a/packages/core/src/historical-sync/README.md b/packages/core/src/sync-historical/README.md similarity index 88% rename from packages/core/src/historical-sync/README.md rename to packages/core/src/sync-historical/README.md index b1e0d8778..07abe5d69 100644 --- a/packages/core/src/historical-sync/README.md +++ b/packages/core/src/sync-historical/README.md @@ -6,7 +6,7 @@ This README aims to document Ponder's historical sync approach. Please refer to the architecture diagram to see where the historical sync service sits. During startup, the Ponder constructor creates one instance of the historical sync service for each network, passing the event sources (log filter + factories) and user-provided Transport for that network. -The purpose of the historical sync service is to fetch raw blockchain data (blocks, transactions, and logs) from an RPC endpoint and insert that data into the event store. Most of the complexity comes from the decision to aggressively cache raw blockchain data, which avoids unnecessary RPC requests and enables ~instant sync times when restarting a Ponder app during development. +The purpose of the historical sync service is to fetch raw blockchain data (blocks, transactions, and logs) from an RPC endpoint and insert that data into the sync store. Most of the complexity comes from the decision to aggressively cache raw blockchain data, which avoids unnecessary RPC requests and enables ~instant sync times when restarting a Ponder app during development. The historical sync service is responsible for handling blocks up to and including the finalized block for the network. The realtime sync service is responsible for the finalized block through latest. @@ -19,7 +19,7 @@ The historical sync service has a small public API. - `start()` method: Starts processing tasks from the queue. - `onIdle()` method: Returns a promise that resolves when the historical sync is complete. - `kill()` method: Kills the service. Must clean up any resources that would block the process from exiting. -- `"historicalCheckpoint"` event: Emitted when the minimum completed block among all registered event sources has progressed. This indicates to consumers that the event store now contains a complete history of events for all registered event sources between their start block and this block (inclusive). +- `"historicalCheckpoint"` event: Emitted when the minimum completed block among all registered event sources has progressed. This indicates to consumers that the sync store now contains a complete history of events for all registered event sources between their start block and this block (inclusive). - `"syncComplete"` event: Emitted when the service has finished processing all historical sync tasks. ## Background @@ -35,7 +35,7 @@ This hare-brained service design won't make sense if you don't first understand Here are a few rough requirements for the service. These follow from our desired user/developer experience. -1. The historical sync procedure should progress iteratively, starting from the first required block and progressing forward. This unlocks the "dev mode" - users can start writing & getting feedback on indexing function code before the entire sync is complete. Ideally, there will be at least a few events in the event store in the time it takes a user to move from the terminal where they ran `pnpm dev` to their editor. +1. The historical sync procedure should progress iteratively, starting from the first required block and progressing forward. This unlocks the "dev mode" - users can start writing & getting feedback on indexing function code before the entire sync is complete. Ideally, there will be at least a few events in the sync store in the time it takes a user to move from the terminal where they ran `pnpm dev` to their editor. 2. If a user kills the process and starts it again, the sync progress bar should pick up exactly where it left off. 3. If a user has fully synced an app, then adds a new contract to `ponder.config.ts`, the service should only sync the new contract - the other contracts should be fully cached. 4. The service should handle errors. This includes rate-limiting, `eth_getLogs` block range + response size limits, and random/incidental RPC errors. @@ -49,7 +49,7 @@ The historical sync service is organized around a few components: 3. A block callback registry 4. A block progress tracker -The progress trackers are basically an in-memory mirror of the event store cache metadata. Whenever the block progress tracker checkpoint moves forward, the service emits a `"historicalCheckpoint"` event. +The progress trackers are basically an in-memory mirror of the sync store cache metadata. Whenever the block progress tracker checkpoint moves forward, the service emits a `"historicalCheckpoint"` event. ## Task types @@ -60,7 +60,7 @@ There are currently 4 kinds of tasks that can be added to the queue. Parameters: `fromBlock`, `toBlock`, `LogFilterCriteria` (this includes `address` and `topics`) 1. Call `eth_getLogs(fromBlock, toBlock, address, topics)` to get all logs matching this filter. -2. For each unique block number among logs.map(log => log.blockNumber), register a block callback. Each block callback inserts raw logs + cache metadata into the event store. +2. For each unique block number among logs.map(log => log.blockNumber), register a block callback. Each block callback inserts raw logs + cache metadata into the sync store. 3. Update the progress tracker for this log filter. Then, if the overall checkpoint across all log filters & child contracts has moved forward, schedule any block tasks that are now ready to be processed. ### Factory contract task @@ -68,16 +68,16 @@ Parameters: `fromBlock`, `toBlock`, `LogFilterCriteria` (this includes `address` Parameters: `fromBlock`, `toBlock`, `FactoryCriteria` (includes `factoryAddress`, `factoryEventSelector`, `childAddressLocation`) 1. Call `eth_getLogs(fromBlock, toBlock, address: factoryAddress, topics: [factoryEventSelector])` to get all new child contracts in this block range. -2. Add new child contracts to the event store and update the cache metadata. +2. Add new child contracts to the sync store and update the cache metadata. 3. Update the progress tracker for this factory contract. Then, if the checkpoint for this factory contract has moved forward, schedule new child contract tasks accordingly. ### Child contract task Parameters: `fromBlock`, `toBlock`, `FactoryCriteria` -1. Query `childContractAddresses` from the event store up to and including `toBlock`. +1. Query `childContractAddresses` from the sync store up to and including `toBlock`. 2. Call `eth_getLogs(fromBlock, toBlock, address: [childContractAddresses])`. -3. For each unique block number among logs.map(log => log.blockNumber), register a block callback. Each block callback inserts raw logs + cache metadata into the event store. +3. For each unique block number among logs.map(log => log.blockNumber), register a block callback. Each block callback inserts raw logs + cache metadata into the sync store. 4. Update the progress tracker for this child contract. Then, if the overall checkpoint across all log filters & child contracts has moved forward, schedule any block tasks that are now ready to be processed. ### Block task diff --git a/packages/core/src/historical-sync/service.test.ts b/packages/core/src/sync-historical/service.test.ts similarity index 85% rename from packages/core/src/historical-sync/service.test.ts rename to packages/core/src/sync-historical/service.test.ts index 2971b650f..54673c15b 100644 --- a/packages/core/src/historical-sync/service.test.ts +++ b/packages/core/src/sync-historical/service.test.ts @@ -9,14 +9,14 @@ import { uniswapV3PoolFactoryConfig, usdcContractConfig, } from "@/_test/constants"; -import { setupEventStore } from "@/_test/setup"; +import { setupSyncStore } from "@/_test/setup"; import { publicClient } from "@/_test/utils"; import type { Network } from "@/config/networks"; import type { Source } from "@/config/sources"; import { HistoricalSyncService } from "./service"; -beforeEach((context) => setupEventStore(context)); +beforeEach((context) => setupSyncStore(context)); const network: Network = { name: "mainnet", @@ -56,11 +56,11 @@ const uniswapV3Factory = { } satisfies Source; test("start() with log filter inserts log filter interval records", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -68,7 +68,7 @@ test("start() with log filter inserts log filter interval records", async (conte service.start(); await service.onIdle(); - const logFilterIntervals = await eventStore.getLogFilterIntervals({ + const logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: network.chainId, logFilter: usdcLogFilter.criteria, }); @@ -79,11 +79,11 @@ test("start() with log filter inserts log filter interval records", async (conte }); test("start() with factory contract inserts log filter and factory log filter interval records", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [uniswapV3Factory], }); @@ -91,19 +91,17 @@ test("start() with factory contract inserts log filter and factory log filter in service.start(); await service.onIdle(); - const childAddressLogFilterIntervals = await eventStore.getLogFilterIntervals( - { - chainId: network.chainId, - logFilter: { - address: uniswapV3Factory.criteria.address, - topics: [uniswapV3Factory.criteria.eventSelector], - }, - } - ); + const childAddressLogFilterIntervals = await syncStore.getLogFilterIntervals({ + chainId: network.chainId, + logFilter: { + address: uniswapV3Factory.criteria.address, + topics: [uniswapV3Factory.criteria.eventSelector], + }, + }); expect(childAddressLogFilterIntervals).toMatchObject([[16369500, 16370000]]); - const childContractIntervals = await eventStore.getFactoryLogFilterIntervals({ + const childContractIntervals = await syncStore.getFactoryLogFilterIntervals({ chainId: uniswapV3Factory.chainId, factory: uniswapV3Factory.criteria, }); @@ -113,11 +111,11 @@ test("start() with factory contract inserts log filter and factory log filter in }); test("start() with factory contract inserts child contract addresses", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [uniswapV3Factory], }); @@ -125,7 +123,7 @@ test("start() with factory contract inserts child contract addresses", async (co service.start(); await service.onIdle(); - const iterator = eventStore.getFactoryChildAddresses({ + const iterator = syncStore.getFactoryChildAddresses({ chainId: uniswapV3Factory.chainId, factory: uniswapV3Factory.criteria, upToBlockNumber: 16370000n, @@ -147,11 +145,11 @@ test("start() with factory contract inserts child contract addresses", async (co }); test("setup() with log filter and factory contract updates block metrics", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter, uniswapV3Factory], }); @@ -188,11 +186,11 @@ test("setup() with log filter and factory contract updates block metrics", async }); test("start() with log filter and factory contract updates completed blocks metrics", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter, uniswapV3Factory], }); @@ -219,11 +217,11 @@ test("start() with log filter and factory contract updates completed blocks metr }); test("start() with log filter and factory contract updates rpc request duration metrics", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -256,12 +254,12 @@ test("start() with log filter and factory contract updates rpc request duration await service.kill(); }); -test("start() adds log filter events to event store", async (context) => { - const { common, eventStore } = context; +test("start() adds log filter events to sync store", async (context) => { + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -269,7 +267,7 @@ test("start() adds log filter events to event store", async (context) => { service.start(); await service.onIdle(); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -310,12 +308,12 @@ test("start() adds log filter events to event store", async (context) => { await service.kill(); }); -test("start() adds log filter and factory contract events to event store", async (context) => { - const { common, eventStore } = context; +test("start() adds log filter and factory contract events to sync store", async (context) => { + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter, uniswapV3Factory], }); @@ -323,7 +321,7 @@ test("start() adds log filter and factory contract events to event store", async service.start(); await service.onIdle(); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -353,13 +351,13 @@ test("start() adds log filter and factory contract events to event store", async }); test("start() retries unexpected error in log filter task", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; rpcRequestSpy.mockRejectedValueOnce(new Error("Unexpected error!")); const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -367,7 +365,7 @@ test("start() retries unexpected error in log filter task", async (context) => { service.start(); await service.onIdle(); - const logFilterIntervals = await eventStore.getLogFilterIntervals({ + const logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: network.chainId, logFilter: usdcLogFilter.criteria, }); @@ -378,14 +376,14 @@ test("start() retries unexpected error in log filter task", async (context) => { }); test("start() retries unexpected error in block task", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; - const spy = vi.spyOn(eventStore, "insertLogFilterInterval"); + const spy = vi.spyOn(syncStore, "insertLogFilterInterval"); spy.mockRejectedValueOnce(new Error("Unexpected error!")); const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -393,7 +391,7 @@ test("start() retries unexpected error in block task", async (context) => { service.start(); await service.onIdle(); - const logFilterIntervals = await eventStore.getLogFilterIntervals({ + const logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: network.chainId, logFilter: usdcLogFilter.criteria, }); @@ -404,7 +402,7 @@ test("start() retries unexpected error in block task", async (context) => { }); test("start() handles Alchemy 'Log response size exceeded' error", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; rpcRequestSpy.mockRejectedValueOnce( new InvalidParamsRpcError( @@ -417,7 +415,7 @@ test("start() handles Alchemy 'Log response size exceeded' error", async (contex const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -425,7 +423,7 @@ test("start() handles Alchemy 'Log response size exceeded' error", async (contex service.start(); await service.onIdle(); - const logFilterIntervals = await eventStore.getLogFilterIntervals({ + const logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: network.chainId, logFilter: usdcLogFilter.criteria, }); @@ -435,7 +433,7 @@ test("start() handles Alchemy 'Log response size exceeded' error", async (contex }); test("start() handles Quicknode 'eth_getLogs and eth_newFilter are limited to a 10,000 blocks range' error", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; rpcRequestSpy.mockRejectedValueOnce( new HttpRequestError({ @@ -447,7 +445,7 @@ test("start() handles Quicknode 'eth_getLogs and eth_newFilter are limited to a const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -455,7 +453,7 @@ test("start() handles Quicknode 'eth_getLogs and eth_newFilter are limited to a service.start(); await service.onIdle(); - const logFilterIntervals = await eventStore.getLogFilterIntervals({ + const logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: network.chainId, logFilter: usdcLogFilter.criteria, }); @@ -465,11 +463,11 @@ test("start() handles Quicknode 'eth_getLogs and eth_newFilter are limited to a }); test("start() emits sync completed event", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -485,11 +483,11 @@ test("start() emits sync completed event", async (context) => { }); test("start() emits checkpoint and sync completed event if 100% cached", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; let service = new HistoricalSyncService({ common, - eventStore, + syncStore, sources: [usdcLogFilter], network, }); @@ -501,7 +499,7 @@ test("start() emits checkpoint and sync completed event if 100% cached", async ( service = new HistoricalSyncService({ common, - eventStore, + syncStore, sources: [usdcLogFilter], network, }); @@ -523,11 +521,11 @@ test("start() emits checkpoint and sync completed event if 100% cached", async ( }); test("start() emits historicalCheckpoint event", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new HistoricalSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); diff --git a/packages/core/src/historical-sync/service.ts b/packages/core/src/sync-historical/service.ts similarity index 97% rename from packages/core/src/historical-sync/service.ts rename to packages/core/src/sync-historical/service.ts index 8a8abc28e..579430224 100644 --- a/packages/core/src/historical-sync/service.ts +++ b/packages/core/src/sync-historical/service.ts @@ -21,8 +21,8 @@ import { type Source, sourceIsLogFilter, } from "@/config/sources"; -import type { EventStore } from "@/event-store/store"; import type { Common } from "@/Ponder"; +import type { SyncStore } from "@/sync-store/store"; import { formatEta, formatPercentage } from "@/utils/format"; import { BlockProgressTracker, @@ -43,7 +43,7 @@ type HistoricalSyncEvents = { syncComplete: undefined; /** * Emitted when the minimum cached timestamp among all registered event sources moves forward. - * This indicates to consumers that the connected event store now contains a complete history + * This indicates to consumers that the connected sync store now contains a complete history * of events for all registered event sources between their start block and this timestamp (inclusive). */ historicalCheckpoint: { blockNumber: number; blockTimestamp: number }; @@ -86,7 +86,7 @@ type HistoricalSyncTask = export class HistoricalSyncService extends Emittery { private common: Common; - private eventStore: EventStore; + private syncStore: SyncStore; private network: Network; /** @@ -109,7 +109,7 @@ export class HistoricalSyncService extends Emittery { /** * Functions registered by log filter + child contract tasks. These functions accept * a raw block object, get required data from it, then insert data and cache metadata - * into the event store. The keys of this object are used to keep track of which blocks + * into the sync store. The keys of this object are used to keep track of which blocks * must be fetched. */ private blockCallbacks: Record< @@ -129,19 +129,19 @@ export class HistoricalSyncService extends Emittery { constructor({ common, - eventStore, + syncStore, network, sources = [], }: { common: Common; - eventStore: EventStore; + syncStore: SyncStore; network: Network; sources?: Source[]; }) { super(); this.common = common; - this.eventStore = eventStore; + this.syncStore = syncStore; this.network = network; this.sources = sources; @@ -189,7 +189,7 @@ export class HistoricalSyncService extends Emittery { } const completedLogFilterIntervals = - await this.eventStore.getLogFilterIntervals({ + await this.syncStore.getLogFilterIntervals({ chainId: source.chainId, logFilter: { address: source.criteria.address, @@ -274,7 +274,7 @@ export class HistoricalSyncService extends Emittery { // Note that factory child address progress is stored using // log intervals for the factory log. const completedFactoryChildAddressIntervals = - await this.eventStore.getLogFilterIntervals({ + await this.syncStore.getLogFilterIntervals({ chainId: source.chainId, logFilter: { address: source.criteria.address, @@ -336,7 +336,7 @@ export class HistoricalSyncService extends Emittery { ); const completedFactoryLogFilterIntervals = - await this.eventStore.getFactoryLogFilterIntervals({ + await this.syncStore.getFactoryLogFilterIntervals({ chainId: source.chainId, factory: source.criteria, }); @@ -586,7 +586,7 @@ export class HistoricalSyncService extends Emittery { for (const logInterval of logIntervals) { const { startBlock, endBlock, logs, transactionHashes } = logInterval; (this.blockCallbacks[endBlock] ||= []).push(async (block) => { - await this.eventStore.insertLogFilterInterval({ + await this.syncStore.insertLogFilterInterval({ chainId: logFilter.chainId, block, transactions: block.transactions.filter((tx) => @@ -634,7 +634,7 @@ export class HistoricalSyncService extends Emittery { }); // Insert the new child address logs into the store. - await this.eventStore.insertFactoryChildAddressLogs({ + await this.syncStore.insertFactoryChildAddressLogs({ chainId: factory.chainId, logs, }); @@ -646,7 +646,7 @@ export class HistoricalSyncService extends Emittery { for (const logInterval of logIntervals) { const { startBlock, endBlock, logs, transactionHashes } = logInterval; (this.blockCallbacks[endBlock] ||= []).push(async (block) => { - await this.eventStore.insertLogFilterInterval({ + await this.syncStore.insertLogFilterInterval({ chainId: factory.chainId, logFilter: { address: factory.criteria.address, @@ -700,7 +700,7 @@ export class HistoricalSyncService extends Emittery { }: { task: FactoryLogFilterTask; }) => { - const iterator = this.eventStore.getFactoryChildAddresses({ + const iterator = this.syncStore.getFactoryChildAddresses({ chainId: factory.chainId, factory: factory.criteria, upToBlockNumber: BigInt(toBlock), @@ -724,7 +724,7 @@ export class HistoricalSyncService extends Emittery { const { startBlock, endBlock, logs, transactionHashes } = logInterval; (this.blockCallbacks[endBlock] ||= []).push(async (block) => { - await this.eventStore.insertFactoryLogFilterInterval({ + await this.syncStore.insertFactoryLogFilterInterval({ chainId: factory.chainId, factory: factory.criteria, block, diff --git a/packages/core/src/historical-sync/utils.test.ts b/packages/core/src/sync-historical/utils.test.ts similarity index 100% rename from packages/core/src/historical-sync/utils.test.ts rename to packages/core/src/sync-historical/utils.test.ts diff --git a/packages/core/src/historical-sync/utils.ts b/packages/core/src/sync-historical/utils.ts similarity index 100% rename from packages/core/src/historical-sync/utils.ts rename to packages/core/src/sync-historical/utils.ts diff --git a/packages/core/src/realtime-sync/bloom.test.ts b/packages/core/src/sync-realtime/bloom.test.ts similarity index 100% rename from packages/core/src/realtime-sync/bloom.test.ts rename to packages/core/src/sync-realtime/bloom.test.ts diff --git a/packages/core/src/realtime-sync/bloom.ts b/packages/core/src/sync-realtime/bloom.ts similarity index 100% rename from packages/core/src/realtime-sync/bloom.ts rename to packages/core/src/sync-realtime/bloom.ts diff --git a/packages/core/src/realtime-sync/filter.test.ts b/packages/core/src/sync-realtime/filter.test.ts similarity index 100% rename from packages/core/src/realtime-sync/filter.test.ts rename to packages/core/src/sync-realtime/filter.test.ts diff --git a/packages/core/src/realtime-sync/filter.ts b/packages/core/src/sync-realtime/filter.ts similarity index 100% rename from packages/core/src/realtime-sync/filter.ts rename to packages/core/src/sync-realtime/filter.ts diff --git a/packages/core/src/realtime-sync/format.ts b/packages/core/src/sync-realtime/format.ts similarity index 100% rename from packages/core/src/realtime-sync/format.ts rename to packages/core/src/sync-realtime/format.ts diff --git a/packages/core/src/realtime-sync/service.test.ts b/packages/core/src/sync-realtime/service.test.ts similarity index 90% rename from packages/core/src/realtime-sync/service.test.ts rename to packages/core/src/sync-realtime/service.test.ts index 1139cdd9a..55f77bba8 100644 --- a/packages/core/src/realtime-sync/service.test.ts +++ b/packages/core/src/sync-realtime/service.test.ts @@ -8,7 +8,7 @@ import { usdcContractConfig, vitalik, } from "@/_test/constants"; -import { resetTestClient, setupEventStore } from "@/_test/setup"; +import { resetTestClient, setupSyncStore } from "@/_test/setup"; import { publicClient, testClient, walletClient } from "@/_test/utils"; import type { Network } from "@/config/networks"; import type { Source } from "@/config/sources"; @@ -17,7 +17,7 @@ import { range } from "@/utils/range"; import { RealtimeSyncService } from "./service"; -beforeEach((context) => setupEventStore(context)); +beforeEach((context) => setupSyncStore(context)); beforeEach(resetTestClient); const network: Network = { @@ -100,11 +100,11 @@ const createAndInitializeUniswapV3Pool = async () => { }; test("setup() returns block numbers", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -118,11 +118,11 @@ test("setup() returns block numbers", async (context) => { }); test("start() adds blocks to the store from finalized to latest", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -131,7 +131,7 @@ test("start() adds blocks to the store from finalized to latest", async (context await service.start(); await service.onIdle(); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(5); expect(blocks.map((block) => decodeToBigInt(block.number))).toMatchObject([ 16379996n, @@ -145,11 +145,11 @@ test("start() adds blocks to the store from finalized to latest", async (context }); test("start() adds all required transactions to the store", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -158,10 +158,10 @@ test("start() adds all required transactions to the store", async (context) => { await service.start(); await service.onIdle(); - const logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + const logs = await syncStore.db.selectFrom("logs").selectAll().execute(); const requiredTransactionHashes = new Set(logs.map((l) => l.transactionHash)); - const transactions = await eventStore.db + const transactions = await syncStore.db .selectFrom("transactions") .selectAll() .execute(); @@ -175,11 +175,11 @@ test("start() adds all required transactions to the store", async (context) => { }); test("start() adds all matched logs to the store", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -188,7 +188,7 @@ test("start() adds all matched logs to the store", async (context) => { await service.start(); await service.onIdle(); - const logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + const logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(79); logs.forEach((log) => { expect(log.address).toEqual(usdcContractConfig.address); @@ -198,11 +198,11 @@ test("start() adds all matched logs to the store", async (context) => { }); test("start() handles new blocks", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -227,7 +227,7 @@ test("start() handles new blocks", async (context) => { await service.onIdle(); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(7); expect(blocks.map((block) => decodeToBigInt(block.number))).toMatchObject([ @@ -245,11 +245,11 @@ test("start() handles new blocks", async (context) => { }); test("start() handles error while fetching new latest block gracefully", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -271,7 +271,7 @@ test("start() handles error while fetching new latest block gracefully", async ( await service.onIdle(); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(6); expect(blocks.map((block) => decodeToBigInt(block.number))).toMatchObject([ 16379996n, @@ -286,11 +286,11 @@ test("start() handles error while fetching new latest block gracefully", async ( }); test("start() emits realtimeCheckpoint events", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -327,11 +327,11 @@ test("start() emits realtimeCheckpoint events", async (context) => { }); test("start() inserts log filter interval records for finalized blocks", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -350,7 +350,7 @@ test("start() inserts log filter interval records for finalized blocks", async ( await service.addNewLatestBlock(); await service.onIdle(); - const logFilterIntervals = await eventStore.getLogFilterIntervals({ + const logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: network.chainId, logFilter: usdcLogFilter.criteria, }); @@ -365,11 +365,11 @@ test("start() inserts log filter interval records for finalized blocks", async ( }); test("start() deletes data from the store after 3 block shallow reorg", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -389,7 +389,7 @@ test("start() deletes data from the store after 3 block shallow reorg", async (c await service.addNewLatestBlock(); await service.onIdle(); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks.map((block) => decodeToBigInt(block.number))).toMatchObject([ 16379996n, 16379997n, @@ -415,7 +415,7 @@ test("start() deletes data from the store after 3 block shallow reorg", async (c await service.addNewLatestBlock(); await service.onIdle(); - const blocksAfterReorg = await eventStore.db + const blocksAfterReorg = await syncStore.db .selectFrom("blocks") .selectAll() .execute(); @@ -435,11 +435,11 @@ test("start() deletes data from the store after 3 block shallow reorg", async (c }); test("start() emits shallowReorg event after 3 block shallow reorg", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -477,11 +477,11 @@ test("start() emits shallowReorg event after 3 block shallow reorg", async (cont }); test("emits deepReorg event after deep reorg", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [usdcLogFilter], }); @@ -530,11 +530,11 @@ test("emits deepReorg event after deep reorg", async (context) => { }); test("start() with factory contract inserts new child contracts records and child contract events", async (context) => { - const { common, eventStore } = context; + const { common, syncStore } = context; const service = new RealtimeSyncService({ common, - eventStore, + syncStore, network, sources: [uniswapV3Factory], }); @@ -548,7 +548,7 @@ test("start() with factory contract inserts new child contracts records and chil await service.addNewLatestBlock(); await service.onIdle(); - const iterator = eventStore.getFactoryChildAddresses({ + const iterator = syncStore.getFactoryChildAddresses({ chainId: uniswapV3Factory.chainId, factory: uniswapV3Factory.criteria, upToBlockNumber: 16380010n, @@ -561,7 +561,7 @@ test("start() with factory contract inserts new child contracts records and chil "0x25e0870d42b6cef90b6dc8216588fad55d5f55c4", ]); - const eventIterator = eventStore.getLogEvents({ + const eventIterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, factories: [ diff --git a/packages/core/src/realtime-sync/service.ts b/packages/core/src/sync-realtime/service.ts similarity index 97% rename from packages/core/src/realtime-sync/service.ts rename to packages/core/src/sync-realtime/service.ts index bafd6344e..b8bc94943 100644 --- a/packages/core/src/realtime-sync/service.ts +++ b/packages/core/src/sync-realtime/service.ts @@ -14,8 +14,8 @@ import { sourceIsFactory, sourceIsLogFilter, } from "@/config/sources"; -import type { EventStore } from "@/event-store/store"; import type { Common } from "@/Ponder"; +import type { SyncStore } from "@/sync-store/store"; import { poll } from "@/utils/poll"; import { type Queue, createQueue } from "@/utils/queue"; import { range } from "@/utils/range"; @@ -41,7 +41,7 @@ type RealtimeSyncQueue = Queue; export class RealtimeSyncService extends Emittery { private common: Common; - private eventStore: EventStore; + private syncStore: SyncStore; private network: Network; private sources: Source[]; @@ -56,19 +56,19 @@ export class RealtimeSyncService extends Emittery { constructor({ common, - eventStore, + syncStore, network, sources = [], }: { common: Common; - eventStore: EventStore; + syncStore: SyncStore; network: Network; sources?: Source[]; }) { super(); this.common = common; - this.eventStore = eventStore; + this.syncStore = syncStore; this.network = network; this.sources = sources; @@ -329,7 +329,7 @@ export class RealtimeSyncService extends Emittery { ], }); - await this.eventStore.insertFactoryChildAddressLogs({ + await this.syncStore.insertFactoryChildAddressLogs({ chainId: this.network.chainId, logs: matchedFactoryLogs, }); @@ -342,7 +342,7 @@ export class RealtimeSyncService extends Emittery { // latency and database growth. const factoryLogFilters = await Promise.all( this.sources.filter(sourceIsFactory).map(async (factory) => { - const iterator = this.eventStore.getFactoryChildAddresses({ + const iterator = this.syncStore.getFactoryChildAddresses({ chainId: this.network.chainId, factory: factory.criteria, upToBlockNumber: hexToBigInt(block.number!), @@ -390,7 +390,7 @@ export class RealtimeSyncService extends Emittery { ); // TODO: Maybe rename or at least document behavior - await this.eventStore.insertRealtimeBlock({ + await this.syncStore.insertRealtimeBlock({ chainId: this.network.chainId, block: newBlockWithTransactions, transactions: filteredTransactions, @@ -441,7 +441,7 @@ export class RealtimeSyncService extends Emittery { // 1) Log filter intervals // 2) Factory contract intervals // 3) Child filter intervals - await this.eventStore.insertRealtimeInterval({ + await this.syncStore.insertRealtimeInterval({ chainId: this.network.chainId, logFilters: this.sources .filter(sourceIsLogFilter) @@ -565,7 +565,7 @@ export class RealtimeSyncService extends Emittery { (block) => block.number <= commonAncestorBlock.number ); - await this.eventStore.deleteRealtimeData({ + await this.syncStore.deleteRealtimeData({ chainId: this.network.chainId, fromBlock: BigInt(commonAncestorBlock.number), }); diff --git a/packages/core/src/event-store/postgres/format.ts b/packages/core/src/sync-store/postgres/format.ts similarity index 99% rename from packages/core/src/event-store/postgres/format.ts rename to packages/core/src/sync-store/postgres/format.ts index bdd12a591..4627674f6 100644 --- a/packages/core/src/event-store/postgres/format.ts +++ b/packages/core/src/sync-store/postgres/format.ts @@ -198,7 +198,7 @@ type FactoryLogFilterIntervalsTable = { endBlock: bigint; }; -export type EventStoreTables = { +export type SyncStoreTables = { blocks: BlocksTable; transactions: TransactionsTable; logs: LogsTable; diff --git a/packages/core/src/event-store/postgres/migrations.test.ts b/packages/core/src/sync-store/postgres/migrations.test.ts similarity index 76% rename from packages/core/src/event-store/postgres/migrations.test.ts rename to packages/core/src/sync-store/postgres/migrations.test.ts index af6d9e8fb..edf654658 100644 --- a/packages/core/src/event-store/postgres/migrations.test.ts +++ b/packages/core/src/sync-store/postgres/migrations.test.ts @@ -7,7 +7,7 @@ import { blockOneTransactions, contractReadResultOne, } from "@/_test/constants"; -import { setupEventStore } from "@/_test/setup"; +import { setupSyncStore } from "@/_test/setup"; import { rpcToPostgresBlock, @@ -15,7 +15,7 @@ import { rpcToPostgresTransaction, } from "./format"; -beforeEach((context) => setupEventStore(context, { migrateUp: false })); +beforeEach((context) => setupSyncStore(context, { migrateUp: false })); const seed_2023_09_19_0_new_sync_design = async (db: Kysely) => { await db @@ -50,18 +50,18 @@ const seed_2023_09_19_0_new_sync_design = async (db: Kysely) => { }; test("2023_09_19_0_new_sync_design -> 2023_11_06_0_new_rpc_cache_design succeeds", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - if (eventStore.kind !== "postgres") return; + if (syncStore.kind !== "postgres") return; - const { error } = await eventStore.migrator.migrateTo( + const { error } = await syncStore.migrator.migrateTo( "2023_09_19_0_new_sync_design" ); expect(error).toBeFalsy(); - await seed_2023_09_19_0_new_sync_design(eventStore.db); + await seed_2023_09_19_0_new_sync_design(syncStore.db); - const { error: latestError } = await eventStore.migrator.migrateTo( + const { error: latestError } = await syncStore.migrator.migrateTo( "2023_11_06_0_new_rpc_cache_design" ); expect(latestError).toBeFalsy(); diff --git a/packages/core/src/event-store/postgres/migrations.ts b/packages/core/src/sync-store/postgres/migrations.ts similarity index 100% rename from packages/core/src/event-store/postgres/migrations.ts rename to packages/core/src/sync-store/postgres/migrations.ts diff --git a/packages/core/src/event-store/postgres/store.ts b/packages/core/src/sync-store/postgres/store.ts similarity index 99% rename from packages/core/src/event-store/postgres/store.ts rename to packages/core/src/sync-store/postgres/store.ts index 6964e2b0f..35c838e04 100644 --- a/packages/core/src/event-store/postgres/store.ts +++ b/packages/core/src/sync-store/postgres/store.ts @@ -22,18 +22,18 @@ import { import { intervalIntersectionMany, intervalUnion } from "@/utils/interval"; import { range } from "@/utils/range"; -import type { EventStore } from "../store"; +import type { SyncStore } from "../store"; import { - type EventStoreTables, + type SyncStoreTables, rpcToPostgresBlock, rpcToPostgresLog, rpcToPostgresTransaction, } from "./format"; import { migrationProvider } from "./migrations"; -export class PostgresEventStore implements EventStore { +export class PostgresSyncStore implements SyncStore { kind = "postgres" as const; - db: Kysely; + db: Kysely; migrator: Migrator; constructor({ @@ -43,7 +43,7 @@ export class PostgresEventStore implements EventStore { pool: Pool; databaseSchema?: string; }) { - this.db = new Kysely({ + this.db = new Kysely({ dialect: new PostgresDialect({ pool, onCreateConnection: databaseSchema @@ -665,7 +665,7 @@ export class PostgresEventStore implements EventStore { logFilters, interval: { startBlock, endBlock }, }: { - tx: KyselyTransaction; + tx: KyselyTransaction; chainId: number; logFilters: LogFilterCriteria[]; interval: { startBlock: bigint; endBlock: bigint }; @@ -697,7 +697,7 @@ export class PostgresEventStore implements EventStore { factories, interval: { startBlock, endBlock }, }: { - tx: KyselyTransaction; + tx: KyselyTransaction; chainId: number; factories: FactoryCriteria[]; interval: { startBlock: bigint; endBlock: bigint }; diff --git a/packages/core/src/event-store/sqlite/format.ts b/packages/core/src/sync-store/sqlite/format.ts similarity index 99% rename from packages/core/src/event-store/sqlite/format.ts rename to packages/core/src/sync-store/sqlite/format.ts index 1e47b0dd3..8ff6efc3c 100644 --- a/packages/core/src/event-store/sqlite/format.ts +++ b/packages/core/src/sync-store/sqlite/format.ts @@ -203,7 +203,7 @@ type FactoryLogFilterIntervalsTable = { endBlock: BigIntText; }; -export type EventStoreTables = { +export type SyncStoreTables = { blocks: BlocksTable; transactions: TransactionsTable; logs: LogsTable; diff --git a/packages/core/src/event-store/sqlite/migrations.test.ts b/packages/core/src/sync-store/sqlite/migrations.test.ts similarity index 76% rename from packages/core/src/event-store/sqlite/migrations.test.ts rename to packages/core/src/sync-store/sqlite/migrations.test.ts index aab2f8846..9e8f216ce 100644 --- a/packages/core/src/event-store/sqlite/migrations.test.ts +++ b/packages/core/src/sync-store/sqlite/migrations.test.ts @@ -7,7 +7,7 @@ import { blockOneTransactions, contractReadResultOne, } from "@/_test/constants"; -import { setupEventStore } from "@/_test/setup"; +import { setupSyncStore } from "@/_test/setup"; import { rpcToSqliteBlock, @@ -15,7 +15,7 @@ import { rpcToSqliteTransaction, } from "./format"; -beforeEach((context) => setupEventStore(context, { migrateUp: false })); +beforeEach((context) => setupSyncStore(context, { migrateUp: false })); const seed_2023_09_19_0_new_sync_design = async (db: Kysely) => { await db @@ -52,18 +52,18 @@ const seed_2023_09_19_0_new_sync_design = async (db: Kysely) => { test( "2023_09_19_0_new_sync_design -> 2023_11_06_0_new_rpc_cache_design succeeds", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - if (eventStore.kind !== "sqlite") return; + if (syncStore.kind !== "sqlite") return; - const { error } = await eventStore.migrator.migrateTo( + const { error } = await syncStore.migrator.migrateTo( "2023_09_19_0_new_sync_design" ); expect(error).toBeFalsy(); - await seed_2023_09_19_0_new_sync_design(eventStore.db); + await seed_2023_09_19_0_new_sync_design(syncStore.db); - const { error: latestError } = await eventStore.migrator.migrateTo( + const { error: latestError } = await syncStore.migrator.migrateTo( "2023_11_06_0_new_rpc_cache_design" ); expect(latestError).toBeFalsy(); diff --git a/packages/core/src/event-store/sqlite/migrations.ts b/packages/core/src/sync-store/sqlite/migrations.ts similarity index 100% rename from packages/core/src/event-store/sqlite/migrations.ts rename to packages/core/src/sync-store/sqlite/migrations.ts diff --git a/packages/core/src/event-store/sqlite/store.ts b/packages/core/src/sync-store/sqlite/store.ts similarity index 99% rename from packages/core/src/event-store/sqlite/store.ts rename to packages/core/src/sync-store/sqlite/store.ts index 871a1b15e..b37853501 100644 --- a/packages/core/src/event-store/sqlite/store.ts +++ b/packages/core/src/sync-store/sqlite/store.ts @@ -22,9 +22,9 @@ import { import { intervalIntersectionMany, intervalUnion } from "@/utils/interval"; import { range } from "@/utils/range"; -import type { EventStore } from "../store"; +import type { SyncStore } from "../store"; import { - type EventStoreTables, + type SyncStoreTables, BigIntText, rpcToSqliteBlock, rpcToSqliteLog, @@ -32,13 +32,13 @@ import { } from "./format"; import { migrationProvider } from "./migrations"; -export class SqliteEventStore implements EventStore { +export class SqliteSyncStore implements SyncStore { kind = "sqlite" as const; - db: Kysely; + db: Kysely; migrator: Migrator; constructor({ db }: { db: Sqlite.Database }) { - this.db = new Kysely({ + this.db = new Kysely({ dialect: new SqliteDialect({ database: db }), }); @@ -635,7 +635,7 @@ export class SqliteEventStore implements EventStore { logFilters, interval: { startBlock, endBlock }, }: { - tx: KyselyTransaction; + tx: KyselyTransaction; chainId: number; logFilters: LogFilterCriteria[]; interval: { startBlock: bigint; endBlock: bigint }; @@ -671,7 +671,7 @@ export class SqliteEventStore implements EventStore { factories, interval: { startBlock, endBlock }, }: { - tx: KyselyTransaction; + tx: KyselyTransaction; chainId: number; factories: FactoryCriteria[]; interval: { startBlock: bigint; endBlock: bigint }; diff --git a/packages/core/src/event-store/store.test.ts b/packages/core/src/sync-store/store.test.ts similarity index 83% rename from packages/core/src/event-store/store.test.ts rename to packages/core/src/sync-store/store.test.ts index 624802936..665edd828 100644 --- a/packages/core/src/event-store/store.test.ts +++ b/packages/core/src/sync-store/store.test.ts @@ -11,15 +11,15 @@ import { blockTwoTransactions, usdcContractConfig, } from "@/_test/constants"; -import { setupEventStore } from "@/_test/setup"; +import { setupSyncStore } from "@/_test/setup"; import type { FactoryCriteria, LogFilterCriteria } from "@/config/sources"; -beforeEach((context) => setupEventStore(context)); +beforeEach((context) => setupSyncStore(context)); test("setup creates tables", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - const tables = await eventStore.db.introspection.getTables(); + const tables = await syncStore.db.introspection.getTables(); const tableNames = tables.map((t) => t.name); expect(tableNames).toContain("blocks"); expect(tableNames).toContain("logs"); @@ -34,9 +34,9 @@ test("setup creates tables", async (context) => { }); test("insertLogFilterInterval inserts block, transactions, and logs", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockOne, @@ -48,23 +48,23 @@ test("insertLogFilterInterval inserts block, transactions, and logs", async (con }, }); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(1); - const transactions = await eventStore.db + const transactions = await syncStore.db .selectFrom("transactions") .selectAll() .execute(); expect(transactions).toHaveLength(2); - const logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + const logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(2); }); test("insertLogFilterInterval inserts log filter intervals", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: 1, logFilter: { address: ["0xa", "0xb"], @@ -76,7 +76,7 @@ test("insertLogFilterInterval inserts log filter intervals", async (context) => interval: { startBlock: 0n, endBlock: 100n }, }); - const logFilterRanges = await eventStore.getLogFilterIntervals({ + const logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: ["0xa", "0xb"], @@ -88,9 +88,9 @@ test("insertLogFilterInterval inserts log filter intervals", async (context) => }); test("insertLogFilterInterval merges ranges on insertion", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockOne, @@ -102,7 +102,7 @@ test("insertLogFilterInterval merges ranges on insertion", async (context) => { }, }); - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockThree, @@ -114,7 +114,7 @@ test("insertLogFilterInterval merges ranges on insertion", async (context) => { }, }); - let logFilterRanges = await eventStore.getLogFilterIntervals({ + let logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: usdcContractConfig.address }, }); @@ -124,7 +124,7 @@ test("insertLogFilterInterval merges ranges on insertion", async (context) => { [15495112, 15495112], ]); - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockTwo, @@ -136,7 +136,7 @@ test("insertLogFilterInterval merges ranges on insertion", async (context) => { }, }); - logFilterRanges = await eventStore.getLogFilterIntervals({ + logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: usdcContractConfig.address }, }); @@ -145,10 +145,10 @@ test("insertLogFilterInterval merges ranges on insertion", async (context) => { }); test("insertLogFilterInterval merges log intervals inserted concurrently", async (context) => { - const { eventStore } = context; + const { syncStore } = context; await Promise.all([ - eventStore.insertLogFilterInterval({ + syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockOne, @@ -159,7 +159,7 @@ test("insertLogFilterInterval merges log intervals inserted concurrently", async endBlock: hexToBigInt(blockOne.number!), }, }), - eventStore.insertLogFilterInterval({ + syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockTwo, @@ -170,7 +170,7 @@ test("insertLogFilterInterval merges log intervals inserted concurrently", async endBlock: hexToBigInt(blockTwo.number!), }, }), - eventStore.insertLogFilterInterval({ + syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockThree, @@ -183,7 +183,7 @@ test("insertLogFilterInterval merges log intervals inserted concurrently", async }), ]); - const logFilterRanges = await eventStore.getLogFilterIntervals({ + const logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: usdcContractConfig.address }, }); @@ -192,9 +192,9 @@ test("insertLogFilterInterval merges log intervals inserted concurrently", async }); test("getLogFilterIntervals respects log filter inclusivity rules", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: 1, logFilter: { address: ["0xa", "0xb"], @@ -207,7 +207,7 @@ test("getLogFilterIntervals respects log filter inclusivity rules", async (conte }); // This is a narrower inclusion criteria on `address` and `topic0`. Full range is available. - let logFilterRanges = await eventStore.getLogFilterIntervals({ + let logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: ["0xa"], topics: [["0xc"], null, "0xe", null] }, }); @@ -215,7 +215,7 @@ test("getLogFilterIntervals respects log filter inclusivity rules", async (conte expect(logFilterRanges).toMatchObject([[0, 100]]); // This is a broader inclusion criteria on `address`. No ranges available. - logFilterRanges = await eventStore.getLogFilterIntervals({ + logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: undefined, topics: [["0xc"], null, "0xe", null] }, }); @@ -223,7 +223,7 @@ test("getLogFilterIntervals respects log filter inclusivity rules", async (conte expect(logFilterRanges).toMatchObject([]); // This is a narrower inclusion criteria on `topic1`. Full range available. - logFilterRanges = await eventStore.getLogFilterIntervals({ + logFilterRanges = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: ["0xa"], topics: [["0xc"], "0xd", "0xe", null] }, }); @@ -232,9 +232,9 @@ test("getLogFilterIntervals respects log filter inclusivity rules", async (conte }); test("getLogFilterRanges handles complex log filter inclusivity rules", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: 1, logFilter: {}, block: blockOne, @@ -243,7 +243,7 @@ test("getLogFilterRanges handles complex log filter inclusivity rules", async (c interval: { startBlock: 0n, endBlock: 100n }, }); - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: 1, logFilter: { topics: [null, ["0xc", "0xd"]] }, block: blockOne, @@ -253,14 +253,14 @@ test("getLogFilterRanges handles complex log filter inclusivity rules", async (c }); // Broad criteria only includes broad intervals. - let logFilterIntervals = await eventStore.getLogFilterIntervals({ + let logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: {}, }); expect(logFilterIntervals).toMatchObject([[0, 100]]); // Narrower criteria includes both broad and specific intervals. - logFilterIntervals = await eventStore.getLogFilterIntervals({ + logFilterIntervals = await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { topics: [null, "0xc"] }, }); @@ -271,19 +271,19 @@ test("getLogFilterRanges handles complex log filter inclusivity rules", async (c }); test("insertFactoryChildAddressLogs inserts logs", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertFactoryChildAddressLogs({ + await syncStore.insertFactoryChildAddressLogs({ chainId: 1, logs: blockOneLogs, }); - const logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + const logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(2); }); test("getFactoryChildAddresses gets child addresses for topic location", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -292,7 +292,7 @@ test("getFactoryChildAddresses gets child addresses for topic location", async ( childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertFactoryChildAddressLogs({ + await syncStore.insertFactoryChildAddressLogs({ chainId: 1, logs: [ { @@ -318,7 +318,7 @@ test("getFactoryChildAddresses gets child addresses for topic location", async ( ], }); - let iterator = eventStore.getFactoryChildAddresses({ + let iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: factoryCriteria, upToBlockNumber: 150n, @@ -332,7 +332,7 @@ test("getFactoryChildAddresses gets child addresses for topic location", async ( "0xchild30000000000000000000000000000000000", ]); - iterator = eventStore.getFactoryChildAddresses({ + iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: { ...factoryCriteria, childAddressLocation: "topic2" }, upToBlockNumber: 150n, @@ -348,7 +348,7 @@ test("getFactoryChildAddresses gets child addresses for topic location", async ( }); test("getFactoryChildAddresses gets child addresses for offset location", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -357,7 +357,7 @@ test("getFactoryChildAddresses gets child addresses for offset location", async childAddressLocation: "offset32", } satisfies FactoryCriteria; - await eventStore.insertFactoryChildAddressLogs({ + await syncStore.insertFactoryChildAddressLogs({ chainId: 1, logs: [ { @@ -381,7 +381,7 @@ test("getFactoryChildAddresses gets child addresses for offset location", async ], }); - const iterator = eventStore.getFactoryChildAddresses({ + const iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: factoryCriteria, upToBlockNumber: 150n, @@ -397,7 +397,7 @@ test("getFactoryChildAddresses gets child addresses for offset location", async }); test("getFactoryChildAddresses respects upToBlockNumber argument", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -406,7 +406,7 @@ test("getFactoryChildAddresses respects upToBlockNumber argument", async (contex childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertFactoryChildAddressLogs({ + await syncStore.insertFactoryChildAddressLogs({ chainId: 1, logs: [ { @@ -430,7 +430,7 @@ test("getFactoryChildAddresses respects upToBlockNumber argument", async (contex ], }); - let iterator = eventStore.getFactoryChildAddresses({ + let iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: factoryCriteria, upToBlockNumber: 150n, @@ -441,7 +441,7 @@ test("getFactoryChildAddresses respects upToBlockNumber argument", async (contex expect(results).toMatchObject(["0xchild10000000000000000000000000000000000"]); - iterator = eventStore.getFactoryChildAddresses({ + iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: factoryCriteria, upToBlockNumber: 250n, @@ -457,7 +457,7 @@ test("getFactoryChildAddresses respects upToBlockNumber argument", async (contex }); test("getFactoryChildAddresses paginates correctly", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -466,7 +466,7 @@ test("getFactoryChildAddresses paginates correctly", async (context) => { childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertFactoryChildAddressLogs({ + await syncStore.insertFactoryChildAddressLogs({ chainId: 1, logs: [ { @@ -499,7 +499,7 @@ test("getFactoryChildAddresses paginates correctly", async (context) => { ], }); - const iterator = eventStore.getFactoryChildAddresses({ + const iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: factoryCriteria, upToBlockNumber: 1000n, @@ -527,7 +527,7 @@ test("getFactoryChildAddresses paginates correctly", async (context) => { }); test("getFactoryChildAddresses does not yield empty list", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -536,7 +536,7 @@ test("getFactoryChildAddresses does not yield empty list", async (context) => { childAddressLocation: "topic1", } satisfies FactoryCriteria; - const iterator = eventStore.getFactoryChildAddresses({ + const iterator = syncStore.getFactoryChildAddresses({ chainId: 1, factory: factoryCriteria, upToBlockNumber: 1000n, @@ -552,7 +552,7 @@ test("getFactoryChildAddresses does not yield empty list", async (context) => { }); test("insertFactoryLogFilterInterval inserts block, transactions, and logs", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -561,7 +561,7 @@ test("insertFactoryLogFilterInterval inserts block, transactions, and logs", asy childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertFactoryLogFilterInterval({ + await syncStore.insertFactoryLogFilterInterval({ chainId: 1, factory: factoryCriteria, block: blockOne, @@ -570,21 +570,21 @@ test("insertFactoryLogFilterInterval inserts block, transactions, and logs", asy interval: { startBlock: 0n, endBlock: 500n }, }); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(1); - const transactions = await eventStore.db + const transactions = await syncStore.db .selectFrom("transactions") .selectAll() .execute(); expect(transactions).toHaveLength(2); - const logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + const logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(2); }); test("insertFactoryLogFilterInterval inserts and merges child contract intervals", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -593,7 +593,7 @@ test("insertFactoryLogFilterInterval inserts and merges child contract intervals childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertFactoryLogFilterInterval({ + await syncStore.insertFactoryLogFilterInterval({ chainId: 1, factory: factoryCriteria, block: blockOne, @@ -602,7 +602,7 @@ test("insertFactoryLogFilterInterval inserts and merges child contract intervals interval: { startBlock: 0n, endBlock: 500n }, }); - await eventStore.insertFactoryLogFilterInterval({ + await syncStore.insertFactoryLogFilterInterval({ chainId: 1, factory: factoryCriteria, block: blockThree, @@ -611,7 +611,7 @@ test("insertFactoryLogFilterInterval inserts and merges child contract intervals interval: { startBlock: 750n, endBlock: 1000n }, }); - let intervals = await eventStore.getFactoryLogFilterIntervals({ + let intervals = await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteria, }); @@ -621,7 +621,7 @@ test("insertFactoryLogFilterInterval inserts and merges child contract intervals [750, 1000], ]); - await eventStore.insertFactoryLogFilterInterval({ + await syncStore.insertFactoryLogFilterInterval({ chainId: 1, factory: factoryCriteria, block: blockTwo, @@ -630,7 +630,7 @@ test("insertFactoryLogFilterInterval inserts and merges child contract intervals interval: { startBlock: 501n, endBlock: 800n }, }); - intervals = await eventStore.getFactoryLogFilterIntervals({ + intervals = await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteria, }); @@ -639,7 +639,7 @@ test("insertFactoryLogFilterInterval inserts and merges child contract intervals }); test("getFactoryLogFilterIntervals handles topic filtering rules", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const factoryCriteria = { address: "0xfactory", @@ -648,7 +648,7 @@ test("getFactoryLogFilterIntervals handles topic filtering rules", async (contex childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertFactoryLogFilterInterval({ + await syncStore.insertFactoryLogFilterInterval({ chainId: 1, factory: factoryCriteria, block: blockOne, @@ -657,14 +657,14 @@ test("getFactoryLogFilterIntervals handles topic filtering rules", async (contex interval: { startBlock: 0n, endBlock: 500n }, }); - let intervals = await eventStore.getFactoryLogFilterIntervals({ + let intervals = await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteria, }); expect(intervals).toMatchObject([[0, 500]]); - intervals = await eventStore.getFactoryLogFilterIntervals({ + intervals = await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: { ...factoryCriteria, @@ -678,30 +678,30 @@ test("getFactoryLogFilterIntervals handles topic filtering rules", async (contex }); test("insertRealtimeBlock inserts data", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - const blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + const blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(1); - const transactions = await eventStore.db + const transactions = await syncStore.db .selectFrom("transactions") .selectAll() .execute(); expect(transactions).toHaveLength(2); - const logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + const logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(2); }); test("insertRealtimeInterval inserts log filter intervals", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const logFilterCriteria = { address: usdcContractConfig.address, @@ -719,7 +719,7 @@ test("insertRealtimeInterval inserts log filter intervals", async (context) => { childAddressLocation: "offset64", } satisfies FactoryCriteria; - await eventStore.insertRealtimeInterval({ + await syncStore.insertRealtimeInterval({ chainId: 1, logFilters: [logFilterCriteria], factories: [factoryCriteriaOne, factoryCriteriaTwo], @@ -727,7 +727,7 @@ test("insertRealtimeInterval inserts log filter intervals", async (context) => { }); expect( - await eventStore.getLogFilterIntervals({ + await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: logFilterCriteria, }) @@ -735,7 +735,7 @@ test("insertRealtimeInterval inserts log filter intervals", async (context) => { // Confirm log filters have been inserted for factory child address logs. expect( - await eventStore.getLogFilterIntervals({ + await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: factoryCriteriaOne.address, @@ -744,7 +744,7 @@ test("insertRealtimeInterval inserts log filter intervals", async (context) => { }) ).toMatchObject([[500, 550]]); expect( - await eventStore.getLogFilterIntervals({ + await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: { address: factoryCriteriaOne.address, @@ -755,13 +755,13 @@ test("insertRealtimeInterval inserts log filter intervals", async (context) => { // Also confirm factory log filters have been inserted. expect( - await eventStore.getFactoryLogFilterIntervals({ + await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteriaOne, }) ).toMatchObject([[500, 550]]); expect( - await eventStore.getFactoryLogFilterIntervals({ + await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteriaTwo, }) @@ -769,9 +769,9 @@ test("insertRealtimeInterval inserts log filter intervals", async (context) => { }); test("deleteRealtimeData deletes blocks, transactions and logs", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockOne, @@ -783,7 +783,7 @@ test("deleteRealtimeData deletes blocks, transactions and logs", async (context) }, }); - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: { address: usdcContractConfig.address }, block: blockTwo, @@ -795,38 +795,38 @@ test("deleteRealtimeData deletes blocks, transactions and logs", async (context) }, }); - let blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + let blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(2); - let transactions = await eventStore.db + let transactions = await syncStore.db .selectFrom("transactions") .selectAll() .execute(); expect(transactions).toHaveLength(3); - let logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + let logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(3); - await eventStore.deleteRealtimeData({ + await syncStore.deleteRealtimeData({ chainId: usdcContractConfig.chainId, fromBlock: hexToBigInt(blockOne.number!), }); - blocks = await eventStore.db.selectFrom("blocks").selectAll().execute(); + blocks = await syncStore.db.selectFrom("blocks").selectAll().execute(); expect(blocks).toHaveLength(1); - transactions = await eventStore.db + transactions = await syncStore.db .selectFrom("transactions") .selectAll() .execute(); expect(transactions).toHaveLength(2); - logs = await eventStore.db.selectFrom("logs").selectAll().execute(); + logs = await syncStore.db.selectFrom("logs").selectAll().execute(); expect(logs).toHaveLength(2); }); test("deleteRealtimeData updates interval data", async (context) => { - const { eventStore } = context; + const { syncStore } = context; const logFilterCriteria = { address: usdcContractConfig.address, @@ -838,7 +838,7 @@ test("deleteRealtimeData updates interval data", async (context) => { childAddressLocation: "topic1", } satisfies FactoryCriteria; - await eventStore.insertLogFilterInterval({ + await syncStore.insertLogFilterInterval({ chainId: usdcContractConfig.chainId, logFilter: logFilterCriteria, block: blockTwo, @@ -850,7 +850,7 @@ test("deleteRealtimeData updates interval data", async (context) => { }, }); - await eventStore.insertFactoryLogFilterInterval({ + await syncStore.insertFactoryLogFilterInterval({ chainId: 1, factory: factoryCriteria, block: blockTwo, @@ -863,33 +863,33 @@ test("deleteRealtimeData updates interval data", async (context) => { }); expect( - await eventStore.getLogFilterIntervals({ + await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: logFilterCriteria, }) ).toMatchObject([[15495110, 15495111]]); expect( - await eventStore.getFactoryLogFilterIntervals({ + await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteria, }) ).toMatchObject([[15495110, 15495111]]); - await eventStore.deleteRealtimeData({ + await syncStore.deleteRealtimeData({ chainId: usdcContractConfig.chainId, fromBlock: hexToBigInt(blockOne.number!), }); expect( - await eventStore.getLogFilterIntervals({ + await syncStore.getLogFilterIntervals({ chainId: 1, logFilter: logFilterCriteria, }) ).toMatchObject([[15495110, 15495110]]); expect( - await eventStore.getFactoryLogFilterIntervals({ + await syncStore.getFactoryLogFilterIntervals({ chainId: 1, factory: factoryCriteria, }) @@ -897,16 +897,16 @@ test("deleteRealtimeData updates interval data", async (context) => { }); test("insertRpcRequestResult inserts a request result", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRpcRequestResult({ + await syncStore.insertRpcRequestResult({ chainId: 1, request: "0x123", blockNumber: 100n, result: "0x789", }); - const rpcRequestResults = await eventStore.db + const rpcRequestResults = await syncStore.db .selectFrom("rpcRequestResults") .selectAll() .execute(); @@ -920,16 +920,16 @@ test("insertRpcRequestResult inserts a request result", async (context) => { }); test("insertRpcRequestResult upserts on conflict", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRpcRequestResult({ + await syncStore.insertRpcRequestResult({ chainId: 1, request: "0x123", blockNumber: 100n, result: "0x789", }); - const rpcRequestResult = await eventStore.db + const rpcRequestResult = await syncStore.db .selectFrom("rpcRequestResults") .selectAll() .execute(); @@ -940,14 +940,14 @@ test("insertRpcRequestResult upserts on conflict", async (context) => { result: "0x789", }); - await eventStore.insertRpcRequestResult({ + await syncStore.insertRpcRequestResult({ chainId: 1, request: "0x123", blockNumber: 100n, result: "0x789123", }); - const rpcRequestResultsUpdated = await eventStore.db + const rpcRequestResultsUpdated = await syncStore.db .selectFrom("rpcRequestResults") .selectAll() .execute(); @@ -960,16 +960,16 @@ test("insertRpcRequestResult upserts on conflict", async (context) => { }); test("getRpcRequestResult returns data", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRpcRequestResult({ + await syncStore.insertRpcRequestResult({ chainId: 1, request: "0x123", blockNumber: 100n, result: "0x789", }); - const rpcRequestResult = await eventStore.getRpcRequestResult({ + const rpcRequestResult = await syncStore.getRpcRequestResult({ chainId: 1, request: "0x123", blockNumber: 100n, @@ -984,16 +984,16 @@ test("getRpcRequestResult returns data", async (context) => { }); test("getRpcRequestResult returns null if not found", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRpcRequestResult({ + await syncStore.insertRpcRequestResult({ chainId: 1, request: "0x123", blockNumber: 100n, result: "0x789", }); - const rpcRequestResult = await eventStore.getRpcRequestResult({ + const rpcRequestResult = await syncStore.getRpcRequestResult({ request: "0x125", chainId: 1, blockNumber: 100n, @@ -1003,16 +1003,16 @@ test("getRpcRequestResult returns null if not found", async (context) => { }); test("getLogEvents returns log events", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [{ name: "noFilter", chainId: 1, criteria: {} }], @@ -1154,16 +1154,16 @@ test("getLogEvents returns log events", async (context) => { }); test("getLogEvents filters on log filter with one address", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -1182,23 +1182,23 @@ test("getLogEvents filters on log filter with one address", async (context) => { }); test("getLogEvents filters on log filter with multiple addresses", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -1230,23 +1230,23 @@ test("getLogEvents filters on log filter with multiple addresses", async (contex }); test("getLogEvents filters on log filter with single topic", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -1278,23 +1278,23 @@ test("getLogEvents filters on log filter with single topic", async (context) => }); test("getLogEvents filters on log filter with multiple topics", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -1323,9 +1323,9 @@ test("getLogEvents filters on log filter with multiple topics", async (context) }); test("getLogEvents filters on simple factory", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertFactoryChildAddressLogs({ + await syncStore.insertFactoryChildAddressLogs({ chainId: 1, logs: [ { @@ -1340,7 +1340,7 @@ test("getLogEvents filters on simple factory", async (context) => { ], }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, @@ -1350,7 +1350,7 @@ test("getLogEvents filters on simple factory", async (context) => { })), }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, factories: [ @@ -1377,23 +1377,23 @@ test("getLogEvents filters on simple factory", async (context) => { }); test("getLogEvents filters on fromBlock", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -1418,23 +1418,23 @@ test("getLogEvents filters on fromBlock", async (context) => { }); test("getLogEvents filters on multiple filters", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ @@ -1476,23 +1476,23 @@ test("getLogEvents filters on multiple filters", async (context) => { }); test("getLogEvents filters on fromTimestamp (inclusive)", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: hexToNumber(blockTwo.timestamp!), toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [{ name: "noFilter", chainId: 1, criteria: {} }], @@ -1505,23 +1505,23 @@ test("getLogEvents filters on fromTimestamp (inclusive)", async (context) => { }); test("getLogEvents filters on toTimestamp (inclusive)", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: hexToNumber(blockOne.timestamp!), logFilters: [{ name: "noFilter", chainId: 1, criteria: {} }], @@ -1537,23 +1537,23 @@ test("getLogEvents filters on toTimestamp (inclusive)", async (context) => { }); test("getLogEvents returns no events if includeEventSelectors is an empty array", async (context) => { - const { eventStore } = context; + const { syncStore } = context; - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockOne, transactions: blockOneTransactions, logs: blockOneLogs, }); - await eventStore.insertRealtimeBlock({ + await syncStore.insertRealtimeBlock({ chainId: 1, block: blockTwo, transactions: blockTwoTransactions, logs: blockTwoLogs, }); - const iterator = eventStore.getLogEvents({ + const iterator = syncStore.getLogEvents({ fromTimestamp: 0, toTimestamp: Number.MAX_SAFE_INTEGER, logFilters: [ diff --git a/packages/core/src/event-store/store.ts b/packages/core/src/sync-store/store.ts similarity index 99% rename from packages/core/src/event-store/store.ts rename to packages/core/src/sync-store/store.ts index 63cc4f8e1..1ad383094 100644 --- a/packages/core/src/event-store/store.ts +++ b/packages/core/src/sync-store/store.ts @@ -6,7 +6,7 @@ import type { Block } from "@/types/block"; import type { Log } from "@/types/log"; import type { Transaction } from "@/types/transaction"; -export interface EventStore { +export interface SyncStore { kind: "sqlite" | "postgres"; db: Kysely; migrator: Migrator; diff --git a/packages/core/src/types/model.ts b/packages/core/src/types/model.ts index b2ac7cebc..44d81acf2 100644 --- a/packages/core/src/types/model.ts +++ b/packages/core/src/types/model.ts @@ -1,4 +1,4 @@ -import type { OrderByInput, WhereInput } from "@/user-store/store"; +import type { OrderByInput, WhereInput } from "@/indexing-store/store"; import type { HasOnlyIdProperty,