diff --git a/.changeset/curvy-hats-rest.md b/.changeset/curvy-hats-rest.md
new file mode 100644
index 000000000..b009f610f
--- /dev/null
+++ b/.changeset/curvy-hats-rest.md
@@ -0,0 +1,5 @@
+---
+"@ponder/core": minor
+---
+
+BREAKING: Migrated `ponder.schema.ts` to use Drizzle table definitions. Migrated indexing store API to be compatible with Drizzle table objects. Read the [migration guide](https://ponder-docs-git-kjs-offchain-ponder-sh.vercel.app/docs/migration-guide#070) for more details.
diff --git a/.changeset/pre.json b/.changeset/pre.json
new file mode 100644
index 000000000..62454315d
--- /dev/null
+++ b/.changeset/pre.json
@@ -0,0 +1,12 @@
+{
+ "mode": "pre",
+ "tag": "next",
+ "initialVersions": {
+ "@ponder/common": "0.0.0",
+ "@ponder/core": "0.6.7",
+ "create-ponder": "0.6.7",
+ "eslint-config-ponder": "0.6.7",
+ "@ponder/utils": "0.2.1"
+ },
+ "changesets": []
+}
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 17a4d6f10..26769b79d 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -119,7 +119,7 @@ When adding new features or fixing bugs, it's important to add test cases to cov
### Run tests against Postgres
-By default, the test suite runs against in-memory SQLite databases which mimic Ponder development environments. Unless you are specifically testing Postgres behavior, you don't need to run tests against Postgres locally and can instead rely on CI to catch any regressions.
+By default, the test suite runs against in-memory PGlite databases which mimic Ponder development environments. Unless you are specifically testing Postgres behavior, you don't need to run tests against Postgres locally and can instead rely on CI to catch any regressions.
To run the test suite against Postgres, set the `DATABASE_URL` env var in `packages/core/.env.local`.
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index d8bf7cdea..405dea166 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -53,7 +53,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- database: [Postgres, SQLite]
+ database: [Postgres, PGlite]
steps:
- name: Clone repository
uses: actions/checkout@v4
@@ -112,7 +112,7 @@ jobs:
with:
cache: pnpm
node-version: 20
-
+
- name: Install Bun
if: ${{ matrix.runtime-version == 'Bun' }}
uses: oven-sh/setup-bun@v1
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
index 4e70bfb1e..d2e69d275 100644
--- a/.github/workflows/windows.yml
+++ b/.github/workflows/windows.yml
@@ -13,7 +13,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- database: [Postgres, SQLite]
+ database: [Postgres, PGLite]
steps:
- name: Clone repository
uses: actions/checkout@v4
@@ -48,4 +48,4 @@ jobs:
- name: Test
run: pnpm --filter create-ponder test
env:
- ETHERSCAN_API_KEY: ${{ secrets.ETHERSCAN_API_KEY }}
\ No newline at end of file
+ ETHERSCAN_API_KEY: ${{ secrets.ETHERSCAN_API_KEY }}
diff --git a/docs/pages/docs/migration-guide.mdx b/docs/pages/docs/migration-guide.mdx
index 4a3ca06eb..207ea584b 100644
--- a/docs/pages/docs/migration-guide.mdx
+++ b/docs/pages/docs/migration-guide.mdx
@@ -8,6 +8,468 @@ import Architecture from "../../public/architecture.svg";
# Migration guide
+## 0.7.0-next.1
+
+This release migrates to a new schema definition and database API. It also adds native support for offchain data.
+
+The `0.7.0` release contains several breaking changes. We recommend creating a new git branch for the migration.
+
+### Install & run codegen
+
+{/* prettier-ignore */}
+
+
+```bash filename="shell"
+pnpm add @ponder/core@0.7.0-next.1
+```
+
+
+```bash filename="shell"
+yarn add @ponder/core@0.7.0-next.1
+```
+
+
+```bash filename="shell"
+npm add @ponder/core@0.7.0-next.1
+```
+
+
+
+To ensure strong type safety during the migration, regenerate `ponder-env.d.ts`.
+
+{/* prettier-ignore */}
+
+
+```bash filename="shell"
+pnpm codegen
+```
+
+
+```bash filename="shell"
+yarn codegen
+```
+
+
+```bash filename="shell"
+npm run codegen
+```
+
+
+
+### Migrate `ponder.schema.ts`
+
+Here's a table defined with the new schema definition API, which uses [Drizzle](https://orm.drizzle.team/docs/overview) under the hood.
+
+```ts filename="ponder.schema.ts (after)"
+import { onchainTable } from "@ponder/core/db";
+
+export const accounts = onchainTable("account", (p) => ({
+ address: p.evmHex().primaryKey(),
+ daiBalance: p.evmBigint().notNull(),
+ isAdmin: p.boolean().notNull(),
+ graffiti: p.string(),
+}));
+```
+
+Key changes:
+
+1. Declare tables with the `onchainTable` function exported from `@ponder/core`
+2. Prefer pluralized table names
+3. Export all table objects from `ponder.schema.ts`
+4. Use `.primaryKey()` to mark the primary key column
+5. Columns are nullable by default, use `.notNull()` to add the constraint
+6. `p.hex()` and `p.bigint()` renamed to `p.evmHex()` and `p.evmBigint()`
+
+The new `onchainTable` function adds support for several new capabilities.
+
+- Custom primary key column name (other than `id`)
+- Composite primary keys
+- Default column values
+- Serial (autoincrementing) column types
+
+Here's a more advanced example with indexes, a serial column, and a composite primary key.
+
+```ts filename="ponder.schema.ts"
+import { onchainTable, index, primaryKey } from "@ponder/core/db";
+
+export const transferEvents = onchainTable(
+ "transfer_event",
+ (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ from: t.evmHex().notNull(),
+ to: t.evmHex().notNull(),
+ }),
+ (table) => ({
+ fromIdx: index().on(table.from),
+ })
+);
+
+export const allowance = onchainTable(
+ "allowance",
+ (t) => ({
+ owner: t.evmHex(),
+ spender: t.evmHex(),
+ amount: t.evmBigint().notNull(),
+ }),
+ (table) => ({
+ pk: primaryKey({ columns: [table.owner, table.spender] }),
+ })
+);
+
+export const approvalEvent = onchainTable("approval_event", (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ owner: t.evmHex().notNull(),
+ spender: t.evmHex().notNull(),
+}));
+```
+
+### Migrate indexing functions
+
+This release updates the indexing function database API to offer a unified SQL experience based on Drizzle.
+
+Here's an indexing function defined with the new API, which uses the table objects exported from `ponder.schema.ts`.
+
+{/* prettier-ignore */}
+```ts filename="src/index.ts"
+import { ponder } from "@/generated";
+import { account } from "../ponder.schema";
+
+ponder.on("ERC20:Transfer", async ({ event, context }) => {
+ await context.db
+ .upsert(account, { address: event.args.from })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance - event.args.amount,
+ }));
+});
+```
+
+Key changes:
+
+1. Transition from ORM pattern `db.Account.create({ ... }){:ts}` to query builder pattern `db.insert(accounts, { ... }){:ts}`
+2. Import table objects from `ponder.schema.ts`
+3. Replace `findMany` with `db.sql.select(...)` or `db.sql.query(...)`
+
+Here is a simple migration example to familiarize yourself with the API.
+
+
+
+```ts filename="src/index.ts (<=0.6)"
+// Create a single allowance
+await context.db.Allowance.create({
+ id: event.log.id,
+ data: {
+ owner: event.args.owner,
+ spender: event.args.spender,
+ amount: event.args.amount,
+ },
+});
+```
+
+{/* prettier-ignore */}
+```ts filename="src/index.ts (0.7)"
+import { allowance } from "../ponder.schema";
+
+// Create a single allowance
+await context.db
+ .insert(allowance)
+ .values({
+ id: event.log.id,
+ owner: event.args.owner,
+ spender: event.args.spender,
+ amount: event.args.amount,
+ });
+```
+
+
+
+Here is a reference for how to migrate each method.
+
+```ts filename="src/index.ts"
+// create -> insert
+await context.db.Account.create({
+ id: event.args.from,
+ data: { balance: 0n },
+});
+await context.db.insert(account).values({ id: event.args.from, balance: 0n });
+
+// createMany -> insert
+await context.db.Account.createMany({
+ data: [
+ { id: event.args.from, balance: 0n },
+ { id: event.args.to, balance: 0n },
+ ],
+});
+await context.db.insert(account).values([
+ { id: event.args.from, balance: 0n },
+ { id: event.args.to, balance: 0n },
+]);
+
+// findUnique -> find
+await context.db.Account.findUnique({ id: event.args.from });
+await context.db.find(account, { address: event.args.from });
+
+// update
+await context.db.Account.update({
+ id: event.args.from,
+ data: ({ current }) => ({ balance: current.balance + 100n }),
+});
+await context.db
+ .update(account, { address: event.args.from })
+ .set((row) => ({ balance: row.balance + 100n }));
+
+// upsert
+await context.db.Account.upsert({
+ id: event.args.from,
+ create: { balance: 0n },
+ update: ({ current }) => ({ balance: current.balance + 100n }),
+});
+await context.db
+ .upsert(account, { address: event.args.from })
+ .insert({ balance: 0n })
+ .update((row) => ({ balance: row.balance + 100n }));
+
+// delete
+await context.db.Account.delete({ id: event.args.from });
+await context.db.delete(account, { address: event.args.from });
+
+// findMany -> select
+await context.db.Account.findMany({ where: { balance: { gt: 100n } } });
+await context.db.sql.select().from(account).where(eq(account.balance, 100n));
+```
+
+Finally, another migration example for an ERC20 Transfer indexing function using `upsert`.
+
+
+
+{/* prettier-ignore */}
+```ts filename="src/index.ts"
+import { ponder } from "@/generated";
+
+ponder.on("ERC20:Transfer", async ({ event, context }) => {
+ const { Account, TransferEvent } = context.db;
+
+ await Account.upsert({
+ id: event.args.from,
+ create: {
+ balance: BigInt(0),
+ isOwner: false,
+ },
+ update: ({ current }) => ({
+ balance: current.balance - event.args.amount,
+ }),
+ });
+});
+```
+
+{/* prettier-ignore */}
+```ts filename="src/index.ts"
+import { ponder } from "@/generated";
+import { account } from "../ponder.schema";
+
+ponder.on("ERC20:Transfer", async ({ event, context }) => {
+ await context.db
+ .upsert(account, { address: event.args.from })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance - event.args.amount,
+ }));
+});
+```
+
+
+
+**Direct SQL API**
+
+The `context.db.sql` interface replaces the rigid `findMany` method and supports any valid SQL `select` query.
+
+```ts filename="src/index.ts"
+import { desc } from "@ponder/core/db";
+import { account } from "../ponder.schema";
+
+ponder.on("...", ({ event, context }) => {
+ const result = await context.db.sql
+ .select()
+ .from(account)
+ .orderBy(desc(account.balance))
+ .limit(1);
+});
+```
+
+### Offchain Tables
+
+This release adds support for offchain tables. These are "normal" tables that persist across reloads and redeployments and have no special reorg handling properties.
+
+You can write to offchain tables in api functions and read from them in indexing functions.
+
+| | onchain tables | offchain tables |
+| ------------------ | -------------- | --------------- |
+| indexing functions | read + write | read |
+| api functions | read | read + write |
+
+Use the `offchainTable()` function to define offchain tables in `ponder.schema.ts`.
+
+
+
+#### Add an offchain table
+
+```ts filename="ponder.schema.ts"
+import { offchainTable, text, integer } from "@ponder/core/db";
+
+export const author = offchainTable("author", {
+ name: text("name").primaryKey(),
+ age: integer("age").notNull(),
+});
+```
+
+#### Install `drizzle-kit`
+
+Unlike onchain tables, offchain tables are not created automatically. To create them, install `drizzle-kit` run the migration generation script.
+
+{/* prettier-ignore */}
+
+
+```bash filename="shell"
+pnpm add drizzle-kit@latest
+```
+
+
+```bash filename="shell"
+yarn add drizzle-kit@latest
+```
+
+
+```bash filename="shell"
+npm add drizzle-kit@latest
+```
+
+
+
+```json filename="package.json"
+{
+ "scripts": {
+ "generate": "drizzle-kit generate --dialect postgresql --schema ./ponder.schema.ts --out migrations"
+ }
+}
+```
+
+{/* prettier-ignore */}
+
+
+```bash filename="shell"
+pnpm generate
+```
+
+
+```bash filename="shell"
+yarn generate
+```
+
+
+```bash filename="shell"
+npm run generate
+```
+
+
+
+#### Run migrations
+
+Start the dev server to apply any pending migrations found in the `migrations/` directory.
+
+{/* prettier-ignore */}
+
+
+```bash filename="shell"
+pnpm dev
+```
+
+
+```bash filename="shell"
+yarn dev
+```
+
+
+```bash filename="shell"
+npm run dev
+```
+
+
+
+
+
+By default, offchain tables are created in the `public` schema. You can specify a different schema using the `offchainSchema()` function.
+
+```ts filename="ponder.schema.ts" {3}
+import { offchainSchema, serial, evmHex } from "@ponder/core/db";
+
+export const offchain = offchainSchema("offchain");
+
+export const metadata = offchain.table("metadata", {
+ id: serial("id").primaryKey(),
+ authors:
+ account: evmHex("account").notNull(),
+});
+```
+
+Here's a complete example that uses an offchain table to store user metadata.
+
+
+```ts filename="ponder.schema.ts"
+import {
+ boolean,
+ evmBigint,
+ evmHex,
+ offchainTable,
+ onchainTable,
+ serial,
+} from "@ponder/core/db";
+
+export const account = onchainTable("account", {
+ address: evmHex("address").primaryKey(),
+ balance: evmBigint("balance").notNull(),
+ isOwner: boolean("is_owner").notNull(),
+});
+
+export const metadata = offchainTable("metadata", {
+ id: serial("id").primaryKey(),
+ account: evmHex("account").notNull(),
+});
+```
+
+```ts filename="src/api/index.ts"
+import { ponder } from "@/generated";
+import { replaceBigInts } from "@ponder/core";
+import { desc, eq } from "@ponder/core/db";
+import { getAddress } from "viem";
+import { account, metadata } from "../../ponder.schema";
+
+ponder.get("/register/:address", async (c) => {
+ const account = c.req.param("address");
+ await c.db.insert(metadata).values({ account });
+ return c.text("Success", 200);
+});
+
+ponder.get("/user-balances", async (c) => {
+ const result = await c.db
+ .select({
+ address: account.address,
+ balance: account.balance,
+ })
+ .from(account)
+ .innerJoin(metadata, eq(account.address, metadata.account))
+ .orderBy(desc(account.balance))
+ .limit(10);
+
+ return c.json(replaceBigInts(result, (b) => formatEther(b)));
+});
+```
+
+
+
## 0.6.0
### Updated `viem` to `>=2`
@@ -99,6 +561,7 @@ Ponder now creates a table in the `public` schema for each table in `ponder.sche
Isolation while running multiple Ponder instances against the same database also works differently. Before, Ponder used a schema with a pseudorandom name if the desired schema was in use. Now, Ponder will fail on startup with an error if it cannot acquire a lock on the desired schema.
This also changes the zero-downtime behavior on platforms like Railway. For more information on how this works in `0.4`, please reference:
+
- [Direct SQL](/docs/query/direct-sql)
- [Zero-downtime deployments](/docs/production/zero-downtime)
diff --git a/docs/styles.css b/docs/styles.css
index 985a8ecc4..fb2999f7e 100644
--- a/docs/styles.css
+++ b/docs/styles.css
@@ -127,3 +127,8 @@
),
url("/hero.png") lightgray 50% / cover no-repeat;
}
+
+/* MISC NEXTRA OVERRIDES */
+._list-decimal, ._list-disc {
+ margin-top: 12px;
+}
\ No newline at end of file
diff --git a/examples/feature-api-functions/migrations/0000_glamorous_fallen_one.sql b/examples/feature-api-functions/migrations/0000_glamorous_fallen_one.sql
new file mode 100644
index 000000000..27aad0c5f
--- /dev/null
+++ b/examples/feature-api-functions/migrations/0000_glamorous_fallen_one.sql
@@ -0,0 +1,6 @@
+CREATE SCHEMA "offchain";
+--> statement-breakpoint
+CREATE TABLE IF NOT EXISTS "offchain"."metadata" (
+ "id" serial PRIMARY KEY NOT NULL,
+ "account" text NOT NULL
+);
diff --git a/examples/feature-api-functions/migrations/meta/0000_snapshot.json b/examples/feature-api-functions/migrations/meta/0000_snapshot.json
new file mode 100644
index 000000000..2f4cad901
--- /dev/null
+++ b/examples/feature-api-functions/migrations/meta/0000_snapshot.json
@@ -0,0 +1,40 @@
+{
+ "id": "6011cb22-cfd9-474d-a02b-ba6addfea155",
+ "prevId": "00000000-0000-0000-0000-000000000000",
+ "version": "7",
+ "dialect": "postgresql",
+ "tables": {
+ "offchain.metadata": {
+ "name": "metadata",
+ "schema": "offchain",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "serial",
+ "primaryKey": true,
+ "notNull": true
+ },
+ "account": {
+ "name": "account",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {}
+ }
+ },
+ "enums": {},
+ "schemas": {
+ "offchain": "offchain"
+ },
+ "sequences": {},
+ "_meta": {
+ "columns": {},
+ "schemas": {},
+ "tables": {}
+ }
+}
diff --git a/examples/feature-api-functions/migrations/meta/_journal.json b/examples/feature-api-functions/migrations/meta/_journal.json
new file mode 100644
index 000000000..c7751ec29
--- /dev/null
+++ b/examples/feature-api-functions/migrations/meta/_journal.json
@@ -0,0 +1,13 @@
+{
+ "version": "7",
+ "dialect": "postgresql",
+ "entries": [
+ {
+ "idx": 0,
+ "version": "7",
+ "when": 1728509311024,
+ "tag": "0000_glamorous_fallen_one",
+ "breakpoints": true
+ }
+ ]
+}
diff --git a/examples/feature-api-functions/package.json b/examples/feature-api-functions/package.json
index 6ec614428..436f16671 100644
--- a/examples/feature-api-functions/package.json
+++ b/examples/feature-api-functions/package.json
@@ -7,11 +7,13 @@
"start": "ponder start",
"codegen": "ponder codegen",
"serve": "ponder serve",
+ "generate": "drizzle-kit generate --dialect postgresql --schema ./ponder.schema.ts --out migrations",
"lint": "eslint .",
"typecheck": "tsc"
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "drizzle-kit": "0.25.0",
"hono": "^4.5.0",
"viem": "^2.21.3"
},
diff --git a/examples/feature-api-functions/ponder-env.d.ts b/examples/feature-api-functions/ponder-env.d.ts
index 03126bf92..919cf2f8a 100644
--- a/examples/feature-api-functions/ponder-env.d.ts
+++ b/examples/feature-api-functions/ponder-env.d.ts
@@ -7,7 +7,7 @@ declare module "@/generated" {
import type { Virtual } from "@ponder/core";
type config = typeof import("./ponder.config.ts").default;
- type schema = typeof import("./ponder.schema.ts").default;
+ type schema = typeof import("./ponder.schema.ts");
export const ponder: Virtual.Registry;
@@ -21,8 +21,6 @@ declare module "@/generated" {
schema,
name
>;
- export type ApiContext = Virtual.Drizzle;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
- export type Schema = Virtual.Schema;
}
diff --git a/examples/feature-api-functions/ponder.schema.ts b/examples/feature-api-functions/ponder.schema.ts
index a722500c6..d779855e0 100644
--- a/examples/feature-api-functions/ponder.schema.ts
+++ b/examples/feature-api-functions/ponder.schema.ts
@@ -1,50 +1,53 @@
-import { createSchema } from "@ponder/core";
-
-export default createSchema((p) => ({
- Account: p.createTable({
- id: p.hex(),
- balance: p.bigint(),
- isOwner: p.boolean(),
+import {
+ index,
+ offchainSchema,
+ onchainTable,
+ primaryKey,
+} from "@ponder/core/db";
+
+export const account = onchainTable("account", (t) => ({
+ address: t.evmHex().primaryKey(),
+ balance: t.evmBigint().notNull(),
+ isOwner: t.boolean().notNull(),
+}));
- allowances: p.many("Allowance.ownerId"),
- approvalOwnerEvents: p.many("ApprovalEvent.ownerId"),
- approvalSpenderEvents: p.many("ApprovalEvent.spenderId"),
- transferFromEvents: p.many("TransferEvent.fromId"),
- transferToEvents: p.many("TransferEvent.toId"),
+export const allowance = onchainTable(
+ "allowance",
+ (t) => ({
+ owner: t.evmHex(),
+ spender: t.evmHex(),
+ amount: t.evmBigint().notNull(),
}),
- Allowance: p.createTable({
- id: p.string(),
- amount: p.bigint(),
-
- ownerId: p.hex().references("Account.id"),
- spenderId: p.hex().references("Account.id"),
-
- owner: p.one("ownerId"),
- spender: p.one("spenderId"),
+ (table) => ({
+ pk: primaryKey({ columns: [table.owner, table.spender] }),
}),
- TransferEvent: p.createTable(
- {
- id: p.string(),
- amount: p.bigint(),
- timestamp: p.int(),
-
- fromId: p.hex().references("Account.id"),
- toId: p.hex().references("Account.id"),
-
- from: p.one("fromId"),
- to: p.one("toId"),
- },
- { fromIdIndex: p.index("fromId") },
- ),
- ApprovalEvent: p.createTable({
- id: p.string(),
- amount: p.bigint(),
- timestamp: p.int(),
+);
+
+export const transferEvent = onchainTable(
+ "transfer_event",
+ (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ from: t.evmHex().notNull(),
+ to: t.evmHex().notNull(),
+ }),
+ (table) => ({
+ fromIdx: index("from_index").on(table.from),
+ }),
+);
+
+export const approvalEvent = onchainTable("approval_event", (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ owner: t.evmHex().notNull(),
+ spender: t.evmHex().notNull(),
+}));
- ownerId: p.hex().references("Account.id"),
- spenderId: p.hex().references("Account.id"),
+export const schema = offchainSchema("offchain");
- owner: p.one("ownerId"),
- spender: p.one("spenderId"),
- }),
+export const metadata = schema.table("metadata", (t) => ({
+ id: t.serial().primaryKey(),
+ account: t.evmHex().notNull(),
}));
diff --git a/examples/feature-api-functions/src/api/index.ts b/examples/feature-api-functions/src/api/index.ts
index 219dbde3b..62cb8600b 100644
--- a/examples/feature-api-functions/src/api/index.ts
+++ b/examples/feature-api-functions/src/api/index.ts
@@ -1,13 +1,13 @@
import { ponder } from "@/generated";
-import { count, desc, eq, graphql, or, replaceBigInts } from "@ponder/core";
+import { replaceBigInts } from "@ponder/core";
+import { count, desc, eq, or } from "@ponder/core/db";
import { formatEther, getAddress } from "viem";
+import { account, metadata, transferEvent } from "../../ponder.schema";
-ponder.use("/graphql", graphql());
+// ponder.use("/graphql", graphql());
ponder.get("/count", async (c) => {
- const result = await c.db
- .select({ count: count() })
- .from(c.tables.TransferEvent);
+ const result = await c.db.select({ count: count() }).from(transferEvent);
if (result.length === 0) return c.text("0");
return c.text(String(result[0]!.count));
@@ -15,33 +15,51 @@ ponder.get("/count", async (c) => {
ponder.get("/count/:address", async (c) => {
const account = getAddress(c.req.param("address"));
- const { TransferEvent } = c.tables;
const result = await c.db
.select({ count: count() })
- .from(c.tables.TransferEvent)
- .where(
- or(eq(TransferEvent.fromId, account), eq(TransferEvent.toId, account)),
- );
+ .from(transferEvent)
+ .where(or(eq(transferEvent.from, account), eq(transferEvent.to, account)));
if (result.length === 0) return c.text("0");
return c.text(String(result[0]!.count));
});
ponder.get("/whale-transfers", async (c) => {
- const { TransferEvent, Account } = c.tables;
-
// Top 10 transfers from whale accounts
const result = await c.db
.select({
- amount: TransferEvent.amount,
- senderBalance: Account.balance,
+ sender: account.address,
+ senderBalance: account.balance,
+ amount: transferEvent.amount,
})
- .from(TransferEvent)
- .innerJoin(Account, eq(TransferEvent.fromId, Account.id))
- .orderBy(desc(Account.balance))
+ .from(transferEvent)
+ .innerJoin(account, eq(transferEvent.from, account.address))
+ .orderBy(desc(account.balance))
.limit(10);
if (result.length === 0) return c.text("Not found", 500);
return c.json(replaceBigInts(result, (b) => formatEther(b)));
});
+
+ponder.get("/register/:address", async (c) => {
+ const account = getAddress(c.req.param("address"));
+ await c.db.insert(metadata).values({ account });
+
+ return c.text("Success", 200);
+});
+
+ponder.get("/user-transfers", async (c) => {
+ // Top 20 largest transfers to registered users
+ const result = await c.db
+ .select({
+ amount: transferEvent.amount,
+ account: metadata.account,
+ })
+ .from(transferEvent)
+ .innerJoin(metadata, eq(transferEvent.to, metadata.account))
+ .orderBy(desc(transferEvent.amount))
+ .limit(20);
+
+ return c.json(replaceBigInts(result, (b) => formatEther(b)));
+});
diff --git a/examples/feature-api-functions/src/index.ts b/examples/feature-api-functions/src/index.ts
index 16bf33aa7..552d35b68 100644
--- a/examples/feature-api-functions/src/index.ts
+++ b/examples/feature-api-functions/src/index.ts
@@ -1,70 +1,50 @@
import { ponder } from "@/generated";
+import {
+ account,
+ allowance,
+ approvalEvent,
+ transferEvent,
+} from "../ponder.schema";
ponder.on("ERC20:Transfer", async ({ event, context }) => {
- const { Account, TransferEvent } = context.db;
-
- // Create an Account for the sender, or update the balance if it already exists.
- await Account.upsert({
- id: event.args.from,
- create: {
- balance: BigInt(0),
- isOwner: false,
- },
- update: ({ current }) => ({
- balance: current.balance - event.args.amount,
- }),
- });
-
- // Create an Account for the recipient, or update the balance if it already exists.
- await Account.upsert({
- id: event.args.to,
- create: {
- balance: event.args.amount,
- isOwner: false,
- },
- update: ({ current }) => ({
- balance: current.balance + event.args.amount,
- }),
- });
-
- // Create a TransferEvent.
- await TransferEvent.create({
- id: event.log.id,
- data: {
- fromId: event.args.from,
- toId: event.args.to,
- amount: event.args.amount,
- timestamp: Number(event.block.timestamp),
- },
+ await context.db
+ .upsert(account, { address: event.args.from })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance - event.args.amount,
+ }));
+
+ await context.db
+ .upsert(account, { address: event.args.to })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance + event.args.amount,
+ }));
+
+ // add row to "transfer_event".
+ await context.db.insert(transferEvent).values({
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ from: event.args.from,
+ to: event.args.to,
});
});
ponder.on("ERC20:Approval", async ({ event, context }) => {
- const { Allowance, ApprovalEvent } = context.db;
-
- const allowanceId = `${event.args.owner}-${event.args.spender}`;
-
- // Create or update the Allowance.
- await Allowance.upsert({
- id: allowanceId,
- create: {
- ownerId: event.args.owner,
- spenderId: event.args.spender,
- amount: event.args.amount,
- },
- update: {
- amount: event.args.amount,
- },
- });
-
- // Create an ApprovalEvent.
- await ApprovalEvent.create({
- id: event.log.id,
- data: {
- ownerId: event.args.owner,
- spenderId: event.args.spender,
- amount: event.args.amount,
- timestamp: Number(event.block.timestamp),
- },
+ // upsert "allowance".
+ await context.db
+ .upsert(allowance, {
+ spender: event.args.spender,
+ owner: event.args.owner,
+ })
+ .insert({ amount: event.args.amount })
+ .update({ amount: event.args.amount });
+
+ // add row to "approval_event".
+ await context.db.insert(approvalEvent).values({
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ owner: event.args.owner,
+ spender: event.args.spender,
});
});
diff --git a/examples/feature-multichain/ponder.schema.ts b/examples/feature-multichain/ponder.schema.ts
index c13f8a8b6..cb9645757 100644
--- a/examples/feature-multichain/ponder.schema.ts
+++ b/examples/feature-multichain/ponder.schema.ts
@@ -1,8 +1,6 @@
-import { createSchema } from "@ponder/core";
+import { onchainTable } from "@ponder/core";
-export default createSchema((p) => ({
- Account: p.createTable({
- id: p.hex(),
- balance: p.bigint(),
- }),
+export const account = onchainTable("account", (t) => ({
+ address: t.evmHex().primaryKey(),
+ balance: t.evmBigint().notNull(),
}));
diff --git a/examples/feature-multichain/src/index.ts b/examples/feature-multichain/src/index.ts
index 7cf369419..9e45cef20 100644
--- a/examples/feature-multichain/src/index.ts
+++ b/examples/feature-multichain/src/index.ts
@@ -1,15 +1,9 @@
import { ponder } from "@/generated";
+import { account } from "../ponder.schema";
ponder.on("weth9:Deposit", async ({ event, context }) => {
- const { Account } = context.db;
-
- await Account.upsert({
- id: event.args.dst,
- create: {
- balance: event.args.wad,
- },
- update: ({ current }) => ({
- balance: current.balance + event.args.wad,
- }),
- });
+ await context.db
+ .upsert(account, { address: event.args.dst })
+ .insert({ balance: event.args.wad })
+ .update((row) => ({ balance: row.balance + event.args.wad }));
});
diff --git a/examples/reference-erc20/package.json b/examples/reference-erc20/package.json
index 0b3d3d6da..b5819ad48 100644
--- a/examples/reference-erc20/package.json
+++ b/examples/reference-erc20/package.json
@@ -12,6 +12,7 @@
},
"dependencies": {
"@ponder/core": "workspace:*",
+ "drizzle-kit": "0.22.8",
"hono": "^4.5.0",
"viem": "^2.21.3"
},
diff --git a/examples/reference-erc20/ponder-env.d.ts b/examples/reference-erc20/ponder-env.d.ts
index 03126bf92..e7f300973 100644
--- a/examples/reference-erc20/ponder-env.d.ts
+++ b/examples/reference-erc20/ponder-env.d.ts
@@ -7,7 +7,7 @@ declare module "@/generated" {
import type { Virtual } from "@ponder/core";
type config = typeof import("./ponder.config.ts").default;
- type schema = typeof import("./ponder.schema.ts").default;
+ type schema = typeof import("./ponder.schema.ts");
export const ponder: Virtual.Registry;
@@ -21,8 +21,7 @@ declare module "@/generated" {
schema,
name
>;
- export type ApiContext = Virtual.Drizzle;
+ export type ApiContext = Virtual.ApiContext;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
- export type Schema = Virtual.Schema;
}
diff --git a/examples/reference-erc20/ponder.schema.ts b/examples/reference-erc20/ponder.schema.ts
index a722500c6..64386b24d 100644
--- a/examples/reference-erc20/ponder.schema.ts
+++ b/examples/reference-erc20/ponder.schema.ts
@@ -1,50 +1,41 @@
-import { createSchema } from "@ponder/core";
+import { index, onchainTable, primaryKey } from "@ponder/core/db";
-export default createSchema((p) => ({
- Account: p.createTable({
- id: p.hex(),
- balance: p.bigint(),
- isOwner: p.boolean(),
+export const account = onchainTable("account", (t) => ({
+ address: t.evmHex().primaryKey(),
+ balance: t.evmBigint().notNull(),
+ isOwner: t.boolean().notNull(),
+}));
- allowances: p.many("Allowance.ownerId"),
- approvalOwnerEvents: p.many("ApprovalEvent.ownerId"),
- approvalSpenderEvents: p.many("ApprovalEvent.spenderId"),
- transferFromEvents: p.many("TransferEvent.fromId"),
- transferToEvents: p.many("TransferEvent.toId"),
+export const allowance = onchainTable(
+ "allowance",
+ (t) => ({
+ owner: t.evmHex(),
+ spender: t.evmHex(),
+ amount: t.evmBigint().notNull(),
}),
- Allowance: p.createTable({
- id: p.string(),
- amount: p.bigint(),
-
- ownerId: p.hex().references("Account.id"),
- spenderId: p.hex().references("Account.id"),
-
- owner: p.one("ownerId"),
- spender: p.one("spenderId"),
+ (table) => ({
+ pk: primaryKey({ columns: [table.owner, table.spender] }),
}),
- TransferEvent: p.createTable(
- {
- id: p.string(),
- amount: p.bigint(),
- timestamp: p.int(),
-
- fromId: p.hex().references("Account.id"),
- toId: p.hex().references("Account.id"),
-
- from: p.one("fromId"),
- to: p.one("toId"),
- },
- { fromIdIndex: p.index("fromId") },
- ),
- ApprovalEvent: p.createTable({
- id: p.string(),
- amount: p.bigint(),
- timestamp: p.int(),
-
- ownerId: p.hex().references("Account.id"),
- spenderId: p.hex().references("Account.id"),
-
- owner: p.one("ownerId"),
- spender: p.one("spenderId"),
+);
+
+export const transferEvent = onchainTable(
+ "transfer_event",
+ (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ from: t.evmHex().notNull(),
+ to: t.evmHex().notNull(),
+ }),
+ (table) => ({
+ fromIdx: index("from_index").on(table.from),
}),
+);
+
+export const approvalEvent = onchainTable("approval_event", (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ owner: t.evmHex().notNull(),
+ spender: t.evmHex().notNull(),
}));
diff --git a/examples/reference-erc20/src/index.ts b/examples/reference-erc20/src/index.ts
index 16bf33aa7..552d35b68 100644
--- a/examples/reference-erc20/src/index.ts
+++ b/examples/reference-erc20/src/index.ts
@@ -1,70 +1,50 @@
import { ponder } from "@/generated";
+import {
+ account,
+ allowance,
+ approvalEvent,
+ transferEvent,
+} from "../ponder.schema";
ponder.on("ERC20:Transfer", async ({ event, context }) => {
- const { Account, TransferEvent } = context.db;
-
- // Create an Account for the sender, or update the balance if it already exists.
- await Account.upsert({
- id: event.args.from,
- create: {
- balance: BigInt(0),
- isOwner: false,
- },
- update: ({ current }) => ({
- balance: current.balance - event.args.amount,
- }),
- });
-
- // Create an Account for the recipient, or update the balance if it already exists.
- await Account.upsert({
- id: event.args.to,
- create: {
- balance: event.args.amount,
- isOwner: false,
- },
- update: ({ current }) => ({
- balance: current.balance + event.args.amount,
- }),
- });
-
- // Create a TransferEvent.
- await TransferEvent.create({
- id: event.log.id,
- data: {
- fromId: event.args.from,
- toId: event.args.to,
- amount: event.args.amount,
- timestamp: Number(event.block.timestamp),
- },
+ await context.db
+ .upsert(account, { address: event.args.from })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance - event.args.amount,
+ }));
+
+ await context.db
+ .upsert(account, { address: event.args.to })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance + event.args.amount,
+ }));
+
+ // add row to "transfer_event".
+ await context.db.insert(transferEvent).values({
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ from: event.args.from,
+ to: event.args.to,
});
});
ponder.on("ERC20:Approval", async ({ event, context }) => {
- const { Allowance, ApprovalEvent } = context.db;
-
- const allowanceId = `${event.args.owner}-${event.args.spender}`;
-
- // Create or update the Allowance.
- await Allowance.upsert({
- id: allowanceId,
- create: {
- ownerId: event.args.owner,
- spenderId: event.args.spender,
- amount: event.args.amount,
- },
- update: {
- amount: event.args.amount,
- },
- });
-
- // Create an ApprovalEvent.
- await ApprovalEvent.create({
- id: event.log.id,
- data: {
- ownerId: event.args.owner,
- spenderId: event.args.spender,
- amount: event.args.amount,
- timestamp: Number(event.block.timestamp),
- },
+ // upsert "allowance".
+ await context.db
+ .upsert(allowance, {
+ spender: event.args.spender,
+ owner: event.args.owner,
+ })
+ .insert({ amount: event.args.amount })
+ .update({ amount: event.args.amount });
+
+ // add row to "approval_event".
+ await context.db.insert(approvalEvent).values({
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ owner: event.args.owner,
+ spender: event.args.spender,
});
});
diff --git a/examples/with-trpc/ponder/ponder-env.d.ts b/examples/with-trpc/ponder/ponder-env.d.ts
index 03126bf92..e7f300973 100644
--- a/examples/with-trpc/ponder/ponder-env.d.ts
+++ b/examples/with-trpc/ponder/ponder-env.d.ts
@@ -7,7 +7,7 @@ declare module "@/generated" {
import type { Virtual } from "@ponder/core";
type config = typeof import("./ponder.config.ts").default;
- type schema = typeof import("./ponder.schema.ts").default;
+ type schema = typeof import("./ponder.schema.ts");
export const ponder: Virtual.Registry;
@@ -21,8 +21,7 @@ declare module "@/generated" {
schema,
name
>;
- export type ApiContext = Virtual.Drizzle;
+ export type ApiContext = Virtual.ApiContext;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
- export type Schema = Virtual.Schema;
}
diff --git a/examples/with-trpc/ponder/ponder.schema.ts b/examples/with-trpc/ponder/ponder.schema.ts
index a722500c6..64386b24d 100644
--- a/examples/with-trpc/ponder/ponder.schema.ts
+++ b/examples/with-trpc/ponder/ponder.schema.ts
@@ -1,50 +1,41 @@
-import { createSchema } from "@ponder/core";
+import { index, onchainTable, primaryKey } from "@ponder/core/db";
-export default createSchema((p) => ({
- Account: p.createTable({
- id: p.hex(),
- balance: p.bigint(),
- isOwner: p.boolean(),
+export const account = onchainTable("account", (t) => ({
+ address: t.evmHex().primaryKey(),
+ balance: t.evmBigint().notNull(),
+ isOwner: t.boolean().notNull(),
+}));
- allowances: p.many("Allowance.ownerId"),
- approvalOwnerEvents: p.many("ApprovalEvent.ownerId"),
- approvalSpenderEvents: p.many("ApprovalEvent.spenderId"),
- transferFromEvents: p.many("TransferEvent.fromId"),
- transferToEvents: p.many("TransferEvent.toId"),
+export const allowance = onchainTable(
+ "allowance",
+ (t) => ({
+ owner: t.evmHex(),
+ spender: t.evmHex(),
+ amount: t.evmBigint().notNull(),
}),
- Allowance: p.createTable({
- id: p.string(),
- amount: p.bigint(),
-
- ownerId: p.hex().references("Account.id"),
- spenderId: p.hex().references("Account.id"),
-
- owner: p.one("ownerId"),
- spender: p.one("spenderId"),
+ (table) => ({
+ pk: primaryKey({ columns: [table.owner, table.spender] }),
}),
- TransferEvent: p.createTable(
- {
- id: p.string(),
- amount: p.bigint(),
- timestamp: p.int(),
-
- fromId: p.hex().references("Account.id"),
- toId: p.hex().references("Account.id"),
-
- from: p.one("fromId"),
- to: p.one("toId"),
- },
- { fromIdIndex: p.index("fromId") },
- ),
- ApprovalEvent: p.createTable({
- id: p.string(),
- amount: p.bigint(),
- timestamp: p.int(),
-
- ownerId: p.hex().references("Account.id"),
- spenderId: p.hex().references("Account.id"),
-
- owner: p.one("ownerId"),
- spender: p.one("spenderId"),
+);
+
+export const transferEvent = onchainTable(
+ "transfer_event",
+ (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ from: t.evmHex().notNull(),
+ to: t.evmHex().notNull(),
+ }),
+ (table) => ({
+ fromIdx: index("from_index").on(table.from),
}),
+);
+
+export const approvalEvent = onchainTable("approval_event", (t) => ({
+ id: t.serial().primaryKey(),
+ amount: t.evmBigint().notNull(),
+ timestamp: t.integer().notNull(),
+ owner: t.evmHex().notNull(),
+ spender: t.evmHex().notNull(),
}));
diff --git a/examples/with-trpc/ponder/src/api/index.ts b/examples/with-trpc/ponder/src/api/index.ts
index 709808d87..4da13b854 100644
--- a/examples/with-trpc/ponder/src/api/index.ts
+++ b/examples/with-trpc/ponder/src/api/index.ts
@@ -1,20 +1,19 @@
import { type ApiContext, ponder } from "@/generated";
import { trpcServer } from "@hono/trpc-server";
-import { eq } from "@ponder/core";
+import { eq } from "@ponder/core/db";
import { initTRPC } from "@trpc/server";
import type { Address } from "viem";
import { z } from "zod";
+import * as schema from "../../ponder.schema";
const t = initTRPC.context().create();
const appRouter = t.router({
hello: t.procedure.input(z.string()).query(async ({ input, ctx }) => {
- const { Account } = ctx.tables;
-
const account = await ctx.db
- .select({ balance: Account.balance })
- .from(Account)
- .where(eq(Account.id, input as Address))
+ .select({ balance: schema.account.balance })
+ .from(schema.account)
+ .where(eq(schema.account.address, input as Address))
.limit(1);
if (account.length === 0) return null;
diff --git a/examples/with-trpc/ponder/src/index.ts b/examples/with-trpc/ponder/src/index.ts
index 16bf33aa7..552d35b68 100644
--- a/examples/with-trpc/ponder/src/index.ts
+++ b/examples/with-trpc/ponder/src/index.ts
@@ -1,70 +1,50 @@
import { ponder } from "@/generated";
+import {
+ account,
+ allowance,
+ approvalEvent,
+ transferEvent,
+} from "../ponder.schema";
ponder.on("ERC20:Transfer", async ({ event, context }) => {
- const { Account, TransferEvent } = context.db;
-
- // Create an Account for the sender, or update the balance if it already exists.
- await Account.upsert({
- id: event.args.from,
- create: {
- balance: BigInt(0),
- isOwner: false,
- },
- update: ({ current }) => ({
- balance: current.balance - event.args.amount,
- }),
- });
-
- // Create an Account for the recipient, or update the balance if it already exists.
- await Account.upsert({
- id: event.args.to,
- create: {
- balance: event.args.amount,
- isOwner: false,
- },
- update: ({ current }) => ({
- balance: current.balance + event.args.amount,
- }),
- });
-
- // Create a TransferEvent.
- await TransferEvent.create({
- id: event.log.id,
- data: {
- fromId: event.args.from,
- toId: event.args.to,
- amount: event.args.amount,
- timestamp: Number(event.block.timestamp),
- },
+ await context.db
+ .upsert(account, { address: event.args.from })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance - event.args.amount,
+ }));
+
+ await context.db
+ .upsert(account, { address: event.args.to })
+ .insert({ balance: 0n, isOwner: false })
+ .update((row) => ({
+ balance: row.balance + event.args.amount,
+ }));
+
+ // add row to "transfer_event".
+ await context.db.insert(transferEvent).values({
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ from: event.args.from,
+ to: event.args.to,
});
});
ponder.on("ERC20:Approval", async ({ event, context }) => {
- const { Allowance, ApprovalEvent } = context.db;
-
- const allowanceId = `${event.args.owner}-${event.args.spender}`;
-
- // Create or update the Allowance.
- await Allowance.upsert({
- id: allowanceId,
- create: {
- ownerId: event.args.owner,
- spenderId: event.args.spender,
- amount: event.args.amount,
- },
- update: {
- amount: event.args.amount,
- },
- });
-
- // Create an ApprovalEvent.
- await ApprovalEvent.create({
- id: event.log.id,
- data: {
- ownerId: event.args.owner,
- spenderId: event.args.spender,
- amount: event.args.amount,
- timestamp: Number(event.block.timestamp),
- },
+ // upsert "allowance".
+ await context.db
+ .upsert(allowance, {
+ spender: event.args.spender,
+ owner: event.args.owner,
+ })
+ .insert({ amount: event.args.amount })
+ .update({ amount: event.args.amount });
+
+ // add row to "approval_event".
+ await context.db.insert(approvalEvent).values({
+ amount: event.args.amount,
+ timestamp: Number(event.block.timestamp),
+ owner: event.args.owner,
+ spender: event.args.spender,
});
});
diff --git a/packages/core/package.json b/packages/core/package.json
index 97fdb6d2e..edca4180b 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -1,6 +1,6 @@
{
"name": "@ponder/core",
- "version": "0.6.15",
+ "version": "0.7.0-next.2",
"description": "An open-source framework for crypto application backends",
"license": "MIT",
"type": "module",
@@ -20,11 +20,20 @@
"ponder": "./dist/bin/ponder.js"
},
"module": "./dist/index.js",
+ "main": "./dist/index.cjs",
"types": "./dist/index.d.ts",
"typings": "./dist/index.d.ts",
"exports": {
- "import": "./dist/index.js",
- "types": "./dist/index.d.ts"
+ ".": {
+ "types": "./dist/index.d.ts",
+ "import": "./dist/index.js",
+ "require": "./dist/index.cjs"
+ },
+ "./db": {
+ "types": "./dist/drizzle/db.d.ts",
+ "import": "./dist/drizzle/db.js",
+ "require": "./dist/drizzle/db.cjs"
+ }
},
"scripts": {
"build": "tsup",
@@ -45,25 +54,26 @@
"dependencies": {
"@babel/code-frame": "^7.23.4",
"@commander-js/extra-typings": "^12.0.1",
+ "@electric-sql/pglite": "^0.2.5",
"@escape.tech/graphql-armor-max-aliases": "^2.3.0",
"@escape.tech/graphql-armor-max-depth": "^2.2.0",
"@escape.tech/graphql-armor-max-tokens": "^2.3.0",
"@hono/node-server": "^1.13.2",
"@ponder/utils": "workspace:*",
"abitype": "^0.10.2",
- "better-sqlite3": "^11.1.2",
"commander": "^12.0.0",
"conf": "^12.0.0",
"dataloader": "^2.2.2",
"detect-package-manager": "^3.0.2",
"dotenv": "^16.3.1",
- "drizzle-orm": "^0.31.2",
+ "drizzle-orm": "0.34.1",
"glob": "^10.3.10",
"graphql": "^16.8.1",
"graphql-yoga": "^5.3.0",
"http-terminator": "^3.2.0",
"ink": "^4.4.1",
"kysely": "^0.26.3",
+ "kysely-pglite": "^0.6.0",
"pg": "^8.11.3",
"pg-connection-string": "^2.6.2",
"picocolors": "^1.0.0",
@@ -77,7 +87,6 @@
},
"devDependencies": {
"@types/babel__code-frame": "^7.0.6",
- "@types/better-sqlite3": "^7.6.10",
"@types/glob": "^8.1.0",
"@types/node": "^20.10.0",
"@types/pg": "^8.10.9",
diff --git a/packages/core/src/_test/e2e/erc20/ponder.config.ts b/packages/core/src/_test/e2e/erc20/ponder.config.ts
index d641e01b3..ce217d77e 100644
--- a/packages/core/src/_test/e2e/erc20/ponder.config.ts
+++ b/packages/core/src/_test/e2e/erc20/ponder.config.ts
@@ -13,7 +13,7 @@ function getDatabase() {
const connectionString = databaseUrl.toString();
return { kind: "postgres", connectionString } as const;
} else {
- return { kind: "sqlite" } as const;
+ return { kind: "pglite" } as const;
}
}
diff --git a/packages/core/src/_test/e2e/factory/ponder.config.ts b/packages/core/src/_test/e2e/factory/ponder.config.ts
index 6438a8608..875fc0392 100644
--- a/packages/core/src/_test/e2e/factory/ponder.config.ts
+++ b/packages/core/src/_test/e2e/factory/ponder.config.ts
@@ -13,7 +13,7 @@ function getDatabase() {
const connectionString = databaseUrl.toString();
return { kind: "postgres", connectionString } as const;
} else {
- return { kind: "sqlite" } as const;
+ return { kind: "pglite" } as const;
}
}
diff --git a/packages/core/src/_test/setup.ts b/packages/core/src/_test/setup.ts
index 7db7aa8ac..820d50e11 100644
--- a/packages/core/src/_test/setup.ts
+++ b/packages/core/src/_test/setup.ts
@@ -19,13 +19,19 @@ import type { IndexingStore, ReadonlyStore } from "@/indexing-store/store.js";
import type { Schema } from "@/schema/common.js";
import { type SyncStore, createSyncStore } from "@/sync-store/index.js";
import type { BlockSource, ContractSource, LogFactory } from "@/sync/source.js";
+import { createPglite } from "@/utils/pglite.js";
import type { RequestQueue } from "@/utils/requestQueue.js";
import pg from "pg";
import { rimrafSync } from "rimraf";
import type { Address } from "viem";
-import type { TestContext } from "vitest";
+import { type TestContext, afterAll } from "vitest";
import { deploy, simulate } from "./simulate.js";
-import { getConfig, getNetworkAndSources, testClient } from "./utils.js";
+import {
+ getConfig,
+ getNetworkAndSources,
+ poolId,
+ testClient,
+} from "./utils.js";
declare module "vitest" {
export interface TestContext {
@@ -66,50 +72,51 @@ export function setupCommon(context: TestContext) {
};
}
+const pgliteDataDirs = new Map();
+afterAll(() => pgliteDataDirs.forEach((dataDir) => rimrafSync(dataDir)));
+
/**
* Sets up an isolated database on the test context.
*
- * If `process.env.DATABASE_URL` is set, creates a new database and drops
- * it in the cleanup function. If it's not set, creates a temporary directory
- * for SQLite and removes it in the cleanup function.
- *
* ```ts
* // Add this to any test suite that uses the database.
- * beforeEach((context) => setupIsolatedDatabase(context))
+ * beforeEach(setupIsolatedDatabase)
* ```
*/
export async function setupIsolatedDatabase(context: TestContext) {
- if (process.env.DATABASE_URL) {
- const databaseName = `vitest_${process.env.VITEST_POOL_ID ?? 1}`;
- const databaseUrl = new URL(process.env.DATABASE_URL);
- databaseUrl.pathname = `/${databaseName}`;
-
- const poolConfig = { max: 30, connectionString: databaseUrl.toString() };
+ const connectionString = process.env.DATABASE_URL;
+ if (connectionString !== undefined) {
+ const databaseName = `vitest_${poolId}`;
- const client = new pg.Client({
- connectionString: process.env.DATABASE_URL,
- });
+ const client = new pg.Client({ connectionString });
await client.connect();
await client.query(`DROP DATABASE IF EXISTS "${databaseName}"`);
await client.query(`CREATE DATABASE "${databaseName}"`);
await client.end();
- context.databaseConfig = {
- kind: "postgres",
- poolConfig,
- schema: "public",
- };
+ const databaseUrl = new URL(connectionString);
+ databaseUrl.pathname = `/${databaseName}`;
+ const poolConfig = { max: 30, connectionString: databaseUrl.toString() };
- return () => {};
+ context.databaseConfig = { kind: "postgres", poolConfig, schema: "public" };
} else {
- const tempDir = path.join(os.tmpdir(), randomUUID());
- mkdirSync(tempDir, { recursive: true });
+ let dataDir = pgliteDataDirs.get(poolId);
+ if (dataDir === undefined) {
+ dataDir = path.join(os.tmpdir(), randomUUID());
+ mkdirSync(dataDir, { recursive: true });
+ pgliteDataDirs.set(poolId, dataDir);
+ }
+
+ const databaseName = `vitest_${poolId}`;
- context.databaseConfig = { kind: "sqlite", directory: tempDir };
+ const parent = createPglite({ dataDir });
+ await parent.exec(`DROP DATABASE IF EXISTS "${databaseName}"`);
+ await parent.exec(`CREATE DATABASE "${databaseName}"`);
+ await parent.close();
- return () => {
- rimrafSync(tempDir);
- };
+ const options = { dataDir, database: databaseName };
+
+ context.databaseConfig = { kind: "pglite", options };
}
}
@@ -135,7 +142,7 @@ export async function setupDatabaseServices(
cleanup: () => Promise;
}> {
const config = { ...defaultDatabaseServiceSetup, ...overrides };
- const database = createDatabase({
+ const database = await createDatabase({
common: context.common,
databaseConfig: context.databaseConfig,
schema: config.schema,
@@ -143,16 +150,17 @@ export async function setupDatabaseServices(
await database.setup(config);
- await database.migrateSync();
+ await database.migrateSync().catch((err) => {
+ console.log(err);
+ throw err;
+ });
const syncStore = createSyncStore({
common: context.common,
- dialect: database.dialect,
db: database.qb.sync,
});
const readonlyStore = getReadonlyStore({
- dialect: database.dialect,
schema: config.schema,
db: database.qb.user,
common: context.common,
@@ -161,7 +169,6 @@ export async function setupDatabaseServices(
const indexingStore =
config.indexing === "historical"
? getHistoricalStore({
- dialect: database.dialect,
schema: config.schema,
readonlyStore,
db: database.qb.user,
@@ -171,7 +178,6 @@ export async function setupDatabaseServices(
: {
...readonlyStore,
...getRealtimeStore({
- dialect: database.dialect,
schema: config.schema,
db: database.qb.user,
common: context.common,
diff --git a/packages/core/src/bin/commands/codegen.ts b/packages/core/src/bin/commands/codegen.ts
index ad49b0864..e63b0cf53 100644
--- a/packages/core/src/bin/commands/codegen.ts
+++ b/packages/core/src/bin/commands/codegen.ts
@@ -57,7 +57,7 @@ export async function codegen({ cliOptions }: { cliOptions: CliOptions }) {
properties: { cli_command: "codegen" },
});
- runCodegen({ common, graphqlSchema: indexing.build.graphqlSchema });
+ runCodegen({ common });
logger.info({ service: "codegen", msg: "Wrote ponder-env.d.ts" });
logger.info({ service: "codegen", msg: "Wrote schema.graphql" });
diff --git a/packages/core/src/bin/commands/serve.ts b/packages/core/src/bin/commands/serve.ts
index 543362b01..6d8a626fe 100644
--- a/packages/core/src/bin/commands/serve.ts
+++ b/packages/core/src/bin/commands/serve.ts
@@ -69,15 +69,15 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) {
const { databaseConfig, schema } = api.build;
- if (databaseConfig.kind === "sqlite") {
+ if (databaseConfig.kind === "pglite") {
await shutdown({
- reason: "The 'ponder serve' command does not support SQLite",
+ reason: "The 'ponder serve' command does not support PGlite",
code: 1,
});
return cleanup;
}
- const database = createDatabase({
+ const database = await createDatabase({
common,
schema,
databaseConfig,
diff --git a/packages/core/src/bin/utils/run.ts b/packages/core/src/bin/utils/run.ts
index 05ec1e965..d54c207c3 100644
--- a/packages/core/src/bin/utils/run.ts
+++ b/packages/core/src/bin/utils/run.ts
@@ -2,11 +2,7 @@ import type { IndexingBuild } from "@/build/index.js";
import { runCodegen } from "@/common/codegen.js";
import type { Common } from "@/common/common.js";
import { createDatabase } from "@/database/index.js";
-import { getHistoricalStore } from "@/indexing-store/historical.js";
import { getMetadataStore } from "@/indexing-store/metadata.js";
-import { getReadonlyStore } from "@/indexing-store/readonly.js";
-import { getRealtimeStore } from "@/indexing-store/realtime.js";
-import type { IndexingStore } from "@/indexing-store/store.js";
import { createIndexingService } from "@/indexing/index.js";
import { createSyncStore } from "@/sync-store/index.js";
import type { Event } from "@/sync/events.js";
@@ -39,14 +35,13 @@ export async function run({
databaseConfig,
networks,
sources,
- graphqlSchema,
schema,
indexingFunctions,
} = build;
let isKilled = false;
- const database = createDatabase({
+ const database = await createDatabase({
common,
schema,
databaseConfig,
@@ -58,11 +53,9 @@ export async function run({
const syncStore = createSyncStore({
common,
db: database.qb.sync,
- dialect: database.dialect,
});
const metadataStore = getMetadataStore({
- dialect: database.dialect,
db: database.qb.user,
});
@@ -70,7 +63,7 @@ export async function run({
// starting the server so the app can become responsive more quickly.
await database.migrateSync();
- runCodegen({ common, graphqlSchema });
+ runCodegen({ common });
// Note: can throw
const sync = await createSync({
@@ -109,6 +102,9 @@ export async function run({
if (result.status === "error") onReloadableError(result.error);
+ // overwrite the temporary "checkpoint" value in reorg tables
+ await database.complete({ checkpoint: event.checkpoint });
+
await metadataStore.setStatus(event.status);
break;
@@ -127,32 +123,13 @@ export async function run({
},
});
- const readonlyStore = getReadonlyStore({
- dialect: database.dialect,
- schema,
- db: database.qb.user,
- common,
- });
-
- const historicalStore = getHistoricalStore({
- dialect: database.dialect,
- schema,
- readonlyStore,
- db: database.qb.user,
- common,
- isCacheExhaustive: encodeCheckpoint(zeroCheckpoint) === initialCheckpoint,
- });
-
- let indexingStore: IndexingStore = historicalStore;
-
const indexingService = createIndexingService({
indexingFunctions,
common,
- indexingStore,
sources,
networks,
sync,
- schema,
+ database,
});
await metadataStore.setStatus(sync.getStatus());
@@ -194,7 +171,7 @@ export async function run({
if (isKilled) return;
- await historicalStore.flush({ isFullFlush: true });
+ // await historicalStore.flush({ isFullFlush: true });
// Manually update metrics to fix a UI bug that occurs when the end
// checkpoint is between the last processed event and the finalized
@@ -220,19 +197,8 @@ export async function run({
await database.finalize({ checkpoint: sync.getFinalizedCheckpoint() });
- await database.createIndexes({ schema });
-
- indexingStore = {
- ...readonlyStore,
- ...getRealtimeStore({
- dialect: database.dialect,
- schema,
- db: database.qb.user,
- common,
- }),
- };
-
- indexingService.updateIndexingStore({ indexingStore, schema });
+ // await database.createIndexes({ schema });
+ await database.createTriggers();
await sync.startRealtime();
diff --git a/packages/core/src/bin/utils/runServer.ts b/packages/core/src/bin/utils/runServer.ts
index f6a35c417..291df0821 100644
--- a/packages/core/src/bin/utils/runServer.ts
+++ b/packages/core/src/bin/utils/runServer.ts
@@ -15,7 +15,7 @@ export async function runServer({
}) {
const { databaseConfig, schema } = build;
- const database = createDatabase({
+ const database = await createDatabase({
common,
schema,
databaseConfig,
diff --git a/packages/core/src/build/configAndIndexingFunctions.test.ts b/packages/core/src/build/configAndIndexingFunctions.test.ts
index 6b5c01391..6cea50883 100644
--- a/packages/core/src/build/configAndIndexingFunctions.test.ts
+++ b/packages/core/src/build/configAndIndexingFunctions.test.ts
@@ -403,10 +403,10 @@ test("buildConfigAndIndexingFunctions() validates address empty string", async (
address: "" as Address,
},
},
- }) as Config;
+ });
const result = await safeBuildConfigAndIndexingFunctions({
- config,
+ config: config as unknown as Config,
rawIndexingFunctions: [{ name: "a:Event0", fn: () => {} }],
options,
});
@@ -430,10 +430,10 @@ test("buildConfigAndIndexingFunctions() validates address prefix", async () => {
address: "0b0000000000000000000000000000000000000001" as Address,
},
},
- }) as Config;
+ });
const result = await safeBuildConfigAndIndexingFunctions({
- config,
+ config: config as unknown as Config,
rawIndexingFunctions: [{ name: "a:Event0", fn: () => {} }],
options,
});
@@ -630,7 +630,7 @@ test("buildConfigAndIndexingFunctions() coerces NaN endBlock to undefined", asyn
expect(sources[0]!.filter.toBlock).toBe(undefined);
});
-test("buildConfigAndIndexingFunctions() database uses sqlite by default", async () => {
+test("buildConfigAndIndexingFunctions() database uses pglite by default", async () => {
const config = createConfig({
networks: { mainnet: { chainId: 1, transport: http() } },
contracts: { a: { network: "mainnet", abi: [event0] } },
@@ -646,16 +646,18 @@ test("buildConfigAndIndexingFunctions() database uses sqlite by default", async
options,
});
expect(databaseConfig).toMatchObject({
- kind: "sqlite",
- directory: expect.stringContaining(path.join(".ponder", "sqlite")),
+ kind: "pglite",
+ options: {
+ dataDir: expect.stringContaining(path.join(".ponder", "pglite")),
+ },
});
process.env.DATABASE_URL = prev;
});
-test("buildConfigAndIndexingFunctions() database respects custom sqlite path", async () => {
+test("buildConfigAndIndexingFunctions() database respects custom pglite path", async () => {
const config = createConfig({
- database: { kind: "sqlite", directory: "custom-sqlite/directory" },
+ database: { kind: "pglite", directory: "custom-pglite/directory" },
networks: { mainnet: { chainId: 1, transport: http() } },
contracts: { a: { network: "mainnet", abi: [event0] } },
});
@@ -667,14 +669,16 @@ test("buildConfigAndIndexingFunctions() database respects custom sqlite path", a
});
expect(databaseConfig).toMatchObject({
- kind: "sqlite",
- directory: expect.stringContaining(path.join("custom-sqlite", "directory")),
+ kind: "pglite",
+ options: {
+ dataDir: expect.stringContaining(path.join("custom-pglite", "directory")),
+ },
});
});
-test("buildConfigAndIndexingFunctions() database uses sqlite if specified even if DATABASE_URL env var present", async () => {
+test("buildConfigAndIndexingFunctions() database uses pglite if specified even if DATABASE_URL env var present", async () => {
const config = createConfig({
- database: { kind: "sqlite" },
+ database: { kind: "pglite" },
networks: { mainnet: { chainId: 1, transport: http() } },
contracts: { a: { network: "mainnet", abi: [event0] } },
});
@@ -687,8 +691,8 @@ test("buildConfigAndIndexingFunctions() database uses sqlite if specified even i
options,
});
expect(databaseConfig).toMatchObject({
- kind: "sqlite",
- directory: expect.stringContaining(path.join(".ponder", "sqlite")),
+ kind: "pglite",
+ directory: expect.stringContaining(path.join(".ponder", "pglite")),
});
vi.unstubAllEnvs();
diff --git a/packages/core/src/build/configAndIndexingFunctions.ts b/packages/core/src/build/configAndIndexingFunctions.ts
index 67a0d9a94..061f8d269 100644
--- a/packages/core/src/build/configAndIndexingFunctions.ts
+++ b/packages/core/src/build/configAndIndexingFunctions.ts
@@ -41,13 +41,16 @@ export async function buildConfigAndIndexingFunctions({
// Build database.
let databaseConfig: DatabaseConfig;
- // Determine SQLite directory, preferring config.database.directory if available
- const sqliteDir =
- config.database?.kind === "sqlite" && config.database.directory
- ? path.resolve(config.database.directory)
- : path.join(ponderDir, "sqlite");
+ // Determine PGlite directory, preferring config.database.directory if available
+ const pgliteDir =
+ config.database?.kind === "pglite" && config.database.directory
+ ? config.database.directory === "memory://"
+ ? "memory://"
+ : path.resolve(config.database.directory)
+ : path.join(ponderDir, "pglite");
- const sqlitePrintPath = path.relative(rootDir, sqliteDir);
+ const pglitePrintPath =
+ pgliteDir === "memory://" ? "memory://" : path.relative(rootDir, pgliteDir);
if (config.database?.kind) {
if (config.database.kind === "postgres") {
@@ -111,10 +114,10 @@ export async function buildConfigAndIndexingFunctions({
} else {
logs.push({
level: "info",
- msg: `Using SQLite database in '${sqlitePrintPath}' (from ponder.config.ts)`,
+ msg: `Using PGlite database in '${pglitePrintPath}' (from ponder.config.ts)`,
});
- databaseConfig = { kind: "sqlite", directory: sqliteDir };
+ databaseConfig = { kind: "pglite", options: { dataDir: pgliteDir } };
}
} else {
let connectionString: string | undefined = undefined;
@@ -164,13 +167,13 @@ export async function buildConfigAndIndexingFunctions({
schema,
};
} else {
- // Fall back to SQLite.
+ // Fall back to PGlite.
logs.push({
level: "info",
- msg: `Using SQLite database at ${sqlitePrintPath} (default)`,
+ msg: `Using PGlite database at ${pglitePrintPath} (default)`,
});
- databaseConfig = { kind: "sqlite", directory: sqliteDir };
+ databaseConfig = { kind: "pglite", options: { dataDir: pgliteDir } };
}
}
diff --git a/packages/core/src/build/schema.test.ts b/packages/core/src/build/schema.test.ts
deleted file mode 100644
index 97bc96893..000000000
--- a/packages/core/src/build/schema.test.ts
+++ /dev/null
@@ -1,411 +0,0 @@
-import { createSchema } from "@/schema/schema.js";
-import { expect, test } from "vitest";
-import { safeBuildSchema } from "./schema.js";
-
-test("safeBuildSchema() returns error for duplicate enum values", () => {
- const schema = createSchema((p) => ({
- myEnum: p.createEnum(["duplicate", "duplicate"]),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Enum 'myEnum' contains duplicate value 'duplicate'.",
- );
-});
-
-test("safeBuildSchema() returns error for table without ID column", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({}),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Table 'myTable' does not have an 'id' column.",
- );
-});
-
-test("safeBuildSchema() returns error for ID column typed as an enum", () => {
- const schema = createSchema((p) => ({
- myEnum: p.createEnum(["value1", "value2"]),
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.enum("myEnum"),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. Got 'enum', expected one of ['string', 'hex', 'bigint', 'int'].",
- );
-});
-
-test("safeBuildSchema() returns error for ID column typed as a 'one' relationship", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.one("refTableId"),
- refTableId: p.string().references("refTable.id"),
- }),
- refTable: p.createTable({
- id: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. Got 'one', expected one of ['string', 'hex', 'bigint', 'int'].",
- );
-});
-
-test("safeBuildSchema() returns error for ID column typed as a 'many' relationship", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.many("refTable.myTableId"),
- }),
- // @ts-expect-error
- refTable: p.createTable({
- id: p.string(),
- myTableId: p.string().references("myTable.id"),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. Got 'many', expected one of ['string', 'hex', 'bigint', 'int'].",
- );
-});
-
-test("safeBuildSchema() returns error for ID column with the references modifier", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.string().references("refTable.id"),
- }),
- refTable: p.createTable({
- id: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. ID columns cannot use the '.references' modifier.",
- );
-});
-
-test("safeBuildSchema() returns error for invalid ID column type boolean", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.boolean(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. Got 'boolean', expected one of ['string', 'hex', 'bigint', 'int'].",
- );
-});
-
-test("safeBuildSchema() returns error for invalid ID column type float", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.float(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. Got 'float', expected one of ['string', 'hex', 'bigint', 'int'].",
- );
-});
-
-test("safeBuildSchema() returns error for ID column with optional modifier", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.string().optional(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. ID columns cannot be optional.",
- );
-});
-
-test("safeBuildSchema() returns error for ID column with list modifier", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- // @ts-expect-error
- id: p.string().list(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Invalid type for ID column 'myTable.id'. ID columns cannot be a list.",
- );
-});
-
-test("safeBuildSchema() returns error for empty table or enum name", () => {
- const schema = createSchema((p) => ({
- "": p.createEnum(["value1", "value2"]),
- myTable: p.createTable({
- id: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Enum name can't be an empty string.",
- );
-});
-
-test("safeBuildSchema() returns error for table or enum name with invalid characters", () => {
- const schema = createSchema((p) => ({
- "invalid-name": p.createEnum(["value1", "value2"]),
- myTable: p.createTable({
- id: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toBe(
- "Validation failed: Enum name 'invalid-name' contains an invalid character.",
- );
-});
-
-test("safeBuildSchema() returns error for 'one' relationship with non-existent reference column", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- id: p.string(),
- // @ts-expect-error
- refColumn: p.one("nonExistentColumn"),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain("uses a column that does not exist");
-});
-
-test("safeBuildSchema() returns error for 'one' relationship with reference to non-foreign key column", () => {
- const schema = createSchema((p) => ({
- myTable: p.createTable({
- id: p.string(),
- refColumn: p.one("nonForeignKeyColumn"),
- nonForeignKeyColumn: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "uses a column that is not foreign key column",
- );
-});
-
-test("safeBuildSchema() returns error for 'many' relationship with non-existent reference table", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- id: p.string(),
- refColumn: p.many("nonExistentTable.nonExistentColumn"),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain("uses a table that does not exist");
-});
-
-test("safeBuildSchema() returns error for 'many' relationship with non-existent reference column", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- id: p.string(),
- refColumn: p.many("otherTable.nonExistentColumn"),
- }),
- otherTable: p.createTable({
- id: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain("uses a column that does not exist");
-});
-
-test("safeBuildSchema() returns error for 'many' relationship with reference to non-foreign key column", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- id: p.string(),
- refColumn: p.many("otherTable.nonForeignKeyColumn"),
- }),
- otherTable: p.createTable({
- id: p.string(),
- nonForeignKeyColumn: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "uses a column that is not foreign key column",
- );
-});
-
-test("safeBuildSchema() returns error for enum column referencing non-existent enum", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- id: p.string(),
- enumColumn: p.enum("nonExistentEnum"),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain("doesn't reference a valid enum");
-});
-
-test("safeBuildSchema() returns error for foreign key column referencing non-existent ID column", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable({
- id: p.string(),
- fkColumn: p.string().references("nonExistentTable.id"),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "does not reference a valid ID column",
- );
-});
-
-test("safeBuildSchema() returns error for foreign key column type mismatch", () => {
- const schema = createSchema((p) => ({
- myTable: p.createTable({
- id: p.string(),
- fkColumn: p.bigint().references("otherTable.id"),
- }),
- otherTable: p.createTable({
- id: p.string(),
- }),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "type does not match the referenced table's ID column type",
- );
-});
-
-test("safeBuildSchema() returns error for empty index", () => {
- const schema = createSchema((p) => ({
- myTable: p.createTable(
- {
- id: p.string(),
- col: p.int(),
- },
- {
- colIndex: p.index([]),
- },
- ),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain("Index 'colIndex' cannot be empty.");
-});
-
-test("safeBuildSchema() returns error for duplicate index", () => {
- const schema = createSchema((p) => ({
- myTable: p.createTable(
- {
- id: p.string(),
- col: p.int(),
- },
- {
- colIndex: p.index(["col", "col"]),
- },
- ),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "Index 'colIndex' cannot contain duplicate columns.",
- );
-});
-
-test("safeBuildSchema() returns error for invalid multi-column index", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable(
- {
- id: p.string(),
- col: p.int(),
- },
- {
- // @ts-expect-error
- colIndex: p.index(["coll"]),
- },
- ),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "Index 'colIndex' does not reference a valid column.",
- );
-});
-
-test("safeBuildSchema() returns error for invalid index", () => {
- const schema = createSchema((p) => ({
- // @ts-expect-error
- myTable: p.createTable(
- {
- id: p.string(),
- col: p.int(),
- },
- {
- // @ts-expect-error
- colIndex: p.index("col1"),
- },
- ),
- }));
-
- const result = safeBuildSchema({ schema });
- expect(result.status).toBe("error");
- expect(result.error?.message).toContain(
- "Index 'colIndex' does not reference a valid column.",
- );
-});
diff --git a/packages/core/src/build/schema.ts b/packages/core/src/build/schema.ts
deleted file mode 100644
index e56fefa1b..000000000
--- a/packages/core/src/build/schema.ts
+++ /dev/null
@@ -1,338 +0,0 @@
-import { BuildError } from "@/common/errors.js";
-import type { Schema } from "@/schema/common.js";
-import {
- extractReferenceTable,
- getEnums,
- getTables,
- isEnumColumn,
- isJSONColumn,
- isListColumn,
- isManyColumn,
- isOneColumn,
- isOptionalColumn,
- isReferenceColumn,
-} from "@/schema/utils.js";
-import { dedupe } from "@ponder/common";
-
-export const buildSchema = ({ schema }: { schema: Schema }) => {
- const logs: { level: "warn" | "info" | "debug"; msg: string }[] = [];
-
- // Validate enums
- Object.entries(getEnums(schema)).forEach(([name, _enum]) => {
- validateTableOrColumnName(name, "Enum");
-
- const enumValues = new Set();
- for (const enumValue of _enum) {
- if (enumValues.has(enumValue)) {
- throw new Error(
- `Validation failed: Enum '${name}' contains duplicate value '${enumValue}'.`,
- );
- }
- enumValues.add(enumValue);
- }
- });
-
- // Validate tables
- Object.entries(getTables(schema)).forEach(
- ([tableName, { table, constraints }]) => {
- validateTableOrColumnName(tableName, "Table");
-
- // Validate the id column
- if (table.id === undefined)
- throw new Error(
- `Validation failed: Table '${tableName}' does not have an 'id' column.`,
- );
-
- if (isJSONColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. Got 'json', expected one of ['string', 'hex', 'bigint', 'int'].`,
- );
- if (isEnumColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. Got 'enum', expected one of ['string', 'hex', 'bigint', 'int'].`,
- );
- if (isOneColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. Got 'one', expected one of ['string', 'hex', 'bigint', 'int'].`,
- );
- if (isManyColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. Got 'many', expected one of ['string', 'hex', 'bigint', 'int'].`,
- );
- if (isReferenceColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. ID columns cannot use the '.references' modifier.`,
- );
-
- if (
- table.id[" scalar"] !== "bigint" &&
- table.id[" scalar"] !== "string" &&
- table.id[" scalar"] !== "hex" &&
- table.id[" scalar"] !== "int"
- )
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. Got '${table.id[" scalar"]}', expected one of ['string', 'hex', 'bigint', 'int'].`,
- );
-
- if (isOptionalColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. ID columns cannot be optional.`,
- );
- if (isListColumn(table.id))
- throw new Error(
- `Validation failed: Invalid type for ID column '${tableName}.id'. ID columns cannot be a list.`,
- );
-
- // Validate all other columns
- Object.entries(table).forEach(([columnName, column]) => {
- if (columnName === "id") return;
-
- validateTableOrColumnName(columnName, "Column");
-
- if (isOneColumn(column)) {
- const usedColumn = Object.entries(table).find(
- ([c]) => c === column[" reference"],
- );
-
- if (usedColumn === undefined) {
- const otherColumns = Object.keys(table).filter(
- (c) => c !== columnName,
- );
- throw new Error(
- `Validation failed. Relationship column '${tableName}.${columnName}' uses a column that does not exist. Got '${
- column[" reference"]
- }', expected one of [${otherColumns.map((c) => `'${c}'`).join(", ")}].`,
- );
- }
-
- if (!isReferenceColumn(usedColumn[1])) {
- const foreignKeyColumns = Object.keys(table).filter(
- (c) => c !== columnName && isReferenceColumn(table[c]!),
- );
- throw new Error(
- `Validation failed. Relationship column '${tableName}.${columnName}' uses a column that is not foreign key column. Got '${
- column[" reference"]
- }', expected one of [${foreignKeyColumns.map((c) => `'${c}'`).join(", ")}].`,
- );
- }
- }
-
- if (isManyColumn(column)) {
- const usedTable = Object.entries(getTables(schema)).find(
- ([name]) => name === column[" referenceTable"],
- );
-
- if (usedTable === undefined) {
- const otherTables = Object.keys(getTables(schema)).filter(
- (t) => t !== tableName,
- );
-
- throw new Error(
- `Validation failed. Relationship column '${tableName}.${columnName}' uses a table that does not exist. Got '${
- column[" referenceTable"]
- }', expected one of [${otherTables.map((t) => `'${t}'`).join(", ")}].`,
- );
- }
-
- const usedTableColumns = Object.entries(usedTable[1].table);
- const usedColumn = usedTableColumns.find(
- ([columnName]) => columnName === column[" referenceColumn"],
- );
-
- if (usedColumn === undefined) {
- throw new Error(
- `Validation failed. Relationship column '${tableName}.${columnName}' uses a column that does not exist. Got '${
- column[" referenceTable"]
- }.${column[" referenceTable"]}', expected one of [${usedTableColumns
- .map((c) => `'${usedTable[0]}.${c}'`)
- .join(", ")}].`,
- );
- }
-
- if (!isReferenceColumn(usedColumn[1])) {
- const foreignKeyColumnNames = usedTableColumns.filter(([, c]) =>
- isReferenceColumn(c),
- );
- throw new Error(
- `Validation failed. Relationship column '${tableName}.${columnName}' uses a column that is not foreign key column. Got '${
- column[" referenceTable"]
- }.${column[" referenceTable"]}', expected one of [${foreignKeyColumnNames
- .map((c) => `'${usedTable[0]}.${c}'`)
- .join(", ")}].`,
- );
- }
- }
-
- if (isEnumColumn(column)) {
- const referencedEnum = Object.entries(getEnums(schema)).find(
- ([enumName]) => enumName === column[" enum"],
- );
- if (referencedEnum === undefined) {
- throw new Error(
- `Validation failed: Enum column '${tableName}.${columnName}' doesn't reference a valid enum. Got '${
- column[" enum"]
- }', expected one of [${Object.keys(getEnums(schema))
- .map((e) => `'${e}'`)
- .join(", ")}].`,
- );
- }
- }
-
- if (isReferenceColumn(column)) {
- const referencedTable = Object.entries(getTables(schema)).find(
- ([tableName]) => tableName === extractReferenceTable(column),
- );
-
- if (referencedTable === undefined) {
- throw new Error(
- `Validation failed: Foreign key column '${tableName}.${columnName}' does not reference a valid ID column. Got '${extractReferenceTable(
- column,
- )}', expected one of [${Object.keys(getTables(schema))
- .map((t) => `'${t}.id'`)
- .join(", ")}].`,
- );
- }
-
- if (referencedTable[1].table.id[" scalar"] !== column[" scalar"]) {
- throw new Error(
- `Validation failed: Foreign key column '${tableName}.${columnName}' type does not match the referenced table's ID column type. Got '${column[" scalar"]}', expected '${referencedTable[1].table.id[" scalar"]}'.`,
- );
- }
-
- // NOTE: This is unreachable, but worth documenting here.
- // if (column.list) {
- // throw new Error(
- // `Validation failed: Foreign key column '${tableName}.${columnName}' cannot use the 'list' modifier.`,
- // );
- // }
- }
-
- // NOTE: This is unreachable, but worth documenting here.
- // if (isPrimitiveColumn(column)) {
- // if (
- // column.type !== "bigint" &&
- // column.type !== "string" &&
- // column.type !== "boolean" &&
- // column.type !== "int" &&
- // column.type !== "float" &&
- // column.type !== "hex"
- // ) {
- // throw new Error(
- // `Validation failed: Primitive column '${tableName}.${columnName}' type is invalid. Got '${column.type}', expected one of ['bigint', 'string', 'boolean', 'int', 'float', 'hex'].`,
- // );
- // }
- // }
- });
-
- // Validate constraints
- if (constraints === undefined) return;
-
- for (const [name, index] of Object.entries(constraints)) {
- validateTableOrColumnName(name, "index");
- const column = index[" column"];
-
- if (Array.isArray(column)) {
- if (column.length === 0)
- throw new Error(
- `Validation failed: Index '${name}' cannot be empty.`,
- );
-
- if (column.length !== dedupe(column as string[]).length)
- throw new Error(
- `Validation failed: Index '${name}' cannot contain duplicate columns.`,
- );
-
- for (const c of column) {
- if (table[c] === undefined)
- throw new Error(
- `Validation failed: Index '${name}' does not reference a valid column. Got '${c}', expected one of [${Object.keys(
- table,
- ).join(", ")}].`,
- );
-
- if (isJSONColumn(table[c]!))
- throw new Error(
- `Validation failed: Invalid type for column '${column}' referenced by index '${name}'. Got 'json', expected one of ['string', 'hex', 'bigint', 'int', 'boolean', 'float'].`,
- );
-
- if (isOneColumn(table[c]!))
- throw new Error(
- `Validation failed: Invalid type for column '${column}' referenced by index '${name}'. Got 'one', expected one of ['string', 'hex', 'bigint', 'int', 'boolean', 'float'].`,
- );
-
- if (isManyColumn(table[c]!))
- throw new Error(
- `Validation failed: Invalid type for column '${column}' referenced by index '${name}'. Got 'many', expected one of ['string', 'hex', 'bigint', 'int', 'boolean', 'float'].`,
- );
- }
- } else {
- if (column === "id") {
- logs.push({
- level: "warn",
- msg: `Ignoring index '${name}'. Column 'id' has a primary key constraint by default.`,
- });
- delete constraints[name];
- continue;
- }
-
- if (table[column] === undefined)
- throw new Error(
- `Validation failed: Index '${name}' does not reference a valid column. Got '${column}', expected one of [${Object.entries(
- table,
- )
- .filter(
- ([_, column]) =>
- !isOneColumn(column) && !isManyColumn(column),
- )
- .map(([columnName]) => columnName)
- .join(", ")}].`,
- );
-
- if (isJSONColumn(table[column]!))
- throw new Error(
- `Validation failed: Invalid type for column '${column}' referenced by index '${name}'. Got 'json', expected one of ['string', 'hex', 'bigint', 'int', 'boolean', 'float'].`,
- );
-
- if (isOneColumn(table[column]!))
- throw new Error(
- `Validation failed: Invalid type for column '${column}' referenced by index '${name}'. Got 'one', expected one of ['string', 'hex', 'bigint', 'int', 'boolean', 'float'].`,
- );
-
- if (isManyColumn(table[column]!))
- throw new Error(
- `Validation failed: Invalid type for column '${column}' referenced by index '${name}'. Got 'many', expected one of ['string', 'hex', 'bigint', 'int', 'boolean', 'float'].`,
- );
- }
- }
- },
- );
-
- return { schema, logs };
-};
-
-const validateTableOrColumnName = (key: string, type: string) => {
- if (key === "")
- throw new Error(
- `Validation failed: ${type} name can't be an empty string.`,
- );
-
- if (!/^[a-z|A-Z|0-9]+$/.test(key))
- throw new Error(
- `Validation failed: ${type} name '${key}' contains an invalid character.`,
- );
-};
-
-export function safeBuildSchema({ schema }: { schema: Schema }) {
- try {
- const result = buildSchema({ schema });
- return {
- status: "success",
- schema: result.schema,
- logs: result.logs,
- } as const;
- } catch (_error) {
- const buildError = new BuildError((_error as Error).message);
- buildError.stack = undefined;
- return { status: "error", error: buildError } as const;
- }
-}
diff --git a/packages/core/src/build/service.ts b/packages/core/src/build/service.ts
index f37b94285..387e5d5f0 100644
--- a/packages/core/src/build/service.ts
+++ b/packages/core/src/build/service.ts
@@ -6,13 +6,11 @@ import { BuildError } from "@/common/errors.js";
import type { Config } from "@/config/config.js";
import type { DatabaseConfig } from "@/config/database.js";
import type { Network } from "@/config/networks.js";
-import { buildGraphQLSchema } from "@/graphql/buildGraphqlSchema.js";
+import type { Schema } from "@/drizzle/index.js";
import type { PonderRoutes } from "@/hono/index.js";
-import type { Schema } from "@/schema/common.js";
import type { Source } from "@/sync/source.js";
import { serialize } from "@/utils/serialize.js";
import { glob } from "glob";
-import type { GraphQLSchema } from "graphql";
import type { Hono } from "hono";
import { type ViteDevServer, createServer } from "vite";
import { ViteNodeRunner } from "vite-node/client";
@@ -26,7 +24,6 @@ import {
safeBuildConfigAndIndexingFunctions,
} from "./configAndIndexingFunctions.js";
import { vitePluginPonder } from "./plugin.js";
-import { safeBuildSchema } from "./schema.js";
import { parseViteNodeError } from "./stacktrace.js";
const BUILD_ID_VERSION = "1";
@@ -54,7 +51,6 @@ type BaseBuild = {
networks: Network[];
// Schema
schema: Schema;
- graphqlSchema: GraphQLSchema;
};
export type IndexingBuild = BaseBuild & {
@@ -269,6 +265,7 @@ export const start = async (
const hasSchemaUpdate = invalidated.includes(
common.options.schemaFile.replace(/\\/g, "/"),
);
+
const hasIndexingUpdate = invalidated.some(
(file) =>
buildService.indexingRegex.test(file) &&
@@ -484,10 +481,11 @@ const executeSchema = async (
return executeResult;
}
- const schema = executeResult.exports.default as Schema;
+ const schema = executeResult.exports;
+ // TODO(kyle) hash the contents
const contentHash = createHash("sha256")
- .update(serialize(schema))
+ .update(fs.readFileSync(buildService.common.options.schemaFile))
.digest("hex");
return { status: "success", schema, contentHash };
@@ -605,26 +603,6 @@ const validateAndBuild = async (
contentHash: string;
},
): Promise => {
- // Validate and build the schema
- const buildSchemaResult = safeBuildSchema({
- schema: schema.schema,
- });
- if (buildSchemaResult.status === "error") {
- common.logger.error({
- service: "build",
- msg: "Error while building schema:",
- error: buildSchemaResult.error,
- });
-
- return buildSchemaResult;
- }
-
- for (const log of buildSchemaResult.logs) {
- common.logger[log.level]({ service: "build", msg: log.msg });
- }
-
- const graphqlSchema = buildGraphQLSchema(buildSchemaResult.schema);
-
// Validates and build the config
const buildConfigAndIndexingFunctionsResult =
await safeBuildConfigAndIndexingFunctions({
@@ -666,8 +644,7 @@ const validateAndBuild = async (
databaseConfig: buildConfigAndIndexingFunctionsResult.databaseConfig,
networks: buildConfigAndIndexingFunctionsResult.networks,
sources: buildConfigAndIndexingFunctionsResult.sources,
- schema: buildSchemaResult.schema,
- graphqlSchema,
+ schema: schema.schema,
indexingFunctions:
buildConfigAndIndexingFunctionsResult.indexingFunctions,
},
diff --git a/packages/core/src/common/codegen.ts b/packages/core/src/common/codegen.ts
index 59b5d6baa..f5ed59d34 100644
--- a/packages/core/src/common/codegen.ts
+++ b/packages/core/src/common/codegen.ts
@@ -1,7 +1,6 @@
import { mkdirSync, writeFileSync } from "node:fs";
import path from "node:path";
import type { Common } from "@/common/common.js";
-import { type GraphQLSchema, printSchema } from "graphql";
export const ponderEnv = `// This file enables type checking and editor autocomplete for this Ponder project.
// After upgrading, you may find that changes have been made to this file.
@@ -12,7 +11,7 @@ declare module "@/generated" {
import type { Virtual } from "@ponder/core";
type config = typeof import("./ponder.config.ts").default;
- type schema = typeof import("./ponder.schema.ts").default;
+ type schema = typeof import("./ponder.schema.ts");
export const ponder: Virtual.Registry;
@@ -26,19 +25,18 @@ declare module "@/generated" {
schema,
name
>;
- export type ApiContext = Virtual.Drizzle;
+ export type ApiContext = Virtual.ApiContext;
export type IndexingFunctionArgs =
Virtual.IndexingFunctionArgs;
- export type Schema = Virtual.Schema;
}
`;
export function runCodegen({
common,
- graphqlSchema,
+ // graphqlSchema,
}: {
common: Common;
- graphqlSchema: GraphQLSchema;
+ // graphqlSchema: GraphQLSchema;
}) {
writeFileSync(
path.join(common.options.rootDir, "ponder-env.d.ts"),
@@ -52,11 +50,11 @@ export function runCodegen({
});
mkdirSync(common.options.generatedDir, { recursive: true });
- writeFileSync(
- path.join(common.options.generatedDir, "schema.graphql"),
- printSchema(graphqlSchema),
- "utf-8",
- );
+ // writeFileSync(
+ // path.join(common.options.generatedDir, "schema.graphql"),
+ // printSchema(graphqlSchema),
+ // "utf-8",
+ // );
common.logger.debug({
service: "codegen",
diff --git a/packages/core/src/common/errors.ts b/packages/core/src/common/errors.ts
index f0b13bc75..cf70aa2bd 100644
--- a/packages/core/src/common/errors.ts
+++ b/packages/core/src/common/errors.ts
@@ -91,6 +91,15 @@ export class CheckConstraintError extends NonRetryableError {
}
}
+export class InvalidStoreMethodError extends NonRetryableError {
+ override name = "InvalidStoreMethodError";
+
+ constructor(message?: string | undefined) {
+ super(message);
+ Object.setPrototypeOf(this, CheckConstraintError.prototype);
+ }
+}
+
export class BigIntSerializationError extends NonRetryableError {
override name = "BigIntSerializationError";
diff --git a/packages/core/src/common/metrics.ts b/packages/core/src/common/metrics.ts
index 663111e38..083714ef8 100644
--- a/packages/core/src/common/metrics.ts
+++ b/packages/core/src/common/metrics.ts
@@ -57,11 +57,9 @@ export class MetricsService {
ponder_rpc_request_duration: prometheus.Histogram<"network" | "method">;
ponder_rpc_request_lag: prometheus.Histogram<"network" | "method">;
- ponder_postgres_pool_connections: prometheus.Gauge<"pool" | "kind"> = null!;
+ ponder_postgres_query_total: prometheus.Counter<"pool">;
ponder_postgres_query_queue_size: prometheus.Gauge<"pool"> = null!;
- ponder_postgres_query_total: prometheus.Counter<"pool"> = null!;
-
- ponder_sqlite_query_total: prometheus.Counter<"database"> = null!;
+ ponder_postgres_pool_connections: prometheus.Gauge<"pool" | "kind"> = null!;
constructor() {
this.registry = new prometheus.Registry();
@@ -220,6 +218,25 @@ export class MetricsService {
registers: [this.registry],
});
+ this.ponder_postgres_query_total = new prometheus.Counter({
+ name: "ponder_postgres_query_total",
+ help: "Total number of queries submitted to the database",
+ labelNames: ["pool"] as const,
+ registers: [this.registry],
+ });
+ this.ponder_postgres_pool_connections = new prometheus.Gauge({
+ name: "ponder_postgres_pool_connections",
+ help: "Number of postgres database connections",
+ labelNames: ["pool", "kind"] as const,
+ registers: [this.registry],
+ });
+ this.ponder_postgres_query_queue_size = new prometheus.Gauge({
+ name: "ponder_postgres_query_queue_size",
+ help: "Size of postgres queries",
+ labelNames: ["pool"] as const,
+ registers: [this.registry],
+ });
+
prometheus.collectDefaultMetrics({ register: this.registry });
}
@@ -280,11 +297,12 @@ export async function getSyncProgress(metrics: MetricsService): Promise<
const requestCount: { [network: string]: number } = {};
const rpcRequestMetrics = await metrics.ponder_rpc_request_duration.get();
for (const m of rpcRequestMetrics.values) {
+ const network = m.labels.network!;
if (m.metricName === "ponder_rpc_request_duration_count") {
- if (requestCount[m.labels.network!] === undefined) {
- requestCount[m.labels.network!] = 0;
+ if (requestCount[network] === undefined) {
+ requestCount[network] = 0;
}
- requestCount[m.labels.network!] += m.value;
+ requestCount[m.labels.network!]! += m.value;
}
}
diff --git a/packages/core/src/common/options.ts b/packages/core/src/common/options.ts
index fd9322d84..1f293a861 100644
--- a/packages/core/src/common/options.ts
+++ b/packages/core/src/common/options.ts
@@ -14,6 +14,7 @@ export type Options = {
generatedDir: string;
ponderDir: string;
logDir: string;
+ migrationsDir: string;
port: number;
hostname?: string;
@@ -86,6 +87,7 @@ export const buildOptions = ({ cliOptions }: { cliOptions: CliOptions }) => {
generatedDir: path.join(rootDir, "generated"),
ponderDir: path.join(rootDir, ".ponder"),
logDir: path.join(rootDir, ".ponder", "logs"),
+ migrationsDir: path.join(rootDir, "migrations"),
port,
hostname,
@@ -99,7 +101,7 @@ export const buildOptions = ({ cliOptions }: { cliOptions: CliOptions }) => {
databaseHeartbeatInterval: 10 * 1000,
databaseHeartbeatTimeout: 25 * 1000,
- // Half of the max query parameters for SQLite
+ // Half of the max query parameters for PGlite
databaseMaxQueryParameters: 16_000,
databaseMaxRowLimit: 1_000,
diff --git a/packages/core/src/common/telemetry.ts b/packages/core/src/common/telemetry.ts
index 3a0c0788a..164a57cf7 100644
--- a/packages/core/src/common/telemetry.ts
+++ b/packages/core/src/common/telemetry.ts
@@ -6,7 +6,6 @@ import path from "node:path";
import { promisify } from "node:util";
import type { IndexingBuild } from "@/build/service.js";
import type { Options } from "@/common/options.js";
-import { getTables } from "@/schema/utils.js";
import { startClock } from "@/utils/timer.js";
import { wait } from "@/utils/wait.js";
import { createQueue } from "@ponder/common";
@@ -279,7 +278,7 @@ function getPackageJson(rootDir: string) {
}
export function buildPayload(build: IndexingBuild) {
- const table_count = Object.keys(getTables(build.schema)).length;
+ const table_count = Object.keys(build.schema).length;
const indexing_function_count = Object.values(build.indexingFunctions).reduce(
(acc, f) => acc + Object.keys(f).length,
0,
diff --git a/packages/core/src/config/config.ts b/packages/core/src/config/config.ts
index c4b2f0b77..a9b4fdbc9 100644
--- a/packages/core/src/config/config.ts
+++ b/packages/core/src/config/config.ts
@@ -14,8 +14,8 @@ export type BlockConfig = {
type DatabaseConfig =
| {
- kind: "sqlite";
- /** Directory path to use for SQLite database files. Default: `".ponder/sqlite"`. */
+ kind: "pglite";
+ /** Directory path to use for PGlite database files. Default: `".ponder/pglite"`. */
directory?: string;
}
| {
diff --git a/packages/core/src/config/database.ts b/packages/core/src/config/database.ts
index 241ab01ff..18514a275 100644
--- a/packages/core/src/config/database.ts
+++ b/packages/core/src/config/database.ts
@@ -1,12 +1,9 @@
import type { Prettify } from "@/types/utils.js";
+import type { PGliteOptions } from "@/utils/pglite.js";
import type { PoolConfig as RawPoolConfig } from "pg";
export type PoolConfig = Prettify;
export type DatabaseConfig =
- | { kind: "sqlite"; directory: string }
- | {
- kind: "postgres";
- poolConfig: PoolConfig;
- schema: string;
- };
+ | { kind: "pglite"; options: PGliteOptions }
+ | { kind: "postgres"; poolConfig: PoolConfig; schema: string };
diff --git a/packages/core/src/database/index.test.ts b/packages/core/src/database/index.test.ts
index 8d5e3576b..642d4f6f4 100644
--- a/packages/core/src/database/index.test.ts
+++ b/packages/core/src/database/index.test.ts
@@ -174,7 +174,6 @@ test("setup with the same build ID and namespace reverts to and returns the fina
await database.setup({ buildId: "abc" });
const realtimeIndexingStore = getRealtimeStore({
- dialect: context.databaseConfig.kind,
schema,
db: database.qb.user,
common: context.common,
@@ -231,7 +230,6 @@ test("setup with the same build ID and namespace reverts to and returns the fina
});
const readonlyIndexingStore = getReadonlyStore({
- dialect: context.databaseConfig.kind,
schema,
db: databaseTwo.qb.user,
common: context.common,
@@ -278,17 +276,11 @@ test("setup succeeds if the lock expires after waiting to expire", async (contex
.updateTable("_ponder_meta")
.where("key", "=", "app")
.set({
- value:
- database.dialect === "sqlite"
- ? JSON.stringify({
- ...JSON.parse(row!.value!),
- is_locked: true,
- })
- : {
- // @ts-ignore
- ...row!.value!,
- is_locked: true,
- },
+ value: {
+ // @ts-ignore
+ ...row!.value!,
+ // is_locked: true,
+ },
})
.execute();
@@ -339,7 +331,7 @@ test("setup throws if there is a table name collision", async (context) => {
});
await database.qb.internal.executeQuery(
- sql`CREATE TABLE "Pet" (id TEXT)`.compile(database.qb.internal),
+ sql`CREATE TABLE "public"."Pet" (id TEXT)`.compile(database.qb.internal),
);
expect(await getUserTableNames(database)).toStrictEqual(["Pet"]);
@@ -377,18 +369,8 @@ test("heartbeat updates the heartbeat_at value", async (context) => {
.executeTakeFirst();
expect(
- BigInt(
- database.dialect === "sqlite"
- ? JSON.parse(rowAfterHeartbeat!.value!).heartbeat_at
- : // @ts-ignore
- rowAfterHeartbeat!.value!.heartbeat_at,
- ),
- ).toBeGreaterThan(
- database.dialect === "sqlite"
- ? JSON.parse(row!.value!).heartbeat_at
- : // @ts-ignore
- row!.value!.heartbeat_at,
- );
+ BigInt(rowAfterHeartbeat!.value!.heartbeat_at as number),
+ ).toBeGreaterThan(row!.value!.heartbeat_at as number);
await database.kill();
});
@@ -414,12 +396,7 @@ test("finalize updates lock table", async (context) => {
.select("value")
.executeTakeFirst();
- expect(
- database.dialect === "sqlite"
- ? JSON.parse(row!.value!).checkpoint
- : // @ts-ignore
- row!.value!.checkpoint,
- ).toStrictEqual(encodeCheckpoint(maxCheckpoint));
+ expect(row!.value!.checkpoint).toStrictEqual(encodeCheckpoint(maxCheckpoint));
await database.kill();
});
@@ -434,7 +411,6 @@ test("finalize delete reorg table rows", async (context) => {
await database.setup({ buildId: "abc" });
const realtimeIndexingStore = getRealtimeStore({
- dialect: context.databaseConfig.kind,
schema,
db: database.qb.user,
common: context.common,
@@ -522,18 +498,8 @@ test("kill releases the namespace lock", async (context) => {
.select("value")
.executeTakeFirst();
- expect(
- database.dialect === "sqlite"
- ? JSON.parse(row!.value!).is_locked
- : // @ts-ignore
- row!.value!.is_locked,
- ).toBe(1);
- expect(
- database.dialect === "sqlite"
- ? JSON.parse(rowAfterKill!.value!).is_locked
- : // @ts-ignore
- rowAfterKill!.value!.is_locked,
- ).toBe(0);
+ expect(row!.value!.is_locked).toBe(1);
+ expect(rowAfterKill!.value!.is_locked).toBe(0);
await databaseTwo.kill();
});
@@ -633,9 +599,7 @@ test("setup with the same build ID drops indexes", async (context) => {
const indexes = await getUserIndexNames(databaseTwo, "Person");
- expect(indexes).toStrictEqual([
- database.dialect === "sqlite" ? "sqlite_autoindex_Person_1" : "Person_pkey",
- ]);
+ expect(indexes).toStrictEqual(["Person_pkey"]);
await databaseTwo.kill();
});
@@ -643,10 +607,7 @@ test("setup with the same build ID drops indexes", async (context) => {
test("revert() deletes versions newer than the safe timestamp", async (context) => {
const { indexingStore, database, cleanup } = await setupDatabaseServices(
context,
- {
- schema,
- indexing: "realtime",
- },
+ { schema, indexing: "realtime" },
);
await indexingStore.create({
@@ -722,10 +683,7 @@ test("revert() deletes versions newer than the safe timestamp", async (context)
test("revert() updates versions with intermediate logs", async (context) => {
const { indexingStore, database, cleanup } = await setupDatabaseServices(
context,
- {
- schema,
- indexing: "realtime",
- },
+ { schema, indexing: "realtime" },
);
await indexingStore.create({
@@ -761,16 +719,12 @@ test("revert() updates versions with intermediate logs", async (context) => {
async function getUserTableNames(database: Database) {
const { rows } = await database.qb.internal.executeQuery<{ name: string }>(
- database.dialect === "sqlite"
- ? sql`SELECT name FROM sqlite_master WHERE type='table'`.compile(
- database.qb.internal,
- )
- : sql`
- SELECT table_name as name
- FROM information_schema.tables
- WHERE table_schema = '${sql.raw(database.namespace)}'
- AND table_type = 'BASE TABLE'
- `.compile(database.qb.internal),
+ sql`
+ SELECT table_name as name
+ FROM information_schema.tables
+ WHERE table_schema = '${sql.raw(database.namespace)}'
+ AND table_type = 'BASE TABLE'
+ `.compile(database.qb.internal),
);
return rows.map(({ name }) => name);
}
@@ -780,16 +734,12 @@ async function getUserIndexNames(database: Database, tableName: string) {
name: string;
tbl_name: string;
}>(
- database.dialect === "sqlite"
- ? sql`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='${sql.raw(tableName)}'`.compile(
- database.qb.internal,
- )
- : sql`
- SELECT indexname as name
- FROM pg_indexes
- WHERE schemaname = '${sql.raw(database.namespace)}'
- AND tablename = '${sql.raw(tableName)}'
- `.compile(database.qb.internal),
+ sql`
+ SELECT indexname as name
+ FROM pg_indexes
+ WHERE schemaname = '${sql.raw(database.namespace)}'
+ AND tablename = '${sql.raw(tableName)}'
+ `.compile(database.qb.internal),
);
return rows.map((r) => r.name);
}
diff --git a/packages/core/src/database/index.ts b/packages/core/src/database/index.ts
index 2dec76b88..5d5754b0d 100644
--- a/packages/core/src/database/index.ts
+++ b/packages/core/src/database/index.ts
@@ -1,39 +1,36 @@
-import fs from "node:fs";
-import path from "node:path";
import type { Common } from "@/common/common.js";
import { NonRetryableError } from "@/common/errors.js";
import type { DatabaseConfig } from "@/config/database.js";
-import type { Schema } from "@/schema/common.js";
-import {
- getEnums,
- getTables,
- isEnumColumn,
- isJSONColumn,
- isListColumn,
- isManyColumn,
- isOneColumn,
- isOptionalColumn,
-} from "@/schema/utils.js";
+import { type Drizzle, type Schema, onchain } from "@/drizzle/index.js";
+import { generateTableSQL, getPrimaryKeyColumns } from "@/drizzle/sql.js";
import type { PonderSyncSchema } from "@/sync-store/encoding.js";
import {
moveLegacyTables,
migrationProvider as postgresMigrationProvider,
-} from "@/sync-store/postgres/migrations.js";
-import { migrationProvider as sqliteMigrationProvider } from "@/sync-store/sqlite/migrations.js";
-import type { UserTable } from "@/types/schema.js";
+} from "@/sync-store/migrations.js";
+import type { Status } from "@/sync/index.js";
import {
decodeCheckpoint,
encodeCheckpoint,
+ maxCheckpoint,
zeroCheckpoint,
} from "@/utils/checkpoint.js";
import { formatEta } from "@/utils/format.js";
-import { createPool, createReadonlyPool } from "@/utils/pg.js";
-import {
- type SqliteDatabase,
- createReadonlySqliteDatabase,
- createSqliteDatabase,
-} from "@/utils/sqlite.js";
+import { createPool } from "@/utils/pg.js";
+import { createPglite } from "@/utils/pglite.js";
import { wait } from "@/utils/wait.js";
+import type { PGlite } from "@electric-sql/pglite";
+import { getTableColumns, getTableName, is } from "drizzle-orm";
+import { drizzle as drizzleNodePg } from "drizzle-orm/node-postgres";
+import {
+ PgTable,
+ getTableConfig,
+ integer,
+ pgTable,
+ serial,
+ varchar,
+} from "drizzle-orm/pg-core";
+import { drizzle as drizzlePglite } from "drizzle-orm/pglite";
import {
Migrator,
PostgresDialect,
@@ -41,18 +38,19 @@ import {
WithSchemaPlugin,
sql,
} from "kysely";
-import { SqliteDialect } from "kysely";
+import { KyselyPGlite } from "kysely-pglite";
import type { Pool } from "pg";
import prometheus from "prom-client";
import { HeadlessKysely } from "./kysely.js";
export type Database<
- dialect extends "sqlite" | "postgres" = "sqlite" | "postgres",
+ dialect extends "pglite" | "postgres" = "pglite" | "postgres",
> = {
dialect: dialect;
namespace: string;
driver: Driver;
qb: QueryBuilder;
+ drizzle: Drizzle;
migrateSync(): Promise;
/**
* Prepare the database environment for a Ponder app.
@@ -70,9 +68,11 @@ export type Database<
* - Else, start
*/
setup(args: { buildId: string }): Promise<{ checkpoint: string }>;
+ createTriggers(): Promise;
revert(args: { checkpoint: string }): Promise;
finalize(args: { checkpoint: string }): Promise;
- createIndexes(args: { schema: Schema }): Promise;
+ // createIndexes(args: { schema: Schema }): Promise;
+ complete(args: { checkpoint: string }): Promise;
kill(): Promise;
};
@@ -88,31 +88,30 @@ type PonderApp = {
type PonderInternalSchema = {
_ponder_meta: {
key: "status" | "app";
- value: string | null;
+ value: PonderApp | Status | null;
};
} & {
[_: `_ponder_reorg__${string}`]: {
- id: unknown;
operation_id: number;
checkpoint: string;
operation: 0 | 1 | 2;
};
-} & {
- [tableName: string]: UserTable;
};
-type Driver = dialect extends "sqlite"
- ? {
- user: SqliteDatabase;
- readonly: SqliteDatabase;
- sync: SqliteDatabase;
- }
- : {
- internal: Pool;
- user: Pool;
- readonly: Pool;
- sync: Pool;
- };
+type PGliteDriver = {
+ instance: PGlite;
+};
+
+type PostgresDriver = {
+ internal: Pool;
+ user: Pool;
+ readonly: Pool;
+ sync: Pool;
+};
+
+type Driver = dialect extends "pglite"
+ ? PGliteDriver
+ : PostgresDriver;
type QueryBuilder = {
/** For updating metadata and handling reorgs */
@@ -125,29 +124,11 @@ type QueryBuilder = {
sync: HeadlessKysely;
};
-const scalarToSqliteType = {
- boolean: "integer",
- int: "integer",
- float: "real",
- string: "text",
- bigint: "varchar(79)",
- hex: "blob",
-} as const;
-
-const scalarToPostgresType = {
- boolean: "integer",
- int: "integer",
- float: "float8",
- string: "text",
- bigint: "numeric(78, 0)",
- hex: "bytea",
-} as const;
-
-export const createDatabase = (args: {
+export const createDatabase = async (args: {
common: Common;
schema: Schema;
databaseConfig: DatabaseConfig;
-}): Database => {
+}): Promise => {
let heartbeatInterval: NodeJS.Timeout | undefined;
let namespace: string;
@@ -155,32 +136,29 @@ export const createDatabase = (args: {
// Create drivers and orms
////////
- let dialect: Database["dialect"];
let driver: Database["driver"];
let qb: Database["qb"];
- if (args.databaseConfig.kind === "sqlite") {
- dialect = "sqlite";
- namespace = "public";
+ const dialect = args.databaseConfig.kind;
- const userFile = path.join(args.databaseConfig.directory, "public.db");
- const syncFile = path.join(args.databaseConfig.directory, "ponder_sync.db");
+ if (args.databaseConfig.kind === "pglite") {
+ namespace = "public";
driver = {
- user: createSqliteDatabase(userFile),
- readonly: createReadonlySqliteDatabase(userFile),
- sync: createSqliteDatabase(syncFile),
+ instance: createPglite(args.databaseConfig.options),
};
+ const kyselyDialect = new KyselyPGlite(driver.instance).dialect;
+
qb = {
internal: new HeadlessKysely({
name: "internal",
common: args.common,
- dialect: new SqliteDialect({ database: driver.user }),
+ dialect: kyselyDialect,
log(event) {
if (event.level === "query") {
- args.common.metrics.ponder_sqlite_query_total.inc({
- database: "internal",
+ args.common.metrics.ponder_postgres_query_total.inc({
+ pool: "internal",
});
}
},
@@ -188,11 +166,11 @@ export const createDatabase = (args: {
user: new HeadlessKysely({
name: "user",
common: args.common,
- dialect: new SqliteDialect({ database: driver.user }),
+ dialect: kyselyDialect,
log(event) {
if (event.level === "query") {
- args.common.metrics.ponder_sqlite_query_total.inc({
- database: "user",
+ args.common.metrics.ponder_postgres_query_total.inc({
+ pool: "user",
});
}
},
@@ -200,11 +178,11 @@ export const createDatabase = (args: {
readonly: new HeadlessKysely({
name: "readonly",
common: args.common,
- dialect: new SqliteDialect({ database: driver.readonly }),
+ dialect: kyselyDialect,
log(event) {
if (event.level === "query") {
- args.common.metrics.ponder_sqlite_query_total.inc({
- database: "readonly",
+ args.common.metrics.ponder_postgres_query_total.inc({
+ pool: "readonly",
});
}
},
@@ -212,18 +190,18 @@ export const createDatabase = (args: {
sync: new HeadlessKysely({
name: "sync",
common: args.common,
- dialect: new SqliteDialect({ database: driver.sync }),
+ dialect: kyselyDialect,
log(event) {
if (event.level === "query") {
- args.common.metrics.ponder_sqlite_query_total.inc({
- database: "sync",
+ args.common.metrics.ponder_postgres_query_total.inc({
+ pool: "sync",
});
}
},
+ plugins: [new WithSchemaPlugin("ponder_sync")],
}),
};
} else {
- dialect = "postgres";
namespace = args.databaseConfig.schema;
const internalMax = 2;
@@ -247,7 +225,7 @@ export const createDatabase = (args: {
application_name: `${namespace}_user`,
max: userMax,
}),
- readonly: createReadonlyPool({
+ readonly: createPool({
...args.databaseConfig.poolConfig,
application_name: `${namespace}_readonly`,
max: readonlyMax,
@@ -315,120 +293,92 @@ export const createDatabase = (args: {
};
}
- // Register metrics
- if (dialect === "sqlite") {
- args.common.metrics.registry.removeSingleMetric(
- "ponder_sqlite_query_total",
- );
- args.common.metrics.ponder_sqlite_query_total = new prometheus.Counter({
- name: "ponder_sqlite_query_total",
- help: "Number of queries submitted to the database",
- labelNames: ["database"] as const,
- registers: [args.common.metrics.registry],
- });
- } else {
- args.common.metrics.registry.removeSingleMetric(
- "ponder_postgres_query_total",
- );
- args.common.metrics.ponder_postgres_query_total = new prometheus.Counter({
- name: "ponder_postgres_query_total",
- help: "Total number of queries submitted to the database",
- labelNames: ["pool"] as const,
- registers: [args.common.metrics.registry],
- });
-
- args.common.metrics.registry.removeSingleMetric(
- "ponder_postgres_pool_connections",
- );
- args.common.metrics.ponder_postgres_pool_connections = new prometheus.Gauge(
- {
- name: "ponder_postgres_pool_connections",
- help: "Number of connections in the pool",
- labelNames: ["pool", "kind"] as const,
- registers: [args.common.metrics.registry],
- collect() {
- this.set(
- { pool: "internal", kind: "idle" },
- // @ts-ignore
- driver.internal.idleCount,
- );
- this.set(
- { pool: "internal", kind: "total" },
- // @ts-ignore
- driver.internal.totalCount,
- );
+ /**
+ * Reset the prototype so `table instanceof PgTable` evaluates to true.
+ */
+ for (const table of Object.values(args.schema)) {
+ // @ts-ignore
+ if (onchain in table) {
+ Object.setPrototypeOf(table, PgTable.prototype);
+ }
+ }
- this.set(
- { pool: "sync", kind: "idle" },
- (driver.sync as Pool).idleCount,
- );
- this.set(
- { pool: "sync", kind: "total" },
- (driver.sync as Pool).totalCount,
- );
+ const drizzle =
+ dialect === "pglite"
+ ? drizzlePglite((driver as PGliteDriver).instance, args.schema)
+ : drizzleNodePg((driver as PostgresDriver).user, args.schema);
- this.set(
- { pool: "user", kind: "idle" },
- (driver.user as Pool).idleCount,
- );
- this.set(
- { pool: "user", kind: "total" },
- (driver.user as Pool).totalCount,
- );
+ // if (fs.existsSync(args.common.options.migrationsDir)) {
+ // await migrate(drizzle, {
+ // migrationsFolder: args.common.options.migrationsDir,
+ // });
+ // }
- this.set(
- { pool: "readonly", kind: "idle" },
- (driver.readonly as Pool).idleCount,
- );
- this.set(
- { pool: "readonly", kind: "total" },
- (driver.readonly as Pool).totalCount,
- );
- },
- },
- );
+ // Register metrics
+ const d = driver as PostgresDriver;
+ args.common.metrics.registry.removeSingleMetric(
+ "ponder_postgres_pool_connections",
+ );
+ args.common.metrics.ponder_postgres_pool_connections = new prometheus.Gauge({
+ name: "ponder_postgres_pool_connections",
+ help: "Number of connections in the pool",
+ labelNames: ["pool", "kind"] as const,
+ registers: [args.common.metrics.registry],
+ collect() {
+ this.set({ pool: "internal", kind: "idle" }, d.internal.idleCount);
+ this.set({ pool: "internal", kind: "total" }, d.internal.totalCount);
+ this.set({ pool: "sync", kind: "idle" }, d.sync.idleCount);
+ this.set({ pool: "sync", kind: "total" }, d.sync.totalCount);
+ this.set({ pool: "user", kind: "idle" }, d.user.idleCount);
+ this.set({ pool: "user", kind: "total" }, d.user.totalCount);
+ this.set({ pool: "readonly", kind: "idle" }, d.readonly.idleCount);
+ this.set({ pool: "readonly", kind: "total" }, d.readonly.totalCount);
+ },
+ });
+
+ args.common.metrics.registry.removeSingleMetric(
+ "ponder_postgres_query_queue_size",
+ );
+ args.common.metrics.ponder_postgres_query_queue_size = new prometheus.Gauge({
+ name: "ponder_postgres_query_queue_size",
+ help: "Number of query requests waiting for an available connection",
+ labelNames: ["pool"] as const,
+ registers: [args.common.metrics.registry],
+ collect() {
+ this.set({ pool: "internal" }, d.internal.waitingCount);
+ this.set({ pool: "sync" }, d.sync.waitingCount);
+ this.set({ pool: "user" }, d.user.waitingCount);
+ this.set({ pool: "readonly" }, d.readonly.waitingCount);
+ },
+ });
- args.common.metrics.registry.removeSingleMetric(
- "ponder_postgres_query_queue_size",
- );
- args.common.metrics.ponder_postgres_query_queue_size = new prometheus.Gauge(
- {
- name: "ponder_postgres_query_queue_size",
- help: "Number of query requests waiting for an available connection",
- labelNames: ["pool"] as const,
- registers: [args.common.metrics.registry],
- collect() {
- // @ts-ignore
- this.set({ pool: "internal" }, driver.internal.waitingCount);
- this.set({ pool: "sync" }, (driver.sync as Pool).waitingCount);
- this.set({ pool: "user" }, (driver.user as Pool).waitingCount);
- this.set(
- { pool: "readonly" },
- (driver.readonly as Pool).waitingCount,
- );
- },
- },
- );
- }
////////
// Helpers
////////
- const encodeApp = (app: PonderApp) => {
- return dialect === "sqlite" ? JSON.stringify(app) : (app as any);
- };
-
+ /**
+ * Undo operations in user tables by using the `_ponder_reorg` metadata.
+ *
+ * Note: `_ponder_reorg` tables may contain operations that have not been applied to the
+ * underlying tables, but only be 1 operation at most.
+ */
const revert = async ({
- tableName,
+ sqlTableName,
+ jsTableName,
checkpoint,
tx,
}: {
- tableName: string;
+ sqlTableName: string;
+ jsTableName: string;
checkpoint: string;
tx: Transaction;
}) => {
+ const primaryKeyColumns = getPrimaryKeyColumns(
+ args.schema[jsTableName] as PgTable,
+ );
+
const rows = await tx
- .deleteFrom(`_ponder_reorg__${tableName}`)
+ .deleteFrom(`_ponder_reorg__${sqlTableName}`)
.returningAll()
.where("checkpoint", ">", checkpoint)
.execute();
@@ -440,8 +390,15 @@ export const createDatabase = (args: {
if (log.operation === 0) {
// Create
await tx
+ // @ts-ignore
.deleteFrom(tableName)
- .where("id", "=", log.id as any)
+ .$call((qb) => {
+ for (const name of primaryKeyColumns) {
+ // @ts-ignore
+ qb = qb.where(name, "=", log[name]);
+ }
+ return qb;
+ })
.execute();
} else if (log.operation === 1) {
// Update
@@ -453,9 +410,16 @@ export const createDatabase = (args: {
// @ts-ignore
log.operation = undefined;
await tx
+ // @ts-ignore
.updateTable(tableName)
.set(log as any)
- .where("id", "=", log.id as any)
+ .$call((qb) => {
+ for (const name of primaryKeyColumns) {
+ // @ts-ignore
+ qb = qb.where(name, "=", log[name]);
+ }
+ return qb;
+ })
.execute();
} else {
// Delete
@@ -467,49 +431,59 @@ export const createDatabase = (args: {
// @ts-ignore
log.operation = undefined;
await tx
+ // @ts-ignore
.insertInto(tableName)
.values(log as any)
+ // @ts-ignore
+ .onConflict((oc) => oc.columns(primaryKeyColumns).doNothing())
.execute();
}
}
args.common.logger.info({
service: "database",
- msg: `Reverted ${rows.length} unfinalized operations from '${tableName}' table`,
+ msg: `Reverted ${rows.length} unfinalized operations from '${sqlTableName}' table`,
});
};
+ const getJsTableNames = () => {
+ const tableNames = Object.entries(args.schema)
+ .filter(([, table]) => is(table, PgTable))
+ .map(([tableName]) => tableName);
+
+ return tableNames;
+ };
+
+ const getSQLTableNames = () => {
+ const tableNames = Object.values(args.schema)
+ .filter((table): table is PgTable => is(table, PgTable))
+ .map((table) => getTableConfig(table).name);
+
+ return tableNames;
+ };
+
return {
dialect,
namespace,
driver,
qb,
+ drizzle,
async migrateSync() {
await qb.sync.wrap({ method: "migrateSyncStore" }, async () => {
// TODO: Probably remove this at 1.0 to speed up startup time.
// TODO(kevin) is the `WithSchemaPlugin` going to break this?
- if (dialect === "postgres") {
- await moveLegacyTables({
- common: args.common,
- db: qb.internal,
- newSchemaName: "ponder_sync",
- });
- }
-
- let migrator: Migrator;
+ await moveLegacyTables({
+ common: args.common,
+ // @ts-expect-error
+ db: qb.internal,
+ newSchemaName: "ponder_sync",
+ });
- if (dialect === "sqlite") {
- migrator = new Migrator({
- db: qb.sync as any,
- provider: sqliteMigrationProvider,
- });
- } else {
- migrator = new Migrator({
- db: qb.sync as any,
- provider: postgresMigrationProvider,
- migrationTableSchema: "ponder_sync",
- });
- }
+ const migrator = new Migrator({
+ db: qb.sync as any,
+ provider: postgresMigrationProvider,
+ migrationTableSchema: "ponder_sync",
+ });
const { error } = await migrator.migrateToLatest();
if (error) throw error;
@@ -524,136 +498,66 @@ export const createDatabase = (args: {
// v0.6 migration
- if (args.databaseConfig.kind === "sqlite") {
- const ponderFile = path.join(
- args.databaseConfig.directory,
- "ponder.db",
- );
- if (fs.existsSync(ponderFile)) {
- const _driver = createSqliteDatabase(ponderFile);
- const _orm = new HeadlessKysely({
- name: "user",
- common: args.common,
- dialect: new SqliteDialect({ database: _driver }),
- });
- await qb.internal.wrap({ method: "setup" }, async () => {
- const namespaceCount = await _orm
- .selectFrom("namespace_lock")
- .select(sql`count(*)`.as("count"))
- .executeTakeFirst();
-
- const tableNames = await _orm
- .selectFrom("namespace_lock")
- .select("schema")
- .where("namespace", "=", namespace)
- .executeTakeFirst()
- .then((schema) =>
- schema === undefined
- ? undefined
- : Object.keys(JSON.parse(schema.schema).tables),
- );
- if (tableNames) {
- for (const tableName of tableNames) {
- await qb.internal.schema
- .dropTable(tableName)
- .ifExists()
- .execute();
- }
+ const hasPonderSchema = await qb.internal
+ // @ts-ignore
+ .selectFrom("information_schema.schemata")
+ // @ts-ignore
+ .select("schema_name")
+ // @ts-ignore
+ .where("schema_name", "=", "ponder")
+ .executeTakeFirst()
+ .then((schema) => schema?.schema_name === "ponder");
+
+ if (hasPonderSchema) {
+ await qb.internal.wrap({ method: "setup" }, async () => {
+ const namespaceCount = await qb.internal
+ .withSchema("ponder")
+ // @ts-ignore
+ .selectFrom("namespace_lock")
+ .select(sql`count(*)`.as("count"))
+ .executeTakeFirst();
- await _orm
- .deleteFrom("namespace_lock")
- .where("namespace", "=", namespace)
+ const tableNames = await qb.internal
+ .withSchema("ponder")
+ // @ts-ignore
+ .selectFrom("namespace_lock")
+ // @ts-ignore
+ .select("schema")
+ // @ts-ignore
+ .where("namespace", "=", namespace)
+ .executeTakeFirst()
+ .then((schema: any | undefined) =>
+ schema === undefined
+ ? undefined
+ : Object.keys(schema.schema.tables),
+ );
+ if (tableNames) {
+ for (const tableName of tableNames) {
+ await qb.internal.schema
+ .dropTable(tableName)
+ .ifExists()
+ .cascade()
.execute();
-
- await _orm.destroy();
- _driver.close();
-
- if (namespaceCount!.count === 1) {
- fs.rmSync(
- // @ts-ignore
- path.join(args.databaseConfig.directory, "ponder.db"),
- {
- force: true,
- },
- );
- fs.rmSync(
- // @ts-ignore
- path.join(args.databaseConfig.directory, "ponder.db-shm"),
- {
- force: true,
- },
- );
- fs.rmSync(
- // @ts-ignore
- path.join(args.databaseConfig.directory, "ponder.db-wal"),
- {
- force: true,
- },
- );
- args.common.logger.debug({
- service: "database",
- msg: `Removed '.ponder/sqlite/ponder.db' file`,
- });
- }
}
- });
- }
- } else {
- const hasPonderSchema = await qb.internal
- .selectFrom("information_schema.schemata")
- .select("schema_name")
- .where("schema_name", "=", "ponder")
- .executeTakeFirst()
- .then((schema) => schema?.schema_name === "ponder");
-
- if (hasPonderSchema) {
- await qb.internal.wrap({ method: "setup" }, async () => {
- const namespaceCount = await qb.internal
- .withSchema("ponder")
- .selectFrom("namespace_lock")
- .select(sql`count(*)`.as("count"))
- .executeTakeFirst();
- const tableNames = await qb.internal
+ await qb.internal
.withSchema("ponder")
- .selectFrom("namespace_lock")
- .select("schema")
+ // @ts-ignore
+ .deleteFrom("namespace_lock")
+ // @ts-ignore
.where("namespace", "=", namespace)
- .executeTakeFirst()
- .then((schema: any | undefined) =>
- schema === undefined
- ? undefined
- : Object.keys(schema.schema.tables),
- );
- if (tableNames) {
- for (const tableName of tableNames) {
- await qb.internal.schema
- .dropTable(tableName)
- .ifExists()
- .cascade()
- .execute();
- }
-
- await qb.internal
- .withSchema("ponder")
- .deleteFrom("namespace_lock")
- .where("namespace", "=", namespace)
- .execute();
+ .execute();
- if (namespaceCount!.count === 1) {
- await qb.internal.schema
- .dropSchema("ponder")
- .cascade()
- .execute();
+ if (namespaceCount!.count === 1) {
+ await qb.internal.schema.dropSchema("ponder").cascade().execute();
- args.common.logger.debug({
- service: "database",
- msg: `Removed 'ponder' schema`,
- });
- }
+ args.common.logger.debug({
+ service: "database",
+ msg: `Removed 'ponder' schema`,
+ });
}
- });
- }
+ }
+ });
}
await qb.internal.wrap({ method: "setup" }, async () => {
@@ -681,147 +585,46 @@ export const createDatabase = (args: {
////////
const createUserTables = async () => {
- for (const [tableName, table] of Object.entries(
- getTables(args.schema),
- )) {
- await tx.schema
- .createTable(tableName)
- .$call((builder) => {
- for (const [columnName, column] of Object.entries(
- table.table,
- )) {
- if (isOneColumn(column)) continue;
- if (isManyColumn(column)) continue;
- if (isEnumColumn(column)) {
- // Handle enum types
- builder = builder.addColumn(
- columnName,
- "text",
- (col) => {
- if (isOptionalColumn(column) === false)
- col = col.notNull();
- if (isListColumn(column) === false) {
- col = col.check(
- sql`${sql.ref(columnName)} in (${sql.join(
- getEnums(args.schema)[column[" enum"]]!.map(
- (v) => sql.lit(v),
- ),
- )})`,
- );
- }
- return col;
- },
- );
- } else if (isListColumn(column)) {
- // Handle scalar list columns
- builder = builder.addColumn(
- columnName,
- "text",
- (col) => {
- if (isOptionalColumn(column) === false)
- col = col.notNull();
- return col;
- },
- );
- } else if (isJSONColumn(column)) {
- // Handle json columns
- builder = builder.addColumn(
- columnName,
- "jsonb",
- (col) => {
- if (isOptionalColumn(column) === false)
- col = col.notNull();
- return col;
- },
- );
- } else {
- // Non-list base columns
- builder = builder.addColumn(
- columnName,
- (dialect === "sqlite"
- ? scalarToSqliteType
- : scalarToPostgresType)[column[" scalar"]],
- (col) => {
- if (isOptionalColumn(column) === false)
- col = col.notNull();
- if (columnName === "id") col = col.primaryKey();
- return col;
- },
- );
- }
- }
-
- return builder;
- })
- .execute()
- .catch((_error) => {
- const error = _error as Error;
- if (!error.message.includes("already exists")) throw error;
- throw new NonRetryableError(
- `Unable to create table '${namespace}'.'${tableName}' because a table with that name already exists. Is there another application using the '${namespace}' database schema?`,
- );
- });
+ for (const table of Object.values(args.schema)) {
+ if (is(table, PgTable)) {
+ await sql
+ .raw(generateTableSQL({ table, namespace }))
+ .execute(tx);
- args.common.logger.info({
- service: "database",
- msg: `Created table '${namespace}'.'${tableName}'`,
- });
+ args.common.logger.info({
+ service: "database",
+ msg: `Created table '${namespace}'.'${getTableName(table)}'`,
+ });
+ }
}
};
const createReorgTables = async () => {
- for (const [tableName, table] of Object.entries(
- getTables(args.schema),
- )) {
- await tx.schema
- .createTable(`_ponder_reorg__${tableName}`)
- .$call((builder) => {
- for (const [columnName, column] of Object.entries(
- table.table,
- )) {
- if (isOneColumn(column)) continue;
- if (isManyColumn(column)) continue;
- if (isEnumColumn(column)) {
- // Handle enum types
- // Omit the CHECK constraint because its included in the user table
- builder = builder.addColumn(columnName, "text");
- } else if (isListColumn(column)) {
- // Handle scalar list columns
- builder = builder.addColumn(columnName, "text");
- } else if (isJSONColumn(column)) {
- // Handle json columns
- builder = builder.addColumn(columnName, "jsonb");
- } else {
- // Non-list base columns
- builder = builder.addColumn(
- columnName,
- (dialect === "sqlite"
- ? scalarToSqliteType
- : scalarToPostgresType)[column[" scalar"]],
- (col) => {
- if (columnName === "id") col = col.notNull();
- return col;
- },
- );
- }
- }
-
- builder = builder
- .addColumn(
- "operation_id",
- dialect === "sqlite" ? "integer" : "serial",
- (col) => col.notNull().primaryKey(),
- )
- .addColumn("checkpoint", "varchar(75)", (col) =>
- col.notNull(),
- )
- .addColumn("operation", "integer", (col) =>
- col.notNull(),
- );
-
- return builder;
- })
- .execute();
+ for (const table of Object.values(args.schema)) {
+ if (is(table, PgTable)) {
+ const extraColumns = Object.values(
+ pgTable("", {
+ operation_id: serial("operation_id")
+ .notNull()
+ .primaryKey(),
+ operation: integer("operation").notNull(),
+ checkpoint: varchar("checkpoint", {
+ length: 75,
+ }).notNull(),
+ }),
+ );
+
+ await sql
+ .raw(
+ generateTableSQL({
+ table,
+ namespace,
+ extraColumns,
+ namePrefix: "_ponder_reorg__",
+ }),
+ )
+ .execute(tx);
+ }
}
};
@@ -831,12 +634,9 @@ export const createDatabase = (args: {
.select("value")
.executeTakeFirst();
- const previousApp: PonderApp | undefined =
- row === undefined
- ? undefined
- : dialect === "sqlite"
- ? JSON.parse(row.value!)
- : row.value;
+ const previousApp = (row?.value ?? undefined) as
+ | PonderApp
+ | undefined;
const newApp = {
is_locked: 1,
@@ -844,7 +644,7 @@ export const createDatabase = (args: {
heartbeat_at: Date.now(),
build_id: buildId,
checkpoint: encodeCheckpoint(zeroCheckpoint),
- table_names: Object.keys(getTables(args.schema)),
+ table_names: getSQLTableNames(),
} satisfies PonderApp;
/**
@@ -860,7 +660,7 @@ export const createDatabase = (args: {
.execute();
await tx
.insertInto("_ponder_meta")
- .values({ key: "app", value: encodeApp(newApp) })
+ .values({ key: "app", value: newApp })
.execute();
args.common.logger.debug({
service: "database",
@@ -908,12 +708,12 @@ export const createDatabase = (args: {
await tx
.updateTable("_ponder_meta")
.set({
- value: encodeApp({
+ value: {
...previousApp,
is_locked: 1,
is_dev: 0,
heartbeat_at: Date.now(),
- }),
+ },
})
.where("key", "=", "app")
.execute();
@@ -928,22 +728,35 @@ export const createDatabase = (args: {
});
// Remove indexes
- for (const [tableName, table] of Object.entries(
- getTables(args.schema),
- )) {
- if (table.constraints === undefined) continue;
-
- for (const name of Object.keys(table.constraints)) {
- await tx.schema
- .dropIndex(`${tableName}_${name}`)
- .ifExists()
- .execute();
-
- args.common.logger.info({
- service: "database",
- msg: `Dropped index '${tableName}_${name}' in schema '${namespace}'`,
- });
- }
+ // for (const [tableName, table] of Object.entries(
+ // getTables(args.schema),
+ // )) {
+ // if (table.constraints === undefined) continue;
+
+ // for (const name of Object.keys(table.constraints)) {
+ // await tx.schema
+ // .dropIndex(`${tableName}_${name}`)
+ // .ifExists()
+ // .execute();
+
+ // args.common.logger.info({
+ // service: "database",
+ // msg: `Dropped index '${tableName}_${name}' in schema '${namespace}'`,
+ // });
+ // }
+ // }
+
+ // Remove triggers
+
+ const sqlTableNames = getSQLTableNames();
+ const jsTableNames = getJsTableNames();
+
+ for (const tableName of sqlTableNames) {
+ await sql
+ .ref(
+ `DROP TRIGGER IF EXISTS "${tableName}_reorg" ON "${namespace}"."${tableName}"`,
+ )
+ .execute(tx);
}
// Revert unfinalized data
@@ -956,9 +769,10 @@ export const createDatabase = (args: {
msg: `Reverting operations after finalized checkpoint (timestamp=${blockTimestamp} chainId=${chainId} block=${blockNumber})`,
});
- for (const tableName of Object.keys(getTables(args.schema))) {
+ for (let i = 0; i < sqlTableNames.length; i++) {
await revert({
- tableName,
+ sqlTableName: sqlTableNames[i]!,
+ jsTableName: jsTableNames[i]!,
checkpoint: previousApp.checkpoint,
tx,
});
@@ -978,7 +792,7 @@ export const createDatabase = (args: {
await tx
.updateTable("_ponder_meta")
- .set({ value: encodeApp(newApp) })
+ .set({ value: newApp })
.where("key", "=", "app")
.execute();
@@ -995,7 +809,11 @@ export const createDatabase = (args: {
.ifExists()
.execute();
- await tx.schema.dropTable(tableName).ifExists().execute();
+ await tx.schema
+ .dropTable(tableName)
+ .ifExists()
+ .cascade()
+ .execute();
args.common.logger.debug({
service: "database",
@@ -1043,10 +861,7 @@ export const createDatabase = (args: {
.updateTable("_ponder_meta")
.where("key", "=", "app")
.set({
- value:
- dialect === "sqlite"
- ? sql`json_set(value, '$.heartbeat_at', ${heartbeat})`
- : sql`jsonb_set(value, '{heartbeat_at}', ${heartbeat})`,
+ value: sql`jsonb_set(value, '{heartbeat_at}', ${heartbeat})`,
})
.execute();
@@ -1056,7 +871,6 @@ export const createDatabase = (args: {
});
} catch (err) {
const error = err as Error;
- console.log(error);
args.common.logger.error({
service: "database",
msg: `Failed to update heartbeat timestamp, retrying in ${formatEta(
@@ -1069,69 +883,123 @@ export const createDatabase = (args: {
return { checkpoint: result.checkpoint };
},
- async createIndexes() {
- await Promise.all(
- Object.entries(getTables(args.schema)).flatMap(([tableName, table]) => {
- if (table.constraints === undefined) return [];
-
- return Object.entries(table.constraints).map(
- async ([name, index]) => {
- await qb.internal.wrap({ method: "createIndexes" }, async () => {
- const indexName = `${tableName}_${name}`;
-
- const indexColumn = index[" column"];
- const order = index[" order"];
- const nulls = index[" nulls"];
-
- if (dialect === "sqlite") {
- const columns = Array.isArray(indexColumn)
- ? indexColumn.map((ic) => `"${ic}"`).join(", ")
- : `"${indexColumn}" ${order === "asc" ? "ASC" : order === "desc" ? "DESC" : ""}`;
-
- await qb.internal.executeQuery(
- sql`CREATE INDEX ${sql.ref(indexName)} ON ${sql.table(
- tableName,
- )} (${sql.raw(columns)})`.compile(qb.internal),
- );
- } else {
- const columns = Array.isArray(indexColumn)
- ? indexColumn.map((ic) => `"${ic}"`).join(", ")
- : `"${indexColumn}" ${order === "asc" ? "ASC" : order === "desc" ? "DESC" : ""} ${
- nulls === "first"
- ? "NULLS FIRST"
- : nulls === "last"
- ? "NULLS LAST"
- : ""
- }`;
-
- await qb.internal.executeQuery(
- sql`CREATE INDEX ${sql.ref(indexName)} ON ${sql.table(
- `${namespace}.${tableName}`,
- )} (${sql.raw(columns)})`.compile(qb.internal),
- );
- }
- });
-
- args.common.logger.info({
- service: "database",
- msg: `Created index '${tableName}_${name}' on columns (${
- Array.isArray(index[" column"])
- ? index[" column"].join(", ")
- : index[" column"]
- }) in schema '${namespace}'`,
- });
- },
+ // async createIndexes() {
+ // await Promise.all(
+ // Object.entries(getTables(args.schema)).flatMap(([tableName, table]) => {
+ // if (table.constraints === undefined) return [];
+
+ // return Object.entries(table.constraints).map(
+ // async ([name, index]) => {
+ // await qb.internal.wrap({ method: "createIndexes" }, async () => {
+ // const indexName = `${tableName}_${name}`;
+
+ // const indexColumn = index[" column"];
+ // const order = index[" order"];
+ // const nulls = index[" nulls"];
+
+ // if (dialect === "sqlite") {
+ // const columns = Array.isArray(indexColumn)
+ // ? indexColumn.map((ic) => `"${ic}"`).join(", ")
+ // : `"${indexColumn}" ${order === "asc" ? "ASC" : order === "desc" ? "DESC" : ""}`;
+
+ // await qb.internal.executeQuery(
+ // sql`CREATE INDEX ${sql.ref(indexName)} ON ${sql.table(
+ // tableName,
+ // )} (${sql.raw(columns)})`.compile(qb.internal),
+ // );
+ // } else {
+ // const columns = Array.isArray(indexColumn)
+ // ? indexColumn.map((ic) => `"${ic}"`).join(", ")
+ // : `"${indexColumn}" ${order === "asc" ? "ASC" : order === "desc" ? "DESC" : ""} ${
+ // nulls === "first"
+ // ? "NULLS FIRST"
+ // : nulls === "last"
+ // ? "NULLS LAST"
+ // : ""
+ // }`;
+
+ // await qb.internal.executeQuery(
+ // sql`CREATE INDEX ${sql.ref(indexName)} ON ${sql.table(
+ // `${namespace}.${tableName}`,
+ // )} (${sql.raw(columns)})`.compile(qb.internal),
+ // );
+ // }
+ // });
+
+ // args.common.logger.info({
+ // service: "database",
+ // msg: `Created index '${tableName}_${name}' on columns (${
+ // Array.isArray(index[" column"])
+ // ? index[" column"].join(", ")
+ // : index[" column"]
+ // }) in schema '${namespace}'`,
+ // });
+ // },
+ // );
+ // }),
+ // );
+ // },
+ async createTriggers() {
+ await qb.internal.wrap({ method: "createTriggers" }, async () => {
+ const sqlTableNames = getSQLTableNames();
+ const jsTableNames = getJsTableNames();
+
+ for (let i = 0; i < sqlTableNames.length; i++) {
+ const jsTableName = jsTableNames[i]!;
+ const sqlTableName = sqlTableNames[i]!;
+
+ const columns = getTableColumns(args.schema[jsTableName]! as PgTable);
+
+ const columnNames = Object.values(columns).map(
+ (column) => `"${column.name}"`,
);
- }),
- );
+
+ await sql
+ .raw(`
+CREATE OR REPLACE FUNCTION ${sqlTableName}_reorg_operation()
+RETURNS TRIGGER AS $$
+BEGIN
+ IF TG_OP = 'INSERT' THEN
+ INSERT INTO "_ponder_reorg__${sqlTableName}" (${columnNames.join(",")}, operation, checkpoint)
+ VALUES (${columnNames.map((name) => `NEW.${name}`).join(",")}, 0, '${encodeCheckpoint(maxCheckpoint)}');
+ ELSIF TG_OP = 'UPDATE' THEN
+ INSERT INTO "_ponder_reorg__${sqlTableName}" (${columnNames.join(",")}, operation, checkpoint)
+ VALUES (${columnNames.map((name) => `OLD.${name}`).join(",")}, 1, '${encodeCheckpoint(maxCheckpoint)}');
+ ELSIF TG_OP = 'DELETE' THEN
+ INSERT INTO "_ponder_reorg__${sqlTableName}" (${columnNames.join(",")}, operation, checkpoint)
+ VALUES (${columnNames.map((name) => `OLD.${name}`).join(",")}, 2, '${encodeCheckpoint(maxCheckpoint)}');
+ END IF;
+ RETURN NULL;
+END;
+$$ LANGUAGE plpgsql
+ `)
+ .execute(qb.internal);
+
+ await sql
+ .raw(`
+CREATE TRIGGER "${sqlTableName}_reorg"
+AFTER INSERT OR UPDATE OR DELETE ON "${namespace}"."${sqlTableName}"
+FOR EACH ROW EXECUTE FUNCTION ${sqlTableName}_reorg_operation();
+ `)
+ .execute(qb.internal);
+ }
+ });
},
async revert({ checkpoint }) {
+ const sqlTableNames = getSQLTableNames();
+ const jsTableNames = getJsTableNames();
+
await qb.internal.wrap({ method: "revert" }, () =>
Promise.all(
- Object.keys(getTables(args.schema)).map((tableName) =>
- qb.internal
- .transaction()
- .execute((tx) => revert({ tableName, checkpoint, tx })),
+ sqlTableNames.map((sqlTableName, i) =>
+ qb.internal.transaction().execute((tx) =>
+ revert({
+ sqlTableName,
+ jsTableName: jsTableNames[i]!,
+ checkpoint,
+ tx,
+ }),
+ ),
),
),
);
@@ -1142,15 +1010,14 @@ export const createDatabase = (args: {
.updateTable("_ponder_meta")
.where("key", "=", "app")
.set({
- value:
- dialect === "sqlite"
- ? sql`json_set(value, '$.checkpoint', ${checkpoint})`
- : sql`jsonb_set(value, '{checkpoint}', to_jsonb(${checkpoint}::varchar(75)))`,
+ value: sql`jsonb_set(value, '{checkpoint}', to_jsonb(${checkpoint}::varchar(75)))`,
})
.execute();
+ const tableNames = getSQLTableNames();
+
await Promise.all(
- Object.keys(getTables(args.schema)).map((tableName) =>
+ tableNames.map((tableName) =>
qb.internal
.deleteFrom(`_ponder_reorg__${tableName}`)
.where("checkpoint", "<=", checkpoint)
@@ -1166,6 +1033,21 @@ export const createDatabase = (args: {
msg: `Updated finalized checkpoint to (timestamp=${decoded.blockTimestamp} chainId=${decoded.chainId} block=${decoded.blockNumber})`,
});
},
+ async complete({ checkpoint }) {
+ const tableNames = getSQLTableNames();
+
+ await Promise.all(
+ tableNames.map((tableName) =>
+ qb.internal.wrap({ method: "complete" }, async () => {
+ await qb.internal
+ .updateTable(`_ponder_reorg__${tableName}`)
+ .set({ checkpoint })
+ .where("checkpoint", "=", encodeCheckpoint(maxCheckpoint))
+ .execute();
+ }),
+ ),
+ );
+ },
async kill() {
clearInterval(heartbeatInterval);
@@ -1173,10 +1055,7 @@ export const createDatabase = (args: {
.updateTable("_ponder_meta")
.where("key", "=", "app")
.set({
- value:
- dialect === "sqlite"
- ? sql`json_set(value, '$.is_locked', 0)`
- : sql`jsonb_set(value, '{is_locked}', to_jsonb(0))`,
+ value: sql`jsonb_set(value, '{is_locked}', to_jsonb(0))`,
})
.execute();
@@ -1190,22 +1069,17 @@ export const createDatabase = (args: {
await qb.readonly.destroy();
await qb.sync.destroy();
- if (dialect === "sqlite") {
- // @ts-ignore
- driver.user.close();
- // @ts-ignore
- driver.readonly.close();
- // @ts-ignore
- driver.sync.close();
- } else {
- // @ts-ignore
- await driver.internal.end();
- // @ts-ignore
- await driver.user.end();
- // @ts-ignore
- await driver.readonly.end();
- // @ts-ignore
- await driver.sync.end();
+ if (dialect === "pglite") {
+ const d = driver as PGliteDriver;
+ await d.instance.close();
+ }
+
+ if (dialect === "postgres") {
+ const d = driver as PostgresDriver;
+ await d.internal.end();
+ await d.user.end();
+ await d.readonly.end();
+ await d.sync.end();
}
args.common.logger.debug({
diff --git a/packages/core/src/drizzle/bigint.ts b/packages/core/src/drizzle/bigint.ts
deleted file mode 100644
index 0e499df67..000000000
--- a/packages/core/src/drizzle/bigint.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-import { decodeToBigInt, encodeAsText } from "@/utils/encoding.js";
-import { entityKind } from "drizzle-orm";
-import {
- type AnySQLiteTable,
- SQLiteColumn,
- SQLiteColumnBuilder,
-} from "drizzle-orm/sqlite-core";
-
-export class SQLiteBigintBuilder extends SQLiteColumnBuilder {
- static readonly [entityKind]: string = "SQliteBigintBuilder";
-
- constructor(columnName: string) {
- super(columnName, "string", "SQLiteBigint");
- }
-
- build(table: AnySQLiteTable) {
- return new SQLiteBigint(table, this.config);
- }
-}
-
-export class SQLiteBigint extends SQLiteColumn {
- static readonly [entityKind]: string = "SQLiteBigint";
-
- getSQLType(): string {
- return "varchar(79)";
- }
-
- override mapFromDriverValue(value: string) {
- return decodeToBigInt(value);
- }
-
- override mapToDriverValue(value: bigint): string {
- return encodeAsText(value as bigint);
- }
-}
diff --git a/packages/core/src/drizzle/db.ts b/packages/core/src/drizzle/db.ts
index 76b87d550..95f471e27 100644
--- a/packages/core/src/drizzle/db.ts
+++ b/packages/core/src/drizzle/db.ts
@@ -1,29 +1,313 @@
-import type { Column, SQLWrapper, SelectedFields, Table } from "drizzle-orm";
-import type { SelectBuilder } from "./select.js";
-
-export type DrizzleDb = {
- select(): SelectBuilder;
- select>(
- fields: TSelection,
- ): SelectBuilder;
- select(
- fields?: SelectedFields,
- ): SelectBuilder | undefined, "async", void>;
+import { type BuildColumns, type ColumnBuilderBase, Table } from "drizzle-orm";
+import {
+ type AnyPgColumn,
+ type PrimaryKeyBuilder as DrizzlePrimaryKeyBuilder,
+ type ExtraConfigColumn,
+ type PgColumnBuilder,
+ type PgColumnBuilderBase,
+ type PgNumericBuilderInitial,
+ PgSchema,
+ PgTable,
+ type PgTableExtraConfig,
+ type PgTableWithColumns,
+ type TableConfig,
+ primaryKey as drizzlePrimaryKey,
+ numeric,
+} from "drizzle-orm/pg-core";
+import {
+ type PgColumnsBuilders as _PgColumnsBuilders,
+ getPgColumnBuilders,
+} from "drizzle-orm/pg-core/columns/all";
+import { PgHexBuilder, type PgHexBuilderInitial } from "./hex.js";
+import { onchain } from "./index.js";
+
+type $Type = T & {
+ _: {
+ $type: TType;
+ };
+};
+
+// @ts-ignore
+export function evmHex(): PgHexBuilderInitial<"">;
+export function evmHex(
+ columnName: name,
+): PgHexBuilderInitial;
+export function evmHex(columnName?: string) {
+ return new PgHexBuilder(columnName ?? "");
+}
+
+// @ts-ignore
+export function evmBigint(): $Type, bigint>;
+export function evmBigint(
+ columnName: name,
+): $Type, bigint>;
+export function evmBigint(columnName?: string) {
+ return numeric(columnName ?? "", { precision: 78 });
+}
+
+export {
+ sql,
+ eq,
+ gt,
+ gte,
+ lt,
+ lte,
+ ne,
+ isNull,
+ isNotNull,
+ inArray,
+ notInArray,
+ exists,
+ notExists,
+ between,
+ notBetween,
+ like,
+ notIlike,
+ not,
+ asc,
+ desc,
+ and,
+ or,
+ count,
+ countDistinct,
+ avg,
+ avgDistinct,
+ sum,
+ sumDistinct,
+ max,
+ min,
+ relations,
+} from "drizzle-orm";
+
+export {
+ bigserial,
+ boolean,
+ char,
+ cidr,
+ date,
+ doublePrecision,
+ pgEnum,
+ inet,
+ integer,
+ interval,
+ json,
+ jsonb,
+ line,
+ macaddr,
+ macaddr8,
+ numeric,
+ point,
+ real,
+ serial,
+ smallint,
+ smallserial,
+ text,
+ time,
+ timestamp,
+ uuid,
+ varchar,
+ index,
+ uniqueIndex,
+ alias,
+ foreignKey,
+ union,
+ unionAll,
+ intersect,
+ intersectAll,
+ except,
+ exceptAll,
+} from "drizzle-orm/pg-core";
+
+export type PrimaryKeyBuilder =
+ DrizzlePrimaryKeyBuilder & { columnNames: columnNames };
+
+export const primaryKey = <
+ tableName extends string,
+ column extends AnyPgColumn<{ tableName: tableName }> & { " name": string },
+ columns extends (AnyPgColumn<{ tableName: tableName }> & {
+ " name": string;
+ })[],
+>({
+ name,
+ columns,
+}: { name?: string; columns: [column, ...columns] }) =>
+ drizzlePrimaryKey({ name, columns }) as PrimaryKeyBuilder<
+ column[" name"] | columns[number][" name"]
+ >;
+
+export type OnchainTable<
+ T extends TableConfig & {
+ extra: PgTableExtraConfig | undefined;
+ } = TableConfig & { extra: PgTableExtraConfig | undefined },
+> = PgTable & {
+ [Key in keyof T["columns"]]: T["columns"][Key];
+} & { [onchain]: true };
+
+export type OffchainTable = PgTable & {
+ [Key in keyof T["columns"]]: T["columns"][Key];
+};
+
+type BuildExtraConfigColumns<
+ columns extends Record,
+> = {
+ [key in keyof columns]: ExtraConfigColumn & {
+ " name": key;
+ };
+};
+
+type PgColumnsBuilders = _PgColumnsBuilders & {
+ evmHex: typeof evmHex;
+ evmBigint: typeof evmBigint;
+};
+
+// TODO(kyle) add objects at runtime
+
+/**
+ * Create an onchain table
+ *
+ * @returns The offchain table.
+ */
+export const onchainTable = <
+ name extends string,
+ columns extends Record,
+ extra extends PgTableExtraConfig | undefined = undefined,
+>(
+ name: name,
+ columns: columns | ((columnTypes: PgColumnsBuilders) => columns),
+ extraConfig?: (self: BuildExtraConfigColumns) => extra,
+): OnchainTable<{
+ name: name;
+ schema: undefined;
+ columns: BuildColumns;
+ extra: extra;
+ dialect: "pg";
+}> => {
+ const table = pgTableWithSchema(name, columns, extraConfig as any, undefined);
+
/**
- * Execute a raw read-only SQL query..
- *
- * @example
- * import { ponder } from "@/generated";
- * import { sql } from "@ponder/core";
- *
- * ponder.get("/", async (c) => {
- * const result = await c.db.execute(sql`SELECT * from "Accounts"`);
- * return c.json(result);
- * });
- *
- * @see https://orm.drizzle.team/docs/sql
+ * This trick is used to make `table instanceof PgTable` evaluate to false.
+ * This is necessary to avoid generating migrations for onchain tables.
*/
- execute: >(
- query: SQLWrapper,
- ) => Promise;
+ Object.setPrototypeOf(table, Object.prototype);
+
+ // @ts-ignore
+ table[onchain] = true;
+
+ // @ts-ignore
+ return table;
};
+
+export class OffchainSchema extends PgSchema {
+ override table = <
+ name extends string,
+ columns extends Record,
+ >(
+ name: name,
+ columns: columns | ((columnTypes: PgColumnsBuilders) => columns),
+ extraConfig?: (
+ self: BuildExtraConfigColumns,
+ ) => PgTableExtraConfig,
+ ): OffchainTable<{
+ name: name;
+ schema: schema;
+ columns: BuildColumns;
+ dialect: "pg";
+ }> => pgTableWithSchema(name, columns, extraConfig, this.schemaName);
+}
+
+export const offchainSchema = (name: T) =>
+ new OffchainSchema(name);
+
+/**
+ * Create an offchain table
+ *
+ * @returns The offchain table.
+ */
+export const offchainTable = <
+ name extends string,
+ columns extends Record,
+>(
+ name: name,
+ columns: columns | ((columnTypes: PgColumnsBuilders) => columns),
+ extraConfig?: (self: BuildExtraConfigColumns) => PgTableExtraConfig,
+): OffchainTable<{
+ name: name;
+ schema: undefined;
+ columns: BuildColumns;
+ dialect: "pg";
+}> => pgTableWithSchema(name, columns, extraConfig, undefined);
+
+const InlineForeignKeys = Symbol.for("drizzle:PgInlineForeignKeys");
+
+/** @see https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/table.ts#L51 */
+function pgTableWithSchema<
+ name extends string,
+ schema extends string | undefined,
+ columns extends Record,
+>(
+ name: name,
+ columns: columns | ((columnTypes: PgColumnsBuilders) => columns),
+ extraConfig:
+ | ((self: BuildExtraConfigColumns) => PgTableExtraConfig)
+ | undefined,
+ schema: schema,
+ baseName = name,
+): PgTableWithColumns<{
+ name: name;
+ schema: schema;
+ columns: BuildColumns;
+ dialect: "pg";
+}> {
+ const rawTable = new PgTable<{
+ name: name;
+ schema: schema;
+ columns: BuildColumns;
+ dialect: "pg";
+ }>(name, schema, baseName);
+
+ const parsedColumns: columns =
+ typeof columns === "function"
+ ? columns({ ...getPgColumnBuilders(), evmHex, evmBigint })
+ : columns;
+
+ const builtColumns = Object.fromEntries(
+ Object.entries(parsedColumns).map(([name, colBuilderBase]) => {
+ const colBuilder = colBuilderBase;
+ //@ts-ignore
+ colBuilder.setName(name);
+ //@ts-ignore
+ const column = colBuilder.build(rawTable);
+ // @ts-ignore
+ rawTable[InlineForeignKeys].push(
+ //@ts-ignore
+ ...colBuilder.buildForeignKeys(column, rawTable),
+ );
+ return [name, column];
+ }),
+ ) as unknown as BuildColumns;
+
+ const builtColumnsForExtraConfig = Object.fromEntries(
+ Object.entries(parsedColumns).map(([name, colBuilderBase]) => {
+ const colBuilder = colBuilderBase as PgColumnBuilder;
+ //@ts-ignore
+ colBuilder.setName(name);
+ //@ts-ignore
+ const column = colBuilder.buildExtraConfigColumn(rawTable);
+ return [name, column];
+ }),
+ ) as unknown as BuildExtraConfigColumns;
+
+ const table = Object.assign(rawTable, builtColumns);
+
+ //@ts-ignore
+ table[Table.Symbol.Columns] = builtColumns;
+ //@ts-ignore
+ table[Table.Symbol.ExtraConfigColumns] = builtColumnsForExtraConfig;
+
+ if (extraConfig) {
+ //@ts-ignore
+ table[PgTable.Symbol.ExtraConfigBuilder] = extraConfig as any;
+ }
+
+ return table;
+}
diff --git a/packages/core/src/drizzle/hex.ts b/packages/core/src/drizzle/hex.ts
index 40708fd7a..bf2520fca 100644
--- a/packages/core/src/drizzle/hex.ts
+++ b/packages/core/src/drizzle/hex.ts
@@ -1,68 +1,58 @@
-import { entityKind } from "drizzle-orm";
+import {
+ type ColumnBaseConfig,
+ type ColumnBuilderBaseConfig,
+ type ColumnBuilderRuntimeConfig,
+ type MakeColumnConfig,
+ entityKind,
+} from "drizzle-orm";
import {
type AnyPgTable,
PgColumn,
PgColumnBuilder,
} from "drizzle-orm/pg-core";
-import {
- type AnySQLiteTable,
- SQLiteColumn,
- SQLiteColumnBuilder,
-} from "drizzle-orm/sqlite-core";
-import { bytesToHex, hexToBytes } from "viem";
-export class PgHexBuilder extends PgColumnBuilder {
+export type PgHexBuilderInitial = PgHexBuilder<{
+ name: TName;
+ dataType: "string";
+ columnType: "PgHex";
+ data: `0x${string}`;
+ driverParam: string;
+ enumValues: undefined;
+ generated: undefined;
+}>;
+
+export class PgHexBuilder<
+ T extends ColumnBuilderBaseConfig<"string", "PgHex">,
+> extends PgColumnBuilder {
static readonly [entityKind]: string = "PgHexBuilder";
- constructor(columnName: string) {
- super(columnName, "buffer", "PgHex");
+ constructor(name: T["name"]) {
+ super(name, "string", "PgHex");
}
- build(table: AnyPgTable) {
- return new PgHex(table, this.config);
+ /** @internal */
+ // @ts-ignore
+ override build(
+ table: AnyPgTable<{ name: TTableName }>,
+ ): PgHex> {
+ return new PgHex>(
+ table,
+ this.config as ColumnBuilderRuntimeConfig,
+ );
}
}
-export class PgHex extends PgColumn {
+export class PgHex<
+ T extends ColumnBaseConfig<"string", "PgHex">,
+> extends PgColumn {
static readonly [entityKind]: string = "PgHex";
getSQLType(): string {
- return "bytea";
- }
-
- override mapFromDriverValue(value: Buffer) {
- return bytesToHex(value);
- }
-
- override mapToDriverValue(value: `0x${string}`): Buffer {
- return Buffer.from(hexToBytes(value));
- }
-}
-
-export class SQLiteHexBuilder extends SQLiteColumnBuilder {
- static readonly [entityKind]: string = "SQliteHexBuilder";
-
- constructor(columnName: string) {
- super(columnName, "buffer", "SQLiteHex");
- }
-
- build(table: AnySQLiteTable) {
- return new SQLiteHex(table, this.config);
- }
-}
-
-export class SQLiteHex extends SQLiteColumn {
- static readonly [entityKind]: string = "SQLiteHex";
-
- getSQLType(): string {
- return "blob";
- }
-
- override mapFromDriverValue(value: Buffer) {
- return bytesToHex(value);
+ return "text";
}
- override mapToDriverValue(value: `0x${string}`): Buffer {
- return Buffer.from(hexToBytes(value));
+ override mapToDriverValue(value: `0x${string}`) {
+ if (value.length % 2 === 0) return value.toLowerCase() as `0x${string}`;
+ return `0x0${value.slice(2)}`.toLowerCase() as `0x${string}`;
}
}
diff --git a/packages/core/src/drizzle/index.ts b/packages/core/src/drizzle/index.ts
new file mode 100644
index 000000000..ab4170d48
--- /dev/null
+++ b/packages/core/src/drizzle/index.ts
@@ -0,0 +1,11 @@
+import type { NodePgDatabase } from "drizzle-orm/node-postgres";
+import type { PgliteDatabase } from "drizzle-orm/pglite";
+
+export const onchain = Symbol.for("ponder:onchain");
+
+export type Drizzle =
+ | NodePgDatabase
+ | PgliteDatabase;
+
+export type Schema = { [name: string]: unknown };
+type NoSchema = { [name: string]: never };
diff --git a/packages/core/src/drizzle/json.ts b/packages/core/src/drizzle/json.ts
deleted file mode 100644
index 1503ce56f..000000000
--- a/packages/core/src/drizzle/json.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { entityKind } from "drizzle-orm";
-import {
- type AnySQLiteTable,
- SQLiteColumn,
- SQLiteColumnBuilder,
-} from "drizzle-orm/sqlite-core";
-
-export class SQLiteJsonBuilder extends SQLiteColumnBuilder {
- static readonly [entityKind]: string = "SQliteJsonBuilder";
-
- constructor(columnName: string) {
- super(columnName, "json", "SQLiteJson");
- }
-
- build(table: AnySQLiteTable) {
- return new SQLiteJson(table, this.config);
- }
-}
-
-export class SQLiteJson extends SQLiteColumn {
- static readonly [entityKind]: string = "SQLiteJson";
-
- getSQLType(): string {
- return "jsonb";
- }
-
- override mapFromDriverValue(value: string) {
- return JSON.parse(value);
- }
-
- override mapToDriverValue(value: object): string {
- return JSON.stringify(value);
- }
-}
diff --git a/packages/core/src/drizzle/list.ts b/packages/core/src/drizzle/list.ts
deleted file mode 100644
index 2139c96ce..000000000
--- a/packages/core/src/drizzle/list.ts
+++ /dev/null
@@ -1,100 +0,0 @@
-import type { Scalar } from "@/schema/common.js";
-import { entityKind } from "drizzle-orm";
-import {
- type AnyPgTable,
- PgColumn,
- PgColumnBuilder,
-} from "drizzle-orm/pg-core";
-import {
- type AnySQLiteTable,
- SQLiteColumn,
- SQLiteColumnBuilder,
-} from "drizzle-orm/sqlite-core";
-
-export class PgListBuilder extends PgColumnBuilder {
- static readonly [entityKind]: string = "PgListBuilder";
- element: Scalar;
-
- constructor(columnName: string, element: Scalar) {
- super(columnName, "string", "PgList");
- this.element = element;
- }
-
- build(table: AnyPgTable) {
- return new PgList(table, this.config, this.element);
- }
-}
-
-export class PgList extends PgColumn {
- static readonly [entityKind]: string = "PgList";
- element: Scalar;
-
- constructor(
- table: AnyPgTable,
- config: PgListBuilder["config"],
- element: Scalar,
- ) {
- super(table, config);
- this.element = element;
- }
-
- getSQLType(): string {
- return "text";
- }
-
- override mapFromDriverValue(value: string) {
- return this.element === "bigint"
- ? JSON.parse(value).map(BigInt)
- : JSON.parse(value);
- }
-
- override mapToDriverValue(value: Array): string {
- return this.element === "bigint"
- ? JSON.stringify(value.map(String))
- : JSON.stringify(value);
- }
-}
-
-export class SQLiteListBuilder extends SQLiteColumnBuilder {
- static readonly [entityKind]: string = "SQliteListBuilder";
- element: Scalar;
-
- constructor(columnName: string, element: Scalar) {
- super(columnName, "string", "PgList");
- this.element = element;
- }
-
- build(table: AnySQLiteTable) {
- return new SQLiteList(table, this.config, this.element);
- }
-}
-
-export class SQLiteList extends SQLiteColumn {
- static readonly [entityKind]: string = "SQLiteList";
- element: Scalar;
-
- constructor(
- table: AnyPgTable,
- config: SQLiteListBuilder["config"],
- element: Scalar,
- ) {
- super(table, config);
- this.element = element;
- }
-
- getSQLType(): string {
- return "text";
- }
-
- override mapFromDriverValue(value: string) {
- return this.element === "bigint"
- ? JSON.parse(value).map(BigInt)
- : JSON.parse(value);
- }
-
- override mapToDriverValue(value: Array): string {
- return this.element === "bigint"
- ? JSON.stringify(value.map(String))
- : JSON.stringify(value);
- }
-}
diff --git a/packages/core/src/drizzle/runtime.test.ts b/packages/core/src/drizzle/runtime.test.ts
deleted file mode 100644
index f56017052..000000000
--- a/packages/core/src/drizzle/runtime.test.ts
+++ /dev/null
@@ -1,267 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import type { Context } from "@/hono/context.js";
-import type { HistoricalStore } from "@/indexing-store/store.js";
-import { createSchema } from "@/schema/schema.js";
-import { eq } from "drizzle-orm";
-import { beforeEach, expect, test } from "vitest";
-import type { DrizzleDb } from "./db.js";
-import { createDrizzleDb, createDrizzleTables } from "./runtime.js";
-
-beforeEach(setupCommon);
-beforeEach(setupIsolatedDatabase);
-
-test("runtime select", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({ tableName: "table", id: "kyle" });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db.select().from(drizzleTables.table);
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({ id: "kyle" });
-
- await cleanup();
-});
-
-test("select hex", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.hex(),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({ tableName: "table", id: "0x1" });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db.select().from(drizzleTables.table);
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({ id: "0x01" });
-
- await cleanup();
-});
-
-test("select bigint", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.bigint(),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({ tableName: "table", id: 1n });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db.select().from(drizzleTables.table);
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({ id: 1n });
-
- await cleanup();
-});
-
-test("select json", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- json: p.json(),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({
- tableName: "table",
- id: "1",
- data: {
- json: {
- prop: 52,
- },
- },
- });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db.select().from(drizzleTables.table);
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({ id: "1", json: { prop: 52 } });
-
- await cleanup();
-});
-
-test("select enum", async (context) => {
- const schema = createSchema((p) => ({
- en: p.createEnum(["hi", "low"]),
- table: p.createTable({
- id: p.string(),
- en: p.enum("en"),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({
- tableName: "table",
- id: "1",
- data: { en: "hi" },
- });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db.select().from(drizzleTables.table);
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({ id: "1", en: "hi" });
-
- await cleanup();
-});
-
-test("select list", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- list: p.string().list(),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({
- tableName: "table",
- id: "1",
- data: {
- list: ["big", "dog"],
- },
- });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db.select().from(drizzleTables.table);
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({ id: "1", list: ["big", "dog"] });
-
- await cleanup();
-});
-
-test("select with join", async (context) => {
- const schema = createSchema((p) => ({
- account: p.createTable({
- id: p.hex(),
- name: p.string(),
- age: p.int(),
- }),
- nft: p.createTable({
- id: p.bigint(),
- owner: p.hex().references("account.id"),
- }),
- }));
-
- const { database, cleanup, indexingStore } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({
- tableName: "account",
- id: "0x1",
- data: {
- name: "kyle",
- age: 52,
- },
- });
- await indexingStore.create({
- tableName: "nft",
- id: 10n,
- data: { owner: "0x1" },
- });
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const db = createDrizzleDb(database) as unknown as DrizzleDb;
-
- const drizzleTables = createDrizzleTables(schema, database) as Context<
- typeof schema
- >["tables"];
-
- const rows = await db
- .select()
- .from(drizzleTables.account)
- .fullJoin(
- drizzleTables.nft,
- eq(drizzleTables.account.id, drizzleTables.nft.owner),
- );
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({
- account: { id: "0x01", name: "kyle", age: 52 },
- nft: { id: 10n, owner: "0x01" },
- });
-
- await cleanup();
-});
diff --git a/packages/core/src/drizzle/runtime.ts b/packages/core/src/drizzle/runtime.ts
deleted file mode 100644
index cbd5ab6e8..000000000
--- a/packages/core/src/drizzle/runtime.ts
+++ /dev/null
@@ -1,264 +0,0 @@
-import type { Database } from "@/database/index.js";
-import type { Scalar, Schema } from "@/schema/common.js";
-import {
- isEnumColumn,
- isJSONColumn,
- isListColumn,
- isMaterialColumn,
- isOptionalColumn,
- isReferenceColumn,
- isScalarColumn,
-} from "@/schema/utils.js";
-import { getTables } from "@/schema/utils.js";
-import type { SqliteDatabase } from "@/utils/sqlite.js";
-import { type Table, TableAliasProxyHandler } from "drizzle-orm";
-import { drizzle as drizzleSQLite } from "drizzle-orm/better-sqlite3";
-import { drizzle as drizzlePg } from "drizzle-orm/node-postgres";
-import { pgSchema, pgTable } from "drizzle-orm/pg-core";
-import {
- doublePrecision as PgDoublePrecision,
- integer as PgInteger,
- jsonb as PgJsonb,
- numeric as PgNumeric,
- text as PgText,
-} from "drizzle-orm/pg-core";
-import type { View } from "drizzle-orm/sql";
-import {
- integer as SQLiteInteger,
- real as SQLiteReal,
- text as SQLiteText,
- sqliteTable,
-} from "drizzle-orm/sqlite-core";
-import type { Pool } from "pg";
-import { SQLiteBigintBuilder } from "./bigint.js";
-import { PgHexBuilder, SQLiteHexBuilder } from "./hex.js";
-import { SQLiteJsonBuilder } from "./json.js";
-import { PgListBuilder, SQLiteListBuilder } from "./list.js";
-import type { BuildAliasTable } from "./select.js";
-
-export const createDrizzleDb = (database: Database) => {
- if (database.dialect === "postgres") {
- const drizzle = drizzlePg(database.driver.readonly as Pool);
- return {
- // @ts-ignore
- select: (...args: any[]) => drizzle.select(...args),
- execute: (query: any) => drizzle.execute(query),
- };
- } else {
- const drizzle = drizzleSQLite(database.driver.readonly as SqliteDatabase);
- return {
- // @ts-ignore
- select: (...args: any[]) => drizzle.select(...args),
- execute: (query: any) => {
- try {
- try {
- return drizzle.all(query);
- } catch (e) {
- const error = e as Error;
- if (
- error.name === "SqliteError" &&
- error.message ===
- "This statement does not return data. Use run() instead"
- ) {
- return drizzle.run(query);
- } else {
- throw error;
- }
- }
- } catch (e) {
- const error = e as Error;
- if (error.cause) throw error.cause;
- throw error;
- }
- },
- };
- }
-};
-
-export function alias(
- table: tableOrView,
- alias: alias,
-): BuildAliasTable {
- return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any;
-}
-
-type SQLiteTable = Parameters[1];
-type PostgresTable = Parameters[1];
-type DrizzleTable = { [tableName: string]: any };
-
-export const createDrizzleTables = (schema: Schema, database: Database) => {
- const drizzleTables: { [tableName: string]: DrizzleTable } = {};
-
- for (const [tableName, { table }] of Object.entries(getTables(schema))) {
- const drizzleColumns: DrizzleTable = {};
-
- for (const [columnName, column] of Object.entries(table)) {
- if (isMaterialColumn(column)) {
- if (isJSONColumn(column)) {
- drizzleColumns[columnName] = convertJsonColumn(
- columnName,
- database.dialect,
- );
- } else if (isEnumColumn(column)) {
- if (isListColumn(column)) {
- drizzleColumns[columnName] = convertListColumn(
- columnName,
- database.dialect,
- "string",
- );
- } else {
- drizzleColumns[columnName] = convertEnumColumn(
- columnName,
- database.dialect,
- );
- }
- } else if (isScalarColumn(column) || isReferenceColumn(column)) {
- if (isListColumn(column)) {
- drizzleColumns[columnName] = convertListColumn(
- columnName,
- database.dialect,
- column[" scalar"],
- );
- } else {
- switch (column[" scalar"]) {
- case "string":
- drizzleColumns[columnName] = convertStringColumn(
- columnName,
- database.dialect,
- );
- break;
-
- case "int":
- drizzleColumns[columnName] = convertIntColumn(
- columnName,
- database.dialect,
- );
- break;
-
- case "boolean":
- drizzleColumns[columnName] = convertBooleanColumn(
- columnName,
- database.dialect,
- );
- break;
-
- case "float":
- drizzleColumns[columnName] = convertFloatColumn(
- columnName,
- database.dialect,
- );
- break;
-
- case "hex":
- drizzleColumns[columnName] = convertHexColumn(
- columnName,
- database.dialect,
- );
- break;
-
- case "bigint":
- drizzleColumns[columnName] = convertBigintColumn(
- columnName,
- database.dialect,
- );
- break;
- }
- }
-
- // apply column constraints
- if (columnName === "id") {
- drizzleColumns[columnName] =
- drizzleColumns[columnName]!.primaryKey();
- } else if (isOptionalColumn(column) === false) {
- drizzleColumns[columnName] = drizzleColumns[columnName]!.notNull();
- }
- }
- }
- }
-
- if (database.dialect === "postgres") {
- // Note: this is to avoid an error thrown by drizzle when
- // setting schema to "public".
- if (database.namespace === "public") {
- drizzleTables[tableName] = pgTable(
- tableName,
- drizzleColumns as PostgresTable,
- );
- } else {
- drizzleTables[tableName] = pgSchema(database.namespace).table(
- tableName,
- drizzleColumns as PostgresTable,
- );
- }
- } else {
- drizzleTables[tableName] = sqliteTable(
- tableName,
- drizzleColumns as SQLiteTable,
- );
- }
- }
-
- return drizzleTables;
-};
-
-const convertStringColumn = (
- columnName: string,
- kind: "sqlite" | "postgres",
-) => {
- return kind === "sqlite" ? SQLiteText(columnName) : PgText(columnName);
-};
-
-const convertIntColumn = (columnName: string, kind: "sqlite" | "postgres") => {
- return kind === "sqlite" ? SQLiteInteger(columnName) : PgInteger(columnName);
-};
-
-const convertFloatColumn = (
- columnName: string,
- kind: "sqlite" | "postgres",
-) => {
- return kind === "sqlite"
- ? SQLiteReal(columnName)
- : PgDoublePrecision(columnName);
-};
-
-const convertBooleanColumn = (
- columnName: string,
- kind: "sqlite" | "postgres",
-) => {
- return kind === "sqlite" ? SQLiteInteger(columnName) : PgInteger(columnName);
-};
-
-const convertHexColumn = (columnName: string, kind: "sqlite" | "postgres") => {
- return kind === "sqlite"
- ? new SQLiteHexBuilder(columnName)
- : new PgHexBuilder(columnName);
-};
-
-const convertBigintColumn = (
- columnName: string,
- kind: "sqlite" | "postgres",
-) => {
- return kind === "sqlite"
- ? new SQLiteBigintBuilder(columnName)
- : PgNumeric(columnName, { precision: 78 });
-};
-
-const convertListColumn = (
- columnName: string,
- kind: "sqlite" | "postgres",
- element: Scalar,
-) => {
- return kind === "sqlite"
- ? new SQLiteListBuilder(columnName, element)
- : new PgListBuilder(columnName, element);
-};
-
-const convertJsonColumn = (columnName: string, kind: "sqlite" | "postgres") => {
- return kind === "sqlite"
- ? new SQLiteJsonBuilder(columnName)
- : PgJsonb(columnName);
-};
-
-const convertEnumColumn = (columnName: string, kind: "sqlite" | "postgres") => {
- return kind === "sqlite" ? SQLiteText(columnName) : PgText(columnName);
-};
diff --git a/packages/core/src/drizzle/select.ts b/packages/core/src/drizzle/select.ts
deleted file mode 100644
index dc6a592d2..000000000
--- a/packages/core/src/drizzle/select.ts
+++ /dev/null
@@ -1,709 +0,0 @@
-import type {
- Assume,
- Column,
- MakeColumnConfig,
- QueryPromise,
- SelectedFields,
- SelectedFieldsOrdered,
- Subquery,
- Table,
- TableConfig,
- UpdateTableConfig,
- ValidateShape,
- entityKind,
-} from "drizzle-orm";
-import { TypedQueryBuilder } from "drizzle-orm/query-builders/query-builder";
-import type {
- AddAliasToSelection,
- AppendToNullabilityMap,
- AppendToResult,
- BuildSubquerySelection,
- GetSelectTableName,
- GetSelectTableSelection,
- JoinNullability,
- JoinType,
- SelectMode,
- SelectResult,
- SetOperator,
-} from "drizzle-orm/query-builders/select.types";
-import type {
- ColumnsSelection,
- Placeholder,
- Query,
- SQL,
- View,
-} from "drizzle-orm/sql";
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L54
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L50
- */
-export type SelectBuilder<
- TSelection extends SelectedFields | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TBuilderMode extends "db" | "qb" = "db",
-> = {
- from: (
- source: TFrom,
- ) => CreateSelectFromBuilderMode<
- TBuilderMode,
- GetSelectTableName,
- TResultType,
- TRunResult,
- TSelection extends undefined ? GetSelectTableSelection : TSelection,
- TSelection extends undefined ? "single" : "partial"
- >;
-};
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L126
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L130
- */
-export abstract class SelectQueryBuilderBase<
- THKT extends SelectHKTBase,
- TTableName extends string | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TSelection extends ColumnsSelection,
- TSelectMode extends SelectMode,
- TNullabilityMap extends Record<
- string,
- JoinNullability
- > = TTableName extends string ? Record : {},
- TDynamic extends boolean = false,
- TExcludedMethods extends string = never,
- TResult extends any[] = SelectResult<
- TSelection,
- TSelectMode,
- TNullabilityMap
- >[],
- TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
- TSelection,
- TNullabilityMap
- >,
-> extends TypedQueryBuilder {
- declare [entityKind]: string;
- declare _: {
- readonly hkt: THKT;
- readonly tableName: TTableName;
- readonly resultType: TResultType;
- readonly runResult: TRunResult;
- readonly selection: TSelection;
- readonly selectMode: TSelectMode;
- readonly nullabilityMap: TNullabilityMap;
- readonly dynamic: TDynamic;
- readonly excludedMethods: TExcludedMethods;
- readonly result: TResult;
- readonly selectedFields: TSelectedFields;
- };
-
- declare leftJoin: JoinFn;
- declare rightJoin: JoinFn;
- declare innerJoin: JoinFn;
- declare fullJoin: JoinFn;
-
- private declare setOperator: >(
- rightSelection:
- | ((
- setOperators: GetSetOperators,
- ) => SetOperatorRightSelect)
- | SetOperatorRightSelect,
- ) => SelectWithout;
-
- declare union: typeof this.setOperator;
- declare unionAll: typeof this.setOperator;
- declare intersect: typeof this.setOperator;
- declare intersectAll: typeof this.setOperator;
- declare except: typeof this.setOperator;
- declare exceptAll: typeof this.setOperator;
-
- declare where: (
- where: ((aliases: TSelection) => SQL | undefined) | SQL | undefined,
- ) => SelectWithout;
-
- declare having: (
- having:
- | ((aliases: this["_"]["selection"]) => SQL | undefined)
- | SQL
- | undefined,
- ) => SelectWithout;
-
- declare groupBy: (
- ...columns: (Column | SQL)[]
- ) => SelectWithout;
-
- declare orderBy: (
- ...columns: (Column | SQL)[]
- ) => SelectWithout;
-
- declare limit: (
- limit: number | Placeholder,
- ) => SelectWithout;
-
- declare offset: (
- offset: number | Placeholder,
- ) => SelectWithout;
-
- declare toSQL: () => Query;
-
- declare as: (
- alias: TAlias,
- ) => SubqueryWithSelection;
-
- declare $dynamic: () => SelectDynamic;
-}
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.ts#L803
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.ts#L903
- */
-export type SelectBase<
- TTableName extends string | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TSelection extends ColumnsSelection,
- TSelectMode extends SelectMode = "single",
- TNullabilityMap extends Record<
- string,
- JoinNullability
- > = TTableName extends string ? Record : {},
- TDynamic extends boolean = false,
- TExcludedMethods extends string = never,
- TResult = SelectResult[],
- TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
- TSelection,
- TNullabilityMap
- >,
-> = SelectQueryBuilderBase<
- SelectHKT,
- TTableName,
- TResultType,
- TRunResult,
- TSelection,
- TSelectMode,
- TNullabilityMap,
- TDynamic,
- TExcludedMethods,
- // @ts-ignore
- TResult,
- TSelectedFields
-> &
- QueryPromise;
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L31
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L30
- */
-export type SelectJoinConfig = {
- on: SQL;
- table: Table | Subquery | View | SQL;
- alias: string | undefined;
- joinType: JoinType;
-};
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L38
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L30
- */
-export type BuildAliasTable<
- tableOrView extends Table | View,
- alias extends string,
-> = tableOrView extends Table
- ? TableWithColumns<
- UpdateTableConfig<
- tableOrView["_"]["config"],
- {
- name: alias;
- columns: MapColumnsToTableAlias;
- }
- >
- >
- : tableOrView extends View
- ? ViewWithSelection<
- alias,
- tableOrView["_"]["existing"],
- MapColumnsToTableAlias
- >
- : never;
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L52
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L52
- */
-export type SelectConfig = {
- withList?: Subquery[];
- fields: Record;
- fieldsFlat?: SelectedFieldsOrdered;
- where?: SQL;
- having?: SQL;
- table: Table | Subquery | View | SQL;
- limit?: number | Placeholder;
- offset?: number | Placeholder;
- joins?: SelectJoinConfig[];
- orderBy?: (Column | SQL | SQL.Aliased)[];
- groupBy?: (Column | SQL | SQL.Aliased)[];
- distinct?: boolean;
- setOperators: {
- rightSelect: TypedQueryBuilder;
- type: SetOperator;
- isAll: boolean;
- orderBy?: (Column | SQL | SQL.Aliased)[];
- limit?: number | Placeholder;
- offset?: number | Placeholder;
- }[];
-};
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L75
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L82
- */
-export type Join<
- T extends AnySelectQueryBuilder,
- TDynamic extends boolean,
- TJoinType extends JoinType,
- TJoinedTable extends Table | Subquery | View | SQL,
- TJoinedName extends
- GetSelectTableName = GetSelectTableName,
-> = T extends any
- ? SelectWithout<
- SelectKind<
- T["_"]["hkt"],
- T["_"]["tableName"],
- T["_"]["resultType"],
- T["_"]["runResult"],
- AppendToResult<
- T["_"]["tableName"],
- T["_"]["selection"],
- TJoinedName,
- TJoinedTable extends Table
- ? TJoinedTable["_"]["columns"]
- : TJoinedTable extends Subquery | View
- ? Assume<
- TJoinedTable["_"]["selectedFields"],
- SelectedFields
- >
- : never,
- T["_"]["selectMode"]
- >,
- T["_"]["selectMode"] extends "partial"
- ? T["_"]["selectMode"]
- : "multiple",
- AppendToNullabilityMap<
- T["_"]["nullabilityMap"],
- TJoinedName,
- TJoinType
- >,
- T["_"]["dynamic"],
- T["_"]["excludedMethods"]
- >,
- TDynamic,
- T["_"]["excludedMethods"]
- >
- : never;
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L106
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L111
- */
-export type JoinFn<
- T extends AnySelectQueryBuilder,
- TDynamic extends boolean,
- TJoinType extends JoinType,
-> = <
- TJoinedTable extends Table | Subquery | View | SQL,
- TJoinedName extends
- GetSelectTableName = GetSelectTableName,
->(
- table: TJoinedTable,
- on: ((aliases: T["_"]["selection"]) => SQL | undefined) | SQL | undefined,
-) => Join;
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/query-builders/select.types.ts#L75
- */
-type MapColumnsToTableAlias<
- TColumns extends ColumnsSelection,
- TAlias extends string,
-> = {
- [Key in keyof TColumns]: TColumns[Key] extends Column
- ? Column["_"], TAlias>>
- : TColumns[Key];
-} & {};
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L124
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L146
- */
-export type SelectHKTBase = {
- tableName: string | undefined;
- resultType: "sync" | "async";
- runResult: unknown;
- selection: unknown;
- selectMode: SelectMode;
- nullabilityMap: unknown;
- dynamic: boolean;
- excludedMethods: string;
- result: unknown;
- selectedFields: unknown;
- _type: unknown;
-};
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L138
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L158
- */
-export type SelectKind<
- T extends SelectHKTBase,
- TTableName extends string | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TSelection extends ColumnsSelection,
- TSelectMode extends SelectMode,
- TNullabilityMap extends Record,
- TDynamic extends boolean,
- TExcludedMethods extends string,
- TResult = SelectResult[],
- TSelectedFields = BuildSubquerySelection,
-> = (T & {
- tableName: TTableName;
- resultType: TResultType;
- runResult: TRunResult;
- selection: TSelection;
- selectMode: TSelectMode;
- nullabilityMap: TNullabilityMap;
- dynamic: TDynamic;
- excludedMethods: TExcludedMethods;
- result: TResult;
- selectedFields: TSelectedFields;
-})["_type"];
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L163
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L179
- */
-export interface SelectQueryBuilderHKT extends SelectHKTBase {
- _type: SelectQueryBuilderBase<
- SelectQueryBuilderHKT,
- this["tableName"],
- this["resultType"],
- this["runResult"],
- Assume,
- this["selectMode"],
- Assume>,
- this["dynamic"],
- this["excludedMethods"],
- Assume,
- Assume
- >;
-}
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L179
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L193
- */
-export interface SelectHKT extends SelectHKTBase {
- _type: SelectBase<
- this["tableName"],
- this["resultType"],
- this["runResult"],
- Assume,
- this["selectMode"],
- Assume>,
- this["dynamic"],
- this["excludedMethods"],
- Assume,
- Assume
- >;
-}
-
-export type SetOperatorExcludedMethods =
- | "leftJoin"
- | "rightJoin"
- | "innerJoin"
- | "fullJoin"
- | "where"
- | "having"
- | "groupBy";
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L204
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L206
- */
-export type CreateSelectFromBuilderMode<
- TBuilderMode extends "db" | "qb",
- TTableName extends string | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TSelection extends ColumnsSelection,
- TSelectMode extends SelectMode,
-> = TBuilderMode extends "db"
- ? SelectBase
- : SelectQueryBuilderBase<
- SelectQueryBuilderHKT,
- TTableName,
- TResultType,
- TRunResult,
- TSelection,
- TSelectMode
- >;
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/query-builders/select.types.ts#L227
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/query-builders/select.types.ts#L224
- */
-type SelectWithout<
- T extends AnySelectQueryBuilder,
- TDynamic extends boolean,
- K extends keyof T & string,
- TResetExcluded extends boolean = false,
-> = TDynamic extends true
- ? T
- : Omit<
- SelectKind<
- T["_"]["hkt"],
- T["_"]["tableName"],
- T["_"]["resultType"],
- T["_"]["runResult"],
- T["_"]["selection"],
- T["_"]["selectMode"],
- T["_"]["nullabilityMap"],
- TDynamic,
- TResetExcluded extends true ? K : T["_"]["excludedMethods"] | K,
- T["_"]["result"],
- T["_"]["selectedFields"]
- >,
- TResetExcluded extends true ? K : T["_"]["excludedMethods"] | K
- >;
-
-export type SelectDynamic = SelectKind<
- T["_"]["hkt"],
- T["_"]["tableName"],
- T["_"]["resultType"],
- T["_"]["runResult"],
- T["_"]["selection"],
- T["_"]["selectMode"],
- T["_"]["nullabilityMap"],
- true,
- never,
- T["_"]["result"],
- T["_"]["selectedFields"]
->;
-
-export type AnySelectQueryBuilder = SelectQueryBuilderBase<
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any
->;
-
-export type AnySetOperatorInterface = SetOperatorInterface<
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any
->;
-
-export interface SetOperatorInterface<
- TTableName extends string | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TSelection extends ColumnsSelection,
- TSelectMode extends SelectMode = "single",
- TNullabilityMap extends Record<
- string,
- JoinNullability
- > = TTableName extends string ? Record : {},
- TDynamic extends boolean = false,
- TExcludedMethods extends string = never,
- TResult extends any[] = SelectResult<
- TSelection,
- TSelectMode,
- TNullabilityMap
- >[],
- TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
- TSelection,
- TNullabilityMap
- >,
-> {
- _: {
- readonly hkt: SelectHKTBase;
- readonly tableName: TTableName;
- readonly resultType: TResultType;
- readonly runResult: TRunResult;
- readonly selection: TSelection;
- readonly selectMode: TSelectMode;
- readonly nullabilityMap: TNullabilityMap;
- readonly dynamic: TDynamic;
- readonly excludedMethods: TExcludedMethods;
- readonly result: TResult;
- readonly selectedFields: TSelectedFields;
- };
-}
-
-export type SetOperatorWithResult = SetOperatorInterface<
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- TResult,
- any
->;
-
-export type SetOperatorRightSelect<
- TValue extends SetOperatorWithResult,
- TResult extends any[],
-> = TValue extends SetOperatorInterface<
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- infer TValueResult,
- any
->
- ? ValidateShape<
- TValueResult[number],
- TResult[number],
- TypedQueryBuilder
- >
- : TValue;
-
-export type SetOperatorRestSelect<
- TValue extends readonly SetOperatorWithResult[],
- TResult extends any[],
-> = TValue extends [infer First, ...infer Rest]
- ? First extends SetOperatorInterface<
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- any,
- infer TValueResult,
- any
- >
- ? Rest extends AnySetOperatorInterface[]
- ? [
- ValidateShape<
- TValueResult[number],
- TResult[number],
- TypedQueryBuilder
- >,
- ...SetOperatorRestSelect,
- ]
- : ValidateShape<
- TValueResult[number],
- TResult[number],
- TypedQueryBuilder[]
- >
- : never
- : TValue;
-
-export type CreateSetOperatorFn = <
- TTableName extends string | undefined,
- TResultType extends "sync" | "async",
- TRunResult,
- TSelection extends ColumnsSelection,
- TValue extends SetOperatorWithResult,
- TRest extends SetOperatorWithResult[],
- TSelectMode extends SelectMode = "single",
- TNullabilityMap extends Record<
- string,
- JoinNullability
- > = TTableName extends string ? Record : {},
- TDynamic extends boolean = false,
- TExcludedMethods extends string = never,
- TResult extends any[] = SelectResult<
- TSelection,
- TSelectMode,
- TNullabilityMap
- >[],
- TSelectedFields extends ColumnsSelection = BuildSubquerySelection<
- TSelection,
- TNullabilityMap
- >,
->(
- leftSelect: SetOperatorInterface<
- TTableName,
- TResultType,
- TRunResult,
- TSelection,
- TSelectMode,
- TNullabilityMap,
- TDynamic,
- TExcludedMethods,
- TResult,
- TSelectedFields
- >,
- rightSelect: SetOperatorRightSelect,
- ...restSelects: SetOperatorRestSelect
-) => SelectWithout<
- SelectBase<
- TTableName,
- TResultType,
- TRunResult,
- TSelection,
- TSelectMode,
- TNullabilityMap,
- TDynamic,
- TExcludedMethods,
- TResult,
- TSelectedFields
- >,
- false,
- SetOperatorExcludedMethods,
- true
->;
-
-export type GetSetOperators = {
- union: CreateSetOperatorFn;
- intersect: CreateSetOperatorFn;
- except: CreateSetOperatorFn;
- unionAll: CreateSetOperatorFn;
-};
-
-export type SubqueryWithSelection<
- TSelection extends ColumnsSelection,
- TAlias extends string,
-> = Subquery> &
- AddAliasToSelection;
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/table.ts#L49
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/table.ts#L43
- */
-export type TableWithColumns = Table & {
- [key in keyof T["columns"]]: T["columns"][key];
-};
-
-/**
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/sqlite-core/view.ts#L154
- * https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/pg-core/view.ts#L305
- */
-export type ViewWithSelection<
- TName extends string,
- TExisting extends boolean,
- TSelection extends ColumnsSelection,
-> = View & TSelection;
diff --git a/packages/core/src/drizzle/sql.ts b/packages/core/src/drizzle/sql.ts
new file mode 100644
index 000000000..83ebcc0a1
--- /dev/null
+++ b/packages/core/src/drizzle/sql.ts
@@ -0,0 +1,252 @@
+import { Table, is } from "drizzle-orm";
+import {
+ PgColumn,
+ PgEnumColumn,
+ type PgTable,
+ type TableConfig,
+ getTableConfig,
+ integer,
+ pgTable,
+ serial,
+ varchar,
+} from "drizzle-orm/pg-core";
+
+export const pgNativeTypes = new Set([
+ "uuid",
+ "smallint",
+ "integer",
+ "bigint",
+ "boolean",
+ "text",
+ "varchar",
+ "serial",
+ "bigserial",
+ "decimal",
+ "numeric",
+ "real",
+ "json",
+ "jsonb",
+ "time",
+ "time with time zone",
+ "time without time zone",
+ "time",
+ "timestamp",
+ "timestamp with time zone",
+ "timestamp without time zone",
+ "date",
+ "interval",
+ "bigint",
+ "bigserial",
+ "double precision",
+ "interval year",
+ "interval month",
+ "interval day",
+ "interval hour",
+ "interval minute",
+ "interval second",
+ "interval year to month",
+ "interval day to hour",
+ "interval day to minute",
+ "interval day to second",
+ "interval hour to minute",
+ "interval hour to second",
+ "interval minute to second",
+]);
+
+const isPgNativeType = (it: string) => {
+ if (pgNativeTypes.has(it)) return true;
+ const toCheck = it.replace(/ /g, "");
+ return (
+ toCheck.startsWith("varchar(") ||
+ toCheck.startsWith("char(") ||
+ toCheck.startsWith("numeric(") ||
+ toCheck.startsWith("timestamp(") ||
+ toCheck.startsWith("doubleprecision[") ||
+ toCheck.startsWith("intervalyear(") ||
+ toCheck.startsWith("intervalmonth(") ||
+ toCheck.startsWith("intervalday(") ||
+ toCheck.startsWith("intervalhour(") ||
+ toCheck.startsWith("intervalminute(") ||
+ toCheck.startsWith("intervalsecond(") ||
+ toCheck.startsWith("intervalyeartomonth(") ||
+ toCheck.startsWith("intervaldaytohour(") ||
+ toCheck.startsWith("intervaldaytominute(") ||
+ toCheck.startsWith("intervaldaytosecond(") ||
+ toCheck.startsWith("intervalhourtominute(") ||
+ toCheck.startsWith("intervalhourtosecond(") ||
+ toCheck.startsWith("intervalminutetosecond(") ||
+ toCheck.startsWith("vector(") ||
+ toCheck.startsWith("geometry(") ||
+ /^(\w+)(\[\d*])+$/.test(it)
+ );
+};
+
+/** @see https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-kit/src/sqlgenerator.ts#L134 */
+
+export const generateTableSQL = ({
+ table,
+ namespace,
+ namePrefix,
+ extraColumns,
+}: {
+ table: PgTable;
+ namespace: string;
+ namePrefix?: string;
+ extraColumns?: PgColumn[];
+}) => {
+ const config = getTableConfig(table);
+ const tableName = config.name;
+ const columns = config.columns;
+ const primaryKeys = config.primaryKeys;
+
+ let statement = "";
+ const name = namespace
+ ? `"${namespace}"."${namePrefix ?? ""}${tableName}"`
+ : `"${namePrefix ?? ""}${tableName}"`;
+
+ statement += `CREATE TABLE IF NOT EXISTS ${name} (\n`;
+ for (let i = 0; i < columns.length; i++) {
+ const column = columns[i]!;
+
+ const primaryKeyStatement =
+ column.primary && extraColumns === undefined ? " PRIMARY KEY" : "";
+ const notNullStatement =
+ column.notNull && !column.generatedIdentity ? " NOT NULL" : "";
+ const defaultStatement =
+ column.default !== undefined ? ` DEFAULT ${column.default}` : "";
+
+ const uniqueConstraint = column.isUnique
+ ? ` CONSTRAINT "${column.uniqueName}" UNIQUE${column.uniqueType === "not distinct" ? " NULLS NOT DISTINCT" : ""}`
+ : "";
+
+ const typeSchema = is(column, PgEnumColumn)
+ ? column.enum.schema || "public"
+ : undefined;
+
+ const schemaPrefix =
+ typeSchema && typeSchema !== "public" ? `"${typeSchema}".` : "";
+
+ const type = isPgNativeType(column.getSQLType())
+ ? column.getSQLType()
+ : `${schemaPrefix}"${column.getSQLType()}"`;
+ const generated = column.generated;
+
+ const generatedStatement = generated
+ ? ` GENERATED ALWAYS AS (${generated?.as}) STORED`
+ : "";
+
+ // const unsquashedIdentity = column.generatedIdentity
+ // ? PgSquasher.unsquashIdentity(column.identity)
+ // : undefined;
+
+ // const identityWithSchema = schema
+ // ? `"${schema}"."${unsquashedIdentity?.name}"`
+ // : `"${unsquashedIdentity?.name}"`;
+
+ // const identity = unsquashedIdentity
+ // ? ` GENERATED ${
+ // unsquashedIdentity.type === "always" ? "ALWAYS" : "BY DEFAULT"
+ // } AS IDENTITY (sequence name ${identityWithSchema}${
+ // unsquashedIdentity.increment
+ // ? ` INCREMENT BY ${unsquashedIdentity.increment}`
+ // : ""
+ // }${
+ // unsquashedIdentity.minValue
+ // ? ` MINVALUE ${unsquashedIdentity.minValue}`
+ // : ""
+ // }${
+ // unsquashedIdentity.maxValue
+ // ? ` MAXVALUE ${unsquashedIdentity.maxValue}`
+ // : ""
+ // }${
+ // unsquashedIdentity.startWith
+ // ? ` START WITH ${unsquashedIdentity.startWith}`
+ // : ""
+ // }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ""}${
+ // unsquashedIdentity.cycle ? ` CYCLE` : ""
+ // })`
+ // : "";
+
+ statement += `\t"${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${uniqueConstraint}`;
+ statement +=
+ i === columns.length - 1 && extraColumns === undefined ? "" : ",\n";
+ }
+
+ if (extraColumns) {
+ for (let i = 0; i < extraColumns.length; i++) {
+ const column = extraColumns[i]!;
+
+ const primaryKeyStatement = column.primary ? " PRIMARY KEY" : "";
+ const notNullStatement =
+ column.notNull && !column.generatedIdentity ? " NOT NULL" : "";
+
+ const type = column.getSQLType();
+
+ statement += `\t"${column.name}" ${type}${primaryKeyStatement}${notNullStatement}`;
+ statement += i === extraColumns.length - 1 ? "" : ",\n";
+ }
+ }
+
+ // TODO(kyle) indexes
+
+ if (
+ extraColumns === undefined &&
+ typeof primaryKeys !== "undefined" &&
+ primaryKeys.length > 0
+ ) {
+ statement += ",\n";
+
+ statement += `\tCONSTRAINT "${primaryKeys[0]!.getName()}" PRIMARY KEY(\"${primaryKeys[0]!.columns.map((c) => c.name).join(`","`)}\")`;
+ // statement += `\n`;
+ }
+
+ // if (
+ // typeof uniqueConstraints !== "undefined" &&
+ // uniqueConstraints.length > 0
+ // ) {
+ // for (const uniqueConstraint of uniqueConstraints) {
+ // statement += ",\n";
+ // const unsquashedUnique = PgSquasher.unsquashUnique(uniqueConstraint);
+ // statement += `\tCONSTRAINT "${unsquashedUnique.name}" UNIQUE${
+ // unsquashedUnique.nullsNotDistinct ? " NULLS NOT DISTINCT" : ""
+ // }(\"${unsquashedUnique.columns.join(`","`)}\")`;
+ // // statement += `\n`;
+ // }
+ // }
+ statement += "\n);";
+ statement += "\n";
+
+ return statement;
+};
+
+export const getPrimaryKeyColumns = (table: PgTable): string[] => {
+ const tableConfig = getTableConfig(table);
+ if (tableConfig.primaryKeys.length > 0) {
+ return tableConfig.primaryKeys[0]!.columns.map((c) => c.name);
+ }
+
+ const pkColumn = tableConfig.columns.find((c) => c.primary)!;
+
+ return [pkColumn.name];
+};
+
+export const getReorgTable = (table: PgTable) => {
+ const config = getTableConfig(table);
+
+ const t = pgTable(`_ponder_reorg__${config.name}`, {
+ operation_id: serial("operation_id").notNull().primaryKey(),
+ operation: integer("operation").notNull(),
+ checkpoint: varchar("checkpoint", {
+ length: 75,
+ }).notNull(),
+ });
+
+ for (const [field, col] of Object.entries(table)) {
+ if (is(col, PgColumn)) {
+ // @ts-ignore
+ t[Table.Symbol.Columns][field] = col;
+ }
+ }
+
+ return t;
+};
diff --git a/packages/core/src/drizzle/table.test-d.ts b/packages/core/src/drizzle/table.test-d.ts
deleted file mode 100644
index 7c57393be..000000000
--- a/packages/core/src/drizzle/table.test-d.ts
+++ /dev/null
@@ -1,152 +0,0 @@
-import { createSchema } from "@/index.js";
-import { eq } from "drizzle-orm";
-import type { Hex } from "viem";
-import { expectTypeOf, test } from "vitest";
-import type { DrizzleDb } from "./db.js";
-import type { DrizzleTable } from "./table.js";
-
-test("select query promise", async () => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- name: p.int().optional(),
- }),
- }));
-
- const table = {} as DrizzleTable<
- "table",
- (typeof schema)["table"]["table"],
- typeof schema
- >;
-
- const result = await ({} as DrizzleDb).select({ id: table.id }).from(table);
- // ^?
-
- expectTypeOf<{ id: string }[]>(result);
-});
-
-test("select optional column", async () => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- name: p.int().optional(),
- }),
- }));
-
- const table = {} as DrizzleTable<
- "table",
- (typeof schema)["table"]["table"],
- typeof schema
- >;
-
- const result = await ({} as DrizzleDb).select().from(table);
- // ^?
-
- expectTypeOf<{ id: string; name: number | null }[]>(result);
-});
-
-test("select enum", async () => {
- const schema = createSchema((p) => ({
- e: p.createEnum(["yes", "no"]),
- table: p.createTable({
- id: p.string(),
- e: p.enum("e"),
- }),
- }));
-
- const table = {} as DrizzleTable<
- "table",
- (typeof schema)["table"]["table"],
- typeof schema
- >;
-
- const result = await ({} as DrizzleDb).select().from(table);
- // ^?
-
- expectTypeOf<{ id: string; e: "yes" | "no" }[]>(result);
-});
-
-test("select json", async () => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- json: p.json<{ a: number; b: string }>(),
- }),
- }));
-
- const table = {} as DrizzleTable<
- "table",
- (typeof schema)["table"]["table"],
- typeof schema
- >;
-
- const result = await ({} as DrizzleDb).select().from(table);
- // ^?
-
- expectTypeOf<{ id: string; json: { a: number; b: string } }[]>(result);
-});
-
-test("select list", async () => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- list: p.string().list(),
- }),
- }));
-
- const table = {} as DrizzleTable<
- "table",
- (typeof schema)["table"]["table"],
- typeof schema
- >;
-
- const result = await ({} as DrizzleDb).select().from(table);
- // ^?
-
- expectTypeOf<{ id: string; list: string[] }[]>(result);
-});
-
-test("select join", async () => {
- const schema = createSchema((p) => ({
- account: p.createTable({
- id: p.hex(),
- name: p.string(),
- age: p.int(),
- }),
- nft: p.createTable({
- id: p.bigint(),
- owner: p.hex().references("account.id"),
- }),
- }));
-
- const account = {} as DrizzleTable<
- "account",
- (typeof schema)["account"]["table"],
- typeof schema
- >;
- const nft = {} as DrizzleTable<
- "nft",
- (typeof schema)["nft"]["table"],
- typeof schema
- >;
-
- const result = await ({} as DrizzleDb)
- // ^?
- .select()
- .from(account)
- .fullJoin(nft, eq(account.id, nft.owner));
-
- expectTypeOf<
- {
- account: {
- id: Hex;
- name: string;
- age: number;
- } | null;
- nft: {
- id: bigint;
- owner: Hex;
- } | null;
- }[]
- >(result);
-});
diff --git a/packages/core/src/drizzle/table.ts b/packages/core/src/drizzle/table.ts
deleted file mode 100644
index 6fe78fc6a..000000000
--- a/packages/core/src/drizzle/table.ts
+++ /dev/null
@@ -1,51 +0,0 @@
-import type {
- EnumColumn,
- ExtractNonVirtualColumnNames,
- JSONColumn,
- Schema as PonderSchema,
- Table as PonderTable,
- ReferenceColumn,
- ScalarColumn,
-} from "@/schema/common.js";
-import type { InferColumnType } from "@/schema/infer.js";
-import type { BuildColumns, ColumnBuilderBase } from "drizzle-orm";
-import type { TableWithColumns } from "./select.js";
-
-/**
- * Performs type transformation between Ponder and Drizzle column representation.
- *
- * @returns TableWithColumns
- */
-export type DrizzleTable<
- tableName extends string,
- table extends PonderTable,
- schema extends PonderSchema,
-> = TableWithColumns<{
- name: tableName;
- schema: undefined;
- columns: BuildColumns<
- tableName,
- {
- [columnName in ExtractNonVirtualColumnNames]: ColumnBuilderBase<{
- name: columnName & string;
- dataType: "custom";
- columnType: "ponder";
- data: InferColumnType;
- driverParam: unknown;
- enumValues: undefined;
- notNull: (table[columnName] &
- (
- | ScalarColumn
- | ReferenceColumn
- | EnumColumn
- | JSONColumn
- ))[" optional"] extends true
- ? false
- : true;
- primaryKey: columnName extends "id" ? true : false;
- }>;
- },
- "common"
- >;
- dialect: "common";
-}>;
diff --git a/packages/core/src/graphql/buildGraphqlSchema.test.ts b/packages/core/src/graphql/buildGraphqlSchema.test.ts
deleted file mode 100644
index bfc2af2d1..000000000
--- a/packages/core/src/graphql/buildGraphqlSchema.test.ts
+++ /dev/null
@@ -1,3097 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import { getMetadataStore } from "@/indexing-store/metadata.js";
-import type { IndexingStore } from "@/indexing-store/store.js";
-import { createSchema } from "@/schema/schema.js";
-import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js";
-import { type GraphQLType, execute, parse } from "graphql";
-import { beforeEach, expect, test } from "vitest";
-import { buildGraphQLSchema } from "./buildGraphqlSchema.js";
-import { buildLoaderCache } from "./buildLoaderCache.js";
-
-beforeEach(setupCommon);
-beforeEach(setupIsolatedDatabase);
-
-const create = async (id: string, indexingStore: IndexingStore) => {
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id,
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-};
-
-test("scalar", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- });
-
- await cleanup();
-});
-
-test("scalar list", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string().list(),
- int: p.int().list(),
- float: p.float().list(),
- boolean: p.boolean().list(),
- hex: p.hex().list(),
- bigint: p.bigint().list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: ["0"],
- int: [0],
- float: [0],
- boolean: [false],
- hex: ["0x0"],
- bigint: [0n],
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- string: ["0"],
- int: [0],
- float: [0],
- boolean: [false],
- hex: ["0x0"],
- bigint: ["0"],
- },
- });
-
- await cleanup();
-});
-
-test("scalar optional", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string().optional(),
- int: p.int().optional(),
- float: p.float().optional(),
- boolean: p.boolean().optional(),
- hex: p.hex().optional(),
- bigint: p.bigint().optional(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: null,
- int: null,
- float: null,
- boolean: null,
- hex: null,
- bigint: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- string: null,
- int: null,
- float: null,
- boolean: null,
- hex: null,
- bigint: null,
- },
- });
-
- await cleanup();
-});
-
-test("scalar optional list", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string().optional().list(),
- int: p.int().optional().list(),
- float: p.float().optional().list(),
- boolean: p.boolean().optional().list(),
- hex: p.hex().optional().list(),
- bigint: p.bigint().optional().list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: null,
- int: null,
- float: null,
- boolean: null,
- hex: null,
- bigint: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- string: null,
- int: null,
- float: null,
- boolean: null,
- hex: null,
- bigint: null,
- },
- });
-
- await cleanup();
-});
-
-test("json", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- json: p.json(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- json: { kevin: 52 },
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- json
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- json: { kevin: 52 },
- },
- });
-
- await cleanup();
-});
-
-test("enum", async (context) => {
- const schema = createSchema((p) => ({
- enum: p.createEnum(["A", "B"]),
- table: p.createTable({
- id: p.string(),
- enum: p.enum("enum"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- enum: "A",
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- enum
- }
- }
-`);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- enum: "A",
- },
- });
-
- await cleanup();
-});
-
-test("enum optional", async (context) => {
- const schema = createSchema((p) => ({
- enum: p.createEnum(["A", "B"]),
- table: p.createTable({
- id: p.string(),
- enum: p.enum("enum").optional(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- enum: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- enum
- }
-}
-`);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- enum: null,
- },
- });
-
- await cleanup();
-});
-
-test("enum list", async (context) => {
- const schema = createSchema((p) => ({
- enum: p.createEnum(["A", "B"]),
- table: p.createTable({
- id: p.string(),
- enum: p.enum("enum").list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- enum: ["A"],
- },
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "1",
- data: {
- enum: ["B"],
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- enum
- }
- }
-`);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- enum: ["A"],
- },
- });
-
- await cleanup();
-});
-
-test("enum optional list", async (context) => {
- const schema = createSchema((p) => ({
- enum: p.createEnum(["A", "B"]),
- table: p.createTable({
- id: p.string(),
- enum: p.enum("enum").optional().list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- enum: null,
- },
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "1",
- data: {
- enum: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- enum
- }
- }
-`);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- enum: null,
- },
- });
-
- await cleanup();
-});
-
-test("one", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
-
- ref: p.string().references("table.id"),
- one: p.one("ref"),
-
- refNull: p.string().references("table.id").optional(),
- oneNull: p.one("refNull"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- ref: "0",
- refNull: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- one {
- id
- }
- oneNull {
- id
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: {
- readonlyStore: indexingStore,
- getLoader: buildLoaderCache({ store: indexingStore }),
- },
- });
-
- expect(result.data).toMatchObject({
- table: {
- one: {
- id: "0",
- },
- oneNull: null,
- },
- });
-
- await cleanup();
-});
-
-test("many", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
-
- ref: p.string().references("many.id"),
- // refNull: p.string().references("many.id").optional(),
- }),
- many: p.createTable({
- id: p.string(),
- manyCol: p.many("table.ref"),
- // manyNull: p.many("table.refNull"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- ref: "0",
- // refNull: null,
- },
- });
-
- await indexingStore.create({
- tableName: "many",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- many(id: "0") {
- manyCol {
- items {
- id
- }
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: {
- readonlyStore: indexingStore,
- getLoader: buildLoaderCache({ store: indexingStore }),
- },
- });
-
- expect(result.data).toMatchObject({
- many: {
- manyCol: {
- items: [
- {
- id: "0",
- },
- ],
- },
- },
- });
-
- await cleanup();
-});
-
-test("many w/ filter", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- col: p.string(),
- ref: p.string().references("many.id"),
- }),
- many: p.createTable({
- id: p.string(),
- manyCol: p.many("table.ref"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- col: "kevin",
- ref: "0",
- },
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "1",
- data: {
- col: "kyle",
- ref: "0",
- },
- });
-
- await indexingStore.create({
- tableName: "many",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- many(id: "0") {
- manyCol (where: {col: "kevin"}) {
- items {
- id
- }
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: {
- readonlyStore: indexingStore,
- getLoader: buildLoaderCache({ store: indexingStore }),
- },
- });
-
- expect(result.data).toMatchObject({
- many: {
- manyCol: {
- items: [
- {
- id: "0",
- },
- ],
- },
- },
- });
-
- await cleanup();
-});
-
-test("bigint id", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: 0n,
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0") {
- id
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0",
- },
- });
-
- await cleanup();
-});
-
-test("hex id", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.hex(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0x00",
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- table(id: "0x00") {
- id
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- table: {
- id: "0x00",
- },
- });
-
- await cleanup();
-});
-
-test("filter string eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { string: "0" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter string in", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { string_in: ["0", "2"] }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter string contains", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "string",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { string_contains: "tr" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "string",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter string starts with", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "string",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { string_starts_with: "str" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "string",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter string not ends with", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "string",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { string_not_ends_with: "str" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "string",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter int eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { int: 0 }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter int gt", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "0",
- int: 1,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { int_gt: 0 }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 1,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter int lte", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { int_lte: 0 }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter int in", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { int_in: [0, 2] }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter float eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { float: 0 }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter float gt", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "0",
- int: 0,
- float: 1,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { float_gt: 0 }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 1,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter float lte", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { float_lte: 0 }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter float in", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { float_in: [0, 2] }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter bigint eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { bigint: "0" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter bigint gt", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 1n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { bigint_gt: "0" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "1",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter bigint lte", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { bigint_lte: "0" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter bigint in", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { bigint_in: ["0", "2"] }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filer hex eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { hex: "0x00" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter hex gt", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x1",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (where: { hex_gt: "0x00" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x01",
- bigint: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter string list eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string().list(),
- int: p.int().list(),
- float: p.float().list(),
- boolean: p.boolean().list(),
- hex: p.hex().list(),
- bigint: p.bigint().list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: ["0"],
- int: [0],
- float: [0],
- boolean: [false],
- hex: ["0x0"],
- bigint: [0n],
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(where: { string: ["0"] }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: ["0"],
- int: [0],
- float: [0],
- boolean: [false],
- hex: ["0x0"],
- bigint: ["0"],
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter string list has", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string().list(),
- int: p.int().list(),
- float: p.float().list(),
- boolean: p.boolean().list(),
- hex: p.hex().list(),
- bigint: p.bigint().list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: ["0"],
- int: [0],
- float: [0],
- boolean: [false],
- hex: ["0x0"],
- bigint: [0n],
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(where: { string_has: "0" }) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- string: ["0"],
- int: [0],
- float: [0],
- boolean: [false],
- hex: ["0x0"],
- bigint: ["0"],
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter enum eq", async (context) => {
- const schema = createSchema((p) => ({
- enum: p.createEnum(["A", "B"]),
- table: p.createTable({
- id: p.string(),
- enum: p.enum("enum"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- enum: "A",
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(enum: "A") {
- items{
- id
- enum
- }
- }
- }
-`);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- enum: "A",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter enum in", async (context) => {
- const schema = createSchema((p) => ({
- enum: p.createEnum(["A", "B"]),
- table: p.createTable({
- id: p.string(),
- enum: p.enum("enum"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- enum: "A",
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(enum_in: ["A"]) {
- items{
- id
- enum
- }
- }
- }
-`);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- enum: "A",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter ref eq", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
-
- ref: p.string().references("table.id"),
- one: p.one("ref"),
-
- refNull: p.string().references("table.id").optional(),
- oneNull: p.one("refNull"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- ref: "0",
- refNull: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(where: { ref: "0" }) {
- items {
- one {
- id
- }
- oneNull
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: {
- readonlyStore: indexingStore,
- getLoader: buildLoaderCache({ store: indexingStore }),
- },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- one: {
- id: "0",
- },
- oneNull: null,
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("filter ref in", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
-
- ref: p.string().references("table.id"),
- one: p.one("ref"),
-
- refNull: p.string().references("table.id").optional(),
- oneNull: p.one("refNull"),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- ref: "0",
- refNull: null,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(where: { ref_in: ["0", "2"] }) {
- items {
- one {
- id
- }
-
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: {
- readonlyStore: indexingStore,
- getLoader: buildLoaderCache({ store: indexingStore }),
- },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- one: {
- id: "0",
- },
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("order int asc", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "1",
- data: {
- string: "0",
- int: 1_000,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "2",
- data: {
- string: "0",
- int: 5,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(orderBy: "int", orderDirection: "asc") {
- items {
- id
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- },
- {
- id: "2",
- },
- {
- id: "1",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("order bigint asc", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "1",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 1_000n,
- },
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "2",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 5n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(orderBy: "bigint", orderDirection: "asc") {
- items {
- id
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "0",
- },
- {
- id: "2",
- },
- {
- id: "1",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("order bigint desc", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await create("0", indexingStore);
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "1",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 1_000n,
- },
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "2",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 5n,
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables(orderBy: "bigint", orderDirection: "desc") {
- items {
- id
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- expect(result.data).toMatchObject({
- tables: {
- items: [
- {
- id: "1",
- },
- {
- id: "2",
- },
- {
- id: "0",
- },
- ],
- },
- });
-
- await cleanup();
-});
-
-test("limit default", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- for (let i = 0; i < 100; i++) {
- await create(String(i), indexingStore);
- }
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- // @ts-ignore
- expect(result.data.tables.items).toHaveLength(50);
-
- await cleanup();
-});
-
-test("limit", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- for (let i = 0; i < 100; i++) {
- await create(String(i), indexingStore);
- }
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (limit: 15) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- // @ts-ignore
- expect(result.data.tables.items).toHaveLength(15);
-
- await cleanup();
-});
-
-test("limit error", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- // for (let i = 0; i < 100; i++) {
- // await create(String(i), indexingStore);
- // }
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- tables (limit: 1005) {
- items {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore },
- });
-
- // @ts-ignore
- expect(result.errors[0].message).toBe(
- "Invalid limit. Got 1005, expected <=1000.",
- );
-
- await cleanup();
-});
-
-test("filter type has correct suffixes and types", () => {
- const s = createSchema((p) => ({
- SimpleEnum: p.createEnum(["VALUE", "ANOTHER_VALUE"]),
- RelatedTableStringId: p.createTable({ id: p.string() }),
- RelatedTableBigIntId: p.createTable({ id: p.bigint() }),
- Table: p.createTable({
- id: p.string(),
- int: p.int(),
- float: p.float(),
- bool: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- enum: p.enum("SimpleEnum"),
- listString: p.string().list(),
- listBigInt: p.bigint().list(),
- listEnum: p.enum("SimpleEnum").list(),
- relatedTableStringId: p.string().references("RelatedTableStringId.id"),
- relatedTableBigIntId: p.bigint().references("RelatedTableBigIntId.id"),
- relatedTableString: p.one("relatedTableStringId"),
- }),
- }));
-
- const serverSchema = buildGraphQLSchema(s);
-
- const typeMap = serverSchema.getTypeMap();
-
- const tableFilterType = typeMap.TableFilter!;
- const fields = (tableFilterType.toConfig() as any).fields as Record<
- string,
- { name: string; type: GraphQLType }
- >;
-
- const fieldsPretty = Object.entries(fields).reduce>(
- (acc, [key, value]) => {
- acc[key] = value.type.toString();
- return acc;
- },
- {},
- );
-
- expect(fieldsPretty).toMatchObject({
- id: "String",
- id_not: "String",
- id_in: "[String]",
- id_not_in: "[String]",
- id_contains: "String",
- id_not_contains: "String",
- id_starts_with: "String",
- id_ends_with: "String",
- id_not_starts_with: "String",
- id_not_ends_with: "String",
- int: "Int",
- int_not: "Int",
- int_in: "[Int]",
- int_not_in: "[Int]",
- int_gt: "Int",
- int_lt: "Int",
- int_gte: "Int",
- int_lte: "Int",
- float: "Float",
- float_not: "Float",
- float_in: "[Float]",
- float_not_in: "[Float]",
- float_gt: "Float",
- float_lt: "Float",
- float_gte: "Float",
- float_lte: "Float",
- bool: "Boolean",
- bool_not: "Boolean",
- bool_in: "[Boolean]",
- bool_not_in: "[Boolean]",
- hex: "String",
- hex_gt: "String",
- hex_lt: "String",
- hex_gte: "String",
- hex_lte: "String",
- hex_not: "String",
- hex_in: "[String]",
- hex_not_in: "[String]",
- bigint: "BigInt",
- bigint_not: "BigInt",
- bigint_in: "[BigInt]",
- bigint_not_in: "[BigInt]",
- bigint_gt: "BigInt",
- bigint_lt: "BigInt",
- bigint_gte: "BigInt",
- bigint_lte: "BigInt",
- enum: "SimpleEnum",
- enum_not: "SimpleEnum",
- enum_in: "[SimpleEnum]",
- enum_not_in: "[SimpleEnum]",
- listString: "[String]",
- listString_not: "[String]",
- listString_has: "String",
- listString_not_has: "String",
- listBigInt: "[BigInt]",
- listBigInt_not: "[BigInt]",
- listBigInt_has: "BigInt",
- listBigInt_not_has: "BigInt",
- listEnum: "[SimpleEnum]",
- listEnum_not: "[SimpleEnum]",
- listEnum_has: "SimpleEnum",
- listEnum_not_has: "SimpleEnum",
- relatedTableStringId: "String",
- relatedTableStringId_not: "String",
- relatedTableStringId_in: "[String]",
- relatedTableStringId_not_in: "[String]",
- relatedTableStringId_contains: "String",
- relatedTableStringId_not_contains: "String",
- relatedTableStringId_starts_with: "String",
- relatedTableStringId_ends_with: "String",
- relatedTableStringId_not_starts_with: "String",
- relatedTableStringId_not_ends_with: "String",
- relatedTableBigIntId: "BigInt",
- relatedTableBigIntId_not: "BigInt",
- relatedTableBigIntId_in: "[BigInt]",
- relatedTableBigIntId_not_in: "[BigInt]",
- relatedTableBigIntId_gt: "BigInt",
- relatedTableBigIntId_lt: "BigInt",
- relatedTableBigIntId_gte: "BigInt",
- relatedTableBigIntId_lte: "BigInt",
- });
-});
-
-test("metadata", async (context) => {
- const schema = createSchema(() => ({}));
-
- const { indexingStore, cleanup, database } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- const metadataStore = getMetadataStore({
- dialect: database.dialect,
- db: database.qb.user,
- });
-
- await metadataStore.setStatus({
- mainnet: {
- ready: true,
- block: {
- number: 10,
- timestamp: 20,
- },
- },
- });
-
- const graphqlSchema = buildGraphQLSchema(schema);
-
- const document = parse(`
- query {
- _meta {
- status
- }
- }
- `);
-
- const result = await execute({
- schema: graphqlSchema,
- document,
- contextValue: { readonlyStore: indexingStore, metadataStore },
- });
-
- expect(result.data).toMatchObject({
- _meta: {
- status: {
- mainnet: {
- ready: true,
- block: {
- number: 10,
- timestamp: 20,
- },
- },
- },
- },
- });
-
- await cleanup();
-});
diff --git a/packages/core/src/graphql/buildGraphqlSchema.ts b/packages/core/src/graphql/buildGraphqlSchema.ts
deleted file mode 100644
index 1cc918e9b..000000000
--- a/packages/core/src/graphql/buildGraphqlSchema.ts
+++ /dev/null
@@ -1,71 +0,0 @@
-import type { MetadataStore, ReadonlyStore } from "@/indexing-store/store.js";
-import type { Schema } from "@/schema/common.js";
-import { getTables } from "@/schema/utils.js";
-import {
- type GraphQLFieldConfig,
- GraphQLObjectType,
- GraphQLSchema,
-} from "graphql";
-import type { GetLoader } from "./buildLoaderCache.js";
-import { buildEntityTypes } from "./entity.js";
-import { buildEnumTypes } from "./enum.js";
-import { buildEntityFilterTypes } from "./filter.js";
-import { metadataEntity } from "./metadata.js";
-import { buildPluralField } from "./plural.js";
-import { buildSingularField } from "./singular.js";
-
-// TODO(kyle) stricter type
-export type Parent = Record;
-export type Context = {
- getLoader: GetLoader;
- readonlyStore: ReadonlyStore;
- metadataStore: MetadataStore;
-};
-
-export const buildGraphQLSchema = (schema: Schema): GraphQLSchema => {
- const queryFields: Record> = {};
-
- const { enumTypes } = buildEnumTypes({ schema });
- const { entityFilterTypes } = buildEntityFilterTypes({ schema, enumTypes });
- const { entityTypes, entityPageTypes } = buildEntityTypes({
- schema,
- enumTypes,
- entityFilterTypes,
- });
-
- for (const [tableName, { table }] of Object.entries(getTables(schema))) {
- const entityType = entityTypes[tableName]!;
- const entityPageType = entityPageTypes[tableName]!;
- const entityFilterType = entityFilterTypes[tableName]!;
-
- const singularFieldName =
- tableName.charAt(0).toLowerCase() + tableName.slice(1);
- queryFields[singularFieldName] = buildSingularField({
- tableName,
- table,
- entityType,
- });
-
- const pluralFieldName = `${singularFieldName}s`;
- queryFields[pluralFieldName] = buildPluralField({
- tableName,
- entityPageType,
- entityFilterType,
- });
- }
-
- queryFields._meta = {
- type: metadataEntity,
- resolve: async (_source, _args, context) => {
- const status = await context.metadataStore.getStatus();
- return { status };
- },
- };
-
- return new GraphQLSchema({
- query: new GraphQLObjectType({
- name: "Query",
- fields: queryFields,
- }),
- });
-};
diff --git a/packages/core/src/graphql/buildLoaderCache.ts b/packages/core/src/graphql/buildLoaderCache.ts
deleted file mode 100644
index bfb6fbeb0..000000000
--- a/packages/core/src/graphql/buildLoaderCache.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import type { ReadonlyStore } from "@/indexing-store/store.js";
-import DataLoader from "dataloader";
-
-export type GetLoader = ReturnType;
-
-export function buildLoaderCache({ store }: { store: ReadonlyStore }) {
- const loaderCache: Record<
- string,
- DataLoader | undefined
- > = {};
-
- return ({ tableName }: { tableName: string }) => {
- const loader = (loaderCache[tableName] ??= new DataLoader(
- async (ids) => {
- const rows = await store.findMany({
- tableName,
- where: { id: { in: ids } },
- limit: ids.length,
- });
-
- return ids.map((id) => rows.items.find((row) => row.id === id));
- },
- { maxBatchSize: 1_000 },
- ));
-
- return loader;
- };
-}
diff --git a/packages/core/src/graphql/entity.ts b/packages/core/src/graphql/entity.ts
deleted file mode 100644
index ef5697248..000000000
--- a/packages/core/src/graphql/entity.ts
+++ /dev/null
@@ -1,187 +0,0 @@
-import type { ReferenceColumn, Schema } from "@/schema/common.js";
-import {
- extractReferenceTable,
- getTables,
- isEnumColumn,
- isJSONColumn,
- isListColumn,
- isManyColumn,
- isOneColumn,
- isOptionalColumn,
-} from "@/schema/utils.js";
-import {
- GraphQLBoolean,
- type GraphQLFieldResolver,
- type GraphQLInputObjectType,
-} from "graphql";
-import {
- type GraphQLEnumType,
- type GraphQLFieldConfigMap,
- GraphQLInt,
- GraphQLList,
- GraphQLNonNull,
- GraphQLObjectType,
- GraphQLString,
-} from "graphql";
-import type { Context, Parent } from "./buildGraphqlSchema.js";
-import { buildWhereObject } from "./filter.js";
-import { GraphQLJSON } from "./graphQLJson.js";
-import type { PluralResolver } from "./plural.js";
-import { SCALARS } from "./scalar.js";
-
-const GraphQLPageInfo = new GraphQLObjectType({
- name: "PageInfo",
- fields: {
- hasNextPage: { type: new GraphQLNonNull(GraphQLBoolean) },
- hasPreviousPage: { type: new GraphQLNonNull(GraphQLBoolean) },
- startCursor: { type: GraphQLString },
- endCursor: { type: GraphQLString },
- },
-});
-
-export const buildEntityTypes = ({
- schema,
- enumTypes,
- entityFilterTypes,
-}: {
- schema: Schema;
- enumTypes: Record;
- entityFilterTypes: Record;
-}) => {
- const entityTypes: Record> = {};
- const entityPageTypes: Record = {};
-
- for (const [tableName, { table }] of Object.entries(getTables(schema))) {
- entityTypes[tableName] = new GraphQLObjectType({
- name: tableName,
- fields: () => {
- const fieldConfigMap: GraphQLFieldConfigMap = {};
-
- Object.entries(table).forEach(([columnName, column]) => {
- if (isOneColumn(column)) {
- // Column must resolve the foreign key of the referenced column
- // Note: this relies on the fact that reference columns can't be lists.
- const referenceColumn = table[
- column[" reference"]
- ] as ReferenceColumn;
- const referencedTable = extractReferenceTable(referenceColumn);
-
- const resolver: GraphQLFieldResolver = async (
- parent,
- _args,
- context,
- ) => {
- // The parent object gets passed in here containing reference column values.
- const relatedRecordId = parent[column[" reference"]];
- // Note: Don't query with a null or undefined id, indexing store will throw error.
- if (relatedRecordId === null || relatedRecordId === undefined)
- return null;
-
- const loader = context.getLoader({
- tableName: referencedTable,
- });
-
- return await loader.load(relatedRecordId);
- };
-
- fieldConfigMap[columnName] = {
- type: isOptionalColumn(referenceColumn)
- ? entityTypes[referencedTable]!
- : new GraphQLNonNull(entityTypes[referencedTable]!),
- resolve: resolver,
- };
- } else if (isManyColumn(column)) {
- const resolver: PluralResolver = async (parent, args, context) => {
- const { where, orderBy, orderDirection, limit, after, before } =
- args;
-
- const whereObject = where ? buildWhereObject(where) : {};
- // Add the parent record ID to the where object.
- // Note that this overrides any existing equals condition.
- (whereObject[column[" referenceColumn"]] ??= {}).equals =
- parent.id;
-
- const orderByObject = orderBy
- ? { [orderBy]: orderDirection ?? "asc" }
- : undefined;
-
- // Query for the IDs of the matching records.
- // TODO: Update query to only fetch IDs, not entire records.
- const result = await context.readonlyStore.findMany({
- tableName: column[" referenceTable"],
- where: whereObject,
- orderBy: orderByObject,
- limit,
- before,
- after,
- });
-
- // Load entire records objects using the loader.
- const loader = context.getLoader({
- tableName: column[" referenceTable"],
- });
-
- const ids = result.items.map((item) => item.id);
- const items = await loader.loadMany(ids);
-
- return { items, pageInfo: result.pageInfo };
- };
-
- fieldConfigMap[columnName] = {
- type: entityPageTypes[column[" referenceTable"]]!,
- args: {
- where: { type: entityFilterTypes[column[" referenceTable"]]! },
- orderBy: { type: GraphQLString },
- orderDirection: { type: GraphQLString },
- before: { type: GraphQLString },
- after: { type: GraphQLString },
- limit: { type: GraphQLInt },
- },
- resolve: resolver,
- };
- } else if (isJSONColumn(column)) {
- fieldConfigMap[columnName] = {
- type: isOptionalColumn(column)
- ? GraphQLJSON
- : new GraphQLNonNull(GraphQLJSON),
- };
- } else {
- const type = isEnumColumn(column)
- ? enumTypes[column[" enum"]]!
- : SCALARS[column[" scalar"]];
- if (isListColumn(column)) {
- const listType = new GraphQLList(new GraphQLNonNull(type));
- fieldConfigMap[columnName] = {
- type: isOptionalColumn(column)
- ? listType
- : new GraphQLNonNull(listType),
- };
- } else {
- fieldConfigMap[columnName] = {
- type: isOptionalColumn(column)
- ? type
- : new GraphQLNonNull(type),
- };
- }
- }
- });
-
- return fieldConfigMap;
- },
- });
-
- entityPageTypes[tableName] = new GraphQLObjectType({
- name: `${tableName}Page`,
- fields: () => ({
- items: {
- type: new GraphQLNonNull(
- new GraphQLList(new GraphQLNonNull(entityTypes[tableName]!)),
- ),
- },
- pageInfo: { type: new GraphQLNonNull(GraphQLPageInfo) },
- }),
- });
- }
-
- return { entityTypes, entityPageTypes };
-};
diff --git a/packages/core/src/graphql/enum.ts b/packages/core/src/graphql/enum.ts
deleted file mode 100644
index c3bf5a334..000000000
--- a/packages/core/src/graphql/enum.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import type { Schema } from "@/schema/common.js";
-import { getEnums } from "@/schema/utils.js";
-import { GraphQLEnumType } from "graphql";
-
-export function buildEnumTypes({ schema }: { schema: Schema }) {
- const enumTypes: Record = {};
-
- for (const [enumName, _enum] of Object.entries(getEnums(schema))) {
- enumTypes[enumName] = new GraphQLEnumType({
- name: enumName,
- values: _enum.reduce(
- (acc: Record, cur) => ({ ...acc, [cur]: {} }),
- {},
- ),
- });
- }
-
- return { enumTypes };
-}
diff --git a/packages/core/src/graphql/filter.test.ts b/packages/core/src/graphql/filter.test.ts
deleted file mode 100644
index a2c001250..000000000
--- a/packages/core/src/graphql/filter.test.ts
+++ /dev/null
@@ -1,69 +0,0 @@
-import { expect, test } from "vitest";
-import { buildWhereObject } from "./filter.js";
-
-test("buildWhereObject transforms equals condition correctly", () => {
- const where = { name: "John" };
- const expected = { name: { equals: "John" } };
-
- expect(buildWhereObject(where)).toEqual(expected);
-});
-
-test("buildWhereObject transforms not condition correctly", () => {
- const where = { age_not: 30 };
- const expected = { age: { not: 30 } };
-
- expect(buildWhereObject(where)).toEqual(expected);
-});
-
-test("buildWhereObject transforms in condition correctly", () => {
- const where = { category_in: ["books", "electronics"] };
- const expected = { category: { in: ["books", "electronics"] } };
-
- expect(buildWhereObject(where)).toEqual(expected);
-});
-
-test("buildWhereObject handles complex conditions with and/or correctly", () => {
- const where = {
- name_not: "Peter",
- AND: [{ name_contains: "John" }, { age_gt: 20 }],
- OR: [{ country: "USA" }, { country: "Canada" }],
- };
- const expected = {
- name: { not: "Peter" },
- AND: [{ name: { contains: "John" } }, { age: { gt: 20 } }],
- OR: [{ country: { equals: "USA" } }, { country: { equals: "Canada" } }],
- };
-
- expect(buildWhereObject(where)).toEqual(expected);
-});
-
-test("buildWhereObject transforms has condition correctly", () => {
- const where = {
- list_has: "0x0",
- };
- const expected = {
- list: { has: "0x0" },
- };
-
- expect(buildWhereObject(where)).toEqual(expected);
-});
-
-test.skip("buildWhereObject handles two conditions for the same field", () => {
- const where = { timestamp_gte: 1630608704, timestamp_lte: 1630605241 };
- const expected = { timestamp: { gte: 1630608704, lte: 1630605241 } };
-
- expect(buildWhereObject(where)).toEqual(expected);
-});
-
-test("buildWhereObject throws error on unknown condition", () => {
- const where = { name_like: "John" };
- expect(() => buildWhereObject(where)).toThrow(
- "Invalid query: Unknown where condition: name_like",
- );
-});
-
-test("buildWhereObject returns an empty object when where is empty", () => {
- const where = {};
- const expected = {};
- expect(buildWhereObject(where)).toEqual(expected);
-});
diff --git a/packages/core/src/graphql/filter.ts b/packages/core/src/graphql/filter.ts
deleted file mode 100644
index 4febd0ed6..000000000
--- a/packages/core/src/graphql/filter.ts
+++ /dev/null
@@ -1,177 +0,0 @@
-import { BuildError } from "@/common/errors.js";
-import type { Schema } from "@/schema/common.js";
-import {
- getTables,
- isEnumColumn,
- isJSONColumn,
- isListColumn,
- isManyColumn,
- isOneColumn,
- isReferenceColumn,
- isScalarColumn,
-} from "@/schema/utils.js";
-import {
- type GraphQLEnumType,
- type GraphQLInputFieldConfigMap,
- GraphQLInputObjectType,
-} from "graphql";
-import { GraphQLList } from "graphql";
-import { SCALARS } from "./scalar.js";
-
-const filterOperators = {
- universal: ["", "_not"],
- singular: ["_in", "_not_in"],
- plural: ["_has", "_not_has"],
- numeric: ["_gt", "_lt", "_gte", "_lte"],
- string: [
- "_contains",
- "_not_contains",
- "_starts_with",
- "_ends_with",
- "_not_starts_with",
- "_not_ends_with",
- ],
-} as const;
-
-export const buildEntityFilterTypes = ({
- schema,
- enumTypes,
-}: { schema: Schema; enumTypes: Record }) => {
- const entityFilterTypes: Record = {};
-
- for (const [tableName, { table }] of Object.entries(getTables(schema))) {
- const filterType = new GraphQLInputObjectType({
- name: `${tableName}Filter`,
- fields: () => {
- const filterFields: GraphQLInputFieldConfigMap = {
- // Logical operators
- AND: { type: new GraphQLList(filterType) },
- OR: { type: new GraphQLList(filterType) },
- };
-
- Object.entries(table).forEach(([columnName, column]) => {
- // Note: Only include non-virtual columns in plural fields
- if (isOneColumn(column)) return;
- if (isManyColumn(column)) return;
- if (isJSONColumn(column)) return;
-
- const type = isEnumColumn(column)
- ? enumTypes[column[" enum"]]!
- : SCALARS[column[" scalar"]];
-
- if (isListColumn(column)) {
- // List fields => universal, plural
- filterOperators.universal.forEach((suffix) => {
- filterFields[`${columnName}${suffix}`] = {
- type: new GraphQLList(type),
- };
- });
-
- filterOperators.plural.forEach((suffix) => {
- filterFields[`${columnName}${suffix}`] = {
- type,
- };
- });
- } else {
- // Scalar fields => universal, singular, numeric OR string depending on base type
- // Note: Booleans => universal and singular only.
- filterOperators.universal.forEach((suffix) => {
- filterFields[`${columnName}${suffix}`] = {
- type,
- };
- });
-
- filterOperators.singular.forEach((suffix) => {
- filterFields[`${columnName}${suffix}`] = {
- type: new GraphQLList(type),
- };
- });
-
- if (
- (isScalarColumn(column) || isReferenceColumn(column)) &&
- ["int", "bigint", "float", "hex"].includes(column[" scalar"])
- ) {
- filterOperators.numeric.forEach((suffix) => {
- filterFields[`${columnName}${suffix}`] = {
- type: type,
- };
- });
- }
-
- if (
- (isScalarColumn(column) || isReferenceColumn(column)) &&
- "string" === column[" scalar"]
- ) {
- filterOperators.string.forEach((suffix) => {
- filterFields[`${columnName}${suffix}`] = {
- type: type,
- };
- });
- }
- }
- });
-
- return filterFields;
- },
- });
-
- entityFilterTypes[tableName] = filterType;
- }
-
- return { entityFilterTypes };
-};
-
-const graphqlFilterToStoreCondition = {
- "": "equals",
- not: "not",
- in: "in",
- not_in: "notIn",
- has: "has",
- not_has: "notHas",
- gt: "gt",
- lt: "lt",
- gte: "gte",
- lte: "lte",
- contains: "contains",
- not_contains: "notContains",
- starts_with: "startsWith",
- not_starts_with: "notStartsWith",
- ends_with: "endsWith",
- not_ends_with: "notEndsWith",
-} as const;
-
-export function buildWhereObject(where: Record) {
- const whereObject: Record = {};
-
- for (const [whereKey, rawValue] of Object.entries(where)) {
- // Handle the `and` and `or` operators.
- if (whereKey === "AND" || whereKey === "OR") {
- if (!Array.isArray(rawValue)) {
- throw new BuildError(
- `Invalid query: Expected an array for the ${whereKey} operator. Got: ${rawValue}`,
- );
- }
-
- whereObject[whereKey] = rawValue.map(buildWhereObject);
- continue;
- }
-
- const [fieldName, condition_] = whereKey.split(/_(.*)/s);
- // This is a hack to handle the "" operator, which the regex above doesn't handle
- const condition = (
- condition_ === undefined ? "" : condition_
- ) as keyof typeof graphqlFilterToStoreCondition;
-
- const storeCondition = graphqlFilterToStoreCondition[condition];
- if (!storeCondition) {
- throw new BuildError(
- `Invalid query: Unknown where condition: ${fieldName}_${condition}`,
- );
- }
-
- whereObject[fieldName!] ||= {};
- whereObject[fieldName!][storeCondition] = rawValue;
- }
-
- return whereObject;
-}
diff --git a/packages/core/src/graphql/graphQLJson.ts b/packages/core/src/graphql/graphQLJson.ts
deleted file mode 100644
index 083d559dc..000000000
--- a/packages/core/src/graphql/graphQLJson.ts
+++ /dev/null
@@ -1,63 +0,0 @@
-import {
- type GraphQLScalarLiteralParser,
- GraphQLScalarType,
- Kind,
- type ObjectValueNode,
- type ValueNode,
- print,
-} from "graphql";
-
-// Modified from https://github.com/taion/graphql-type-json/blob/master/src/index.js
-
-export const GraphQLJSON = new GraphQLScalarType({
- name: "JSON",
- description:
- "The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).",
- serialize: (x) => x,
- parseValue: (x) => x,
- parseLiteral: (ast, variables) => {
- if (ast.kind !== Kind.OBJECT) {
- throw new TypeError(
- `JSONObject cannot represent non-object value: ${print(ast)}`,
- );
- }
-
- return parseObject(ast, variables);
- },
-});
-
-const parseLiteral = (
- ast: ValueNode,
- variables: Parameters[1],
-): ReturnType> => {
- switch (ast.kind) {
- case Kind.STRING:
- case Kind.BOOLEAN:
- return ast.value;
- case Kind.INT:
- case Kind.FLOAT:
- return Number.parseFloat(ast.value);
- case Kind.OBJECT:
- return parseObject(ast, variables);
- case Kind.LIST:
- return ast.values.map((n) => parseLiteral(n, variables));
- case Kind.NULL:
- return null;
- case Kind.VARIABLE:
- return variables ? variables[ast.name.value] : undefined;
- default:
- throw new TypeError(`JSON cannot represent value: ${print(ast)}`);
- }
-};
-
-const parseObject = (
- ast: ObjectValueNode,
- variables: Parameters[1],
-) => {
- const value = Object.create(null);
- ast.fields.forEach((field) => {
- value[field.name.value] = parseLiteral(field.value, variables);
- });
-
- return value;
-};
diff --git a/packages/core/src/graphql/index.test.ts b/packages/core/src/graphql/index.test.ts
deleted file mode 100644
index a6e80ea11..000000000
--- a/packages/core/src/graphql/index.test.ts
+++ /dev/null
@@ -1,316 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import type { HistoricalStore, ReadonlyStore } from "@/indexing-store/store.js";
-import type { Schema } from "@/schema/common.js";
-import { createSchema } from "@/schema/schema.js";
-import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js";
-import { Hono } from "hono";
-import { createMiddleware } from "hono/factory";
-import { beforeEach, expect, test } from "vitest";
-import { graphql } from "./index.js";
-
-beforeEach(setupCommon);
-beforeEach(setupIsolatedDatabase);
-
-const contextMiddleware = (schema: Schema, readonlyStore: ReadonlyStore) =>
- createMiddleware(async (c, next) => {
- c.set("readonlyStore", readonlyStore);
- c.set("schema", schema);
- await next();
- });
-
-test("graphQLMiddleware serves request", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { indexingStore, readonlyStore, cleanup } = await setupDatabaseServices(
- context,
- { schema },
- );
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(zeroCheckpoint),
- id: "0",
- data: {
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x0",
- bigint: 0n,
- },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const app = new Hono()
- .use(contextMiddleware(schema, readonlyStore))
- .use("/graphql", graphql());
-
- const response = await app.request("/graphql", {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- },
- body: JSON.stringify({
- query: `
- query {
- table(id: "0") {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- `,
- }),
- });
-
- expect(response.status).toBe(200);
-
- expect(await response.json()).toMatchObject({
- data: {
- table: {
- id: "0",
- string: "0",
- int: 0,
- float: 0,
- boolean: false,
- hex: "0x00",
- bigint: "0",
- },
- },
- });
-
- await cleanup();
-});
-
-test("graphQLMiddleware throws error when extra filter is applied", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- string: p.string(),
- int: p.int(),
- float: p.float(),
- boolean: p.boolean(),
- hex: p.hex(),
- bigint: p.bigint(),
- }),
- }));
-
- const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const app = new Hono()
- .use(contextMiddleware(schema, readonlyStore))
- .use("/graphql", graphql());
-
- const response = await app.request("/graphql", {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- body: JSON.stringify({
- query: `
- {
- table(id: "0", doesntExist: "kevin") {
- id
- string
- int
- float
- boolean
- hex
- bigint
- }
- }
- `,
- }),
- });
-
- expect(response.status).toBe(200);
- const body = await response.json();
- expect(body.errors[0].message).toBe(
- 'Unknown argument "doesntExist" on field "Query.table".',
- );
-
- await cleanup();
-});
-
-test("graphQLMiddleware throws error for token limit", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({ id: p.string() }),
- }));
-
- const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const app = new Hono()
- .use(contextMiddleware(schema, readonlyStore))
- .use("/graphql", graphql({ maxOperationTokens: 3 }));
-
- const response = await app.request("/graphql", {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- body: JSON.stringify({
- query: `
- {
- __schema {
- types {
- fields {
- type {
- fields {
- type {
- description
- }
- }
- }
- }
- }
- }
- }
- `,
- }),
- });
-
- expect(response.status).toBe(200);
- const body = await response.json();
- expect(body.errors[0].message).toBe(
- "Syntax Error: Token limit of 3 exceeded.",
- );
-
- await cleanup();
-});
-
-test("graphQLMiddleware throws error for depth limit", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({ id: p.string() }),
- }));
-
- const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const app = new Hono()
- .use(contextMiddleware(schema, readonlyStore))
- .use("/graphql", graphql({ maxOperationDepth: 5 }));
-
- const response = await app.request("/graphql", {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- body: JSON.stringify({
- query: `
- {
- __schema {
- types {
- fields {
- type {
- fields {
- type {
- description
- }
- }
- }
- }
- }
- }
- }
- `,
- }),
- });
-
- expect(response.status).toBe(200);
- const body = await response.json();
- expect(body.errors[0].message).toBe(
- "Syntax Error: Query depth limit of 5 exceeded, found 7.",
- );
-
- await cleanup();
-});
-
-test("graphQLMiddleware throws error for max aliases", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({ id: p.string() }),
- }));
-
- const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const app = new Hono()
- .use(contextMiddleware(schema, readonlyStore))
- .use("/graphql", graphql({ maxOperationAliases: 2 }));
-
- const response = await app.request("/graphql", {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- body: JSON.stringify({
- query: `
- {
- __schema {
- types {
- fields {
- type {
- alias1: fields {
- type {
- description
- }
- }
- alias2: fields {
- type {
- description
- }
- }
- alias3: fields {
- type {
- description
- }
- }
- }
- }
- }
- }
- }
- `,
- }),
- });
-
- expect(response.status).toBe(200);
- const body = await response.json();
- expect(body.errors[0].message).toBe(
- "Syntax Error: Aliases limit of 2 exceeded, found 3.",
- );
-
- await cleanup();
-});
-
-test("graphQLMiddleware interactive", async (context) => {
- const { readonlyStore, cleanup } = await setupDatabaseServices(context, {
- schema: {},
- });
-
- const app = new Hono()
- .use(contextMiddleware({}, readonlyStore))
- .use("/graphql", graphql({ maxOperationAliases: 2 }));
-
- const response = await app.request("/graphql");
-
- expect(response.status).toBe(200);
-
- await cleanup();
-});
diff --git a/packages/core/src/graphql/index.ts b/packages/core/src/graphql/index.ts
deleted file mode 100644
index bdabd7650..000000000
--- a/packages/core/src/graphql/index.ts
+++ /dev/null
@@ -1,86 +0,0 @@
-import { graphiQLHtml } from "@/ui/graphiql.html.js";
-import { maxAliasesPlugin } from "@escape.tech/graphql-armor-max-aliases";
-import { maxDepthPlugin } from "@escape.tech/graphql-armor-max-depth";
-import { maxTokensPlugin } from "@escape.tech/graphql-armor-max-tokens";
-import { type YogaServerInstance, createYoga } from "graphql-yoga";
-import { createMiddleware } from "hono/factory";
-import { buildGraphQLSchema } from "./buildGraphqlSchema.js";
-import { buildLoaderCache } from "./buildLoaderCache.js";
-
-/**
- * Middleware for GraphQL with an interactive web view.
- *
- * - Docs: https://ponder.sh/docs/query/api-functions#register-graphql-middleware
- *
- * @example
- * import { ponder } from "@/generated";
- * import { graphql } from "@ponder/core";
- *
- * ponder.use("/graphql", graphql());
- *
- */
-export const graphql = (
- {
- maxOperationTokens = 1000,
- maxOperationDepth = 100,
- maxOperationAliases = 30,
- }: {
- maxOperationTokens?: number;
- maxOperationDepth?: number;
- maxOperationAliases?: number;
- } = {
- // Default limits are from Apollo:
- // https://www.apollographql.com/blog/prevent-graph-misuse-with-operation-size-and-complexity-limit
- maxOperationTokens: 1000,
- maxOperationDepth: 100,
- maxOperationAliases: 30,
- },
-) => {
- let yoga: YogaServerInstance | undefined = undefined;
-
- return createMiddleware(async (c) => {
- if (c.req.method === "GET") {
- return c.html(graphiQLHtml(c.req.path));
- }
-
- if (yoga === undefined) {
- const readonlyStore = c.get("readonlyStore");
- const metadataStore = c.get("metadataStore");
- const schema = c.get("schema");
- const graphqlSchema = buildGraphQLSchema(schema);
-
- yoga = createYoga({
- schema: graphqlSchema,
- context: () => {
- const getLoader = buildLoaderCache({ store: readonlyStore });
- return { readonlyStore, metadataStore, getLoader };
- },
- graphqlEndpoint: c.req.path,
- maskedErrors: process.env.NODE_ENV === "production",
- logging: false,
- graphiql: false,
- parserAndValidationCache: false,
- plugins: [
- maxTokensPlugin({ n: maxOperationTokens }),
- maxDepthPlugin({
- n: maxOperationDepth,
- ignoreIntrospection: false,
- }),
- maxAliasesPlugin({
- n: maxOperationAliases,
- allowList: [],
- }),
- ],
- });
- }
-
- const response = await yoga.handle(c.req.raw);
- // TODO: Figure out why Yoga is returning 500 status codes for GraphQL errors.
- // @ts-expect-error
- response.status = 200;
- // @ts-expect-error
- response.statusText = "OK";
-
- return response;
- });
-};
diff --git a/packages/core/src/graphql/metadata.ts b/packages/core/src/graphql/metadata.ts
deleted file mode 100644
index cdf527dbd..000000000
--- a/packages/core/src/graphql/metadata.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-import { GraphQLObjectType } from "graphql";
-import { GraphQLJSON } from "./graphQLJson.js";
-
-export const metadataEntity = new GraphQLObjectType({
- name: "_meta",
- fields: { status: { type: GraphQLJSON } },
-});
diff --git a/packages/core/src/graphql/plural.ts b/packages/core/src/graphql/plural.ts
deleted file mode 100644
index a10161759..000000000
--- a/packages/core/src/graphql/plural.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-import {
- type GraphQLFieldConfig,
- type GraphQLFieldResolver,
- type GraphQLInputObjectType,
- GraphQLInt,
- GraphQLNonNull,
- type GraphQLObjectType,
- GraphQLString,
-} from "graphql";
-import type { Context, Parent } from "./buildGraphqlSchema.js";
-import { buildWhereObject } from "./filter.js";
-
-type PluralArgs = {
- where?: { [key: string]: number | string };
- after?: string;
- before?: string;
- limit?: number;
- orderBy?: string;
- orderDirection?: "asc" | "desc";
-};
-
-export type PluralResolver = GraphQLFieldResolver;
-
-export const buildPluralField = ({
- tableName,
- entityPageType,
- entityFilterType,
-}: {
- tableName: string;
- entityPageType: GraphQLObjectType;
- entityFilterType: GraphQLInputObjectType;
-}): GraphQLFieldConfig => {
- const resolver: PluralResolver = async (_, args, context) => {
- const { where, orderBy, orderDirection, before, limit, after } = args;
-
- const whereObject = where ? buildWhereObject(where) : {};
-
- const orderByObject = orderBy
- ? { [orderBy]: orderDirection || "asc" }
- : undefined;
-
- return await context.readonlyStore.findMany({
- tableName,
- where: whereObject,
- orderBy: orderByObject,
- limit,
- before,
- after,
- });
- };
-
- return {
- type: new GraphQLNonNull(entityPageType),
- args: {
- where: { type: entityFilterType },
- orderBy: { type: GraphQLString },
- orderDirection: { type: GraphQLString },
- before: { type: GraphQLString },
- after: { type: GraphQLString },
- limit: { type: GraphQLInt },
- },
- resolve: resolver,
- };
-};
diff --git a/packages/core/src/graphql/scalar.ts b/packages/core/src/graphql/scalar.ts
deleted file mode 100644
index 46b7f7d97..000000000
--- a/packages/core/src/graphql/scalar.ts
+++ /dev/null
@@ -1,33 +0,0 @@
-import { BuildError } from "@/common/errors.js";
-import type { Scalar } from "@/schema/common.js";
-import {
- GraphQLBoolean,
- GraphQLFloat,
- GraphQLInt,
- GraphQLScalarType,
- GraphQLString,
-} from "graphql";
-
-const GraphQLBigInt = new GraphQLScalarType({
- name: "BigInt",
- serialize: (value) => String(value),
- parseValue: (value) => BigInt(value as any),
- parseLiteral: (value) => {
- if (value.kind === "StringValue") {
- return BigInt(value.value);
- } else {
- throw new BuildError(
- `Invalid value kind provided for field of type BigInt: ${value.kind}. Expected: StringValue`,
- );
- }
- },
-});
-
-export const SCALARS: { [type in Scalar]: GraphQLScalarType } = {
- int: GraphQLInt,
- float: GraphQLFloat,
- string: GraphQLString,
- boolean: GraphQLBoolean,
- bigint: GraphQLBigInt,
- hex: GraphQLString,
-};
diff --git a/packages/core/src/graphql/singular.ts b/packages/core/src/graphql/singular.ts
deleted file mode 100644
index 1f9076846..000000000
--- a/packages/core/src/graphql/singular.ts
+++ /dev/null
@@ -1,45 +0,0 @@
-import type { Table } from "@/schema/common.js";
-import type { GraphQLObjectType } from "graphql";
-import {
- type GraphQLFieldConfig,
- type GraphQLFieldResolver,
- GraphQLNonNull,
-} from "graphql";
-import type { Context, Parent } from "./buildGraphqlSchema.js";
-import { SCALARS } from "./scalar.js";
-
-type SingularArgs = {
- id?: string;
-};
-type SingularResolver = GraphQLFieldResolver;
-
-export const buildSingularField = ({
- tableName,
- table,
- entityType,
-}: {
- tableName: string;
- table: Table;
- entityType: GraphQLObjectType;
-}): GraphQLFieldConfig => {
- const resolver: SingularResolver = async (_, args, context) => {
- const { id } = args;
-
- if (id === undefined) return null;
-
- const entityInstance = await context.readonlyStore.findUnique({
- tableName,
- id,
- });
-
- return entityInstance;
- };
-
- return {
- type: entityType,
- args: {
- id: { type: new GraphQLNonNull(SCALARS[table.id[" scalar"]]) },
- },
- resolve: resolver,
- };
-};
diff --git a/packages/core/src/hono/context.ts b/packages/core/src/hono/context.ts
index 067b2dfc7..556d2eac2 100644
--- a/packages/core/src/hono/context.ts
+++ b/packages/core/src/hono/context.ts
@@ -1,12 +1,13 @@
-import type { Schema } from "@/schema/common.js";
-import type { ApiContext } from "@/types/api.js";
+import type { Drizzle, Schema } from "@/drizzle/index.js";
import type { Env, Context as HonoContext, Input } from "hono";
export type Context<
schema extends Schema = Schema,
path extends string = string,
input extends Input = {},
-> = ApiContext & {
+> = {
+ db: Drizzle;
+} & {
/**
* Hono request object.
*
@@ -49,4 +50,6 @@ export type MiddlewareContext<
schema extends Schema = Schema,
path extends string = string,
input extends Input = {},
-> = ApiContext & HonoContext;
+> = {
+ db: Drizzle;
+} & HonoContext;
diff --git a/packages/core/src/hono/handler.ts b/packages/core/src/hono/handler.ts
index 725a95764..43c945a3b 100644
--- a/packages/core/src/hono/handler.ts
+++ b/packages/core/src/hono/handler.ts
@@ -1,4 +1,4 @@
-import type { Schema } from "@/schema/common.js";
+import type { Schema } from "@/drizzle/index.js";
import type { ApiRegistry } from "@/types/api.js";
import type { BlankInput, HandlerResponse, Input, Next } from "hono/types";
import type { Context, MiddlewareContext } from "./context.js";
diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts
index abbfcd890..47f31f682 100644
--- a/packages/core/src/index.ts
+++ b/packages/core/src/index.ts
@@ -1,5 +1,4 @@
export { createConfig } from "@/config/config.js";
-export { createSchema } from "@/schema/schema.js";
export type {
Block,
Log,
@@ -24,39 +23,6 @@ export type NetworkConfig = Prettify;
export type BlockConfig = Prettify;
export type DatabaseConfig = Prettify;
-export { graphql } from "@/graphql/index.js";
+export { onchainTable, offchainTable } from "@/drizzle/db.js";
-export {
- sql,
- eq,
- gt,
- gte,
- lt,
- lte,
- ne,
- isNull,
- isNotNull,
- inArray,
- notInArray,
- exists,
- notExists,
- between,
- notBetween,
- like,
- notIlike,
- not,
- asc,
- desc,
- and,
- or,
- count,
- countDistinct,
- avg,
- avgDistinct,
- sum,
- sumDistinct,
- max,
- min,
-} from "drizzle-orm";
-
-export { alias } from "@/drizzle/runtime.js";
+// export { graphql } from "@/graphql/index.js";
diff --git a/packages/core/src/indexing-store/historical.test.ts b/packages/core/src/indexing-store/historical.test.ts
deleted file mode 100644
index f430c3ea4..000000000
--- a/packages/core/src/indexing-store/historical.test.ts
+++ /dev/null
@@ -1,1184 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import {
- BigIntSerializationError,
- CheckConstraintError,
- RecordNotFoundError,
- UniqueConstraintError,
-} from "@/common/errors.js";
-import { createSchema } from "@/schema/schema.js";
-import { beforeEach, expect, test } from "vitest";
-import type { HistoricalStore } from "./store.js";
-
-beforeEach(setupCommon);
-beforeEach(setupIsolatedDatabase);
-
-const schema = createSchema((p) => ({
- PetKind: p.createEnum(["CAT", "DOG"]),
- Pet: p.createTable({
- id: p.string(),
- name: p.string(),
- age: p.int().optional(),
- bigAge: p.bigint().optional(),
- kind: p.enum("PetKind").optional(),
- rating: p.float().optional(),
- json: p.json().optional(),
- }),
- Person: p.createTable({
- id: p.string(),
- name: p.string(),
- }),
-}));
-
-const hexSchema = createSchema((p) => ({
- table: p.createTable({
- id: p.hex(),
- n: p.int(),
- }),
-}));
-
-test("findUnique()", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("findUnique() w/ cache miss", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("findMany()", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const instance = await indexingStore.findMany({
- tableName: "Pet",
- });
- expect(instance.items).toHaveLength(1);
- expect(instance.items[0]).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("create() inserts a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("create() throws UniqueConstraintError", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip" },
- });
-
- const error = await indexingStore
- .create({
- tableName: "Pet",
-
- id: "id1",
- data: { name: "Skip", age: 13 },
- })
- .catch((_error) => _error);
-
- expect(error).instanceOf(UniqueConstraintError);
-
- await cleanup();
-});
-
-test("create() respects optional fields", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", kind: "CAT" },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: null });
-
- await cleanup();
-});
-
-test("create() throws on invalid json", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const error = await indexingStore
- .create({
- tableName: "Pet",
-
- id: "id1",
- data: {
- name: "Skip",
- age: 12,
- json: {
- kevin: 52n,
- },
- },
- })
- .catch((_error) => _error);
-
- expect(error).instanceOf(BigIntSerializationError);
-
- expect(error.message?.includes("Do not know how to serialize a BigInt")).toBe(
- true,
- );
-
- await cleanup();
-});
-
-test("create() accepts enums", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", kind: "CAT" },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", kind: "CAT" });
-
- await cleanup();
-});
-
-test("create() throws on invalid enum value", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const error = await indexingStore
- .create({
- tableName: "Pet",
-
- id: "id1",
- data: { name: "Skip", kind: "NOTACAT" },
- })
- .catch((error) => error);
-
- expect(error).toBeInstanceOf(CheckConstraintError);
-
- await cleanup();
-});
-
-test("create() accepts BigInt fields as bigint and returns as bigint", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await cleanup();
-});
-
-test("create() accepts float fields as float and returns as float", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", rating: 1.0 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", rating: 1.0 });
-
- await cleanup();
-});
-
-test("create() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- const instance = await indexingStore.create({
- tableName: "table",
- id: "0xa",
- data: { n: 1 },
- });
-
- expect(instance).toMatchObject({ id: "0x0a", n: 1 });
-
- await cleanup();
-});
-
-test("update() updates a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: { name: "Peanut Butter" },
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- await cleanup();
-});
-
-test("update() updates a record using an update function", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: ({ current }) => ({
- name: `${current.name} and Skipper`,
- }),
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({
- id: "id1",
- name: "Skip and Skipper",
- });
-
- await cleanup();
-});
-
-test("update() with an empty update object returns the original record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const record = await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: {},
- });
-
- expect(record).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("update() with an update function that returns an empty object returns the record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const record = await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: ({ current }) => {
- if (current.name === "blah") return { name: "newBlah" };
- return {};
- },
- });
-
- expect(record).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("update() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- await indexingStore.create({
- tableName: "table",
- id: "0x0a",
- data: { n: 1 },
- });
-
- await indexingStore.update({
- tableName: "table",
- id: "0x0A",
- data: { n: 2 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0x0A",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 2 });
-
- await cleanup();
-});
-
-test("update() throws RecordNotFoundError", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
- const error = await indexingStore
- .update({
- tableName: "Pet",
- id: "id1",
- data: { name: "Peanut Butter" },
- })
- .catch((err) => err);
-
- expect(error).instanceOf(RecordNotFoundError);
-
- await cleanup();
-});
-
-test("update() w/ cache miss", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const updatedInstance = await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: { name: "Peanut Butter" },
- });
-
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- await cleanup();
-});
-
-test("update() w/ find cache", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- await indexingStore.findUnique({ tableName: "Pet", id: "id1" });
-
- const updatedInstance = await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: { name: "Peanut Butter" },
- });
-
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- const findInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(findInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const rows = await database.qb.user.selectFrom("Pet").selectAll().execute();
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({
- id: "id1",
- name: "Peanut Butter",
- });
-
- await cleanup();
-});
-
-test("upsert() inserts a new record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Skip", age: 12 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() updates a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Skip", age: 24 },
- update: { name: "Jelly" },
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Jelly", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() with an empty update object returns the original record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const record = await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Yellow", age: 14 },
- update: {},
- });
-
- expect(record).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() with an update function that returns an empty object returns the record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const record = await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Yellow", age: 14 },
- update: ({ current }) => {
- if (current.name === "blah") return { name: "newBlah" };
- return {};
- },
- });
-
- expect(record).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() updates a record using an update function", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Skip", age: 24 },
- update: ({ current }) => ({
- age: (current.age as number) - 5,
- }),
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Skip", age: 7 });
-
- await cleanup();
-});
-
-test("upsert() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- await indexingStore.create({
- tableName: "table",
- id: "0x0a",
- data: { n: 1 },
- });
-
- await indexingStore.upsert({
- tableName: "table",
- id: "0xA",
- create: { n: 0 },
- update: { n: 2 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0xA",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 2 });
-
- await cleanup();
-});
-
-test("upsert() w/ cache miss", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const updatedInstance = await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Skip", age: 24 },
- update: { name: "Jelly" },
- });
-
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Jelly", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() w/ find cache", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- // add pet.id1 to find cache
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- const createInstance = await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: { name: "Peanut Butter" },
- update: {},
- });
-
- expect(createInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- let findInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(findInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- // add pet.id1 to find cache, remove from create cache
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- const updateInstance = await indexingStore.upsert({
- tableName: "Pet",
- id: "id1",
- create: {},
- update: { name: "Kevin" },
- });
-
- expect(updateInstance).toMatchObject({ id: "id1", name: "Kevin" });
-
- findInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(findInstance).toMatchObject({ id: "id1", name: "Kevin" });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const rows = await database.qb.user.selectFrom("Pet").selectAll().execute();
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({
- id: "id1",
- name: "Kevin",
- });
-
- await cleanup();
-});
-
-test("delete() removes a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.delete({
- tableName: "Pet",
- id: "id1",
- });
-
- const deletedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(deletedInstance).toBe(null);
-
- await cleanup();
-});
-
-test("delete() w/ find cache", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- await indexingStore.findUnique({ tableName: "Pet", id: "id1" });
-
- const _delete = await indexingStore.delete({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(_delete).toBe(true);
-
- const deletedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(deletedInstance).toBe(null);
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const rows = await database.qb.user.selectFrom("Pet").selectAll().execute();
-
- expect(rows).toHaveLength(0);
-
- await cleanup();
-});
-
-test("delete() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- await indexingStore.create({
- tableName: "table",
- id: "0xa",
- data: { n: 1 },
- });
-
- await indexingStore.delete({
- tableName: "table",
- id: "0xA",
- });
-
- const deletedInstance = await indexingStore.findUnique({
- tableName: "table",
- id: "0xa",
- });
-
- expect(deletedInstance).toBe(null);
-
- await cleanup();
-});
-
-test("createMany() inserts multiple entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const createdItems = await indexingStore.createMany({
- tableName: "Pet",
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
- expect(createdItems.length).toBe(3);
-
- const { items } = await indexingStore.findMany({ tableName: "Pet" });
- expect(items.length).toBe(3);
-
- await cleanup();
-});
-
-test("createMany() inserts a large number of entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const RECORD_COUNT = 10_000;
-
- const createdItems = await indexingStore.createMany({
- tableName: "Pet",
- data: [...Array(RECORD_COUNT).keys()].map((i) => ({
- id: `id${i}`,
- name: "Alice",
- bigAge: BigInt(i),
- })),
- });
- expect(createdItems.length).toBe(RECORD_COUNT);
-
- const { pageInfo } = await indexingStore.findMany({
- tableName: "Pet",
- limit: 1_000,
- });
- const { items } = await indexingStore.findMany({
- tableName: "Pet",
- after: pageInfo.endCursor,
- limit: 1_000,
- });
- expect(items.length).toBe(1_000);
-
- await cleanup();
-});
-
-test("updateMany() updates multiple entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
-
- const updateditems = await indexingStore.updateMany({
- tableName: "Pet",
- where: { bigAge: { gt: 50n } },
- data: { bigAge: 300n },
- });
-
- expect(updateditems.length).toBe(2);
-
- const { items } = await indexingStore.findMany({ tableName: "Pet" });
-
- expect(items.map((i) => i.bigAge)).toMatchObject([300n, 10n, 300n]);
-
- await cleanup();
-});
-
-test("updateMany() updates using a function", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
-
- const updateditems = await indexingStore.updateMany({
- tableName: "Pet",
- where: { bigAge: { gt: 50n } },
- data: () => ({ bigAge: 300n }),
- });
-
- expect(updateditems.length).toBe(2);
-
- const { items } = await indexingStore.findMany({ tableName: "Pet" });
-
- expect(items.map((i) => i.bigAge)).toMatchObject([300n, 10n, 300n]);
-
- await cleanup();
-});
-
-test("updateMany() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- await indexingStore.create({
- tableName: "table",
- id: "0x0a",
- data: { n: 1 },
- });
-
- await indexingStore.updateMany({
- tableName: "table",
- where: { n: { gt: 0 } },
- data: { n: 2 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0x0a",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 2 });
-
- await cleanup();
-});
-
-test("updateMany() updates a large number of entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const RECORD_COUNT = 1_000;
-
- await indexingStore.createMany({
- tableName: "Pet",
- data: [...Array(RECORD_COUNT).keys()].map((i) => ({
- id: `id${i}`,
- name: "Alice",
- bigAge: BigInt(i),
- })),
- });
-
- const updatedItems = await indexingStore.updateMany({
- tableName: "Pet",
- where: {},
- data: ({ current }) => ({
- bigAge: (current.bigAge as bigint) + 1n,
- }),
- });
- expect(updatedItems.length).toBe(RECORD_COUNT);
-
- await cleanup();
-});
-
-test("flush() insert", async (context) => {
- const { indexingStore, cleanup, database } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const rows = await database.qb.user.selectFrom("Pet").selectAll().execute();
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({
- id: "id1",
- name: "Skip",
- age: 12,
- });
-
- await cleanup();
-});
-
-test("flush() update", async (context) => {
- const { indexingStore, cleanup, database } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- await indexingStore.update({
- tableName: "Pet",
- id: "id1",
- data: { name: "Peanut Butter" },
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const rows = await database.qb.user.selectFrom("Pet").selectAll().execute();
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({
- id: "id1",
- name: "Peanut Butter",
- age: 12,
- });
-
- await cleanup();
-});
-
-test("flush() partial", async (context) => {
- const { indexingStore, cleanup, database } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- await indexingStore.createMany({
- tableName: "Pet",
- data: [
- { id: "id0", name: "Skip" },
- { id: "id1", name: "Skip" },
- { id: "id2", name: "Foo" },
- { id: "id3", name: "Bar" },
- { id: "id4", name: "Skip" },
- { id: "id5", name: "Foo" },
- { id: "id6", name: "Bar" },
- { id: "id7", name: "Skip" },
- { id: "id8", name: "Foo" },
- { id: "id9", name: "Bar" },
- ],
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: false });
-
- const rows = await database.qb.user.selectFrom("Pet").selectAll().execute();
-
- expect(rows).toHaveLength(4);
- expect(rows[0]).toMatchObject({
- id: "id0",
- name: "Skip",
- });
-
- await cleanup();
-});
-
-test("flush() skips update w/ no data", async (context) => {
- const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- }),
- }));
-
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- },
- );
-
- await indexingStore.create({
- tableName: "table",
- id: "id",
- });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const instance = await indexingStore.upsert({
- tableName: "table",
- id: "id",
- });
-
- expect(instance).toMatchObject({ id: "id" });
-
- await (indexingStore as HistoricalStore).flush({ isFullFlush: true });
-
- const rows = await database.qb.user.selectFrom("table").selectAll().execute();
-
- expect(rows).toHaveLength(1);
- expect(rows[0]).toMatchObject({
- id: "id",
- });
-
- await cleanup();
-});
diff --git a/packages/core/src/indexing-store/historical.ts b/packages/core/src/indexing-store/historical.ts
deleted file mode 100644
index ca77bb53e..000000000
--- a/packages/core/src/indexing-store/historical.ts
+++ /dev/null
@@ -1,877 +0,0 @@
-import type { Common } from "@/common/common.js";
-import {
- FlushError,
- RecordNotFoundError,
- UniqueConstraintError,
-} from "@/common/errors.js";
-import type { HeadlessKysely } from "@/database/kysely.js";
-import type { Schema, Table } from "@/schema/common.js";
-import {
- getTables,
- isMaterialColumn,
- isReferenceColumn,
- isScalarColumn,
-} from "@/schema/utils.js";
-import type {
- DatabaseRecord,
- DatabaseValue,
- UserId,
- UserRecord,
- UserValue,
-} from "@/types/schema.js";
-import { createQueue } from "@ponder/common";
-import { sql } from "kysely";
-import { type Hex, padHex } from "viem";
-import type {
- HistoricalStore,
- OrderByInput,
- ReadonlyStore,
- WhereInput,
-} from "./store.js";
-import {
- decodeRecord,
- encodeRecord,
- encodeValue,
- validateRecord,
-} from "./utils/encoding.js";
-import { parseStoreError } from "./utils/errors.js";
-import { buildWhereConditions } from "./utils/filter.js";
-
-/** Cache entries that need to be created in the database. */
-type InsertEntry = {
- type: "insert";
- opIndex: number;
- bytes: number;
- record: UserRecord;
-};
-
-/** Cache entries that need to be updated in the database. */
-type UpdateEntry = {
- type: "update";
- opIndex: number;
- bytes: number;
- record: UserRecord;
-};
-
-/**
- * Cache entries that mirror the database. Can be `null`,
- * meaning the entry doesn't exist in the cache.
- */
-export type FindEntry = {
- type: "find";
- opIndex: number;
- bytes: number;
- record: UserRecord | null;
-};
-
-type Entry = InsertEntry | UpdateEntry | FindEntry;
-
-export type Key = string | number;
-
-/**
- * An in-memory representation of the indexing store. Every entry is
- * normalized, validated, and guaranteed to not share any references
- * with user-land.
- */
-type StoreCache = {
- [tableName: string]: { [key: Key]: Entry };
-};
-
-export const getHistoricalStore = ({
- dialect,
- schema,
- readonlyStore,
- db,
- common,
- isCacheExhaustive: _isCacheExhaustive,
-}: {
- dialect: "sqlite" | "postgres";
- schema: Schema;
- readonlyStore: ReadonlyStore;
- db: HeadlessKysely;
- common: Common;
- isCacheExhaustive: boolean;
-}): HistoricalStore => {
- const maxSizeBytes = common.options.indexingCacheMaxBytes;
- const storeCache: StoreCache = {};
- const tables = getTables(schema);
-
- common.logger.debug({
- service: "indexing",
- msg: `Using a ${Math.round(maxSizeBytes / (1024 * 1024))} MB indexing cache`,
- });
-
- /** True if the cache contains the complete state of the store. */
- let isCacheExhaustive = _isCacheExhaustive;
-
- /** Number of rows in cache. */
- let cacheSize = 0;
- /** Estimated number of bytes used by cache. */
- let cacheSizeBytes = 0;
- /** LRU counter. */
- let totalCacheOps = 0;
-
- for (const tableName of Object.keys(tables)) {
- storeCache[tableName] = {};
- }
-
- /**
- * Hex values must be normalized to mirror the `UInt8Array`
- * encoding. i.e. "0xa", "0xA", "0x0a", "0x0A" are all equivalent.
- */
- const normalizeHex = (hex: Hex) =>
- padHex(hex, {
- size: Math.ceil((hex.length - 2) / 2),
- dir: "left",
- }).toLowerCase();
-
- const getCacheKey = (id: UserId, tableName: string): Key => {
- if (tables[tableName]!.table.id[" scalar"] === "hex")
- return normalizeHex(id as Hex);
- if (typeof id === "bigint") return `#Bigint.${id}`;
- return id;
- };
-
- /**
- * Updates a record as if it had been encoded, stored in the database,
- * and then decoded. This is required to normalize p.hex() column values
- * and nullable column values.
- */
- const normalizeRecord = (record: UserRecord, tableName: string) => {
- for (const [columnName, column] of Object.entries(
- tables[tableName]!.table,
- )) {
- // optional columns are null
- if (isMaterialColumn(column) && record[columnName] === undefined) {
- record[columnName] = null;
- }
- // hex is lowercase byte encoded
- if (
- (isScalarColumn(column) || isReferenceColumn(column)) &&
- column[" scalar"] === "hex" &&
- typeof record[columnName] === "string"
- ) {
- record[columnName] = normalizeHex(record[columnName] as Hex);
- }
- }
- };
-
- const shouldFlush = () => cacheSizeBytes > maxSizeBytes;
-
- const flush = createQueue({
- concurrency: 1,
- initialStart: true,
- browser: false,
- worker: async ({ isFullFlush }: { isFullFlush: boolean }) => {
- const flushIndex =
- totalCacheOps -
- cacheSize * (1 - common.options.indexingCacheFlushRatio);
-
- await Promise.all(
- Object.entries(storeCache).map(async ([tableName, tableStoreCache]) => {
- const table = (schema[tableName] as { table: Table }).table;
- const cacheEntries = Object.values(tableStoreCache);
- const batchSize = Math.round(
- common.options.databaseMaxQueryParameters /
- Object.keys(table).length,
- );
-
- let insertRecords: UserRecord[];
-
- if (isFullFlush) {
- insertRecords = cacheEntries
- .filter(({ type }) => type === "insert")
- .map(({ record }) => record!);
- } else {
- insertRecords = cacheEntries
- .filter(
- ({ type, opIndex }) =>
- type === "insert" && opIndex < flushIndex,
- )
- .map(({ record }) => record!);
- }
-
- if (insertRecords.length !== 0) {
- common.logger.debug({
- service: "indexing",
- msg: `Inserting ${insertRecords.length} cached '${tableName}' records into the database`,
- });
-
- for (
- let i = 0, len = insertRecords.length;
- i < len;
- i += batchSize
- ) {
- await db.wrap({ method: `${tableName}.flush` }, async () => {
- const _insertRecords = insertRecords
- .slice(i, i + batchSize)
- // skip validation because its already occurred in the store method
- .map((record) =>
- encodeRecord({
- record,
- table,
- schema,
- dialect,
- skipValidation: true,
- }),
- );
-
- await db
- .insertInto(tableName)
- .values(_insertRecords)
- .execute()
- .catch((_error) => {
- const error = _error as Error;
- common.logger.error({
- service: "indexing",
- msg: "Internal error occurred while flushing cache. Please report this error here: https://github.com/ponder-sh/ponder/issues",
- });
- throw new FlushError(error.message);
- });
- });
- }
- }
-
- // Exit early if the table only has an "id" column.
- if (Object.values(table).filter(isMaterialColumn).length === 1) {
- return;
- }
-
- let updateRecords: UserRecord[];
-
- if (isFullFlush) {
- updateRecords = cacheEntries
- .filter(({ type }) => type === "update")
- .map(({ record }) => record!);
- } else {
- updateRecords = cacheEntries
- .filter(
- ({ type, opIndex }) =>
- type === "update" && opIndex < flushIndex,
- )
- .map(({ record }) => record!);
- }
-
- if (updateRecords.length !== 0) {
- common.logger.debug({
- service: "indexing",
- msg: `Updating ${updateRecords.length} cached '${tableName}' records in the database`,
- });
-
- for (
- let i = 0, len = updateRecords.length;
- i < len;
- i += batchSize
- ) {
- await db.wrap({ method: `${tableName}.flush` }, async () => {
- const _updateRecords = updateRecords
- .slice(i, i + batchSize)
- // skip validation because its already occurred in the store method
- .map((record) =>
- encodeRecord({
- record,
- table,
- schema,
- dialect,
- skipValidation: true,
- }),
- );
-
- await db
- .insertInto(tableName)
- .values(_updateRecords)
- .onConflict((oc) =>
- oc.column("id").doUpdateSet((eb) =>
- Object.entries(table).reduce(
- (acc, [colName, column]) => {
- if (colName !== "id") {
- if (isMaterialColumn(column)) {
- acc[colName] = eb.ref(`excluded.${colName}`);
- }
- }
- return acc;
- },
- {},
- ),
- ),
- )
- .execute()
- .catch((_error) => {
- const error = _error as Error;
- common.logger.error({
- service: "indexing",
- msg: "Internal error occurred while flushing cache. Please report this error here: https://github.com/ponder-sh/ponder/issues",
- });
- throw new FlushError(error.message);
- });
- });
- }
- }
- }),
- );
-
- if (isFullFlush) {
- for (const tableName of Object.keys(tables)) {
- storeCache[tableName] = {};
- }
- cacheSize = 0;
- cacheSizeBytes = 0;
- } else {
- for (const [tableName, tableStoreCache] of Object.entries(storeCache)) {
- for (const [key, { opIndex }] of Object.entries(tableStoreCache)) {
- if (opIndex < flushIndex) {
- const bytes = storeCache[tableName]![key]!.bytes;
- delete storeCache[tableName]![key];
-
- cacheSize--;
- cacheSizeBytes -= bytes;
- }
- }
- }
- }
-
- isCacheExhaustive = false;
- },
- }).add;
-
- const _findUnique = async ({
- tableName,
- id,
- }: {
- tableName: string;
- id: UserId;
- }) => {
- const table = tables[tableName]!.table;
-
- const encodedId = encodeValue({
- value: id,
- column: table.id,
- dialect,
- });
-
- const record = await db
- .selectFrom(tableName)
- .selectAll()
- .where("id", "=", encodedId)
- .executeTakeFirst();
-
- if (record === undefined) return null;
-
- return decodeRecord({ record, table, dialect });
- };
-
- return {
- findUnique: async ({
- tableName,
- id: _id,
- }: {
- tableName: string;
- id: UserId;
- }) => {
- if (shouldFlush()) await flush({ isFullFlush: false });
-
- return db.wrap({ method: `${tableName}.findUnique` }, async () => {
- const id = structuredClone(_id);
- const cacheKey = getCacheKey(id, tableName);
-
- const cacheEntry = storeCache[tableName]![cacheKey];
- if (cacheEntry !== undefined) {
- cacheEntry.opIndex = totalCacheOps++;
- return structuredClone(cacheEntry.record);
- }
-
- // At this point if cache is exhaustive, findUnique will always return null
- const record = isCacheExhaustive
- ? null
- : await _findUnique({ tableName, id });
-
- const bytes = getBytesSize(record);
-
- // add "find" entry to cache
- storeCache[tableName]![cacheKey] = {
- type: "find",
- opIndex: totalCacheOps++,
- bytes,
- record,
- };
-
- cacheSizeBytes += bytes;
- cacheSize++;
-
- return structuredClone(record);
- });
- },
- findMany: async (arg: {
- tableName: string;
- where?: WhereInput;
- orderBy?: OrderByInput;
- before?: string | null;
- after?: string | null;
- limit?: number;
- }) => {
- await flush({ isFullFlush: true });
- return readonlyStore.findMany(arg);
- },
- create: async ({
- tableName,
- id: _id,
- data = {},
- }: {
- tableName: string;
- id: UserId;
- data?: Omit;
- }) => {
- if (shouldFlush()) await flush({ isFullFlush: false });
-
- return db.wrap({ method: `${tableName}.create` }, async () => {
- const id = structuredClone(_id);
- const cacheKey = getCacheKey(id, tableName);
-
- // Check cache truthiness, will be false if record is null.
- if (storeCache[tableName]![cacheKey]?.record) {
- throw new UniqueConstraintError(
- `Unique constraint failed for '${tableName}.id'.`,
- );
- }
-
- // copy user-land record
- const record = structuredClone(data) as UserRecord;
- record.id = id;
-
- normalizeRecord(record, tableName);
-
- validateRecord({ record, table: tables[tableName]!.table, schema });
-
- const bytes = getBytesSize(record);
-
- storeCache[tableName]![cacheKey] = {
- type: "insert",
- opIndex: totalCacheOps++,
- bytes,
- record,
- };
-
- cacheSizeBytes += bytes;
- cacheSize++;
-
- return structuredClone(record);
- });
- },
- createMany: async ({
- tableName,
- data,
- }: {
- tableName: string;
- data: UserRecord[];
- }) => {
- if (shouldFlush()) await flush({ isFullFlush: false });
-
- return db.wrap({ method: `${tableName}.createMany` }, async () => {
- for (const _record of data) {
- const cacheKey = getCacheKey(_record.id, tableName);
-
- // Check cache truthiness, will be false if record is null.
- if (storeCache[tableName]![cacheKey]?.record) {
- throw new UniqueConstraintError(
- `Unique constraint failed for '${tableName}.id'.`,
- );
- }
-
- // copy user-land record
- const record = structuredClone(_record);
-
- normalizeRecord(record, tableName);
-
- validateRecord({ record, table: tables[tableName]!.table, schema });
-
- const bytes = getBytesSize(record);
-
- storeCache[tableName]![cacheKey] = {
- type: "insert",
- opIndex: totalCacheOps++,
- bytes,
- record,
- };
-
- cacheSizeBytes += bytes;
- }
-
- cacheSize += data.length;
-
- const returnData = structuredClone(data);
- for (const record of data) {
- normalizeRecord(record, tableName);
- }
- return returnData;
- });
- },
- update: async ({
- tableName,
- id: _id,
- data = {},
- }: {
- tableName: string;
- id: UserId;
- data?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- }) => {
- if (shouldFlush()) await flush({ isFullFlush: false });
-
- return db.wrap({ method: `${tableName}.findUnique` }, async () => {
- const id = structuredClone(_id);
- const cacheKey = getCacheKey(id, tableName);
-
- let cacheEntry = storeCache[tableName]![cacheKey];
-
- if (cacheEntry === undefined) {
- const record = isCacheExhaustive
- ? null
- : await _findUnique({ tableName, id });
-
- if (record === null) {
- throw new RecordNotFoundError(
- "No existing record was found with the specified ID",
- );
- }
-
- // Note: a "spoof" cache entry is created
- cacheEntry = { type: "update", opIndex: 0, bytes: 0, record };
-
- storeCache[tableName]![cacheKey] = cacheEntry;
- } else {
- if (cacheEntry.record === null) {
- throw new RecordNotFoundError(
- "No existing record was found with the specified ID",
- );
- }
-
- if (cacheEntry.type === "find") {
- // move cache entry to "update"
- (cacheEntry.type as Entry["type"]) = "update";
- }
- }
-
- const update =
- typeof data === "function"
- ? data({ current: structuredClone(cacheEntry.record!) })
- : data;
-
- // copy user-land record
- const record = cacheEntry.record!;
- for (const [key, value] of Object.entries(structuredClone(update))) {
- record[key] = value;
- }
-
- normalizeRecord(record, tableName);
-
- validateRecord({ record, table: tables[tableName]!.table, schema });
-
- const bytes = getBytesSize(record);
-
- cacheEntry.record = record;
- cacheEntry.opIndex = totalCacheOps++;
- cacheEntry.bytes = bytes;
-
- return structuredClone(record);
- });
- },
- updateMany: async ({
- tableName,
- where,
- data = {},
- }: {
- tableName: string;
- where: WhereInput;
- data?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- }) => {
- await flush({ isFullFlush: true });
-
- const table = (schema[tableName] as { table: Table }).table;
-
- if (typeof data === "function") {
- const query = db
- .selectFrom(tableName)
- .selectAll()
- .where((eb) => buildWhereConditions({ eb, where, table, dialect }))
- .orderBy("id", "asc");
-
- const records: UserRecord[] = [];
- let cursor: DatabaseValue = null;
-
- while (true) {
- const _records = await db.wrap(
- { method: `${tableName}.updateMany` },
- async () => {
- const latestRecords: DatabaseRecord[] = await query
- .limit(common.options.databaseMaxRowLimit)
- .$if(cursor !== null, (qb) => qb.where("id", ">", cursor))
- .execute();
-
- const records: DatabaseRecord[] = [];
-
- for (const latestRecord of latestRecords) {
- const current = decodeRecord({
- record: latestRecord,
- table,
- dialect,
- });
- const updateObject = data({ current });
- // Here, `latestRecord` is already encoded, so we need to exclude it from `encodeRecord`.
- const updateRecord = {
- id: latestRecord.id,
- ...encodeRecord({
- record: updateObject,
- table,
- schema,
- dialect,
- skipValidation: false,
- }),
- };
-
- const record = await db
- .updateTable(tableName)
- .set(updateRecord)
- .where("id", "=", latestRecord.id)
- .returningAll()
- .executeTakeFirstOrThrow()
- .catch((err) => {
- throw parseStoreError(err, updateObject);
- });
- records.push(record);
- }
-
- return records.map((record) =>
- decodeRecord({ record, table, dialect }),
- );
- },
- );
-
- records.push(..._records);
-
- if (_records.length === 0) {
- break;
- } else {
- cursor = encodeValue({
- value: _records[_records.length - 1]!.id,
- column: table.id,
- dialect,
- });
- }
- }
-
- return records;
- } else {
- return db.wrap({ method: `${tableName}.updateMany` }, async () => {
- const updateRecord = encodeRecord({
- record: data,
- table,
- schema,
- dialect,
- skipValidation: false,
- });
-
- const records = await db
- .with("latestRows(id)", (db) =>
- db
- .selectFrom(tableName)
- .select("id")
- .where((eb) =>
- buildWhereConditions({ eb, where, table, dialect }),
- ),
- )
- .updateTable(tableName)
- .set(updateRecord)
- .from("latestRows")
- .where(`${tableName}.id`, "=", sql.ref("latestRows.id"))
- .returningAll()
- .execute()
- .catch((err) => {
- throw parseStoreError(err, data);
- });
-
- return records.map((record) =>
- decodeRecord({ record, table, dialect }),
- );
- });
- }
- },
- upsert: async ({
- tableName,
- id: _id,
- create = {},
- update = {},
- }: {
- tableName: string;
- id: UserId;
- create?: Omit;
- update?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- }) => {
- if (shouldFlush()) await flush({ isFullFlush: false });
-
- return db.wrap({ method: `${tableName}.upsert` }, async () => {
- const id = structuredClone(_id);
- const cacheKey = getCacheKey(id, tableName);
-
- let cacheEntry = storeCache[tableName]![cacheKey];
-
- if (cacheEntry === undefined) {
- if (isCacheExhaustive === false) {
- const record = await _findUnique({ tableName, id });
-
- if (record !== null) {
- // Note: a "spoof" cache entry is created
- cacheEntry = { type: "update", opIndex: 0, bytes: 0, record };
- storeCache[tableName]![cacheKey] = cacheEntry;
- }
-
- // Note: an "insert" cache entry will be created if the record is null,
- // so don't need to create it here.
- }
- } else {
- if (cacheEntry.type === "find") {
- if (cacheEntry.record === null) {
- // cache entry will be moved to "insert"
- (cacheEntry.type as Entry["type"]) = "insert";
- } else {
- // move cache entry to "update"
- (cacheEntry.type as Entry["type"]) = "update";
- }
- }
- }
-
- // Check cache truthiness, will be false if record is null.
- if (cacheEntry?.record) {
- // update branch
- const _update =
- typeof update === "function"
- ? update({ current: structuredClone(cacheEntry.record) })
- : update;
-
- // copy user-land record
- const record = cacheEntry.record;
- for (const [key, value] of Object.entries(structuredClone(_update))) {
- record[key] = value;
- }
-
- normalizeRecord(record, tableName);
-
- validateRecord({ record, table: tables[tableName]!.table, schema });
-
- const bytes = getBytesSize(record);
-
- cacheEntry.record = record;
- cacheEntry.opIndex = totalCacheOps++;
- cacheEntry.bytes = bytes;
-
- return structuredClone(record);
- } else {
- // insert/create branch
-
- // copy user-land record
- const record = structuredClone(create) as UserRecord;
- record.id = id;
-
- normalizeRecord(record, tableName);
-
- validateRecord({ record, table: tables[tableName]!.table, schema });
-
- const bytes = getBytesSize(record);
-
- storeCache[tableName]![cacheKey] = {
- type: "insert",
- opIndex: totalCacheOps++,
- bytes,
- record,
- };
-
- cacheSize++;
- cacheSizeBytes += bytes;
-
- return structuredClone(record);
- }
- });
- },
- delete: async ({
- tableName,
- id: _id,
- }: {
- tableName: string;
- id: UserId;
- }) => {
- if (shouldFlush()) await flush({ isFullFlush: false });
-
- return db.wrap({ method: `${tableName}.delete` }, async () => {
- const id = structuredClone(_id);
- const cacheKey = getCacheKey(id, tableName);
-
- const cacheEntry = storeCache[tableName]![cacheKey];
-
- if (cacheEntry !== undefined) {
- // delete from cache
- const bytes = cacheEntry.bytes;
- delete storeCache[tableName]![cacheKey];
- cacheSize--;
- cacheSizeBytes -= bytes;
- }
-
- if (isCacheExhaustive || cacheEntry?.record === null) {
- return false;
- } else {
- const table = (schema[tableName] as { table: Table }).table;
-
- const deletedRecord = await db
- .deleteFrom(tableName)
- .where(
- "id",
- "=",
- encodeValue({ value: id, column: table.id, dialect }),
- )
- .returning(["id"])
- .executeTakeFirst()
- .catch((err) => {
- throw parseStoreError(err, { id });
- });
-
- return !!deletedRecord;
- }
- });
- },
- flush,
- };
-};
-
-export const getBytesSize = (value: UserRecord | UserValue) => {
- // size of metadata
- let size = 16;
-
- if (typeof value === "number") {
- // p.float, p.int
- size += 8;
- } else if (typeof value === "string") {
- // p.hex, p.string, p.enum
- size += 2 * value.length;
- } else if (typeof value === "boolean") {
- // p.boolean
- size += 4;
- } else if (typeof value === "bigint") {
- // p.bigint
- size += 48;
- } else if (value === null || value === undefined) {
- size += 8;
- } else if (Array.isArray(value)) {
- for (const e of value) {
- size += getBytesSize(e);
- }
- } else {
- for (const col of Object.values(value)) {
- size += getBytesSize(col);
- }
- }
-
- return size;
-};
diff --git a/packages/core/src/indexing-store/index.ts b/packages/core/src/indexing-store/index.ts
new file mode 100644
index 000000000..33224f37f
--- /dev/null
+++ b/packages/core/src/indexing-store/index.ts
@@ -0,0 +1,199 @@
+import {
+ InvalidStoreMethodError,
+ RecordNotFoundError,
+} from "@/common/errors.js";
+import type { Database } from "@/database/index.js";
+import { type Schema, onchain } from "@/drizzle/index.js";
+import type { Db } from "@/types/db.js";
+import { type SQL, type Table, and, eq } from "drizzle-orm";
+import { type PgTable, getTableConfig } from "drizzle-orm/pg-core";
+
+export type IndexingStore = Db;
+
+const getKeyConditional = (table: Table, key: Object): SQL => {
+ // @ts-ignore
+ return and(
+ // @ts-ignore
+ ...Object.entries(key).map(([column, value]) => eq(table[column], value)),
+ );
+};
+
+const checkOnchainTable = (
+ table: Table,
+ method: "find" | "insert" | "update" | "upsert" | "delete",
+) => {
+ if (onchain in table) return;
+
+ throw new InvalidStoreMethodError(
+ method === "find"
+ ? `db.find() can only be used with onchain tables, and '${getTableConfig(table).name}' is an offchain table.`
+ : `Indexing functions can only write to onchain tables, and '${getTableConfig(table).name}' is an offchain table.`,
+ );
+};
+
+export const createIndexingStore = ({
+ database,
+}: { database: Database }): IndexingStore => {
+ const wrap = database.qb.user.wrap;
+
+ const indexingStore = {
+ find: (table, key) =>
+ // @ts-ignore
+ wrap({ method: `${getTableConfig(table).name}.find()` }, () => {
+ checkOnchainTable(table as Table, "find");
+ return database.drizzle
+ .select()
+ .from(table as PgTable)
+ .where(getKeyConditional(table as PgTable, key))
+ .then((res) => (res.length === 0 ? undefined : res[0]));
+ }),
+ insert(table) {
+ return {
+ values: (values: any) =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.insert()` },
+ async () => {
+ checkOnchainTable(table as Table, "insert");
+ await database.drizzle.insert(table as PgTable).values(values);
+ },
+ ),
+ };
+ },
+ // @ts-ignore
+ update(table, key) {
+ return {
+ set: (values: any) =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.update()` },
+ async () => {
+ checkOnchainTable(table as Table, "update");
+ if (typeof values === "function") {
+ // @ts-ignore
+ const row = await indexingStore.find(table, key);
+
+ if (row === undefined) {
+ throw new RecordNotFoundError(
+ "No existing record was found with the specified ID",
+ );
+ }
+
+ await indexingStore.update(table, key).set(values(row));
+ } else {
+ await database.drizzle
+ .update(table as PgTable)
+ .set(values)
+ .where(getKeyConditional(table as PgTable, key));
+ }
+ },
+ ),
+ };
+ },
+ // @ts-ignore
+ upsert(table, key) {
+ return {
+ insert(valuesI: any) {
+ return {
+ update: (valuesU: any) =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.upsert()` },
+ async () => {
+ checkOnchainTable(table as Table, "upsert");
+ // @ts-ignore
+ const row = await indexingStore.find(table, key);
+
+ if (row === undefined) {
+ await indexingStore
+ .insert(table)
+ .values({ ...key, ...valuesI });
+ } else {
+ if (typeof valuesU === "function") {
+ const values = valuesU(row);
+ await indexingStore.update(table, key).set(values);
+ } else {
+ await indexingStore.update(table, key).set(valuesU);
+ }
+ }
+ },
+ ),
+ // biome-ignore lint/suspicious/noThenProperty:
+ then: () =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.upsert()` },
+ async () => {
+ checkOnchainTable(table as Table, "upsert");
+ // @ts-ignore
+ const row = await indexingStore.find(table, key);
+ if (row === undefined) {
+ await indexingStore
+ .insert(table)
+ .values({ ...key, ...valuesI });
+ }
+ },
+ ),
+ };
+ },
+ update(valuesU: any) {
+ return {
+ insert: (valuesI: any) =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.upsert()` },
+ async () => {
+ checkOnchainTable(table as Table, "upsert");
+ // @ts-ignore
+ const row = await indexingStore.find(table, key);
+
+ if (row === undefined) {
+ await indexingStore
+ .insert(table)
+ .values({ ...key, ...valuesI });
+ } else {
+ if (typeof valuesU === "function") {
+ const values = valuesU(row);
+ await indexingStore.update(table, key).set(values);
+ } else {
+ await indexingStore.update(table, key).set(valuesU);
+ }
+ }
+ },
+ ),
+ // biome-ignore lint/suspicious/noThenProperty:
+ then: () =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.upsert()` },
+ async () => {
+ checkOnchainTable(table as Table, "upsert");
+ const row = await indexingStore.find(
+ // @ts-ignore
+ table as Table & { [onchain]: true },
+ key,
+ );
+ if (row !== undefined) {
+ if (typeof valuesU === "function") {
+ const values = valuesU(row);
+ await indexingStore.update(table, key).set(values);
+ } else {
+ await indexingStore.update(table, key).set(valuesU);
+ }
+ }
+ },
+ ),
+ };
+ },
+ };
+ },
+ delete: (table, key) =>
+ wrap(
+ { method: `${getTableConfig(table as PgTable).name}.delete()` },
+ async () => {
+ checkOnchainTable(table as Table, "upsert");
+ await database.drizzle
+ .delete(table as Table)
+ .where(getKeyConditional(table as Table, key));
+ },
+ ),
+ sql: database.drizzle,
+ } satisfies IndexingStore;
+
+ // @ts-ignore
+ return indexingStore;
+};
diff --git a/packages/core/src/indexing-store/metadata.test.ts b/packages/core/src/indexing-store/metadata.test.ts
index c67d9c095..e2a37ce10 100644
--- a/packages/core/src/indexing-store/metadata.test.ts
+++ b/packages/core/src/indexing-store/metadata.test.ts
@@ -16,10 +16,7 @@ test("getMetadata() empty", async (context) => {
const { database, cleanup } = await setupDatabaseServices(context, {
schema,
});
- const metadataStore = getMetadataStore({
- dialect: database.dialect,
- db: database.qb.user,
- });
+ const metadataStore = getMetadataStore({ db: database.qb.user });
const status = await metadataStore.getStatus();
@@ -32,10 +29,7 @@ test("setMetadata()", async (context) => {
const { database, cleanup } = await setupDatabaseServices(context, {
schema,
});
- const metadataStore = getMetadataStore({
- dialect: database.dialect,
- db: database.qb.user,
- });
+ const metadataStore = getMetadataStore({ db: database.qb.user });
await metadataStore.setStatus({
mainnet: { block: { number: 10, timestamp: 10 }, ready: false },
diff --git a/packages/core/src/indexing-store/metadata.ts b/packages/core/src/indexing-store/metadata.ts
index 757f23809..bdf8e2fd6 100644
--- a/packages/core/src/indexing-store/metadata.ts
+++ b/packages/core/src/indexing-store/metadata.ts
@@ -1,12 +1,14 @@
import type { HeadlessKysely } from "@/database/kysely.js";
import type { Status } from "@/sync/index.js";
-import type { MetadataStore } from "./store.js";
+
+export type MetadataStore = {
+ setStatus: (status: Status) => Promise;
+ getStatus: () => Promise;
+};
export const getMetadataStore = ({
- dialect,
db,
}: {
- dialect: "sqlite" | "postgres";
db: HeadlessKysely;
}): MetadataStore => ({
getStatus: async () => {
@@ -19,9 +21,7 @@ export const getMetadataStore = ({
if (metadata!.value === null) return null;
- return dialect === "sqlite"
- ? (JSON.parse(metadata!.value) as Status)
- : (metadata!.value as Status);
+ return metadata!.value as Status;
});
},
setStatus: (status: Status) => {
@@ -30,11 +30,11 @@ export const getMetadataStore = ({
.insertInto("_ponder_meta")
.values({
key: "status",
- value: dialect === "sqlite" ? JSON.stringify(status) : status,
+ value: status,
})
.onConflict((oc) =>
oc.column("key").doUpdateSet({
- value: dialect === "sqlite" ? JSON.stringify(status) : status,
+ value: status,
}),
)
.execute();
diff --git a/packages/core/src/indexing-store/readonly.test.ts b/packages/core/src/indexing-store/readonly.test.ts
deleted file mode 100644
index 73d85ebc0..000000000
--- a/packages/core/src/indexing-store/readonly.test.ts
+++ /dev/null
@@ -1,679 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import { createSchema } from "@/schema/schema.js";
-import {
- type Checkpoint,
- encodeCheckpoint,
- zeroCheckpoint,
-} from "@/utils/checkpoint.js";
-import { beforeEach, expect, test } from "vitest";
-
-beforeEach(setupCommon);
-beforeEach(setupIsolatedDatabase);
-
-const schema = createSchema((p) => ({
- PetKind: p.createEnum(["CAT", "DOG"]),
- Pet: p.createTable({
- id: p.string(),
- name: p.string(),
- age: p.int().optional(),
- bigAge: p.bigint().optional(),
- list: p.string().list().optional(),
- kind: p.enum("PetKind").optional(),
- rating: p.float().optional(),
- json: p.json().optional(),
- }),
- Person: p.createTable({
- id: p.string(),
- name: p.string(),
- }),
-}));
-
-const hexSchema = createSchema((p) => ({
- table: p.createTable({
- id: p.hex(),
- n: p.int(),
- }),
-}));
-
-function createCheckpoint(index: number): Checkpoint {
- return { ...zeroCheckpoint, blockTimestamp: index };
-}
-
-test("findUnique() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0x0a",
- data: { n: 1 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0x0A",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 1 });
-
- await cleanup();
-});
-
-test("findUnique() deserializes json", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: {
- name: "Skip",
- age: 12,
- json: {
- kevin: 52,
- },
- },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({
- name: "Skip",
- age: 12,
- json: {
- kevin: 52,
- },
- });
-
- await cleanup();
-});
-
-test("findMany() returns current versions of all records", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(8)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- await indexingStore.update({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "SkipUpdated" },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id2",
- data: { name: "Foo" },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id3",
- data: { name: "Bar", bigAge: 100n },
- });
-
- const { items } = await indexingStore.findMany({ tableName: "Pet" });
- expect(items).toHaveLength(3);
- expect(items.map((i) => i.name)).toMatchObject(["SkipUpdated", "Foo", "Bar"]);
-
- await cleanup();
-});
-
-test("findMany() orders by bigint field", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 105n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id2",
- data: { name: "Foo", bigAge: 10n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id3",
- data: { name: "Bar", bigAge: 190n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id4",
- data: { name: "Patch" },
- });
-
- const { items } = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { bigAge: "asc" },
- });
- expect(items.map((i) => i.bigAge)).toMatchObject([null, 10n, 105n, 190n]);
-
- await cleanup();
-});
-
-test("findMany() filters on bigint gt", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 105n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id2",
- data: { name: "Foo", bigAge: 10n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id3",
- data: { name: "Bar", bigAge: 190n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id4",
- data: { name: "Patch" },
- });
-
- const { items } = await indexingStore.findMany({
- tableName: "Pet",
- where: { bigAge: { gt: 50n } },
- });
-
- expect(items.map((i) => i.bigAge)).toMatchObject([105n, 190n]);
-
- await cleanup();
-});
-
-test("findMany() filters with complex OR condition", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston", age: 12 },
- ],
- });
-
- const { items } = await indexingStore.findMany({
- tableName: "Pet",
- where: {
- OR: [
- { bigAge: { gt: 50n } },
- { AND: [{ name: "Foo" }, { bigAge: { lt: 20n } }] },
- ],
- },
- });
-
- expect(items).toMatchObject([
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ]);
-
- await cleanup();
-});
-
-test("findMany() sorts and filters together", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 105n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id2",
- data: { name: "Foo", bigAge: 10n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id3",
- data: { name: "Bar", bigAge: 190n },
- });
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id4",
- data: { name: "Zarbar" },
- });
-
- const { items } = await indexingStore.findMany({
- tableName: "Pet",
- where: { name: { endsWith: "ar" } },
- orderBy: { name: "asc" },
- });
-
- expect(items.map((i) => i.name)).toMatchObject(["Bar", "Zarbar"]);
-
- await cleanup();
-});
-
-test("findMany() errors on invalid filter condition", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- expect(() =>
- indexingStore.findMany({
- tableName: "Pet",
- where: { name: { invalidWhereCondition: "ar" } },
- }),
- ).rejects.toThrow(
- "Invalid filter condition for column 'name'. Got 'invalidWhereCondition', expected one of ['equals', 'not', 'in', 'notIn', 'contains', 'notContains', 'startsWith', 'notStartsWith', 'endsWith', 'notEndsWith']",
- );
-
- await cleanup();
-});
-
-test("findMany() cursor pagination ascending", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip" },
- { id: "id2", name: "Foo" },
- { id: "id3", name: "Bar" },
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston" },
- { id: "id6", name: "Book" },
- { id: "id7", name: "Shea" },
- { id: "id8", name: "Snack" },
- { id: "id9", name: "Last" },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { id: "asc" },
- limit: 5,
- });
-
- expect(
- resultOne.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([
- { id: "id1", name: "Skip" },
- { id: "id2", name: "Foo" },
- { id: "id3", name: "Bar" },
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston" },
- ]);
- expect(resultOne.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: false,
- hasNextPage: true,
- });
-
- const resultTwo = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { id: "asc" },
- after: resultOne.pageInfo.endCursor,
- });
-
- expect(
- resultTwo.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([
- { id: "id6", name: "Book" },
- { id: "id7", name: "Shea" },
- { id: "id8", name: "Snack" },
- { id: "id9", name: "Last" },
- ]);
- expect(resultTwo.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: true,
- hasNextPage: false,
- });
-
- const resultThree = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { id: "asc" },
- before: resultTwo.pageInfo.startCursor,
- limit: 2,
- });
-
- expect(
- resultThree.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston" },
- ]);
- expect(resultThree.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: true,
- hasNextPage: true,
- });
-
- await cleanup();
-});
-
-test("findMany() cursor pagination descending", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston", age: 12 },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "desc" },
- limit: 2,
- });
-
- expect(
- resultOne.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston" },
- ]);
- expect(resultOne.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: false,
- hasNextPage: true,
- });
-
- const resultTwo = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "desc" },
- after: resultOne.pageInfo.endCursor,
- });
-
- expect(
- resultTwo.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([
- { id: "id1", name: "Skip" },
- { id: "id2", name: "Foo" },
- { id: "id3", name: "Bar" },
- ]);
- expect(resultTwo.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: true,
- hasNextPage: false,
- });
-
- const resultThree = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "desc" },
- before: resultTwo.pageInfo.startCursor,
- limit: 1,
- });
-
- expect(
- resultThree.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([{ id: "id5", name: "Winston" }]);
- expect(resultThree.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: true,
- hasNextPage: true,
- });
-
- await cleanup();
-});
-
-test("findMany() returns start and end cursor if limited", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston", age: 12 },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "asc" },
- });
-
- expect(
- resultOne.items.map((i) => ({ id: i.id, name: i.name })),
- ).toMatchObject([
- { id: "id3", name: "Bar" },
- { id: "id2", name: "Foo" },
- { id: "id1", name: "Skip" },
- { id: "id5", name: "Winston" },
- { id: "id4", name: "Zarbar" },
- ]);
- expect(resultOne.pageInfo).toMatchObject({
- startCursor: expect.any(String),
- endCursor: expect.any(String),
- hasPreviousPage: false,
- hasNextPage: false,
- });
-
- await cleanup();
-});
-
-test("findMany() returns hasPreviousPage if no results", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- { id: "id4", name: "Zarbar" },
- { id: "id5", name: "Winston", age: 12 },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "asc" },
- });
-
- const resultTwo = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "asc" },
- after: resultOne.pageInfo.endCursor,
- });
-
- expect(resultTwo.items).toHaveLength(0);
- expect(resultTwo.pageInfo).toMatchObject({
- startCursor: null,
- endCursor: null,
- hasPreviousPage: true,
- hasNextPage: false,
- });
-
- await cleanup();
-});
-
-test("findMany() errors on orderBy object with multiple keys", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- expect(() =>
- indexingStore.findMany({
- tableName: "Pet",
- orderBy: { name: "asc", bigAge: "desc" },
- }),
- ).rejects.toThrow("Invalid sort. Cannot sort by multiple columns.");
-
- await cleanup();
-});
-
-test("findMany() ordering secondary sort inherits primary", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- { id: "id4", name: "Zarbar", bigAge: 10n },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { bigAge: "desc" },
- });
-
- expect(resultOne.items).toMatchObject([
- { id: "id3", name: "Bar", bigAge: 190n },
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id4", name: "Zarbar", bigAge: 10n }, // secondary sort by ID is descending
- { id: "id2", name: "Foo", bigAge: 10n },
- ]);
-
- const resultTwo = await indexingStore.findMany({
- tableName: "Pet",
- orderBy: { bigAge: "asc" },
- });
-
- expect(resultTwo.items).toMatchObject([
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id4", name: "Zarbar", bigAge: 10n }, // secondary sort by ID is ascending
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ]);
-
- await cleanup();
-});
-
-test("findMany() where list", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", list: ["kevin", "kyle", "jay"] },
- { id: "id2", name: "Foo", list: ["widget", "gadget"] },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "Pet",
- where: { list: { has: "kevin" } },
- });
-
- expect(resultOne.items).toMatchObject([
- { id: "id1", name: "Skip", list: ["kevin", "kyle", "jay"] },
- ]);
-
- await cleanup();
-});
-
-test("findMany() where hex list", async (context) => {
- const hexSchema = createSchema((p) => ({
- table: p.createTable({
- id: p.hex(),
- list: p.hex().list(),
- }),
- }));
-
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- });
-
- await indexingStore.createMany({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "0x00", list: ["0x0A", "0x0B"] },
- { id: "0x01", list: ["0x0a", "0x0b", "0x0c"] },
- ],
- });
-
- const resultOne = await indexingStore.findMany({
- tableName: "table",
- where: { list: { has: "0x0a" } },
- });
-
- expect(resultOne.items).toMatchObject([
- { id: "0x00", list: ["0x0a", "0x0b"] },
- { id: "0x01", list: ["0x0a", "0x0b", "0x0c"] },
- ]);
-
- const resultTwo = await indexingStore.findMany({
- tableName: "table",
- where: { list: { has: "0x0c" } },
- });
-
- expect(resultTwo.items).toMatchObject([
- { id: "0x01", list: ["0x0a", "0x0b", "0x0c"] },
- ]);
-
- await cleanup();
-});
diff --git a/packages/core/src/indexing-store/readonly.ts b/packages/core/src/indexing-store/readonly.ts
deleted file mode 100644
index 87a4b4f8c..000000000
--- a/packages/core/src/indexing-store/readonly.ts
+++ /dev/null
@@ -1,290 +0,0 @@
-import type { Common } from "@/common/common.js";
-import { StoreError } from "@/common/errors.js";
-import type { HeadlessKysely } from "@/database/kysely.js";
-import type { MaterialColumn, Schema, Table } from "@/schema/common.js";
-import type { UserId } from "@/types/schema.js";
-import { sql } from "kysely";
-import type { OrderByInput, ReadonlyStore, WhereInput } from "./store.js";
-import {
- buildCursorConditions,
- decodeCursor,
- encodeCursor,
-} from "./utils/cursor.js";
-import { decodeRecord, encodeValue } from "./utils/encoding.js";
-import { buildWhereConditions } from "./utils/filter.js";
-import {
- buildOrderByConditions,
- reverseOrderByConditions,
-} from "./utils/sort.js";
-
-const DEFAULT_LIMIT = 50 as const;
-
-export const getReadonlyStore = ({
- dialect,
- schema,
- db,
- common,
-}: {
- dialect: "sqlite" | "postgres";
- schema: Schema;
- db: HeadlessKysely;
- common: Common;
-}): ReadonlyStore => ({
- findUnique: async ({
- tableName,
- id,
- }: {
- tableName: string;
- id: UserId;
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.findUnique` }, async () => {
- const encodedId = encodeValue({
- value: id,
- column: table.id,
- dialect,
- });
-
- const record = await db
- .selectFrom(tableName)
- .selectAll()
- .where("id", "=", encodedId)
- .executeTakeFirst();
-
- if (record === undefined) return null;
-
- return decodeRecord({ record, table, dialect });
- });
- },
- findMany: async ({
- tableName,
- where,
- orderBy,
- before = null,
- after = null,
- limit = DEFAULT_LIMIT,
- }: {
- tableName: string;
- where?: WhereInput;
- orderBy?: OrderByInput;
- before?: string | null;
- after?: string | null;
- limit?: number;
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.findMany` }, async () => {
- let query = db.selectFrom(tableName).selectAll();
-
- if (where) {
- query = query.where((eb) =>
- buildWhereConditions({ eb, where, table, dialect }),
- );
- }
-
- const orderByConditions = buildOrderByConditions({ orderBy, table });
- for (const [column, direction] of orderByConditions) {
- query = query.orderBy(
- column,
- dialect === "sqlite"
- ? direction
- : direction === "asc"
- ? sql`asc nulls first`
- : sql`desc nulls last`,
- );
- }
- const orderDirection = orderByConditions[0]![1];
-
- if (limit > common.options.databaseMaxRowLimit) {
- throw new StoreError(
- `Invalid limit. Got ${limit}, expected <=${common.options.databaseMaxRowLimit}.`,
- );
- }
-
- if (after !== null && before !== null) {
- throw new StoreError("Cannot specify both before and after cursors.");
- }
-
- let startCursor = null;
- let endCursor = null;
- let hasPreviousPage = false;
- let hasNextPage = false;
-
- // Neither cursors are specified, apply the order conditions and execute.
- if (after === null && before === null) {
- query = query.limit(limit + 1);
- const records = await query
- .execute()
- .then((records) =>
- records.map((record) => decodeRecord({ record, table, dialect })),
- );
-
- if (records.length === limit + 1) {
- records.pop();
- hasNextPage = true;
- }
-
- startCursor =
- records.length > 0
- ? encodeCursor(records[0]!, orderByConditions)
- : null;
- endCursor =
- records.length > 0
- ? encodeCursor(records[records.length - 1]!, orderByConditions)
- : null;
-
- return {
- items: records,
- pageInfo: { hasNextPage, hasPreviousPage, startCursor, endCursor },
- };
- }
-
- if (after !== null) {
- // User specified an 'after' cursor.
- const rawCursorValues = decodeCursor(after, orderByConditions);
- const cursorValues = rawCursorValues.map(([columnName, value]) => [
- columnName,
- encodeValue({
- value,
- column: table[columnName] as MaterialColumn,
- dialect,
- }),
- ]) satisfies [string, any][];
- query = query
- .where((eb) =>
- buildCursorConditions(cursorValues, "after", orderDirection, eb),
- )
- .limit(limit + 2);
-
- const records = await query
- .execute()
- .then((records) =>
- records.map((record) => decodeRecord({ record, table, dialect })),
- );
-
- if (records.length === 0) {
- return {
- items: records,
- pageInfo: {
- hasNextPage,
- hasPreviousPage,
- startCursor,
- endCursor,
- },
- };
- }
-
- // If the cursor of the first returned record equals the `after` cursor,
- // `hasPreviousPage` is true. Remove that record.
- if (encodeCursor(records[0]!, orderByConditions) === after) {
- records.shift();
- hasPreviousPage = true;
- } else {
- // Otherwise, remove the last record.
- records.pop();
- }
-
- // Now if the length of the records is still equal to limit + 1,
- // there is a next page.
- if (records.length === limit + 1) {
- records.pop();
- hasNextPage = true;
- }
-
- // Now calculate the cursors.
- startCursor =
- records.length > 0
- ? encodeCursor(records[0]!, orderByConditions)
- : null;
- endCursor =
- records.length > 0
- ? encodeCursor(records[records.length - 1]!, orderByConditions)
- : null;
-
- return {
- items: records,
- pageInfo: { hasNextPage, hasPreviousPage, startCursor, endCursor },
- };
- } else {
- // User specified a 'before' cursor.
- const rawCursorValues = decodeCursor(before!, orderByConditions);
- const cursorValues = rawCursorValues.map(([columnName, value]) => [
- columnName,
- encodeValue({
- value,
- column: table[columnName] as MaterialColumn,
- dialect,
- }),
- ]) satisfies [string, any][];
- query = query
- .where((eb) =>
- buildCursorConditions(cursorValues, "before", orderDirection, eb),
- )
- .limit(limit + 2);
-
- // Reverse the order by conditions to get the previous page.
- query = query.clearOrderBy();
- const reversedOrderByConditions =
- reverseOrderByConditions(orderByConditions);
- for (const [column, direction] of reversedOrderByConditions) {
- query = query.orderBy(column, direction);
- }
-
- const records = await query.execute().then((records) =>
- records
- .map((record) => decodeRecord({ record, table, dialect }))
- // Reverse the records again, back to the original order.
- .reverse(),
- );
-
- if (records.length === 0) {
- return {
- items: records,
- pageInfo: {
- hasNextPage,
- hasPreviousPage,
- startCursor,
- endCursor,
- },
- };
- }
-
- // If the cursor of the last returned record equals the `before` cursor,
- // `hasNextPage` is true. Remove that record.
- if (
- encodeCursor(records[records.length - 1]!, orderByConditions) ===
- before
- ) {
- records.pop();
- hasNextPage = true;
- } else {
- // Otherwise, remove the first record.
- records.shift();
- }
-
- // Now if the length of the records is equal to limit + 1, we know
- // there is a previous page.
- if (records.length === limit + 1) {
- records.shift();
- hasPreviousPage = true;
- }
-
- // Now calculate the cursors.
- startCursor =
- records.length > 0
- ? encodeCursor(records[0]!, orderByConditions)
- : null;
- endCursor =
- records.length > 0
- ? encodeCursor(records[records.length - 1]!, orderByConditions)
- : null;
-
- return {
- items: records,
- pageInfo: { hasNextPage, hasPreviousPage, startCursor, endCursor },
- };
- }
- });
- },
-});
diff --git a/packages/core/src/indexing-store/realtime.test.ts b/packages/core/src/indexing-store/realtime.test.ts
deleted file mode 100644
index 9b7d78c9c..000000000
--- a/packages/core/src/indexing-store/realtime.test.ts
+++ /dev/null
@@ -1,897 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import {
- CheckConstraintError,
- UniqueConstraintError,
-} from "@/common/errors.js";
-import { createSchema } from "@/schema/schema.js";
-import {
- type Checkpoint,
- encodeCheckpoint,
- zeroCheckpoint,
-} from "@/utils/checkpoint.js";
-import { beforeEach, expect, test } from "vitest";
-
-beforeEach(setupCommon);
-beforeEach(setupIsolatedDatabase);
-
-const schema = createSchema((p) => ({
- PetKind: p.createEnum(["CAT", "DOG"]),
- Pet: p.createTable({
- id: p.string(),
- name: p.string(),
- age: p.int().optional(),
- bigAge: p.bigint().optional(),
- kind: p.enum("PetKind").optional(),
- rating: p.float().optional(),
- }),
- Person: p.createTable({
- id: p.string(),
- name: p.string(),
- }),
-}));
-
-const hexSchema = createSchema((p) => ({
- table: p.createTable({
- id: p.hex(),
- n: p.int(),
- }),
-}));
-
-function createCheckpoint(index: number): Checkpoint {
- return { ...zeroCheckpoint, blockTimestamp: index };
-}
-
-test("create() inserts a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("create() throws on unique constraint violation", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip" },
- });
-
- const error = await indexingStore
- .create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 13 },
- })
- .catch((_error) => _error);
-
- expect(error).instanceOf(UniqueConstraintError);
-
- await cleanup();
-});
-
-test("create() respects optional fields", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", kind: "CAT" },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: null });
-
- await cleanup();
-});
-
-test("create() accepts enums", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", kind: "CAT" },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", kind: "CAT" });
-
- await cleanup();
-});
-
-test("create() throws on invalid enum value", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- const error = await indexingStore
- .create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", kind: "NOTACAT" },
- })
- .catch((error) => error);
-
- expect(error).toBeInstanceOf(CheckConstraintError);
-
- await cleanup();
-});
-
-test("create() accepts BigInt fields as bigint and returns as bigint", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await cleanup();
-});
-
-test("create() accepts float fields as float and returns as float", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", rating: 1.0 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
-
- expect(instance).toMatchObject({ id: "id1", name: "Skip", rating: 1.0 });
-
- await cleanup();
-});
-
-test("create() inserts into the log table", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- indexing: "realtime",
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
-
- const logs = await database.qb.user
- .selectFrom("_ponder_reorg__Pet")
- .selectAll()
- .execute();
-
- expect(logs).toMatchObject([
- {
- id: "id1",
- checkpoint: encodeCheckpoint(createCheckpoint(10)),
- operation: 0,
- },
- ]);
-
- await cleanup();
-});
-
-test("update() updates a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await indexingStore.update({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(11)),
- id: "id1",
- data: { name: "Peanut Butter" },
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Peanut Butter" });
-
- await cleanup();
-});
-
-test("update() updates a record using an update function", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await indexingStore.update({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(11)),
- id: "id1",
- data: ({ current }) => ({
- name: `${current.name} and Skipper`,
- }),
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({
- id: "id1",
- name: "Skip and Skipper",
- });
-
- await cleanup();
-});
-
-test("update() inserts into the log table", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- indexing: "realtime",
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", bigAge: 100n },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", bigAge: 100n });
-
- await indexingStore.update({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(11)),
- id: "id1",
- data: { name: "Peanut Butter" },
- });
-
- const logs = await database.qb.user
- .selectFrom("_ponder_reorg__Pet")
- .selectAll()
- .execute();
-
- expect(logs).toHaveLength(2);
- expect(logs[1]).toMatchObject({
- id: "id1",
- checkpoint: encodeCheckpoint(createCheckpoint(11)),
- operation: 1,
- });
-
- await cleanup();
-});
-
-test("upsert() inserts a new record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.upsert({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- create: { name: "Skip", age: 12 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() updates a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.upsert({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(12)),
- id: "id1",
- create: { name: "Skip", age: 24 },
- update: { name: "Jelly" },
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Jelly", age: 12 });
-
- await cleanup();
-});
-
-test("upsert() updates a record using an update function", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.upsert({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(12)),
- id: "id1",
- create: { name: "Skip", age: 24 },
- update: ({ current }) => ({
- age: (current.age as number) - 5,
- }),
- });
-
- const updatedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(updatedInstance).toMatchObject({ id: "id1", name: "Skip", age: 7 });
-
- await cleanup();
-});
-
-test("upsert() inserts into the log table", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- indexing: "realtime",
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.upsert({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(12)),
- id: "id1",
- create: { name: "Skip", age: 24 },
- update: { name: "Jelly" },
- });
-
- const logs = await database.qb.user
- .selectFrom("_ponder_reorg__Pet")
- .selectAll()
- .execute();
-
- expect(logs).toHaveLength(2);
- expect(logs[1]).toMatchObject({
- id: "id1",
- checkpoint: encodeCheckpoint(createCheckpoint(12)),
- operation: 1,
- });
-
- await cleanup();
-});
-
-test("delete() removes a record", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.delete({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(15)),
- id: "id1",
- });
-
- const deletedInstance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(deletedInstance).toBe(null);
-
- await cleanup();
-});
-
-test("delete() inserts into the log table", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- indexing: "realtime",
- },
- );
-
- await indexingStore.create({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "id1",
- data: { name: "Skip", age: 12 },
- });
- const instance = await indexingStore.findUnique({
- tableName: "Pet",
- id: "id1",
- });
- expect(instance).toMatchObject({ id: "id1", name: "Skip", age: 12 });
-
- await indexingStore.delete({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(15)),
- id: "id1",
- });
-
- const logs = await database.qb.user
- .selectFrom("_ponder_reorg__Pet")
- .selectAll()
- .execute();
-
- expect(logs).toHaveLength(2);
- expect(logs[1]).toMatchObject({
- id: "id1",
- checkpoint: encodeCheckpoint(createCheckpoint(15)),
- operation: 2,
- });
-
- await cleanup();
-});
-
-test("createMany() inserts multiple entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- const createdItems = await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
- expect(createdItems.length).toBe(3);
-
- const { items } = await indexingStore.findMany({ tableName: "Pet" });
- expect(items.length).toBe(3);
-
- await cleanup();
-});
-
-test("createMany() inserts a large number of entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- const RECORD_COUNT = 100_000;
-
- const createdItems = await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [...Array(RECORD_COUNT).keys()].map((i) => ({
- id: `id${i}`,
- name: "Alice",
- bigAge: BigInt(i),
- })),
- });
- expect(createdItems.length).toBe(RECORD_COUNT);
-
- const { pageInfo } = await indexingStore.findMany({
- tableName: "Pet",
- limit: 1_000,
- });
- const { items } = await indexingStore.findMany({
- tableName: "Pet",
- after: pageInfo.endCursor,
- limit: 1_000,
- });
- expect(items.length).toBe(1_000);
-
- await cleanup();
-});
-
-test("createMany() inserts into the log table", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- indexing: "realtime",
- },
- );
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
-
- const logs = await database.qb.user
- .selectFrom("_ponder_reorg__Pet")
- .selectAll()
- .execute();
-
- expect(logs).toHaveLength(3);
- expect(logs).toMatchObject([
- {
- id: "id1",
- checkpoint: encodeCheckpoint(createCheckpoint(10)),
- operation: 0,
- },
- {
- id: "id2",
- checkpoint: encodeCheckpoint(createCheckpoint(10)),
- operation: 0,
- },
- {
- id: "id3",
- checkpoint: encodeCheckpoint(createCheckpoint(10)),
- operation: 0,
- },
- ]);
-
- await cleanup();
-});
-
-test("updateMany() updates multiple entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- indexing: "realtime",
- });
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
-
- const updateditems = await indexingStore.updateMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(11)),
- where: { bigAge: { gt: 50n } },
- data: { bigAge: 300n },
- });
-
- expect(updateditems.length).toBe(2);
-
- const { items } = await indexingStore.findMany({ tableName: "Pet" });
-
- expect(items.map((i) => i.bigAge)).toMatchObject([300n, 10n, 300n]);
-
- await cleanup();
-});
-
-test("updateMany() inserts into the log table", async (context) => {
- const { indexingStore, database, cleanup } = await setupDatabaseServices(
- context,
- {
- schema,
- indexing: "realtime",
- },
- );
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [
- { id: "id1", name: "Skip", bigAge: 105n },
- { id: "id2", name: "Foo", bigAge: 10n },
- { id: "id3", name: "Bar", bigAge: 190n },
- ],
- });
-
- await indexingStore.updateMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(11)),
- where: { bigAge: { gt: 50n } },
- data: { bigAge: 300n },
- });
-
- const logs = await database.qb.user
- .selectFrom("_ponder_reorg__Pet")
- .selectAll()
- .execute();
-
- expect(logs).toHaveLength(5);
- expect(logs[3]).toMatchObject({
- id: "id1",
- checkpoint: encodeCheckpoint(createCheckpoint(11)),
- operation: 1,
- });
- expect(logs[4]).toMatchObject({
- id: "id3",
- checkpoint: encodeCheckpoint(createCheckpoint(11)),
- operation: 1,
- });
-
- await cleanup();
-});
-
-test("updateMany() updates a large number of entities", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema,
- });
-
- const RECORD_COUNT = 1_000;
-
- await indexingStore.createMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- data: [...Array(RECORD_COUNT).keys()].map((i) => ({
- id: `id${i}`,
- name: "Alice",
- bigAge: BigInt(i),
- })),
- });
-
- const updatedItems = await indexingStore.updateMany({
- tableName: "Pet",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- where: {},
- data: ({ current }) => ({
- bigAge: (current.bigAge as bigint) + 1n,
- }),
- });
- expect(updatedItems.length).toBe(RECORD_COUNT);
-
- await cleanup();
-});
-
-test("update() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0x0a",
- data: { n: 1 },
- });
-
- await indexingStore.update({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0x0A",
- data: { n: 2 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0x0A",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 2 });
-
- await cleanup();
-});
-
-test("updateMany() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0x0a",
- data: { n: 1 },
- });
-
- await indexingStore.updateMany({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- where: { n: { gt: 0 } },
- data: { n: 2 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0x0a",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 2 });
-
- await cleanup();
-});
-
-test("upsert() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0x0a",
- data: { n: 1 },
- });
-
- await indexingStore.upsert({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0xA",
- update: { n: 2 },
- });
-
- const instance = await indexingStore.findUnique({
- tableName: "table",
- id: "0xA",
- });
- expect(instance).toMatchObject({ id: "0x0a", n: 2 });
-
- await cleanup();
-});
-
-test("delete() works with hex case sensitivity", async (context) => {
- const { indexingStore, cleanup } = await setupDatabaseServices(context, {
- schema: hexSchema,
- indexing: "realtime",
- });
-
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(10)),
- id: "0xa",
- data: { n: 1 },
- });
-
- await indexingStore.delete({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint(createCheckpoint(25)),
- id: "0xA",
- });
-
- const deletedInstance = await indexingStore.findUnique({
- tableName: "table",
- id: "0xa",
- });
-
- expect(deletedInstance).toBe(null);
-
- await cleanup();
-});
diff --git a/packages/core/src/indexing-store/realtime.ts b/packages/core/src/indexing-store/realtime.ts
deleted file mode 100644
index 406907137..000000000
--- a/packages/core/src/indexing-store/realtime.ts
+++ /dev/null
@@ -1,469 +0,0 @@
-import type { Common } from "@/common/common.js";
-import type { HeadlessKysely } from "@/database/kysely.js";
-import type { Schema, Table } from "@/schema/common.js";
-import type {
- DatabaseRecord,
- DatabaseValue,
- UserId,
- UserRecord,
-} from "@/types/schema.js";
-import type { WhereInput, WriteStore } from "./store.js";
-import { decodeRecord, encodeRecord, encodeValue } from "./utils/encoding.js";
-import { parseStoreError } from "./utils/errors.js";
-import { buildWhereConditions } from "./utils/filter.js";
-
-export const getRealtimeStore = ({
- dialect,
- schema,
- db,
- common,
-}: {
- dialect: "sqlite" | "postgres";
- schema: Schema;
- db: HeadlessKysely;
- common: Common;
-}): WriteStore<"realtime"> => ({
- create: ({
- tableName,
- encodedCheckpoint,
- id,
- data = {},
- }: {
- tableName: string;
- encodedCheckpoint: string;
- id: UserId;
- data?: Omit;
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.create` }, async () => {
- const createRecord = encodeRecord({
- record: { id, ...data },
- table,
- dialect,
- schema,
- skipValidation: false,
- });
-
- return await db.transaction().execute(async (tx) => {
- const record = await tx
- .insertInto(tableName)
- .values(createRecord)
- .returningAll()
- .executeTakeFirstOrThrow()
- .catch((err) => {
- throw parseStoreError(err, { id, ...data });
- });
-
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values({
- operation: 0,
- id: createRecord.id,
- checkpoint: encodedCheckpoint,
- })
- .execute();
-
- return decodeRecord({ record: record, table, dialect });
- });
- });
- },
- createMany: ({
- tableName,
- encodedCheckpoint,
- data,
- }: {
- tableName: string;
- encodedCheckpoint: string;
- data: UserRecord[];
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.createMany` }, async () => {
- const records: DatabaseRecord[] = [];
- await db.transaction().execute(async (tx) => {
- const batchSize = Math.round(
- common.options.databaseMaxQueryParameters / Object.keys(table).length,
- );
- for (let i = 0, len = data.length; i < len; i += batchSize) {
- const createRecords = data.slice(i, i + batchSize).map((d) =>
- encodeRecord({
- record: d,
- table,
- dialect,
- schema,
- skipValidation: false,
- }),
- );
-
- const _records = await tx
- .insertInto(tableName)
- .values(createRecords)
- .returningAll()
- .execute()
- .catch((err) => {
- throw parseStoreError(err, data.length > 0 ? data[0]! : {});
- });
-
- records.push(..._records);
-
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values(
- createRecords.map((record) => ({
- operation: 0,
- id: record.id,
- checkpoint: encodedCheckpoint,
- })),
- )
- .execute();
- }
- });
-
- return records.map((record) => decodeRecord({ record, table, dialect }));
- });
- },
- update: ({
- tableName,
- encodedCheckpoint,
- id,
- data = {},
- }: {
- tableName: string;
- encodedCheckpoint: string;
- id: UserId;
- data?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.update` }, async () => {
- const encodedId = encodeValue({ value: id, column: table.id, dialect });
-
- const record = await db.transaction().execute(async (tx) => {
- const latestRecord = await tx
- .selectFrom(tableName)
- .selectAll()
- .where("id", "=", encodedId)
- .executeTakeFirstOrThrow()
- .catch((err) => {
- throw parseStoreError(err, { id, data: "(function)" });
- });
-
- const updateObject =
- typeof data === "function"
- ? data({
- current: decodeRecord({
- record: latestRecord,
- table,
- dialect,
- }),
- })
- : data;
- const updateRecord = encodeRecord({
- record: { id, ...updateObject },
- table,
- dialect,
- schema,
- skipValidation: false,
- });
-
- const updateResult = await tx
- .updateTable(tableName)
- .set(updateRecord)
- .where("id", "=", encodedId)
- .returningAll()
- .executeTakeFirstOrThrow()
- .catch((err) => {
- throw parseStoreError(err, { id, ...updateObject });
- });
-
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values({
- operation: 1,
- checkpoint: encodedCheckpoint,
- ...latestRecord,
- })
- .execute();
-
- return updateResult;
- });
-
- const result = decodeRecord({ record: record, table, dialect });
-
- return result;
- });
- },
- updateMany: async ({
- tableName,
- encodedCheckpoint,
- where,
- data = {},
- }: {
- tableName: string;
- encodedCheckpoint: string;
- where: WhereInput;
- data?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- const records: UserRecord[] = [];
- let cursor: DatabaseValue = null;
-
- while (true) {
- const _records = await db.wrap(
- { method: `${tableName}.updateMany` },
- () =>
- db.transaction().execute(async (tx) => {
- const latestRecords: DatabaseRecord[] = await tx
- .selectFrom(tableName)
- .selectAll()
- .where((eb) =>
- buildWhereConditions({
- eb,
- where,
- table,
- dialect,
- }),
- )
- .orderBy("id", "asc")
- .limit(common.options.databaseMaxRowLimit)
- .$if(cursor !== null, (qb) => qb.where("id", ">", cursor))
- .execute();
-
- const records: DatabaseRecord[] = [];
-
- for (const latestRecord of latestRecords) {
- const updateObject =
- typeof data === "function"
- ? data({
- current: decodeRecord({
- record: latestRecord,
- table,
- dialect,
- }),
- })
- : data;
-
- // Here, `latestRecord` is already encoded, so we need to exclude it from `encodeRecord`.
- const updateRecord = {
- id: latestRecord.id,
- ...encodeRecord({
- record: updateObject,
- table,
- dialect,
- schema,
- skipValidation: false,
- }),
- };
-
- const record = await tx
- .updateTable(tableName)
- .set(updateRecord)
- .where("id", "=", latestRecord.id)
- .returningAll()
- .executeTakeFirstOrThrow()
- .catch((err) => {
- throw parseStoreError(err, updateObject);
- });
-
- records.push(record);
-
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values({
- operation: 1,
- checkpoint: encodedCheckpoint,
- ...latestRecord,
- })
- .execute();
- }
-
- return records.map((record) =>
- decodeRecord({ record, table, dialect }),
- );
- }),
- );
-
- records.push(..._records);
-
- if (_records.length === 0) {
- break;
- } else {
- cursor = encodeValue({
- value: _records[_records.length - 1]!.id,
- column: table.id,
- dialect,
- });
- }
- }
-
- return records;
- },
- upsert: ({
- tableName,
- encodedCheckpoint,
- id,
- create = {},
- update = {},
- }: {
- tableName: string;
- encodedCheckpoint: string;
- id: UserId;
- create?: Omit;
- update?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.upsert` }, async () => {
- const encodedId = encodeValue({ value: id, column: table.id, dialect });
- const createRecord = encodeRecord({
- record: { id, ...create },
- table,
- dialect,
- schema,
- skipValidation: false,
- });
-
- const record = await db.transaction().execute(async (tx) => {
- // Find the latest version of this instance.
- const latestRecord = await tx
- .selectFrom(tableName)
- .selectAll()
- .where("id", "=", encodedId)
- .executeTakeFirst();
-
- // If there is no latest version, insert a new version using the create data.
- if (latestRecord === undefined) {
- const record = await tx
- .insertInto(tableName)
- .values(createRecord)
- .returningAll()
- .executeTakeFirstOrThrow()
- .catch((err) => {
- const prettyObject: any = { id };
- for (const [key, value] of Object.entries(create))
- prettyObject[`create.${key}`] = value;
- if (typeof update === "function") {
- prettyObject.update = "(function)";
- } else {
- for (const [key, value] of Object.entries(update))
- prettyObject[`update.${key}`] = value;
- }
- throw parseStoreError(err, prettyObject);
- });
-
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values({
- operation: 0,
- id: createRecord.id,
- checkpoint: encodedCheckpoint,
- })
- .execute();
-
- return record;
- }
-
- const updateObject =
- typeof update === "function"
- ? update({
- current: decodeRecord({
- record: latestRecord,
- table,
- dialect,
- }),
- })
- : update;
- const updateRecord = encodeRecord({
- record: { id, ...updateObject },
- table,
- dialect,
- schema,
- skipValidation: false,
- });
-
- const record = await tx
- .updateTable(tableName)
- .set(updateRecord)
- .where("id", "=", encodedId)
- .returningAll()
- .executeTakeFirstOrThrow()
- .catch((err) => {
- const prettyObject: any = { id };
- for (const [key, value] of Object.entries(create))
- prettyObject[`create.${key}`] = value;
- for (const [key, value] of Object.entries(updateObject))
- prettyObject[`update.${key}`] = value;
- throw parseStoreError(err, prettyObject);
- });
-
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values({
- operation: 1,
- checkpoint: encodedCheckpoint,
- ...latestRecord,
- })
- .execute();
-
- return record;
- });
-
- return decodeRecord({ record, table, dialect });
- });
- },
- delete: ({
- tableName,
- encodedCheckpoint,
- id,
- }: {
- tableName: string;
- encodedCheckpoint: string;
- id: UserId;
- }) => {
- const table = (schema[tableName] as { table: Table }).table;
-
- return db.wrap({ method: `${tableName}.delete` }, async () => {
- const encodedId = encodeValue({ value: id, column: table.id, dialect });
-
- const isDeleted = await db.transaction().execute(async (tx) => {
- const record = await tx
- .selectFrom(tableName)
- .selectAll()
- .where("id", "=", encodedId)
- .executeTakeFirst();
-
- const deletedRecord = await tx
- .deleteFrom(tableName)
- .where("id", "=", encodedId)
- .returning(["id"])
- .executeTakeFirst()
- .catch((err) => {
- throw parseStoreError(err, { id });
- });
-
- if (record !== undefined) {
- await tx
- .insertInto(`_ponder_reorg__${tableName}`)
- .values({
- operation: 2,
- checkpoint: encodedCheckpoint,
- ...record,
- })
- .execute();
- }
-
- return !!deletedRecord;
- });
-
- return isDeleted;
- });
- },
-});
diff --git a/packages/core/src/indexing-store/store.bench.ts b/packages/core/src/indexing-store/store.bench.ts
deleted file mode 100644
index 9c839df89..000000000
--- a/packages/core/src/indexing-store/store.bench.ts
+++ /dev/null
@@ -1,178 +0,0 @@
-import {
- setupCommon,
- setupDatabaseServices,
- setupIsolatedDatabase,
-} from "@/_test/setup.js";
-import { createSchema } from "@/schema/schema.js";
-import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js";
-import { range } from "@/utils/range.js";
-import { type TestContext, bench } from "vitest";
-import type { IndexingStore } from "./store.js";
-
-let context: TestContext;
-let indexingStore: IndexingStore;
-let cleanup: () => Promise;
-
-let count = 50_000;
-
-const schema = createSchema((p) => ({
- table: p.createTable({
- id: p.string(),
- name: p.string(),
- bigAge: p.bigint(),
- }),
-}));
-
-const setup = async () => {
- context = {} as TestContext;
-
- setupCommon(context);
- const cleanupDatabase = await setupIsolatedDatabase(context);
- const { indexingStore: indexingStore_, cleanup: cleanupIndexingStore } =
- await setupDatabaseServices(context, {
- schema,
- });
-
- indexingStore = indexingStore_;
- cleanup = async () => {
- await cleanupIndexingStore();
- await cleanupDatabase();
- };
-
- await indexingStore.createMany({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- data: range(0, count).map((i) => ({
- id: `${i}`,
- name: "Kevin",
- bigAge: 22n,
- })),
- });
-};
-
-const teardown = async () => {
- await cleanup();
-};
-
-bench(
- "create",
- async () => {
- await indexingStore.create({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- id: (count++).toString(),
- data: { name: "Kyle", bigAge: 10n },
- });
- },
- { setup, teardown },
-);
-
-bench(
- "update",
- async () => {
- await indexingStore.update({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- id: "500",
- data: { name: "Kyle" },
- });
- },
- { setup, teardown },
-);
-
-bench(
- "upsert",
- async () => {
- await indexingStore.upsert({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- id: (count++).toString(),
- create: { name: "Kyle", bigAge: 23n },
- update: { name: "Kyle" },
- });
- },
- { setup, teardown },
-);
-
-bench(
- "delete",
- async () => {
- await indexingStore.delete({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- id: (count--).toString(),
- });
- },
- { setup, teardown },
-);
-
-bench(
- "findUnique",
- async () => {
- await indexingStore.findUnique({
- tableName: "table",
- id: "500",
- });
- },
- { setup, teardown },
-);
-
-bench(
- "findMany",
- async () => {
- await indexingStore.findMany({
- tableName: "table",
- });
- },
- { setup, teardown },
-);
-
-bench(
- "createMany",
- async () => {
- await indexingStore.createMany({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- data: [
- { id: (count++).toString(), name: "Kevin", bigAge: 22n },
- { id: (count++).toString(), name: "Kevin", bigAge: 22n },
- { id: (count++).toString(), name: "Kevin", bigAge: 22n },
- ],
- });
- },
- { setup, teardown },
-);
-
-bench(
- "updateMany",
- async () => {
- await indexingStore.updateMany({
- tableName: "table",
- encodedCheckpoint: encodeCheckpoint({
- ...zeroCheckpoint,
- blockTimestamp: count,
- }),
- data: { name: "Kevin", bigAge: 22n },
- where: { id: { equals: "500" } },
- });
- },
- { setup, teardown },
-);
diff --git a/packages/core/src/indexing-store/store.ts b/packages/core/src/indexing-store/store.ts
deleted file mode 100644
index 0bc54e6b5..000000000
--- a/packages/core/src/indexing-store/store.ts
+++ /dev/null
@@ -1,156 +0,0 @@
-import type { Status } from "@/sync/index.js";
-import type {
- UserId,
- UserRecord,
- UserTable,
- UserValue,
-} from "@/types/schema.js";
-import type { Prettify } from "@/types/utils.js";
-import type { Hex } from "viem";
-
-export type ReadonlyStore = {
- findUnique(options: {
- tableName: string;
- id: UserId;
- }): Promise;
-
- findMany(options: {
- tableName: string;
- where?: WhereInput;
- orderBy?: OrderByInput;
- before?: string | null;
- after?: string | null;
- limit?: number;
- }): Promise<{
- items: UserRecord[];
- pageInfo: {
- startCursor: string | null;
- endCursor: string | null;
- hasNextPage: boolean;
- hasPreviousPage: boolean;
- };
- }>;
-};
-
-export type WriteStore<
- env extends "historical" | "realtime",
- ///
- checkpointProp = env extends "realtime"
- ? { encodedCheckpoint: string }
- : { encodedCheckpoint?: never },
-> = {
- create(
- options: {
- tableName: string;
- id: UserId;
- data?: Omit;
- } & checkpointProp,
- ): Promise;
-
- createMany(
- options: {
- tableName: string;
- data: UserRecord[];
- } & checkpointProp,
- ): Promise;
-
- update(
- options: {
- tableName: string;
- id: UserId;
- data?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- } & checkpointProp,
- ): Promise;
-
- updateMany(
- options: {
- tableName: string;
- where?: WhereInput;
- data?:
- | Partial>
- | ((args: { current: UserRecord }) => Partial>);
- } & checkpointProp,
- ): Promise;
-
- upsert(
- options: {
- tableName: string;
- id: UserId;
- create?: Omit