Skip to content

Commit

Permalink
more meaninful template saving blackhole burns
Browse files Browse the repository at this point in the history
  • Loading branch information
dzhelezov committed Jan 27, 2023
1 parent 589c4e1 commit ff452b9
Show file tree
Hide file tree
Showing 8 changed files with 73 additions and 37 deletions.
4 changes: 2 additions & 2 deletions commands.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
},
"migration:generate": {
"description": "Generate a DB migration matching the TypeORM entities",
"deps": ["build"],
"deps": ["build", "migration:clean"],
"cmd": ["squid-typeorm-migration", "generate"],
},
"migration:clean": {
Expand All @@ -46,7 +46,7 @@
},
"process": {
"description": "Load .env and start the squid processor",
"deps": ["migration:apply"],
"deps": ["build", "migration:apply"],
"cmd": ["node", "--require=dotenv/config", "lib/processor.js"]
},
"process:prod": {
Expand Down
11 changes: 0 additions & 11 deletions db/migrations/1666363113857-Data.js

This file was deleted.

13 changes: 13 additions & 0 deletions db/migrations/1674844779131-Data.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
module.exports = class Data1674844779131 {
name = 'Data1674844779131'

async up(db) {
await db.query(`CREATE TABLE "burn" ("id" character varying NOT NULL, "block" integer NOT NULL, "address" text NOT NULL, "value" numeric NOT NULL, "tx_hash" text NOT NULL, CONSTRAINT "PK_dcb4f14ee4534154b31116553f0" PRIMARY KEY ("id"))`)
await db.query(`CREATE INDEX "IDX_fc3726cbc7f5d4edf4340ae298" ON "burn" ("address") `)
}

async down(db) {
await db.query(`DROP TABLE "burn"`)
await db.query(`DROP INDEX "public"."IDX_fc3726cbc7f5d4edf4340ae298"`)
}
}
8 changes: 5 additions & 3 deletions schema.graphql
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
type MyEntity @entity {
type Burn @entity {
id: ID!
foo: String!
block: Int!
address: String! @index
value: BigInt!
txHash: String!
}

25 changes: 25 additions & 0 deletions src/model/generated/burn.model.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm"
import * as marshal from "./marshal"

@Entity_()
export class Burn {
constructor(props?: Partial<Burn>) {
Object.assign(this, props)
}

@PrimaryColumn_()
id!: string

@Column_("int4", {nullable: false})
block!: number

@Index_()
@Column_("text", {nullable: false})
address!: string

@Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false})
value!: bigint

@Column_("text", {nullable: false})
txHash!: string
}
2 changes: 1 addition & 1 deletion src/model/generated/index.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export * from "./myEntity.model"
export * from "./burn.model"
14 changes: 0 additions & 14 deletions src/model/generated/myEntity.model.ts

This file was deleted.

33 changes: 27 additions & 6 deletions src/processor.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { TypeormDatabase } from '@subsquid/typeorm-store';
import {EvmBatchProcessor} from '@subsquid/evm-processor'
import { lookupArchive } from '@subsquid/archive-registry'
import assert from 'assert';
import { Burn } from './model';

const processor = new EvmBatchProcessor()
.setDataSource({
Expand All @@ -24,20 +26,39 @@ const processor = new EvmBatchProcessor()
data: {
transaction: {
from: true,
input: true,
to: true
value: true,
hash: true
}
}
});

function formatID(height:any, hash:string) {
return `${String(height).padStart(10, '0')}-${hash.slice(3,8)}`

}

processor.run(new TypeormDatabase(), async (ctx) => {
const burns: Burn[] = []
for (let c of ctx.blocks) {
for (let i of c.items) {
// apply arbitrary data transformation logic here
// use ctx.store to persist the data
ctx.log.info(i, "Next item:")
assert(i.kind == 'transaction')
// decode and normalize the tx data
burns.push(new Burn({
id: formatID(c.header.height, i.transaction.hash),
block: c.header.height,
address: i.transaction.from,
value: i.transaction.value,
txHash: i.transaction.hash
}))
}
}
}
// apply vectorized transformations and aggregations
const burned = burns.reduce((acc, b) => acc + b.value, 0n)/1_000_000_000n
const startBlock = ctx.blocks.at(0)?.header.height
const endBlock = ctx.blocks.at(-1)?.header.height
ctx.log.info(`Burned ${burned} Gwei from ${startBlock} to ${endBlock}`)

// upsert batches of entities with batch-optimized ctx.store.save
await ctx.store.save(burns)
});

0 comments on commit ff452b9

Please sign in to comment.