From 16dc40cc8cac1cac83edeec8d8e823d7ebe661ba Mon Sep 17 00:00:00 2001 From: Jan Kwiatkowski Date: Fri, 19 Apr 2024 12:15:47 +0200 Subject: [PATCH 1/2] added SpartaDEX support --- adapters/sparta/package.json | 28 ++++ adapters/sparta/src/index.ts | 246 +++++++++++++++++++++++++++++ adapters/sparta/src/sdk/config.ts | 17 ++ adapters/sparta/src/sdk/queries.ts | 28 ++++ adapters/sparta/src/sdk/types.ts | 22 +++ adapters/sparta/src/sdk/utils.ts | 57 +++++++ adapters/sparta/tsconfig.json | 101 ++++++++++++ 7 files changed, 499 insertions(+) create mode 100644 adapters/sparta/package.json create mode 100644 adapters/sparta/src/index.ts create mode 100644 adapters/sparta/src/sdk/config.ts create mode 100644 adapters/sparta/src/sdk/queries.ts create mode 100644 adapters/sparta/src/sdk/types.ts create mode 100644 adapters/sparta/src/sdk/utils.ts create mode 100644 adapters/sparta/tsconfig.json diff --git a/adapters/sparta/package.json b/adapters/sparta/package.json new file mode 100644 index 00000000..3d7d8e54 --- /dev/null +++ b/adapters/sparta/package.json @@ -0,0 +1,28 @@ +{ + "name": "sparta-dex", + "version": "1.0.0", + "description": "", + "main": "index.js", + "type": "commonjs", + "scripts": { + "start": "node dist/index.js", + "compile": "tsc", + "debug": "ts-node src/index.ts", + "watch": "tsc -w", + "clear": "rm -rf dist", + "test": "node " + }, + "keywords": [], + "author": "", + "license": "UNLICENSED", + "dependencies": { + "@apollo/client": "^3.9.11", + "csv-writer": "^1.6.0", + "react": "^18.2.0", + "ts-node": "^10.9.2" + }, + "devDependencies": { + "@types/node": "^20.11.17", + "typescript": "^5.3.3" + } +} \ No newline at end of file diff --git a/adapters/sparta/src/index.ts b/adapters/sparta/src/index.ts new file mode 100644 index 00000000..385b7ab7 --- /dev/null +++ b/adapters/sparta/src/index.ts @@ -0,0 +1,246 @@ +import { createObjectCsvWriter } from "csv-writer"; +import { + client, + PROTOCOL_DEPLOY_BLOCK, + SNAPSHOT_PERIOD_BLOCKS, + FIRST_TIME, +} from "./sdk/config"; +import { OutputDataSchemaRow, BlockData, UserPosition } from "./sdk/types"; +import { LIQUIDITY_QUERY, TOKEN_TRANSFERS_QUERY } from "./sdk/queries"; +import { + getLatestBlockNumberAndTimestamp, + getTimestampAtBlock, + readLastProcessedBlock, + saveLastProcessedBlock, +} from "./sdk/utils"; +import fs from "fs"; + +// Helper function to create a unique key +function createKey(user: string, tokenAddress: string, block: number): string { + return `${user}-${tokenAddress}-${block}`; +} + +// Processes a block range to calculate user positions for mints and burns +async function processBlockData( + block: number +): Promise<[UserPosition[], UserPosition[]]> { + const mintsDict: UserPosition[] = []; + const burnsDict: UserPosition[] = []; + + const liquidityData = await fetchTransfersForMintsAndBurnsTillBlock(block); + if (!liquidityData) { + console.error(`Failed to fetch liquidity data for block ${block}`); + return [[], []]; // Return empty arrays if data is not available + } + const blockTimestamp = await getTimestampAtBlock(block); + await processTransfers( + liquidityData.mints, + block, + blockTimestamp, + mintsDict, + "mint" + ); + await processTransfers( + liquidityData.burns, + block, + blockTimestamp, + burnsDict, + "burn" + ); + + return [mintsDict, burnsDict]; +} + +// Fetches sender information from a transaction +async function fetchSenderFromTransaction(tx: string) { + const { data } = await client.query({ + query: TOKEN_TRANSFERS_QUERY, + variables: { tx }, + fetchPolicy: "no-cache", + }); + return { + user: data.transfer1S[0].from, + token0: data.transfer1S[0].contractId_, + token1: data.transfer1S[1].contractId_, + }; +} + +// General function to process either mints or burns +async function processTransfers( + transfers: any[], + block: number, + blockTimestamp: number, + dictionary: UserPosition[], + type: "mint" | "burn" +) { + for (const transfer of transfers) { + const txId = transfer.transactionHash_; + const txInfo = await fetchSenderFromTransaction(txId); + if (!txInfo) { + console.error(`Failed to fetch sender for transaction ${txId}`); + continue; + } + + let user; + if (type === "mint") { + user = txInfo.user; + } else { + user = transfer.to; + } + + dictionary.push({ + block_number: block, + timestamp: blockTimestamp, + user: user, + token: txInfo.token0, + balance: transfer.amount0, + }); + + dictionary.push({ + block_number: block, + timestamp: blockTimestamp, + user: user, + token: txInfo.token1, + balance: transfer.amount1, + }); + } +} + +// Fetches transactions related to liquidity events +async function fetchTransfersForMintsAndBurnsTillBlock(blockNumber: number) { + const { data } = await client.query({ + query: LIQUIDITY_QUERY, + variables: { blockNumber }, + fetchPolicy: "no-cache", + }); + return data; +} + +function calculateUserPositions( + deposits: UserPosition[], + withdrawals: UserPosition[] +): UserPosition[] { + const userPositionsMap: Map = new Map(); + + // Helper function to process both deposits and withdrawals + const processPosition = (position: UserPosition, isDeposit: boolean) => { + const key = createKey(position.user, position.token, position.block_number); + const amountChange = + BigInt(position.balance) * (isDeposit ? BigInt(1) : BigInt(-1)); + + const existing = userPositionsMap.get(key); + if (existing) { + existing.balance += amountChange; + } else { + userPositionsMap.set(key, { + block_number: position.block_number, + timestamp: position.timestamp, + user: position.user, + token: position.token, + balance: amountChange, + }); + } + }; + + // Process each deposit and withdrawal + deposits.forEach((deposit) => processPosition(deposit, true)); + withdrawals.forEach((withdrawal) => processPosition(withdrawal, false)); + + return Array.from(userPositionsMap.values()); +} + +function convertToOutputDataSchema( + userPositions: UserPosition[] +): OutputDataSchemaRow[] { + return userPositions.flatMap((userPosition) => [ + { + block_number: userPosition.block_number, + timestamp: userPosition.timestamp, + user_address: userPosition.user, + token_address: userPosition.token, + token_balance: userPosition.balance, // Keep as bigint + token_symbol: "", // Adjust accordingly if you have the data + usd_price: 0, // Adjust if you need to calculate this value + }, + ]); +} + +// Get block ranges for processing +async function getBlockRangesToFetch() { + const startBlock = FIRST_TIME + ? PROTOCOL_DEPLOY_BLOCK + : readLastProcessedBlock(); + + if (!startBlock) { + console.error("Failed to read last processed block"); + return []; + } + + const { blockNumber } = await getLatestBlockNumberAndTimestamp(); + + console.log("Fetching blocks from", startBlock, "to", blockNumber); + + const blocks = []; + for (let i = startBlock; i <= blockNumber; i += SNAPSHOT_PERIOD_BLOCKS) { + blocks.push(i); + } + return blocks; +} + +// Saves processed data to a CSV file +async function saveToCSV(outputData: OutputDataSchemaRow[]) { + const csvPath = "output.csv"; + const fileExists = fs.existsSync(csvPath); + + const csvWriter = createObjectCsvWriter({ + path: csvPath, + header: [ + { id: "block_number", title: "Block Number" }, + { id: "timestamp", title: "Timestamp" }, + { id: "user_address", title: "User Address" }, + { id: "token_address", title: "Token Address" }, + { id: "token_balance", title: "Token Balance" }, + { id: "token_symbol", title: "Token Symbol" }, + { id: "usd_price", title: "USD Price" }, + ], + append: fileExists, + }); + + await csvWriter.writeRecords(outputData); + console.log("CSV file has been written successfully"); +} + +export const getUserTVLByBlock = async (blocks: BlockData) => { + const [deposits, withdrawals] = await processBlockData(blocks.blockNumber); + const userPositions = calculateUserPositions(deposits, withdrawals); + return convertToOutputDataSchema(userPositions); +}; +async function main() { + console.log(`Starting data fetching process mode: ${FIRST_TIME}`); + const blocks = await getBlockRangesToFetch(); + + const userData: OutputDataSchemaRow[] = []; + + let lastblock = 0; + try { + for (const block of blocks) { + const blockData = await getUserTVLByBlock({ + blockNumber: block, + blockTimestamp: 0, + }); + userData.push(...blockData); + console.log("Processed block", block); + lastblock = block; + } + } catch (error: any) { + console.error("Error processing block", lastblock, error.message); + } finally { + saveLastProcessedBlock(lastblock); + } + + await saveToCSV(userData); +} + +// IMPORTANT: config::FIRST_TIME is set to true be default +// after inital fetch set it to false +main().catch(console.error); diff --git a/adapters/sparta/src/sdk/config.ts b/adapters/sparta/src/sdk/config.ts new file mode 100644 index 00000000..5d38c418 --- /dev/null +++ b/adapters/sparta/src/sdk/config.ts @@ -0,0 +1,17 @@ +import { ApolloClient, InMemoryCache } from "@apollo/client"; + +export const SPARTA_SUBGRAPH_QUERY_URL = + "https://api.goldsky.com/api/public/project_clv137yzf5wmt01w2bv2f4cgk/subgraphs/sparta-linea/1/gn"; + +export const LINEA_RPC = "https://rpc.linea.build"; + +export const client = new ApolloClient({ + uri: SPARTA_SUBGRAPH_QUERY_URL, + cache: new InMemoryCache(), +}); + +// snpashot should be taken every 1 hour, average block time on linea is 11.5 seconds +export const SNAPSHOT_PERIOD_BLOCKS = 311; +export const PROTOCOL_DEPLOY_BLOCK = 3811977; + +export const FIRST_TIME = true; diff --git a/adapters/sparta/src/sdk/queries.ts b/adapters/sparta/src/sdk/queries.ts new file mode 100644 index 00000000..554d1ac4 --- /dev/null +++ b/adapters/sparta/src/sdk/queries.ts @@ -0,0 +1,28 @@ +import { gql } from "@apollo/client"; + +export const LIQUIDITY_QUERY = gql` + query GetLiquidityBalances($blockNumber: Int!) { + mints(where: { block_number_lte: $blockNumber }) { + amount0 + amount1 + contractId_ + transactionHash_ + } + burns(where: { block_number_lte: $blockNumber }) { + to + amount0 + amount1 + contractId_ + transactionHash_ + } + } +`; + +export const TOKEN_TRANSFERS_QUERY = gql` + query GetLiquidityTransfers($tx: String!) { + transfer1S(where: { transactionHash__contains: $tx }) { + from + contractId_ + } + } +`; diff --git a/adapters/sparta/src/sdk/types.ts b/adapters/sparta/src/sdk/types.ts new file mode 100644 index 00000000..a04f8432 --- /dev/null +++ b/adapters/sparta/src/sdk/types.ts @@ -0,0 +1,22 @@ +export interface BlockData { + blockNumber: number; + blockTimestamp: number; +} + +export type OutputDataSchemaRow = { + block_number: number; + timestamp: number; + user_address: string; + token_address: string; + token_balance: bigint; + token_symbol: string; + usd_price: number; +}; + +export type UserPosition = { + block_number: number; + timestamp: number; + user: string; + token: string; + balance: bigint; +}; diff --git a/adapters/sparta/src/sdk/utils.ts b/adapters/sparta/src/sdk/utils.ts new file mode 100644 index 00000000..7a3639d5 --- /dev/null +++ b/adapters/sparta/src/sdk/utils.ts @@ -0,0 +1,57 @@ +import fs from "fs"; +import path from "path"; + +import { LINEA_RPC } from "./config"; + +const LAST_BLOCK_FILE = path.join(__dirname, "lastBlock.txt"); + +export const post = async (url: string, data: any): Promise => { + const response = await fetch(url, { + method: "POST", + headers: { + "Content-Type": "application/json", + Accept: "application/json", + }, + body: JSON.stringify(data), + }); + return await response.json(); +}; + +export const getLatestBlockNumberAndTimestamp = async () => { + const data = await post(LINEA_RPC, { + jsonrpc: "2.0", + method: "eth_getBlockByNumber", + params: ["latest", false], + id: 1, + }); + const blockNumber = parseInt(data.result.number); + const blockTimestamp = parseInt(data.result.timestamp); + return { blockNumber, blockTimestamp }; +}; + +export const getTimestampAtBlock = async (blockNumber: number) => { + const data = await post(LINEA_RPC, { + jsonrpc: "2.0", + method: "eth_getBlockByNumber", + params: ["0x" + blockNumber.toString(16), true], + id: 1, + }); + return parseInt(data.result.timestamp); +}; + +export function readLastProcessedBlock(): number | null { + try { + if (fs.existsSync(LAST_BLOCK_FILE)) { + const content = fs.readFileSync(LAST_BLOCK_FILE, "utf8"); + return parseInt(content, 10); + } + } catch (error) { + console.error("Failed to read last processed block:", error); + } + return null; +} + +export function saveLastProcessedBlock(blockNumber: number) { + console.log("Saving last processed block:", blockNumber); + fs.writeFileSync(LAST_BLOCK_FILE, blockNumber.toString(), "utf8"); +} diff --git a/adapters/sparta/tsconfig.json b/adapters/sparta/tsconfig.json new file mode 100644 index 00000000..8df9e19a --- /dev/null +++ b/adapters/sparta/tsconfig.json @@ -0,0 +1,101 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": "src/", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "dist/", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + } +} \ No newline at end of file From f07765b10884ed1d92f5fccf98cf6bf61351ef7f Mon Sep 17 00:00:00 2001 From: Jan Kwiatkowski Date: Mon, 22 Apr 2024 18:09:40 +0200 Subject: [PATCH 2/2] tvl calulation from reserves --- adapters/sparta/src/index.ts | 335 ++++++++++++++++++----------- adapters/sparta/src/sdk/config.ts | 24 +++ adapters/sparta/src/sdk/queries.ts | 29 ++- adapters/sparta/src/sdk/types.ts | 54 +++++ 4 files changed, 300 insertions(+), 142 deletions(-) diff --git a/adapters/sparta/src/index.ts b/adapters/sparta/src/index.ts index 385b7ab7..e7e735f4 100644 --- a/adapters/sparta/src/index.ts +++ b/adapters/sparta/src/index.ts @@ -4,9 +4,20 @@ import { PROTOCOL_DEPLOY_BLOCK, SNAPSHOT_PERIOD_BLOCKS, FIRST_TIME, + POOL_TOKENS, } from "./sdk/config"; -import { OutputDataSchemaRow, BlockData, UserPosition } from "./sdk/types"; -import { LIQUIDITY_QUERY, TOKEN_TRANSFERS_QUERY } from "./sdk/queries"; +import { + OutputDataSchemaRow, + BlockData, + UserPositions, + Sync, + Transaction, + CumulativePositions, + Reserves, + UserReserves, + UserPosition, +} from "./sdk/types"; +import { TRANSFERS_QUERY, SYNCS_QUERY } from "./sdk/queries"; import { getLatestBlockNumberAndTimestamp, getTimestampAtBlock, @@ -15,154 +26,229 @@ import { } from "./sdk/utils"; import fs from "fs"; -// Helper function to create a unique key -function createKey(user: string, tokenAddress: string, block: number): string { - return `${user}-${tokenAddress}-${block}`; -} - // Processes a block range to calculate user positions for mints and burns -async function processBlockData( - block: number -): Promise<[UserPosition[], UserPosition[]]> { - const mintsDict: UserPosition[] = []; - const burnsDict: UserPosition[] = []; - - const liquidityData = await fetchTransfersForMintsAndBurnsTillBlock(block); - if (!liquidityData) { - console.error(`Failed to fetch liquidity data for block ${block}`); - return [[], []]; // Return empty arrays if data is not available - } - const blockTimestamp = await getTimestampAtBlock(block); - await processTransfers( - liquidityData.mints, - block, - blockTimestamp, - mintsDict, - "mint" - ); - await processTransfers( - liquidityData.burns, - block, - blockTimestamp, - burnsDict, - "burn" +async function processBlockData(block: number): Promise { + // fetch lp transfers up to block + const liquidityData = await fetchTransfers(block); + const { userPositions, cumulativePositions } = + processTransactions(liquidityData); + + // get reserves at block + const reservesSnapshotAtBlock = await fetchReserves(block); + + // calculate tokens based on reserves + const userReserves = calculateUserReservePortion( + userPositions, + cumulativePositions, + reservesSnapshotAtBlock ); - return [mintsDict, burnsDict]; + const timestamp = await getTimestampAtBlock(block); + + // convert userReserves to userPositions + return convertToUserPositions(userReserves, block, timestamp); } -// Fetches sender information from a transaction -async function fetchSenderFromTransaction(tx: string) { - const { data } = await client.query({ - query: TOKEN_TRANSFERS_QUERY, - variables: { tx }, - fetchPolicy: "no-cache", +function convertToUserPositions( + userData: UserReserves, + block_number: number, + timestamp: number +): UserPosition[] { + const tempResults: Record = {}; + + Object.keys(userData).forEach((user) => { + const contracts = userData[user]; + Object.keys(contracts).forEach((contractId) => { + const details = contracts[contractId]; + + // Process token0 + const key0 = `${user}-${details.token0}`; + if (!tempResults[key0]) { + tempResults[key0] = { + block_number, + timestamp, + user, + token: details.token0, + balance: details.amount0, + }; + } else { + tempResults[key0].balance += details.amount0; + } + + // Process token1 + const key1 = `${user}-${details.token1}`; + if (!tempResults[key1]) { + tempResults[key1] = { + block_number, + timestamp, + user, + token: details.token1, + balance: details.amount1, + }; + } else { + tempResults[key1].balance += details.amount1; + } + }); }); - return { - user: data.transfer1S[0].from, - token0: data.transfer1S[0].contractId_, - token1: data.transfer1S[1].contractId_, - }; + + // Convert the map to an array of UserPosition + return Object.values(tempResults); } +function calculateUserReservePortion( + userPositions: UserPositions, + totalSupply: CumulativePositions, + reserves: Reserves +): UserReserves { + const userReserves: UserReserves = {}; -// General function to process either mints or burns -async function processTransfers( - transfers: any[], - block: number, - blockTimestamp: number, - dictionary: UserPosition[], - type: "mint" | "burn" -) { - for (const transfer of transfers) { - const txId = transfer.transactionHash_; - const txInfo = await fetchSenderFromTransaction(txId); - if (!txInfo) { - console.error(`Failed to fetch sender for transaction ${txId}`); - continue; + Object.keys(userPositions).forEach((contractId) => { + if ( + !totalSupply[contractId] || + !reserves[contractId] || + !POOL_TOKENS[contractId] + ) { + console.log(`Missing data for contract ID: ${contractId}`); + return; } - let user; - if (type === "mint") { - user = txInfo.user; - } else { - user = transfer.to; - } + Object.keys(userPositions[contractId]).forEach((user) => { + const userPosition = userPositions[contractId][user]; + const total = totalSupply[contractId]; - dictionary.push({ - block_number: block, - timestamp: blockTimestamp, - user: user, - token: txInfo.token0, - balance: transfer.amount0, - }); + const share = userPosition / total; + const reserve0 = parseInt(reserves[contractId].reserve0.toString()); + const reserve1 = parseInt(reserves[contractId].reserve1.toString()); + const token0 = POOL_TOKENS[contractId].token0; + const token1 = POOL_TOKENS[contractId].token1; + + if (!userReserves[user]) { + userReserves[user] = {}; + } - dictionary.push({ - block_number: block, - timestamp: blockTimestamp, - user: user, - token: txInfo.token1, - balance: transfer.amount1, + userReserves[user][contractId] = { + amount0: BigInt(share * reserve0), + amount1: BigInt(share * reserve1), + token0: token0, + token1: token1, + }; }); - } + }); + + return userReserves; } -// Fetches transactions related to liquidity events -async function fetchTransfersForMintsAndBurnsTillBlock(blockNumber: number) { +function processTransactions(transactions: Transaction[]): { + userPositions: UserPositions; + cumulativePositions: CumulativePositions; +} { + const userPositions: UserPositions = {}; + const cumulativePositions: CumulativePositions = {}; + + transactions.forEach((transaction) => { + // Normalize addresses for case-insensitive comparison + const fromAddress = transaction.from.toLowerCase(); + const toAddress = transaction.to.toLowerCase(); + const contractId = transaction.contractId_.toLowerCase(); + + // Skip transactions where 'from' or 'to' match the contract ID, or both 'from' and 'to' are zero addresses + if ( + fromAddress === contractId || + toAddress === contractId || + (fromAddress === "0x0000000000000000000000000000000000000000" && + toAddress === "0x0000000000000000000000000000000000000000") + ) { + return; + } + + // Initialize cumulativePositions if not already set + if (!cumulativePositions[contractId]) { + cumulativePositions[contractId] = 0; + } + + // Convert the transaction value from string to integer. + let value = parseInt(transaction.value.toString()); + + // Process transactions that increase liquidity (to address isn't zero) + if (toAddress !== "0x0000000000000000000000000000000000000000") { + if (!userPositions[contractId]) { + userPositions[contractId] = {}; + } + if (!userPositions[contractId][toAddress]) { + userPositions[contractId][toAddress] = 0; + } + userPositions[contractId][toAddress] += value; + cumulativePositions[contractId] += value; + } + + // Process transactions that decrease liquidity (from address isn't zero) + if (fromAddress !== "0x0000000000000000000000000000000000000000") { + if (!userPositions[contractId]) { + userPositions[contractId] = {}; + } + if (!userPositions[contractId][fromAddress]) { + userPositions[contractId][fromAddress] = 0; + } + userPositions[contractId][fromAddress] -= value; + cumulativePositions[contractId] -= value; + } + }); + + return { userPositions, cumulativePositions }; +} + +async function fetchTransfers(blockNumber: number) { const { data } = await client.query({ - query: LIQUIDITY_QUERY, + query: TRANSFERS_QUERY, variables: { blockNumber }, fetchPolicy: "no-cache", }); - return data; + return data.transfers; } -function calculateUserPositions( - deposits: UserPosition[], - withdrawals: UserPosition[] -): UserPosition[] { - const userPositionsMap: Map = new Map(); - - // Helper function to process both deposits and withdrawals - const processPosition = (position: UserPosition, isDeposit: boolean) => { - const key = createKey(position.user, position.token, position.block_number); - const amountChange = - BigInt(position.balance) * (isDeposit ? BigInt(1) : BigInt(-1)); - - const existing = userPositionsMap.get(key); - if (existing) { - existing.balance += amountChange; - } else { - userPositionsMap.set(key, { - block_number: position.block_number, - timestamp: position.timestamp, - user: position.user, - token: position.token, - balance: amountChange, - }); +async function fetchReserves(blockNumber: number): Promise { + const { data } = await client.query({ + query: SYNCS_QUERY, + variables: { blockNumber }, + fetchPolicy: "no-cache", + }); + + const latestPerContractId: Record = {}; + const reserves: Reserves = {}; + + data.syncs.forEach((sync: Sync) => { + const existingEntry = latestPerContractId[sync.contractId_]; + if ( + !existingEntry || + new Date(sync.timestamp_) > new Date(existingEntry.timestamp_) + ) { + latestPerContractId[sync.contractId_] = sync; } - }; + }); - // Process each deposit and withdrawal - deposits.forEach((deposit) => processPosition(deposit, true)); - withdrawals.forEach((withdrawal) => processPosition(withdrawal, false)); + Object.values(latestPerContractId).forEach((sync) => { + reserves[sync.contractId_] = { + reserve0: sync.reserve0, + reserve1: sync.reserve1, + }; + }); - return Array.from(userPositionsMap.values()); + return reserves; } function convertToOutputDataSchema( userPositions: UserPosition[] ): OutputDataSchemaRow[] { - return userPositions.flatMap((userPosition) => [ - { + return userPositions.map((userPosition) => { + return { block_number: userPosition.block_number, timestamp: userPosition.timestamp, user_address: userPosition.user, token_address: userPosition.token, - token_balance: userPosition.balance, // Keep as bigint - token_symbol: "", // Adjust accordingly if you have the data - usd_price: 0, // Adjust if you need to calculate this value - }, - ]); + token_balance: BigInt(userPosition.balance), // Ensure balance is treated as bigint + token_symbol: "", // You may want to fill this based on additional token info you might have + usd_price: 0, // Adjust if you need to calculate this value or pull from another source + }; + }); } // Get block ranges for processing @@ -178,8 +264,6 @@ async function getBlockRangesToFetch() { const { blockNumber } = await getLatestBlockNumberAndTimestamp(); - console.log("Fetching blocks from", startBlock, "to", blockNumber); - const blocks = []; for (let i = startBlock; i <= blockNumber; i += SNAPSHOT_PERIOD_BLOCKS) { blocks.push(i); @@ -211,16 +295,14 @@ async function saveToCSV(outputData: OutputDataSchemaRow[]) { } export const getUserTVLByBlock = async (blocks: BlockData) => { - const [deposits, withdrawals] = await processBlockData(blocks.blockNumber); - const userPositions = calculateUserPositions(deposits, withdrawals); - return convertToOutputDataSchema(userPositions); + const data: UserPosition[] = await processBlockData(blocks.blockNumber); + return convertToOutputDataSchema(data); }; + async function main() { console.log(`Starting data fetching process mode: ${FIRST_TIME}`); const blocks = await getBlockRangesToFetch(); - const userData: OutputDataSchemaRow[] = []; - let lastblock = 0; try { for (const block of blocks) { @@ -228,8 +310,9 @@ async function main() { blockNumber: block, blockTimestamp: 0, }); - userData.push(...blockData); + // userData.push(...blockData); console.log("Processed block", block); + await saveToCSV(blockData); lastblock = block; } } catch (error: any) { @@ -237,8 +320,6 @@ async function main() { } finally { saveLastProcessedBlock(lastblock); } - - await saveToCSV(userData); } // IMPORTANT: config::FIRST_TIME is set to true be default diff --git a/adapters/sparta/src/sdk/config.ts b/adapters/sparta/src/sdk/config.ts index 5d38c418..0beb7038 100644 --- a/adapters/sparta/src/sdk/config.ts +++ b/adapters/sparta/src/sdk/config.ts @@ -1,4 +1,5 @@ import { ApolloClient, InMemoryCache } from "@apollo/client"; +import { PoolTokens } from "./types"; export const SPARTA_SUBGRAPH_QUERY_URL = "https://api.goldsky.com/api/public/project_clv137yzf5wmt01w2bv2f4cgk/subgraphs/sparta-linea/1/gn"; @@ -15,3 +16,26 @@ export const SNAPSHOT_PERIOD_BLOCKS = 311; export const PROTOCOL_DEPLOY_BLOCK = 3811977; export const FIRST_TIME = true; + +export const POOL_TOKENS: PoolTokens = { + "0x0460c78bd496ca8e9483e4f0655a28be1e90a89b": { + token0: "0x176211869ca2b568f2a7d4ee941e073a821ee1ff", + token1: "0xa219439258ca9da29e9cc4ce5596924745e12b93", + }, + "0x30cc8a4f62f1c89bf4246196901e27982be4fd30": { + token0: "0x11F98c7E42A367DaB4f200d2fdc460fb445CE9a8", + token1: "0x176211869ca2b568f2a7d4ee941e073a821ee1ff", + }, + "0x51a056cc4eb7d1feb896554f97aa01805d41f190": { + token0: "0x176211869ca2b568f2a7d4ee941e073a821ee1ff", + token1: "0xe5d7c2a44ffddf6b295a15c148167daaaf5cf34f", + }, + "0x38d4b2627ff87911410129849246a1a19f586873": { + token0: "0x3aab2285ddcddad8edf438c1bab47e1a9d05a9b4", + token1: "0xe5d7c2a44ffddf6b295a15c148167daaaf5cf34f", + }, + "0x6a4d34cea32ecc5be2fc3ec97ce629f2b4c72334": { + token0: "0x176211869ca2b568f2a7d4ee941e073a821ee1ff", + token1: "0x580e933d90091b9ce380740e3a4a39c67eb85b4c", + }, +}; diff --git a/adapters/sparta/src/sdk/queries.ts b/adapters/sparta/src/sdk/queries.ts index 554d1ac4..0813e58c 100644 --- a/adapters/sparta/src/sdk/queries.ts +++ b/adapters/sparta/src/sdk/queries.ts @@ -1,28 +1,27 @@ import { gql } from "@apollo/client"; -export const LIQUIDITY_QUERY = gql` - query GetLiquidityBalances($blockNumber: Int!) { - mints(where: { block_number_lte: $blockNumber }) { - amount0 - amount1 - contractId_ - transactionHash_ - } - burns(where: { block_number_lte: $blockNumber }) { +export const TRANSFERS_QUERY = gql` + query GetLiquidityTransfers($blockNumber: Int!) { + transfers(where: { block_number_lte: $blockNumber }) { + from to - amount0 - amount1 + value contractId_ transactionHash_ } } `; -export const TOKEN_TRANSFERS_QUERY = gql` - query GetLiquidityTransfers($tx: String!) { - transfer1S(where: { transactionHash__contains: $tx }) { - from +export const SYNCS_QUERY = gql` + query GetSyncs($blockNumber: Int!) { + syncs( + where: { block_number_lte: $blockNumber } + orderBy: timestamp_ + orderDirection: desc + ) { contractId_ + reserve0 + reserve1 } } `; diff --git a/adapters/sparta/src/sdk/types.ts b/adapters/sparta/src/sdk/types.ts index a04f8432..6eb20083 100644 --- a/adapters/sparta/src/sdk/types.ts +++ b/adapters/sparta/src/sdk/types.ts @@ -13,6 +13,29 @@ export type OutputDataSchemaRow = { usd_price: number; }; +export interface UserPositions { + [contractId: string]: { + [user: string]: number; + }; +} + +interface UserReserve { + amount0: bigint; + amount1: bigint; + token0: string; + token1: string; +} + +export interface UserReserves { + [user: string]: { + [contractId: string]: UserReserve; + }; +} + +export interface CumulativePositions { + [contractId: string]: number; +} + export type UserPosition = { block_number: number; timestamp: number; @@ -20,3 +43,34 @@ export type UserPosition = { token: string; balance: bigint; }; + +export interface Sync { + contractId_: string; + reserve0: number; + reserve1: number; + timestamp_: string; +} + +export interface Transaction { + from: string; + to: string; + value: number; + contractId_: string; + transactionHash_: string; +} + +export interface Reserves { + [key: string]: { + reserve0: number; + reserve1: number; + }; +} + +interface TokenInfo { + token0: string; + token1: string; +} + +export interface PoolTokens { + [contractId: string]: TokenInfo; +}