Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
Apegurus committed May 8, 2024
2 parents cd0bb52 + 06caa25 commit 426c957
Show file tree
Hide file tree
Showing 8 changed files with 301 additions and 241 deletions.
11 changes: 7 additions & 4 deletions adapters/connext/src/utils/getUserTvlByBlock.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { getCompositeBalances, getLpAccountBalanceAtBlock } from "./subgraph";
import { getBlock, getCompositeBalances, getLpAccountBalanceAtBlock } from "./subgraph";
import { BlockData, OutputDataSchemaRow } from "./types";

export const getUserTVLByBlock = async (blocks: BlockData): Promise<OutputDataSchemaRow[]> => {
Expand All @@ -9,13 +9,16 @@ export const getUserTVLByBlock = async (blocks: BlockData): Promise<OutputDataSc
// get the composite balances
const composite = await getCompositeBalances(data);

// get block info
const { timestamp } = await getBlock(blockNumber);

// format into output
const results: OutputDataSchemaRow[] = [];
composite.forEach(({ block, modified, account, underlyingBalances, underlyingTokens }) => {
composite.forEach(({ account, underlyingBalances, underlyingTokens }) => {
results.push(...underlyingBalances.map((b, i) => {
const formatted: OutputDataSchemaRow = {
timestamp: +modified,
block_number: +block,
timestamp: +timestamp.toString(),
block_number: blockNumber,
user_address: account.id,
token_address: underlyingTokens[i],
token_balance: BigInt(b),
Expand Down
7 changes: 6 additions & 1 deletion adapters/connext/src/utils/subgraph.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { PoolInformation, getPoolInformationFromLpToken } from "./cartographer";
import { LpAccountBalanceHourly, SubgraphResult } from "./types";
import { linea } from "viem/chains";
import { createPublicClient, formatUnits, http, parseUnits } from "viem";
import { createPublicClient, http, parseUnits } from "viem";

export const CONNEXT_SUBGRAPH_QUERY_URL = "https://api.goldsky.com/api/public/project_clssc64y57n5r010yeoly05up/subgraphs/amarok-stableswap-analytics/1.0/gn";
export const LINEA_CHAIN_ID = 59144;
Expand Down Expand Up @@ -209,3 +209,8 @@ const appendSubgraphData = (data: LpAccountBalanceHourly[], existing: Map<string
})
}

export const getBlock = async (blockNumber: number) => {
const client = createPublicClient({ chain: linea, transport: http() });
const block = await client.getBlock({ blockNumber: BigInt(blockNumber) });
return block;
}
78 changes: 68 additions & 10 deletions adapters/interport/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import {client} from "./utils/client";
import {searchStartBlock, stablecoinFarmAddress, vaultsAddresses, zeroAddress} from "./utils/constants";
import {vaultAbi} from "./utils/vault-abi"
import fs from "fs";
import { write } from "fast-csv";
import csv from 'csv-parser';


interface BlockData {
blockNumber: number;
Expand All @@ -17,22 +21,22 @@ type OutputDataSchemaRow = {
usd_price: number; //assign 0 if not available
};

const getBlockTimestamp = async (blockNumber: bigint) => {
const data = await client.getBlock({
blockNumber: blockNumber
})
return Number(data.timestamp);
}
// const getBlockTimestamp = async (blockNumber: bigint) => {
// const data = await client.getBlock({
// blockNumber: blockNumber
// })
// return Number(data.timestamp);
// }

const collectTransferEvents = async (events: any[], token_symbol: string) => {
const collectTransferEvents = async (events: any[], token_symbol: string, block_timestamp: number) => {
const csvRows: OutputDataSchemaRow[] = [];
for (let i = 0; i < events.length; i++) {
const {
args: {from: senderAddress_address, to: receiver_address, amount: token_balance},
blockNumber,
address: token_address
} = events[i]
const timestamp = await getBlockTimestamp(blockNumber)
const timestamp = block_timestamp
if(senderAddress_address !== stablecoinFarmAddress && senderAddress_address !== zeroAddress) {
csvRows.push({
block_number: Number(blockNumber),
Expand Down Expand Up @@ -62,7 +66,7 @@ const collectTransferEvents = async (events: any[], token_symbol: string) => {
export const getUserTVLByBlock = async (
blocks: BlockData
): Promise<OutputDataSchemaRow[]> => {
const {blockNumber} = blocks
const {blockNumber, blockTimestamp} = blocks
const allCsvRows: OutputDataSchemaRow[] = [];
for (let i = 0; i < vaultsAddresses.length; i++) {
const {address, token_symbol} = vaultsAddresses[i];
Expand All @@ -76,10 +80,64 @@ export const getUserTVLByBlock = async (
fromBlock: BigInt(currentStartingBlock),
toBlock: BigInt(endBlock),
});
const transferCsvRows = await collectTransferEvents(transferEvents, token_symbol);
const transferCsvRows = await collectTransferEvents(transferEvents, token_symbol, blockTimestamp);
allCsvRows.push(...transferCsvRows)
currentStartingBlock = endBlock
}
}
return allCsvRows
}


const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on('data', (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on('end', () => {
resolve();
})
.on('error', (err) => {
reject(err);
});
});

return blocks;
};


readBlocksFromCSV('hourly_blocks.csv').then(async (blocks: any[]) => {
console.log(blocks);
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
for(let i = 0; i < result.length; i++){
allCsvRows.push(result[i])
}
} catch (error) {
console.error(`An error occurred for block ${block}:`, error);
}
}
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(`outputData.csv`, { flags: 'w' });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});
}).catch((err) => {
console.error('Error reading CSV file:', err);
});

3 changes: 3 additions & 0 deletions adapters/lynex/src/sdk/lensDetails.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ export const fetchUserPools = async (
blockNumber
)) as any;
return res.map((r: any) => {
if (r.status !== 'success') {
throw new Error("RPC call error. Status: " + r.status);
}
return { result: { ...r.result, userAddress } };
}) as LensResponseWithBlock[];
};
Expand Down
4 changes: 3 additions & 1 deletion adapters/lyve/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,13 @@
"author": "",
"license": "UNLICENSED",
"dependencies": {
"fast-csv": "^5.0.1",
"csv-parser": "^3.0.0",
"fast-csv": "^5.0.1",
"lodash": "^4.17.21",
"node-fetch": "^3.3.2"
},
"devDependencies": {
"@types/lodash": "^4.17.1",
"@types/node": "^20.11.30",
"typescript": "^5.4.3"
}
Expand Down
Loading

0 comments on commit 426c957

Please sign in to comment.