Skip to content

Commit

Permalink
Merge branch 'delta-hq:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
deadshotryker authored Aug 19, 2024
2 parents 3a57e37 + c602991 commit 9347642
Show file tree
Hide file tree
Showing 11 changed files with 793 additions and 1 deletion.
25 changes: 25 additions & 0 deletions adapters/gammastrategies/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
{
"name": "gammastrategies",
"version": "1.0.0",
"description": "",
"main": "index.js",
"type": "commonjs",
"scripts": {
"start": "node dist/index.js",
"compile": "tsc",
"watch": "tsc -w",
"clear": "rm -rf dist",
"test": "node "
},
"keywords": [],
"author": "",
"license": "UNLICENSED",
"dependencies": {
"csv-parser": "^3.0.0",
"fast-csv": "^5.0.1"
},
"devDependencies": {
"@types/node": "^20.11.17",
"typescript": "^5.3.3"
}
}
20 changes: 20 additions & 0 deletions adapters/gammastrategies/src/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
export const PAGE_SIZE = 1000;

export const enum PROTOCOLS {
UNISWAP = 0,
LYNEX = 1,
LINEHUB = 2,
NILE = 3,
}

export const SUBGRAPH_URLS = {
[PROTOCOLS.UNISWAP]:
"https://api.goldsky.com/api/public/project_clols2c0p7fby2nww199i4pdx/subgraphs/gamma-uniswap-linea/latest/gn",
[PROTOCOLS.LYNEX]:
"https://api.goldsky.com/api/public/project_clols2c0p7fby2nww199i4pdx/subgraphs/gamma-lynex-linea/latest/gn",
[PROTOCOLS.LINEHUB]:
"https://api.goldsky.com/api/public/project_clols2c0p7fby2nww199i4pdx/subgraphs/gamma-linehub-linea/latest/gn",
[PROTOCOLS.NILE]:
"https://api.goldsky.com/api/public/project_clols2c0p7fby2nww199i4pdx/subgraphs/gamma-nile-linea/latest/gn",
};

269 changes: 269 additions & 0 deletions adapters/gammastrategies/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,269 @@
import csv from "csv-parser";
import fs from "fs";
import { write } from "fast-csv";
import { PAGE_SIZE, PROTOCOLS, SUBGRAPH_URLS } from "./config";
import { AccountBalances, BlockData, OutputDataSchemaRow } from "./types";

const post = async (url: string, data: any): Promise<any> => {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify(data),
});
return await response.json();
};

const getAccountData = async (
protocol: PROTOCOLS,
lastId = "0"
): Promise<AccountBalances> => {
const ACCOUNTS_QUERY = `query {
accounts(
first: ${PAGE_SIZE},
where: { id_gt: "${lastId}" },
orderBy: id,
orderDirection: asc,
){
id
hypervisorShares(
first: 1000,
where: { shares_gt:0 },
) {
hypervisor {
id
pool {
token0 {
id
symbol
},
token1 {
id
symbol
},
},
totalSupply,
tvl0,
tvl1,
tick,
baseLower,
baseUpper,
limitLower,
limitUpper,
},
shares,
}
}
}`;

const responseJson = await post(SUBGRAPH_URLS[protocol], {
query: ACCOUNTS_QUERY,
});

let accountHoldings: AccountBalances = {};
for (const account of responseJson.data.accounts) {
for (const hypeShare of account.hypervisorShares) {
const isBaseInRange =
hypeShare.hypervisor.tick >= hypeShare.hypervisor.baseLower &&
hypeShare.hypervisor.tick <= hypeShare.hypervisor.baseUpper;
const isLimitInRange =
hypeShare.hypervisor.tick >= hypeShare.hypervisor.limitLower &&
hypeShare.hypervisor.tick <= hypeShare.hypervisor.limitUpper;

// Exclude position if not in range
if (!isBaseInRange && !isLimitInRange) {
continue;
}

accountHoldings[account.id] ??= {};

const shareOfPool = hypeShare.shares / hypeShare.hypervisor.totalSupply;
const tvl0Share = Math.round(shareOfPool * hypeShare.hypervisor.tvl0);
const tvl1Share = Math.round(shareOfPool * hypeShare.hypervisor.tvl1);

const token0Address: string = hypeShare.hypervisor.pool.token0.id;
const token1Address: string = hypeShare.hypervisor.pool.token1.id;

if (token0Address in accountHoldings) {
accountHoldings[account.id][token0Address].balance += tvl0Share;
} else {
accountHoldings[account.id][token0Address] = {
symbol: hypeShare.hypervisor.pool.token0.symbol,
balance: tvl0Share,
};
}

if (token1Address in accountHoldings) {
accountHoldings[account.id][token1Address].balance += tvl1Share;
} else {
accountHoldings[account.id][token1Address] = {
symbol: hypeShare.hypervisor.pool.token1.symbol,
balance: tvl1Share,
};
}
}
}

if (responseJson.data.accounts.length == PAGE_SIZE) {
const lastRecord = responseJson.data.accounts[
responseJson.data.accounts.length - 1
] as any;
accountHoldings = {
...accountHoldings,
...(await getAccountData(protocol, lastRecord.id)),
};
}

return accountHoldings;
};

export const main = async (blocks: BlockData[]) => {
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 10; // Size of batch to trigger writing to the file
let i = 0;

for (const block of blocks) {
try {
// Retrieve data using block number and timestamp
const csvRows = await getUserTVLByBlock(block);

// Accumulate CSV rows for all blocks
allCsvRows.push(...csvRows);

i++;
console.log(`Processed block ${i}`);

// Write to file when batch size is reached or at the end of loop
if (i % batchSize === 0 || i === blocks.length) {
const ws = fs.createWriteStream(`outputData.csv`, {
flags: i === batchSize ? "w" : "a",
});
write(allCsvRows, { headers: i === batchSize ? true : false })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
});

// Clear the accumulated CSV rows
allCsvRows.length = 0;
}
} catch (error) {
console.error(`An error occurred for block ${block.blockNumber}:`, error);
}
}
};

export const getUserTVLByBlock = async (blocks: BlockData) => {
const { blockNumber, blockTimestamp } = blocks;
// Retrieve data using block number and timestamp

const protocolData: AccountBalances[] = await Promise.all([
getAccountData(PROTOCOLS.UNISWAP),
getAccountData(PROTOCOLS.LYNEX),
getAccountData(PROTOCOLS.LINEHUB),
getAccountData(PROTOCOLS.NILE),
]);

const allProtocolHoldings: AccountBalances = {};

// // Aggregate data from all protocols
protocolData.forEach((protocol) => {
Object.entries(protocol).forEach(([userAddress, tokens]) => {
allProtocolHoldings[userAddress] ??= {};
Object.entries(tokens).forEach(([tokenAddress, token]) => {
if (tokenAddress in allProtocolHoldings[userAddress]) {
allProtocolHoldings[userAddress][tokenAddress].balance +=
token.balance;
} else {
allProtocolHoldings[userAddress][tokenAddress] = {
symbol: token.symbol,
balance: token.balance,
};
}
});
});
});

// Transform to required output
const csvRows: OutputDataSchemaRow[] = [];

Object.entries(allProtocolHoldings).forEach(
([userAddress, tokenBalances]) => {
Object.entries(tokenBalances).forEach(([tokenAddress, token]) => {
csvRows.push({
block_number: blockNumber,
timestamp: blockTimestamp,
user_address: userAddress,
token_address: tokenAddress,
token_balance: token.balance,
token_symbol: token.symbol,
usd_price: 0, // Not available
});
});
}
);

return csvRows;
};

const readBlocksFromCSV = async (filePath: string): Promise<BlockData[]> => {
const blocks: BlockData[] = [];

await new Promise<void>((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(csv()) // Specify the separator as '\t' for TSV files
.on("data", (row) => {
const blockNumber = parseInt(row.number, 10);
const blockTimestamp = parseInt(row.timestamp, 10);
if (!isNaN(blockNumber) && blockTimestamp) {
blocks.push({ blockNumber: blockNumber, blockTimestamp });
}
})
.on("end", () => {
resolve();
})
.on("error", (err) => {
reject(err);
});
});

return blocks;
};

readBlocksFromCSV("hourly_blocks.csv")
.then(async (blocks: any[]) => {
const allCsvRows: any[] = []; // Array to accumulate CSV rows for all blocks
const batchSize = 1000; // Size of batch to trigger writing to the file
let i = 0;

for (const block of blocks) {
try {
const result = await getUserTVLByBlock(block);
allCsvRows.push(...result);
} catch (error) {
console.error(
`An error occurred for block ${block.blockNumber}:`,
error
);
}
}
await new Promise((resolve, reject) => {
// const randomTime = Math.random() * 1000;
// setTimeout(resolve, randomTime);
const ws = fs.createWriteStream(`outputData.csv`, { flags: "w" });
write(allCsvRows, { headers: true })
.pipe(ws)
.on("finish", () => {
console.log(`CSV file has been written.`);
resolve;
});
});

// Clear the accumulated CSV rows
// allCsvRows.length = 0;
})
.catch((err) => {
console.error("Error reading CSV file:", err);
});
23 changes: 23 additions & 0 deletions adapters/gammastrategies/src/types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
export type OutputDataSchemaRow = {
block_number: number;
timestamp: number;
user_address: string;
token_address: string;
token_balance: number;
token_symbol: string;
usd_price: number;
};

export interface BlockData {
blockNumber: number;
blockTimestamp: number;
}

export interface AccountBalances {
[userAddress: string]: {
[tokenAddress: string]: {
symbol: string;
balance: number;
};
};
}
Loading

0 comments on commit 9347642

Please sign in to comment.