Skip to content

Commit

Permalink
Merge pull request #61 from miler012/main
Browse files Browse the repository at this point in the history
Satori implement new functions  pull request
  • Loading branch information
nitish-91 authored May 2, 2024
2 parents 7fc3cab + 2cd8c2f commit ddbef0d
Show file tree
Hide file tree
Showing 6 changed files with 1,318 additions and 55 deletions.
14 changes: 14 additions & 0 deletions adapters/satori/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Satori - TVL by User

In this repo you will find the code responsible to get data positions from the subrgaph and calculate the TVL by users.
The main scripts is generating a output as CSV and there is another one that can be used to generate the JSON file.

## How to execute this project?

```
npm install // install all packages
npm run watch //other terminal tab
npm run start // other terminal tab
```

Now you can see the outputData.csv file. That's it.
1,214 changes: 1,214 additions & 0 deletions adapters/satori/outputData.csv

Large diffs are not rendered by default.

37 changes: 23 additions & 14 deletions adapters/satori/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,23 +1,32 @@
import fs from 'fs';
import { write } from 'fast-csv';
import { UserLpSnapshot,getSnapshotsForAddressAtBlock } from './sdk/subgraphDetails';
import { OutputDataSchemaRow,getUserTVLByBlock } from './sdk/subgraphDetailsV2';


interface BlockData {
blockNumber: number;
blockTimestamp: number;
}

const getData = async () => {
const snapshotBlocks: number[] = [
222980,446664,522635,1123644
];
let snapshots: UserLpSnapshot[] = [];
for (const block of snapshotBlocks) {
snapshots = snapshots.concat(await getSnapshotsForAddressAtBlock(block,''))
}

let csvRows: UserLpSnapshot[] = Array.from(new Map(snapshots.map(obj => [obj.user_address + '|' + obj.block_number, obj])).values());;
export const main = async (blocks: BlockData[]) => {
let snapshots: OutputDataSchemaRow[] = [];
for (const { blockNumber, blockTimestamp } of blocks) {
try {
snapshots = snapshots.concat(await getUserTVLByBlock(blockNumber,blockTimestamp))
} catch (error) {
console.error(`An error occurred for block ${blockNumber}:`, error);
}
}
let csvRows: OutputDataSchemaRow[] = Array.from(new Map(snapshots.map(obj => [obj.user_address + '|' + obj.block_number, obj])).values());
console.log(`length:---${csvRows.length}`);
const ws = fs.createWriteStream('outputData.csv');
write(csvRows, { headers: true }).pipe(ws).on('finish', () => {
console.log("CSV file has been written.");
});
}
getData().then(() => {
console.log("Done");
});
};


// main([{blockNumber:1123644,blockTimestamp:1702156132},{blockNumber:517247,blockTimestamp:1702156133}]).then(() => {
// console.log("Done");
// });
33 changes: 0 additions & 33 deletions adapters/satori/src/index_json_file.ts

This file was deleted.

18 changes: 10 additions & 8 deletions adapters/satori/src/sdk/subgraphDetails.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { ASSET, SYMBOL, SUBGRAPH_URL } from "./config";

export interface UserLpSnapshot {
export interface OutputDataSchemaRow {
block_number:number
timestamp:number
user_address:string
Expand All @@ -9,18 +9,19 @@ export interface UserLpSnapshot {
token_balance:number
}

export const getSnapshotsForAddressAtBlock = async (

export const getUserTVLByBlock = async (
blockNumber: number,
address: string,
):Promise<UserLpSnapshot[]> => {
):Promise<OutputDataSchemaRow[]> => {
let subgraphUrl = SUBGRAPH_URL;
let blockQuery = blockNumber !== 0 ? ` block: {number: ${blockNumber}}` : ``;
let ownerQuery = address !== "" ? `owner: "${address.toLowerCase()}"` : ``;

let whereQuery = ownerQuery !== "" ?`where: {${ownerQuery}}`: ``;
let skip = 0;
let fetchNext = true;
let result: UserLpSnapshot[] = [];
let result: OutputDataSchemaRow[] = [];
while(fetchNext){
let query = `{
userLpSnapshots(${whereQuery} ${blockQuery} orderBy: timestamp, first:1000,skip:${skip}){
Expand All @@ -29,8 +30,7 @@ export const getSnapshotsForAddressAtBlock = async (
block
timestamp
lpAmount
}
}
}
`;

Expand All @@ -42,10 +42,11 @@ export const getSnapshotsForAddressAtBlock = async (
let data = await response.json();
let snapshots = data.data.userLpSnapshots
for (const snapshot of snapshots) {
let userLpSnapshot:UserLpSnapshot = {
user_address:snapshot.user,
let userLpSnapshot:OutputDataSchemaRow = {

block_number:snapshot.block,
timestamp:snapshot.timestamp,
user_address:snapshot.user,
token_address:ASSET,
token_symbol:SYMBOL,
token_balance:snapshot.lpAmount
Expand All @@ -60,3 +61,4 @@ export const getSnapshotsForAddressAtBlock = async (
}
return result
}
// getSnapshotsForAddressAtBlock(0,'')
57 changes: 57 additions & 0 deletions adapters/satori/src/sdk/subgraphDetailsV2.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import { ASSET, SYMBOL, SUBGRAPH_URL } from "./config";

export interface OutputDataSchemaRow {
block_number:number
timestamp:number
user_address:string
token_address:string
token_symbol:string
token_balance:number
}


export const getUserTVLByBlock = async (
blockNumber: number,
timestamp: number,
):Promise<OutputDataSchemaRow[]> => {
let subgraphUrl = SUBGRAPH_URL;
let blockQuery = blockNumber !== 0 ? `block: {number: ${blockNumber}}` : ``;
let skip = 0;
let fetchNext = true;
let result: OutputDataSchemaRow[] = [];
while (fetchNext) {
let query = `{
accounts(${blockQuery} first:1000,skip:${skip}){
id
amount
txCount
}
}
`;
let response = await fetch(subgraphUrl, {
method: "POST",
body: JSON.stringify({ query }),
headers: { "Content-Type": "application/json" },
});
let data = await response.json();
let accounts = data.data.accounts
for (const account of accounts) {
let userLpSnapshot:OutputDataSchemaRow = {
block_number:blockNumber,
timestamp:timestamp,
user_address:account.id,
token_address:ASSET,
token_symbol:SYMBOL,
token_balance:account.amount
}
result.push(userLpSnapshot)
}
if(accounts.length < 1000){
fetchNext = false;
}else{
skip += 1000;
}
}

return result
}

0 comments on commit ddbef0d

Please sign in to comment.