-
Notifications
You must be signed in to change notification settings - Fork 0
/
generate_ecosystem_deposits.py
179 lines (152 loc) · 7.2 KB
/
generate_ecosystem_deposits.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
from bal_addresses import BalPoolsGauges, Aura, Subgraph
import csv
import os
import json
from collections import defaultdict
from typing import Dict
from bal_addresses.errors import ChecksumError, NoResultError
from bal_addresses.utils import to_checksum_address
from datetime import datetime, timezone
# Load config from environment
# Set BLOCK to run on a specific block if unset use timestamp
BLOCK = os.environ.get("BLOCK")
# Set TIMESTAMP to find the next block after a UTC timestamp if BLOCK is missing
TIMESTAMP = os.environ.get("TIMESTAMP")
# Set pool_id to run on only 1 of the pool's listed on the top of this file instead of all of them
POOL_ID = os.environ.get("POOL_ID")
# Multichain SOON:tm:
CHAIN = "mainnet"
# New pools can be added to the run_pools.json to be included in runs
with open("run_pools.json", "r") as f:
POOLS_TO_RUN_ON_BY_CHAIN = json.load(f)
# Set block and timestamp based on logic if they are not specified
def set_block_and_timestamp(chain: str, block=None, timestamp=None) -> (int, int):
q = Subgraph(chain)
if not block:
if not timestamp:
timestamp = datetime.now(timezone.utc).timestamp() - 600 # Use 10 minutes ago to make sure subgraphs are up to date
block = q.get_first_block_after_utc_timestamp(int(timestamp))
block = int(block)
timestamp = int(timestamp)
else:
TIMESTAMP = "Block Number Provided"
return (block, timestamp)
def get_tokens_per_bpt(chain:str, block: int, poolid: str):
queries = Subgraph(chain)
result = queries.fetch_graphql_data("core", "get_pool_details", {"id": poolid, "block": block})
result = result["pool"]
bpt_total_shares = result["totalShares"]
bpt_address = result["address"]
tokens_per_bpt = {}
for tokeninfo in result["tokens"]:
if tokeninfo["address"].lower() == bpt_address.lower():
continue
tokens_per_bpt[tokeninfo["address"]] = float(tokeninfo["balance"]) / float(bpt_total_shares)
return tokens_per_bpt
def get_ecosystem_balances_w_csv(pool_id: str, gauge_address: str, block: int, name: str, chain="mainnet") -> Dict[
str, int]:
gauges = BalPoolsGauges(chain)
aura = Aura(chain)
gauge_address = to_checksum_address(gauge_address)
bpt_balances = defaultdict(float)
gauge_balances = defaultdict(float)
aura_balances = defaultdict(float)
bpts_in_bal_gauge = 0
bpts_in_aura = 0
total_circulating_bpts = 0
total_bpts_counted = 0
## Start with raw BPTS
ecosystem_balances = defaultdict(int, gauges.get_bpt_balances(pool_id, block))
for address, amount in ecosystem_balances.items():
ecosystem_balances[address] = float(amount)
bpt_balances[address] = float(amount)
total_circulating_bpts += float(amount)
## Factor in Gauge Deposits
# Subtract the gauge itself
if gauge_address in ecosystem_balances.keys():
bpts_in_bal_gauge = ecosystem_balances[gauge_address]
ecosystem_balances[gauge_address] = 0
else:
print(
f"WARNING: there are no BPTs from {pool_id} staked in the gauge at {gauge_address} did you cross wires, or is there no one staked?")
# Add in Gauge Balances
checksum = 0
for address, amount in gauges.get_gauge_deposit_shares(gauge_address, block).items():
gauge_balances[address] = float(amount)
ecosystem_balances[address] += float(amount)
checksum += amount
if checksum != bpts_in_aura:
print(
f"Warning: {bpts_in_bal_gauge} BPTs were found in the deposited in a bal gauge and zeroed out, but {checksum} of 'em where counted as gauge deposits.")
## Factor in Aura Deposits
# Subtract the gauge itself
aura_staker = aura.AURA_GAUGE_STAKER_BY_CHAIN[chain]
if aura_staker in ecosystem_balances.keys():
bpts_in_aura = ecosystem_balances[aura_staker]
ecosystem_balances[aura_staker] = 0
else:
print(
f"WARNING: there are no BPTs from {pool_id} staked in Aura did you cross wires, or is there no one staked?")
# Add in Aura Balances
checksum = 0
try:
aura_shares_by_address = aura.get_aura_pool_shares(gauge_address, block).items()
except NoResultError as e:
print(e)
aura_shares_by_address = defaultdict(int)
for address, amount in aura_shares_by_address:
aura_balances[address] = amount
ecosystem_balances[address] += amount
checksum += amount
if checksum != bpts_in_aura:
print(
f"Warning: {bpts_in_aura} BPTs were found in the aura proxy and zeroed out, but {checksum} of 'em where counted as Aura deposits.")
## CHeck everything
for address, amount in ecosystem_balances.items():
total_bpts_counted += float(amount)
print(
f"Found {total_circulating_bpts} of which {bpts_in_bal_gauge} where staked by an address in a bal gauge and {bpts_in_aura} where deposited on aura at block {block}")
## Slight tolerance for rounding
delta = abs(total_circulating_bpts - total_bpts_counted)
if delta > 1e-8:
raise ChecksumError(
f"initial bpts found {total_circulating_bpts}, final bpts counted:{total_bpts_counted} the delta is {total_circulating_bpts - total_bpts_counted}")
## Build CSV
name = name.replace("/", "-") # /'s are path structure
output_file = f"out/{chain}/{name}/{block}_{pool_id}.csv"
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, 'w') as f:
writer = csv.writer(f)
writer.writerow(["depositor_address", "bpt_in_wallet", "bpt_in_bal_gauge", "bpt_in_aura", "total_pool_tokens"])
for depositor, amount in ecosystem_balances.items():
writer.writerow(
[depositor, bpt_balances[depositor], gauge_balances[depositor], aura_balances[depositor], amount])
print("CSV file generated successfully: ", output_file)
return ecosystem_balances
def main():
for chain, poolinfos in POOLS_TO_RUN_ON_BY_CHAIN.items():
# Figure out the block and timestmap for this chain using env vars as inputs
block, timestamp = set_block_and_timestamp(chain, BLOCK, TIMESTAMP)
print(f"Using block {block} on chain {chain} derived from unixtime(UTC): {timestamp}\n\n")
for poolinfo in poolinfos:
if POOL_ID and poolinfo["pool_id"] != POOL_ID:
continue
print(
f"\nRunning on {poolinfo['name']}, pool_id: {poolinfo['pool_id']}, gauge: {poolinfo['gauge']}, block: {block}\n")
get_ecosystem_balances_w_csv(
pool_id=poolinfo["pool_id"],
gauge_address=poolinfo["gauge"],
name=poolinfo["name"],
chain=chain,
block=block
)
# Dump Tokens per BPT json
print(block)
tokens_per_bpt = get_tokens_per_bpt(chain, block, poolinfo['pool_id'])
dir_name = poolinfo["name"].replace("/", "-") # /'s are path structure
output_file = f"out/{chain}/{dir_name}/{block}_{poolinfo['pool_id']}.tokens_per_bpt.json"
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, 'w') as f:
json.dump(tokens_per_bpt, f)
if __name__ == "__main__":
main()