Skip to content

Commit

Permalink
Merge pull request #225 from BalancerMaxis/issue/180
Browse files Browse the repository at this point in the history
feat: add root gauges and refactor workflows
  • Loading branch information
Tritium-VLK authored Mar 20, 2024
2 parents bea2990 + 45f8621 commit 8220e64
Show file tree
Hide file tree
Showing 13 changed files with 384 additions and 37 deletions.
11 changes: 5 additions & 6 deletions .github/workflows/generate_addressbooks.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Generate Addressbooks
name: Generate Addressbooks Deployments

on:
schedule:
Expand All @@ -11,10 +11,10 @@ jobs:

steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Install Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.9"

Expand All @@ -24,15 +24,14 @@ jobs:
git clone https://github.com/balancer/balancer-deployments.git
export DEPLOYMENTS_REPO_ROOT_URL=`pwd`/balancer-deployments
pip3 install -r bal_addresses/requirements.txt
python3 gen_core_pools.py
python3 gen_pools_and_gauges.py
python3 transform-deployments.py
python3 gen_mono_addressbook.py
python3 gen_addresses.py
rm -rf balancer-deployments
git add -A
- name: pull-request
uses: peter-evans/create-pull-request@v5
uses: peter-evans/create-pull-request@v6
with:
commit-message: "Scheduled update from changes to monorepo"
title: "Scheduled update from changes to monorepo"
Expand Down
36 changes: 36 additions & 0 deletions .github/workflows/generate_core_pools.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: Generate Core Pools JSON

on:
schedule:
- cron: "0 0 * * 3"
workflow_dispatch:

jobs:
refersh_books:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.9"

- name: Update Files
id: update
run: |
pip3 install -r bal_addresses/requirements.txt
python3 gen_core_pools.py
git add -A
- name: pull-request
uses: peter-evans/create-pull-request@v6
with:
commit-message: "Scheduled update from changes to monorepo"
title: "Scheduled update from changes to monorepo"
branch: "gha-addressbook"
reviewers: "Tritum-VLK"
assignees: "Tritum-VLK"
labels: "Automatic"
8 changes: 3 additions & 5 deletions .github/workflows/generate_permissions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ jobs:
ALCHEMY_KEY: ${{ secrets.ALCHEMY_KEY }}
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Install Python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.9"

Expand All @@ -25,12 +25,10 @@ jobs:
run: |
pip3 install -r bal_addresses/requirements.txt
python3 generate_current_permissions.py
python3 gen_addresses.py
rm -rf balancer-deployments
git add -A
- name: pull-request
uses: peter-evans/create-pull-request@v5
uses: peter-evans/create-pull-request@v6
with:
commit-message: "Scheduled update from changes to monorepo"
title: "Scheduled update from changes to monorepo"
Expand Down
68 changes: 58 additions & 10 deletions bal_addresses/addresses.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
import json
import os.path
from .errors import MultipleMatchesError, NoResultError
from typing import Dict
from typing import Optional

import requests
from munch import Munch
from web3 import Web3

from .utils import to_checksum_address


GITHUB_MONOREPO_RAW = (
"https://raw.githubusercontent.com/balancer-labs/balancer-v2-monorepo/master"
)
Expand All @@ -25,12 +30,16 @@

class AddrBook:
chains = Munch.fromDict(
requests.get(
json.load(open("extras/chains.json"))
if os.path.exists("extras/chains.json")
else requests.get(
"https://raw.githubusercontent.com/BalancerMaxis/bal_addresses/main/extras/chains.json"
).json()
)
fx_description_by_name = Munch.fromDict(
requests.get(
json.load(open("extras/func_desc_by_name.json"))
if os.path.exists("extras/func_desc_by_name.json")
else requests.get(
"https://raw.githubusercontent.com/BalancerMaxis/bal_addresses/main/extras/func_desc_by_name.json"
).json()
)
Expand Down Expand Up @@ -62,6 +71,7 @@ def __init__(self, chain, jsonfile=False):
self._eoas = None
self._pools = None
self._gauges = None
self._root_gauges = None

@property
def deployments(self) -> Optional[Munch]:
Expand Down Expand Up @@ -129,6 +139,17 @@ def gauges(self) -> Optional[Munch]:
self.populate_gauges()
return self._gauges

@property
def root_gauges(self) -> Optional[Munch]:
"""
Get the root gauges for all chains in a form of a Munch object
"""
if self._root_gauges is not None:
return self._root_gauges
else:
self.populate_root_gauges()
return self._root_gauges

def populate_deployments(self) -> None:
chain_deployments = requests.get(
f"{GITHUB_DEPLOYMENTS_RAW}/addresses/{self.chain}.json"
Expand Down Expand Up @@ -197,23 +218,48 @@ def populate_multisigs(self) -> None:
self._multisigs = Munch.fromDict({})

def populate_pools(self) -> None:
with open("extras/pools.json", "r") as f:
msigs = json.load(f)
if msigs.get(self.chain):
self._pools = Munch.fromDict(self.checksum_address_dict(msigs[self.chain]))
pools = (
json.load(open("outputs/pools.json"))
if os.path.exists("outputs/pools.json")
else requests.get(f"{GITHUB_RAW_OUTPUTS}/pools.json").json()
)
if pools.get(self.chain):
self._pools = Munch.fromDict(self.checksum_address_dict(pools[self.chain]))
else:
print(f"Warning: No pools for chain {self.chain}")
self._pools = Munch.fromDict({})

def populate_gauges(self) -> None:
with open("extras/gauges.json", "r") as f:
msigs = json.load(f)
if msigs.get(self.chain):
self._gauges = Munch.fromDict(self.checksum_address_dict(msigs[self.chain]))
gauges = (
json.load(open("outputs/gauges.json"))
if os.path.exists("outputs/gauges.json")
else requests.get(f"{GITHUB_RAW_OUTPUTS}/gauges.json").json()
)
if gauges.get(self.chain):
self._gauges = Munch.fromDict(
self.checksum_address_dict(gauges[self.chain])
)
else:
print(f"Warning: No gauges for chain {self.chain}")
self._gauges = Munch.fromDict({})

def populate_root_gauges(self) -> None:
if self.chain == "mainnet":
root_gauges = (
json.load(open("outputs/root_gauges.json"))
if os.path.exists("outputs/root_gauges.json")
else requests.get(f"{GITHUB_RAW_OUTPUTS}/root_gauges.json").json()
)
if root_gauges.get(self.chain):
self._root_gauges = Munch.fromDict(
self.checksum_address_dict(root_gauges[self.chain])
)
else:
print(f"Warning: No root gauges for chain {self.chain}")
self._root_gauges = Munch.fromDict({})
else:
self._root_gauges = Munch.fromDict({})

def search_unique(self, substr):
results = [s for s in self.flatbook.keys() if substr in s]
if len(results) > 1:
Expand Down Expand Up @@ -300,11 +346,13 @@ def generate_flatbook(self):
self.populate_multisigs()
self.populate_pools()
self.populate_gauges()
self.populate_root_gauges()
self.populate_extras()
# write pools and gauges first, so they get overwritten by deployments later
# deployment label should take precedence over pool/gauge label
flatbook["pools"] = self.flatten_dict(self.pools)
flatbook["gauges"] = self.flatten_dict(self.gauges)
flatbook["root_gauges"] = self.flatten_dict(self.root_gauges)
for deployment, ddata in self.deployments.items():
for contract, infodict in ddata["contracts"].items():
flatbook[infodict.path] = infodict.address
Expand Down
7 changes: 7 additions & 0 deletions bal_addresses/graphql/gauges/root_gauges.gql
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
query RootGauges($skip: Int!, $step_size: Int!) {
rootGauges(skip: $skip, first: $step_size, where: { isKilled: false }) {
id
chain
recipient
}
}
13 changes: 13 additions & 0 deletions bal_addresses/pools_gauges.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,18 @@ def query_preferential_gauges(self, skip=0, step_size=100) -> list:
result += self.query_preferential_gauges(skip + step_size, step_size)
return result

def query_root_gauges(self, skip=0, step_size=100) -> list:
variables = {"skip": skip, "step_size": step_size}
data = self.subgraph.fetch_graphql_data("gauges", "root_gauges", variables)
try:
result = data["rootGauges"]
except KeyError:
result = []
if len(result) > 0:
# didnt reach end of results yet, collect next page
result += self.query_root_gauges(skip + step_size, step_size)
return result

def get_last_join_exit(self, pool_id: int) -> int:
"""
Returns a timestamp of the last join/exit for a given pool id
Expand All @@ -85,6 +97,7 @@ def get_last_join_exit(self, pool_id: int) -> int:
return data["joinExits"][0]["timestamp"]
except:
raise NoResultError(f"empty or malformed results looking for last join/exit on pool {self.chain}:{pool_id}")

def get_liquid_pools_with_protocol_yield_fee(self) -> dict:
"""
query the official balancer subgraph and retrieve pools that
Expand Down
22 changes: 15 additions & 7 deletions gen_core_pools.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,25 @@
import json

from bal_addresses import AddrBook
from bal_addresses.pools_gauges import BalPoolsGauges


if __name__ == "__main__":
chains = AddrBook.chains
def main():
core_pools = {}

# build core pools for every chain and dump result to json
all_core_pools = {}
with open("extras/chains.json", "r") as f:
chains = json.load(f)
for chain in chains["CHAIN_IDS_BY_NAME"]:
gauge_info = BalPoolsGauges(chain)

# core pools
if chain in ["sepolia", "goerli"]:
continue
all_core_pools[chain] = BalPoolsGauges(chain).core_pools
core_pools[chain] = gauge_info.core_pools

# dump the collected dict to json file
with open("outputs/core_pools.json", "w") as f:
json.dump(core_pools, f, indent=2)

json.dump(all_core_pools, open("outputs/core_pools.json", "w"), indent=2)

if __name__ == "__main__":
main()
4 changes: 2 additions & 2 deletions transform-deployments.py → gen_mono_addressbook.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,15 @@ def main():
### add pools
if "pools" not in active[chain]:
active[chain]["pools"] = {}
with open("extras/pools.json", "r") as f:
with open("outputs/pools.json", "r") as f:
data = json.load(f)
data = data.get(chain, {})
data = AddrBook.checksum_address_dict(data)
active[chain]["pools"] = data
### add gauges
if "gauges" not in active[chain]:
active[chain]["gauges"] = {}
with open("extras/gauges.json", "r") as f:
with open("outputs/gauges.json", "r") as f:
data = json.load(f)
data = data.get(chain, {})
data = AddrBook.checksum_address_dict(data)
Expand Down
Loading

0 comments on commit 8220e64

Please sign in to comment.