Skip to content

Commit

Permalink
Merge branch 'main' into issue/214
Browse files Browse the repository at this point in the history
  • Loading branch information
gosuto-inzasheru committed Apr 30, 2024
2 parents 8e31b05 + 73e455b commit 4eba775
Show file tree
Hide file tree
Showing 55 changed files with 1,795 additions and 261 deletions.
9 changes: 9 additions & 0 deletions .github/dependabot.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# ref: https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates

version: 2
updates:
- package-ecosystem: "github-actions"
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
directory: "/"
schedule:
interval: "daily"
21 changes: 21 additions & 0 deletions .github/workflows/black.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
name: Lint

on: [pull_request]

jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Black
uses: psf/black@stable
with:
options: "--verbose"
version: "24.4.0"

- name: Auto commit
uses: stefanzweifel/[email protected]
with:
commit_message: "style: ci lint with `black`"
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ jobs:
os: [ubuntu-latest]

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
Expand Down
7 changes: 5 additions & 2 deletions bal_addresses/errors.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@

class MultipleMatchesError(Exception):
pass


class NoResultError(Exception):
pass


class ChecksumError(Exception):
pass


class GraphQLRequestError(Exception):
pass


class UnexpectedListLengthError(Exception):
pass
pass
38 changes: 23 additions & 15 deletions bal_addresses/permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,12 @@
class BalPermissions:
def __init__(self, chain):
self.chain = chain
self.active_permissions_by_action_id = requests.get(f"{GITHUB_RAW_OUTPUTS}/permissions/active/{chain}.json").json()
self.action_ids_by_contract_by_deployment = requests.get(f"{GITHUB_DEPLOYMENTS_RAW}/action-ids/{chain}/action-ids.json").json()
self.active_permissions_by_action_id = requests.get(
f"{GITHUB_RAW_OUTPUTS}/permissions/active/{chain}.json"
).json()
self.action_ids_by_contract_by_deployment = requests.get(
f"{GITHUB_DEPLOYMENTS_RAW}/action-ids/{chain}/action-ids.json"
).json()

# Define
self.paths_by_action_id = defaultdict(set)
Expand All @@ -23,7 +27,9 @@ def __init__(self, chain):
for contract, contract_data in contracts.items():
for fx, action_id in contract_data["actionIds"].items():
path = f"{deployment}/{contract}/{fx}"
assert path not in self.action_id_by_path.values(), f"{path} shows up twice?"
assert (
path not in self.action_id_by_path.values()
), f"{path} shows up twice?"
self.action_id_by_path[path] = action_id
self.deployments_by_fx[fx].add(deployment)
self.contracts_by_fx[fx].add(contract)
Expand All @@ -35,30 +41,35 @@ def search_path(self, substr) -> list[str]:
results = [path for path in search if path in self.action_id_by_path]
return results

def search_many_paths_by_unique_deployment(self, deployment_substr, fx_substr) -> list[dict[str, str]]:
def search_many_paths_by_unique_deployment(
self, deployment_substr, fx_substr
) -> list[dict[str, str]]:
a = AddrBook(self.chain)
results = []
deployment = a.search_unique_deployment(deployment_substr)
deployment_fxs = self.search_path(deployment.deployment)
search = [s for s in deployment_fxs if fx_substr in s]
for r in search:
result = Munch.fromDict({
"path": r,
"action_id": self.action_id_by_path[r]
})
result = Munch.fromDict({"path": r, "action_id": self.action_id_by_path[r]})
results.append(result)
return Munch.fromDict(results)

def search_unique_path_by_unique_deployment(self, deployment_substr, fx_substr) -> dict[str, str]:
results = self.search_many_paths_by_unique_deployment(deployment_substr, fx_substr)
def search_unique_path_by_unique_deployment(
self, deployment_substr, fx_substr
) -> dict[str, str]:
results = self.search_many_paths_by_unique_deployment(
deployment_substr, fx_substr
)
if len(results) > 1:
raise MultipleMatchesError(f"{fx_substr} Multiple matches found: {results}")
if len(results) < 1:
raise NoResultError(f"{fx_substr}")
return results[0]

def needs_authorizer(self, contract, deployment) -> bool:
return self.action_ids_by_contract_by_deployment[deployment][contract]["useAdaptor"]
return self.action_ids_by_contract_by_deployment[deployment][contract][
"useAdaptor"
]

def allowed_addresses(self, action_id) -> list[str]:
try:
Expand All @@ -72,8 +83,5 @@ def allowed_caller_names(self, action_id) -> list[str]:
addresslist = self.active_permissions_by_action_id[action_id]
except KeyError:
raise NoResultError(f"{action_id} has no authorized callers")
names = [a.flatbook.get(item, 'undef') for item in addresslist]
names = [a.flatbook.get(item, "undef") for item in addresslist]
return names



10 changes: 8 additions & 2 deletions bal_addresses/subgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,18 @@
from gql import Client, gql
from gql.transport.requests import RequestsHTTPTransport

from bal_addresses import AddrBook


graphql_base_path = f"{os.path.dirname(os.path.abspath(__file__))}/graphql"


class Subgraph:
def __init__(self, chain):
def __init__(self, chain: str):
if chain not in AddrBook.chain_ids_by_name.keys():
raise ValueError(f"Invalid chain: {chain}")
self.chain = chain


def get_subgraph_url(self, subgraph="core") -> str:
"""
perform some soup magic to determine the latest subgraph url used in the official frontend
Expand Down
6 changes: 4 additions & 2 deletions bal_addresses/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@

### These functions are to deal with differing web3 versions and the need to use 5.x for legacy brownie code
def to_checksum_address(address: str):
if hasattr(Web3, 'toChecksumAddress'):
if hasattr(Web3, "toChecksumAddress"):
return Web3.toChecksumAddress(address)
if hasattr(Web3, "to_checksum_address"):
return Web3.to_checksum_address(address)


def is_address(address: str):
if hasattr(Web3, "isAddress"):
return Web3.isAddress(address)
if hasattr(Web3, "is_address"):
return Web3.isAddress(address)
return Web3.isAddress(address)
3 changes: 3 additions & 0 deletions extras/arbitrum.json
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,8 @@
"Dubstard": "0x01B894622C7aa890d758C8a0E8156480F0Fa5f6C",
"gosuto": "0xd411b886e96291b089273a5835D2BE4406700352",
"lipman": "0xB209a59A9F3CC7FA5A25dF152a01f9dF8B969A3a"
},
"TreasuryExtentions": {
"AURA_ARB_BAL_LP_BIP_322": "0x8D803f7f7e26E586ee90E5A872cf7830e21f7727"
}
}
2 changes: 2 additions & 0 deletions extras/chains.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"goerli": 42,
"sepolia": 11155111,
"avalanche": 43114,
"fantom": 250,
"base": 8453
},
"SCANNERS_BY_CHAIN": {
Expand All @@ -21,6 +22,7 @@
"goerli": "https://goerli.etherscan.io/",
"sepolia": "https://sepolia.etherscan.io/",
"avalanche": "https://snowtrace.io/",
"fantom": "https://ftmscan.com/",
"base": "https://basescan.org/"
}
}
15 changes: 15 additions & 0 deletions extras/fantom.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"zero": {
"zero": "0x0000000000000000000000000000000000000000"
},
"tokens": {
"WFTM": "0x21be370D5312f44cB42ce377BC9b8a0cEF1A4C83",
"BEETS": "0xF24Bcf4d1e507740041C9cFd2DddB29585aDCe1e"
},
"beethoven_x": {
"treasury": "0xa1E849B1d6c2Fd31c63EEf7822e9E0632411ada7",
"deployer": "0x4fbe899d37fb7514adf2f41B0630E018Ec275a0C",
"vault": "0x20dd72Ed959b6147912C2e529F0a0C651c33c9ce",
"friendly_fork_vesting": "0x8D381EC09356C3F1805c54B2BB0867e8b417DC3a"
}
}
4 changes: 4 additions & 0 deletions extras/mainnet.json
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,10 @@
"bptSwapper": "0x6030331C9225Ee5ae3F3D08FBD19e8bF053dF498",
"oneinchSwapper": "0xd712A863766dE7e7cA13289A97997E01832A6571",
"paraswapSwapper": "0x95676AaEcD59B19C5B79008F86d3A291628b0947"
},
"myso": {
"QuoteHandler": "0x71E1cc2F7574C798ED893Ef04D55D1E573bE95B1",
"LenderVaultFactory": "0x1874a08f7975b25944feb989bbaaa464f61ab3bc"

}
}
22 changes: 16 additions & 6 deletions extras/multisigs.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
"beets_treasury": "0xea06e1b4259730724885a39ce3ca670efb020e26",
"grants_treasury": "0xE2c91f3409Ad6d8cE3a2E2eb330790398CB23597",
"vote_incentive_recycling": "0x9ff471F9f98F42E5151C7855fD1b5aa906b1AF7e",
"bizdev": "0xC7E84373FC63A17B5B22EBaF86219141B630cD7a"
"bizdev": "0xC7E84373FC63A17B5B22EBaF86219141B630cD7a",
"wonderland": "0x74fea3fb0ed030e9228026e7f413d66186d3d107"
},
"polygon": {
"lm": "0xc38c5f97B34E175FFd35407fc91a937300E33860",
Expand Down Expand Up @@ -71,16 +72,25 @@
"blabs_ops": "0x955556b002d05c7B31a9394c10897c1DA19eAEab",
"vote_incentive_recycling": "0x9ff471F9f98F42E5151C7855fD1b5aa906b1AF7e"
},
"goerli": {
"blabs_ops": "0x040E995520F92F96142d1a76c16D4af21A2eFDE7",
"lm": "0x040E995520F92F96142d1a76c16D4af21A2eFDE7",
"maxi_ops": "0x040E995520F92F96142d1a76c16D4af21A2eFDE7"
},
"base": {
"blabs_ops": "0x183C55A0dc7A7Da0f3581997e764D85Fd9E9f63a",
"dao": "0xC40DCFB13651e64C8551007aa57F9260827B6462",
"lm": "0x65226673F3D202E0f897C862590d7e1A992B2048",
"emergency": "0x183C55A0dc7A7Da0f3581997e764D85Fd9E9f63a",
"vote_incentive_recycling": "0x9ff471F9f98F42E5151C7855fD1b5aa906b1AF7e"
},
"goerli": {
"blabs_ops": "0x040E995520F92F96142d1a76c16D4af21A2eFDE7",
"lm": "0x040E995520F92F96142d1a76c16D4af21A2eFDE7",
"maxi_ops": "0x040E995520F92F96142d1a76c16D4af21A2eFDE7"
},
"fantom": {
"dao": "0xFe2022da08d17ac8388F5bEFE4F71456255105A4"
},
"bnb": {
"dao": "0x58099b94e660bBe19848547F6c5d76DcA7282E45"
},
"sepolia": {
"MaxiTesting": "0xdb0d41598cE0497C3aF4961599dc9245d3c0B3ce"
}
}
14 changes: 7 additions & 7 deletions extras/signers.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,12 @@
"nanexcool": "0x823DF0278e4998cD0D06FB857fBD51e85b18A250",
"davgarai": "0xAc1aA53108712d7f38093A67d380aD54B562a650"
},
"ozDefender": {
"checkpointer": "0xe9735f7D85a57BFb860c1E2c1c7B4F587BA0F6e7"
},
"maxiPay": {
"factory": "0x7bbac709a9535464690a435ca7361256496f13ce"
},
"keepers": {
"oz_defender_checkpointer": "0xe9735f7D85a57BFb860c1E2c1c7B4F587BA0F6e7",
"old_oz_relayer_v1": "0xe9735f7D85a57BFb860c1E2c1c7B4F587BA0F6e7",
"oz_relayer_v2" : "0xfb7d0D001BC8D0bC998071C762BfF53EE31b725F",
"github_actions_poker": "0x737760C760FfEc370F84861E4Be4AFF7093Ffa3f"
},
"emeritus": {
Expand All @@ -61,13 +59,15 @@
},
"kyc": {
"Arbitrum": {
"kycsigner1": "0x5c43d19ee1b9f93143c7c258501ef3ada1204524",
"kycsigner1_retired": "0x5c43d19ee1b9f93143c7c258501ef3ada1204524",
"kycsigner2": "0x0938BCEfba80bCd958A5d4BEbF6a4AFFafB07eD2",
"kycsigner3": "0x6edbc5bb151d0bfb203a48b378e88dc5912318f1"
"kycsigner3": "0x6edbc5bb151d0bfb203a48b378e88dc5912318f1",
"kycsigner4": "0x122AFb4667C5f80e45721a42C7c81e9140C62FA4"
}
},
"contributors": {
"json": "0xE2A4DE267cdD4fF5ED9Ba13552F5c624b12db9b2",
"lipman": "0x72658e9A5c55371A5e80559B8E07AbC14F212120"
"lipman": "0x72658e9A5c55371A5e80559B8E07AbC14F212120",
"hyferion": "0xE24fA83aF4dDd6d3e448908ed3F931f0A23a6096"
}
}
2 changes: 1 addition & 1 deletion gen_mono_addressbook.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,4 +103,4 @@ def process_deployments(deployments, old=False):


if __name__ == "__main__":
main()
main()
4 changes: 4 additions & 0 deletions gen_pools_and_gauges.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,10 @@ def main():
with open("extras/chains.json", "r") as f:
chains = json.load(f)
for chain in chains["CHAIN_IDS_BY_NAME"]:
print(chain)
if chain == "fantom":
# not a balancer native chain
continue
gauge_info = BalPoolsGauges(chain)
# pools
# TODO: consider moving to query object??
Expand Down
36 changes: 36 additions & 0 deletions gen_subgraph_urls.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import json

import requests

from bal_addresses.subgraph import Subgraph


def main():
urls = {}

with open("extras/chains.json", "r") as f:
chains = json.load(f)
for chain in chains["CHAIN_IDS_BY_NAME"]:
if chain not in urls:
urls[chain] = {}
for subgraph_type in ["core", "gauges", "blocks", "aura"]:
subgraph = Subgraph(chain)
url = subgraph.get_subgraph_url(subgraph_type)
if url:
code = requests.get(url).status_code
if code == 200:
urls[chain].update({subgraph_type: url})
else:
# if code not in urls[chain]:
# urls[chain][code] = {}
urls[chain].update({subgraph_type: {code: url}})
else:
continue

# dump the collected dict to json file
with open("outputs/subgraph_urls.json", "w") as f:
json.dump(urls, f, indent=2)


if __name__ == "__main__":
main()
2 changes: 1 addition & 1 deletion generate_current_permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"gnosis": Web3(Web3.HTTPProvider(f"https://rpc.gnosischain.com")),
"zkevm": Web3(Web3.HTTPProvider(f"https://zkevm-rpc.com")),
"avalanche": Web3(Web3.HTTPProvider(f"https://api.avax.network/ext/bc/C/rpc")),
"fantom": Web3(Web3.HTTPProvider("https://rpc.fantom.network")),
### Less reliable RPCs first to fail fast :)
"mainnet": Web3(Web3.HTTPProvider(f"https://mainnet.infura.io/v3/{INFURA_KEY}")),
"arbitrum": Web3(
Expand All @@ -25,7 +26,6 @@
"polygon": Web3(
Web3.HTTPProvider(f"https://polygon-mainnet.infura.io/v3/{INFURA_KEY}")
),
"goerli": Web3(Web3.HTTPProvider(f"https://goerli.infura.io/v3/{INFURA_KEY}")),
"sepolia": Web3(Web3.HTTPProvider(f"https://sepolia.infura.io/v3/{INFURA_KEY}")),
}

Expand Down
Loading

0 comments on commit 4eba775

Please sign in to comment.