From e2dc995161e03bc935184459804ce2446d841a31 Mon Sep 17 00:00:00 2001 From: daviidarr Date: Tue, 9 Apr 2024 14:15:07 +0200 Subject: [PATCH] simplify OO + categories in db --- cli.py | 4 +-- config/categories.yaml | 75 ---------------------------------------- config/params.yaml | 1 + plex/debank_api.py | 6 ++-- plex/plex.py | 12 +++---- pnl_explain.py | 10 +++--- utils/db.py | 65 +++++++++++++--------------------- utils/streamlit_utils.py | 6 ++-- 8 files changed, 43 insertions(+), 136 deletions(-) delete mode 100644 config/categories.yaml diff --git a/cli.py b/cli.py index babf4cc..155b4d0 100644 --- a/cli.py +++ b/cli.py @@ -9,7 +9,7 @@ from plex.debank_api import DebankAPI from utils.async_utils import safe_gather -from utils.db import PlexDB, SQLiteDB, RawDataDB, S3JsonRawDataDB +from utils.db import SQLiteDB, SQLiteDB, RawDataDB, S3JsonRawDataDB if __name__ == '__main__': if sys.argv[1] =='snapshot': @@ -24,7 +24,7 @@ plex_db_params = copy.deepcopy(parameters['input_data']['plex_db']) plex_db_params['remote_file'] = plex_db_params['remote_file'].replace('.db', f"_{parameters['profile']['debank_key']}.db") - plex_db: PlexDB = SQLiteDB(plex_db_params, secrets) + plex_db: SQLiteDB = SQLiteDB(plex_db_params, secrets) raw_data_db: RawDataDB = RawDataDB.build_RawDataDB(parameters['input_data']['raw_data_db'], secrets) api = DebankAPI(raw_data_db, plex_db, parameters) diff --git a/config/categories.yaml b/config/categories.yaml deleted file mode 100644 index bdcb993..0000000 --- a/config/categories.yaml +++ /dev/null @@ -1,75 +0,0 @@ -'#0': '#0' -ARB: ARB -AURA: AURA -AVAX: AVAX -BAL: BAL -BNB: BNB -BTC.b: WBTC -BUSD: USDC -Blockchain Bliss Sonnet: Blockchain Bliss Sonnet -CRV: CRV -CVX: CVX -DAI: USDC -DOLA: USDC -DUCKIES: DUCKIES -DYDX: DYDX -ETH: ETH -EURA: EURA -EtherMail.io - Web3 wallet email + ENS / UD token: EtherMail.io - Web3 wallet email - + ENS / UD token -Exchange Pass: Exchange Pass -GGP: GGP -HOP: HOP -KNC: KNC -LDO: LDO -LUSD: USDC -LYRA: LYRA -Limited edition Pirate Girl NFTs at pirategirls.xyz - mint now: Limited edition Pirate - Girl NFTs at pirategirls.xyz - mint now -MAI: USDC -MATIC: MATIC -MIM: USDC -MaticX: MATIC -Member Node: Member Node -PENDLE: PENDLE -PLS: PLS -PT-GLP-28MAR2024: PT-GLP-28MAR2024 -PT-aUSDC-27JUN2024: USDC -QI: QI -RDNT: RDNT -RDNT-WETH: ETH -SD: SD -SDAO: SDAO -SDT: SDT -SONNE: SONNE -SOPH: SOPH -THC World Pass: THC World Pass -USDC: USDC -USDC.e: USDC -USDT: USDC -USDT.e: USDC -USDt: USDC -VELO: VELO -VISR: VISR -WAVAX: AVAX -WBTC: WBTC -WBTC.e: WBTC -WETH: ETH -WETH.e: ETH -WMATIC: MATIC -aPolWETH: ETH -aPolWMATIC: MATIC -agEUR: EURA -crvUSD: USDC -esEXA: esEXA -esGMX: esGMX -jEUR: EURA -miMATIC: MATIC -sAVAX: AVAX -sUSD: USDC -stETH: ETH -stMATIC: MATIC -swETH: ETH -thehighapesclub.com: thehighapesclub.com -wstETH: ETH -xDai: USDC diff --git a/config/params.yaml b/config/params.yaml index f4d0229..3a5be96 100644 --- a/config/params.yaml +++ b/config/params.yaml @@ -1,4 +1,5 @@ profile: + debank_key: "0b9786c662bff596482c995ef9c654aa3663a120" addresses: - "0x7f8DA5FBD700a134842109c54ABA576D5c3712b8" - "0xFaf2A8b5fa78cA2786cEf5F7e19f6942EC7cB531" diff --git a/plex/debank_api.py b/plex/debank_api.py index d929aeb..73b0408 100644 --- a/plex/debank_api.py +++ b/plex/debank_api.py @@ -9,16 +9,16 @@ import streamlit as st from utils.async_utils import safe_gather -from utils.db import RawDataDB, PlexDB +from utils.db import RawDataDB, SQLiteDB class DebankAPI: endpoints = ["all_complex_protocol_list", "all_token_list", "all_nft_list"] api_url = "https://pro-openapi.debank.com/v1" - def __init__(self, json_db: RawDataDB, plex_db: PlexDB, parameters: Dict[str, Any]): + def __init__(self, json_db: RawDataDB, plex_db: SQLiteDB, parameters: Dict[str, Any]): self.parameters = parameters self.json_db: RawDataDB = json_db - self.plex_db: PlexDB = plex_db + self.plex_db: SQLiteDB = plex_db def get_credits(self) -> float: response = requests.get(f'{self.api_url}/account/units', diff --git a/plex/plex.py b/plex/plex.py index cef34b7..1816212 100644 --- a/plex/plex.py +++ b/plex/plex.py @@ -10,18 +10,16 @@ class PnlExplainer: - def __init__(self): - self.categories_path = os.path.join(os.getcwd(), 'config', 'categories.yaml') - with open(self.categories_path, 'r') as f: - self.categories = yaml.safe_load(f) + def __init__(self, categories: dict[str, str]): + self.categories = categories def validate_categories(self, data) -> bool: if missing_category := set(data['asset']) - set(self.categories.keys()): st.warning(f"Categories need to be updated. Please categorize the following assets: {missing_category}") return False - if missing_underlying := set(self.categories.values()) - set(data['asset']): - st.warning(f"I need underlying {missing_underlying} to have a position, maybe get some dust? Sorry...") - return False + # if missing_underlying := set(self.categories.values()) - set(data['asset']): + # st.warning(f"I need underlying {missing_underlying} to have a position, maybe get some dust? Sorry...") + # return False return True def explain(self, start_snapshot: pd.DataFrame, end_snapshot: pd.DataFrame) -> DataFrame: diff --git a/pnl_explain.py b/pnl_explain.py index 4c85772..059c741 100644 --- a/pnl_explain.py +++ b/pnl_explain.py @@ -11,7 +11,7 @@ from plex.plex import PnlExplainer from utils.async_utils import safe_gather -from utils.db import SQLiteDB, RawDataDB, PlexDB +from utils.db import SQLiteDB, RawDataDB from plex.debank_api import DebankAPI assert (sys.version_info >= (3, 10)), "Please use Python 3.10 or higher" @@ -32,12 +32,12 @@ plex_db_params = copy.deepcopy(st.session_state.parameters['input_data']['plex_db']) plex_db_params['remote_file'] = plex_db_params['remote_file'].replace('.db', f"_{st.session_state.parameters['profile']['debank_key']}.db") - st.session_state.plex_db: PlexDB = SQLiteDB(plex_db_params, st.secrets) + st.session_state.plex_db: SQLiteDB = SQLiteDB(plex_db_params, st.secrets) raw_data_db: RawDataDB = RawDataDB.build_RawDataDB(st.session_state.parameters['input_data']['raw_data_db'], st.secrets) st.session_state.api = DebankAPI(json_db=raw_data_db, plex_db=st.session_state.plex_db, parameters=st.session_state.parameters) - st.session_state.pnl_explainer = PnlExplainer() + st.session_state.pnl_explainer = PnlExplainer(st.session_state.plex_db.query_categories()) addresses = st.session_state.parameters['profile']['addresses'] risk_tab, pnl_tab = st.tabs( @@ -90,8 +90,8 @@ edited_categorization = st.data_editor(categorization, use_container_width=True)['underlying'].to_dict() if st.form_submit_button("Override categorization"): st.session_state.pnl_explainer.categories = edited_categorization - with open(st.session_state.pnl_explainer.categories_path, 'w') as f: - yaml.dump(edited_categorization, f) + st.session_state.plex_db.overwrite_categories(edited_categorization) + st.session_state.plex_db.upload_to_s3() st.success("Categories updated (not exposure!)") with pnl_tab: diff --git a/utils/db.py b/utils/db.py index ad6728f..f0c2c1d 100644 --- a/utils/db.py +++ b/utils/db.py @@ -9,6 +9,7 @@ from pathlib import Path import boto3 +import yaml from botocore.exceptions import ClientError import pandas as pd import sqlite3 @@ -89,45 +90,7 @@ def all_timestamps(self, address: str, table_name: TableType) -> list[int]: if file['Key'].endswith('.json') and address in file['Key']] -class PlexDB(ABC): - ''' - Abstract class for PlexDB, where we put snapshots, one table per address - ''' - @abstractmethod - def query_table_at(self, addresses: list[str], timestamp: int, table_name: TableType) -> pd.DataFrame: - raise NotImplementedError - - @abstractmethod - def query_table_between(self, addresses: list[str], start_timestamp: int, end_timestamp: int, table_name: TableType) -> pd.DataFrame: - raise NotImplementedError - - @abstractmethod - def insert_table(self, df: pd.DataFrame, table_name: TableType) -> None: - raise NotImplementedError - - @abstractmethod - def all_timestamps(self, address: str, table_name: TableType) -> list[int]: - raise NotImplementedError - - def last_updated(self, address: str, table_name: TableType) -> tuple[datetime, pd.DataFrame]: - if all_timestamps := self.all_timestamps(address, table_name): - timestamp = max(all_timestamps) - latest_table = self.query_table_at([address], timestamp, table_name) - return datetime.fromtimestamp(timestamp, tz=timezone.utc), latest_table - else: - return datetime(1970, 1, 1, tzinfo=timezone.utc), {} - - -class SQLiteDB(PlexDB): - plex_schema = {'chain': 'TEXT', - 'protocol': 'TEXT', - 'hold_mode': 'TEXT', - 'type': 'TEXT', - 'asset': 'TEXT', - 'amount': 'REAL', - 'price': 'REAL', - 'value': 'REAL', - 'timestamp': 'INTEGER'} +class SQLiteDB: def __init__(self, config: dict, secrets: dict): if 'bucket_name' in config and 'remote_file' in config: # if bucket_name is in config, we are using s3 and download the file to ~ @@ -164,6 +127,14 @@ def __init__(self, config: dict, secrets: dict): os.chmod(local_file, 0o777) self.cursor = self.conn.cursor() + def last_updated(self, address: str, table_name: TableType) -> tuple[datetime, pd.DataFrame]: + if all_timestamps := self.all_timestamps(address, table_name): + timestamp = max(all_timestamps) + latest_table = self.query_table_at([address], timestamp, table_name) + return datetime.fromtimestamp(timestamp, tz=timezone.utc), latest_table + else: + return datetime(1970, 1, 1, tzinfo=timezone.utc), pd.DataFrame() + def upload_to_s3(self): s3 = boto3.client('s3', aws_access_key_id=self.secrets['AWS_ACCESS_KEY_ID'], @@ -175,7 +146,6 @@ def insert_table(self, df: pd.DataFrame, table_name: TableType) -> None: for address, data in df.groupby('address'): table = f"{table_name}_{address}" data.drop(columns='address').to_sql(table, self.conn, if_exists='append', index=False) - self.conn.commit() def query_table_at(self, addresses: list[str], timestamp: int, table_name: TableType) -> pd.DataFrame: return pd.concat([pd.read_sql_query(f'SELECT * FROM {table_name}_{address} WHERE timestamp = {timestamp}', self.conn) @@ -192,4 +162,17 @@ def all_timestamps(self, address: str, table_name: TableType) -> list[int]: self.cursor.execute(f'SELECT DISTINCT timestamp FROM {table_name}_{address}') rows = self.cursor.fetchall() return [row[0] for row in rows] - + + def query_categories(self) -> dict: + tables = pd.read_sql_query("SELECT name FROM sqlite_master WHERE type='table'", self.conn) + if 'categories' not in tables.values: + pd.DataFrame(columns=['asset', 'underlying']).to_sql('categories', self.conn, index=False) + return {} + return pd.read_sql_query('SELECT * FROM categories', self.conn).set_index('asset')['underlying'].to_dict() + + def overwrite_categories(self, categories: dict) -> None: + # if True: + # with open(os.path.join(os.getcwd(), 'config', 'categories_SAVED.yaml'), 'r') as file: + # categories = yaml.safe_load(file) + pd.DataFrame({'asset':categories.keys(), 'underlying': categories.values()}).to_sql('categories', self.conn, index=False, if_exists='replace') + self.conn.commit() \ No newline at end of file diff --git a/utils/streamlit_utils.py b/utils/streamlit_utils.py index 34eb657..e6b344d 100644 --- a/utils/streamlit_utils.py +++ b/utils/streamlit_utils.py @@ -12,7 +12,7 @@ from plotly import express as px from st_aggrid import AgGrid, GridOptionsBuilder -from utils.db import PlexDB +from utils.db import SQLiteDB def load_parameters() -> dict: @@ -49,7 +49,7 @@ def load_parameters() -> dict: return st.session_state.parameters -def prompt_plex_interval(plex_db: PlexDB, addresses: list[str]) -> tuple[int, int]: +def prompt_plex_interval(plex_db: SQLiteDB, addresses: list[str]) -> tuple[int, int]: date_col, time_col = st.columns(2) now_datetime = datetime.now() with time_col: @@ -132,7 +132,7 @@ def download_button(df: pd.DataFrame, label: str, file_name: str, file_type='tex mime=file_type ) -def download_db_button(db: PlexDB, label: str, file_name: str, file_type='application/x-sqlite3'): +def download_db_button(db: SQLiteDB, label: str, file_name: str, file_type='application/x-sqlite3'): with open(db.data_location['local_file'], "rb") as file: st.sidebar.download_button( label=label,