diff --git a/pnl_explain.py b/pnl_explain.py index 503e0bd..1872f22 100644 --- a/pnl_explain.py +++ b/pnl_explain.py @@ -40,19 +40,19 @@ st.session_state.pnl_explainer = PnlExplainer(st.session_state.plex_db.query_categories(),st.secrets['alchemy_key']) addresses = st.session_state.parameters['profile']['addresses'] -risk_tab, pnl_tab = st.tabs( - ["risk", "pnl"]) +risk_tab, risk_history_tab, pnl_tab, pnl_history_tab = st.tabs( + ["risk", "risk_history", "pnl", "pnl_history"]) with risk_tab: with st.form("snapshot_form"): - refresh_tab, historical_tab = st.columns(2) + historical_tab, refresh_tab = st.columns(2) + with historical_tab: + historical = st.form_submit_button("fetch historical date", help="fetch from db") + timestamp = prompt_snapshot_timestamp(st.session_state.plex_db, addresses) with refresh_tab: if refresh := st.form_submit_button("fetch live from debank", help="fetch from debank costs credits !"): timestamp = int(datetime.now().timestamp()) debank_credits = st.session_state.api.get_credits() - with historical_tab: - timestamp = prompt_snapshot_timestamp(st.session_state.plex_db, addresses) - historical = st.form_submit_button("fetch historical date", help="fetch from db") if refresh or historical: all_fetch = asyncio.run( @@ -70,10 +70,22 @@ download_db_button(st.session_state.plex_db, file_name='snapshot.db', label='Download database') - # dynamic categorization if 'snapshot' in st.session_state: + # dynamic categorization if missing_category := set(st.session_state.snapshot['asset']) - set(st.session_state.pnl_explainer.categories.keys()): st.warning(f"New underlyings {missing_category} -> Edit 'underlying' below to group exposures by underlying") + with st.form("categorization_form"): + # categorization + st.write("Edit 'underlying' below to group exposures by underlying") + categorization = pd.DataFrame({'underlying': {coin: coin for coin in missing_category} + | st.session_state.pnl_explainer.categories}) + categorization['exposure'] = st.session_state.snapshot.groupby('asset').sum()['value'] + edited_categorization = st.data_editor(categorization, use_container_width=True)['underlying'].to_dict() + if st.form_submit_button("Override categorization"): + st.session_state.pnl_explainer.categories = edited_categorization + st.session_state.plex_db.overwrite_categories(edited_categorization) + st.session_state.plex_db.upload_to_s3() + st.success("Categories updated (not exposure!)") # display risk st.write("Risk pivot table: group exposures by underlying") @@ -89,102 +101,108 @@ download_button(st.session_state.snapshot, file_name='snapshot.csv', label='Download snapshot') - with st.form("categorization_form"): - # categorization - st.write("Edit 'underlying' below to group exposures by underlying") - categorization = pd.DataFrame({'underlying': {coin: coin for coin in missing_category} - | st.session_state.pnl_explainer.categories}) - categorization['exposure'] = st.session_state.snapshot.groupby('asset').sum()['value'] - edited_categorization = st.data_editor(categorization, use_container_width=True)['underlying'].to_dict() - if st.form_submit_button("Override categorization"): - st.session_state.pnl_explainer.categories = edited_categorization - st.session_state.plex_db.overwrite_categories(edited_categorization) - st.session_state.plex_db.upload_to_s3() - st.success("Categories updated (not exposure!)") +with risk_history_tab: + risk_start_timestamp, risk_end_timestamp = prompt_plex_interval(st.session_state.plex_db, addresses, nonce='risk', default_dt=timedelta(days=7)) + # snapshots + risk_snapshots_within = st.session_state.plex_db.query_table_between(st.session_state.parameters['profile']['addresses'], + risk_start_timestamp, risk_end_timestamp, "snapshots") + risk_snapshots_within['timestamp'] = pd.to_datetime(risk_snapshots_within['timestamp'], unit='s', utc=True) + risk_snapshots_within['underlying'] = risk_snapshots_within['asset'].map( + st.session_state.pnl_explainer.categories) + ''' + plot timeseries of explain by some staked_columns + ''' + categoricals = ['underlying', 'asset', 'protocol', 'chain', 'hold_mode', 'type'] + values = ['value'] + rows = ['timestamp'] + granularity_field = st.selectbox("granularity field", categoricals, index=2) + totals = pd.pivot_table(risk_snapshots_within, values=values, columns=[granularity_field], index=rows, aggfunc='sum') + totals = totals.stack().reset_index() + + fig = px.bar(totals, x=rows[0], y=values[0], + color=granularity_field, title='value', + barmode='stack') + min_dt = 4 * 3600 # 4h + fig.update_traces(width=min_dt * 1000) + st.plotly_chart(fig, use_container_width=True) + + download_button(risk_snapshots_within, file_name='risk_history.csv', label='Download risk history') with pnl_tab: - start_timestamp, end_timestamp = prompt_plex_interval(st.session_state.plex_db, addresses) - - details_tab, history_tab = st.tabs(["details", "history"]) - - with details_tab: - ## display_pivot plex - st.subheader("Pnl Explain") - - st.latex(r'PnL_{\text{full delta}} = \sum (P_{\text{asset}}^1-P_{\text{asset}}^0) \times N^{\text{start}}') - st.latex(r'PnL_{\text{delta}} = \sum (\frac{P_{\text{underlying}}^1}{P_{\text{underlying}}^0}-1) \times N^{\text{start}} \frac{P_{\text{underlying}}^0}{P_{\text{asset}}^0}') - st.latex(r'PnL_{\text{basis}} = PnL_{\text{full delta}} - PnL_{\text{delta}}') - st.latex(r'PnL_{\text{amt\_chng}} = \sum \Delta N \times P^{\text{end}}') - - start_snapshot = st.session_state.plex_db.query_table_at(addresses, start_timestamp, "snapshots") - end_snapshot = st.session_state.plex_db.query_table_at(addresses, end_timestamp, "snapshots") - st.session_state.plex = st.session_state.pnl_explainer.explain(start_snapshot=start_snapshot, end_snapshot=end_snapshot) - - display_pivot(st.session_state.plex.loc[st.session_state.plex['pnl'].apply(lambda x: abs(x) > start_snapshot['value'].sum() * 1e-4)], - rows=['underlying', 'asset'], - columns=['pnl_bucket'], - values=['pnl'], - hidden=['protocol', 'chain', 'hold_mode', 'type']) - - ## display_pivot transactions - st.subheader("Transactions") - - transactions = st.session_state.plex_db.query_table_between(addresses, start_timestamp, end_timestamp, "transactions") - st.session_state.transactions = st.session_state.pnl_explainer.format_transactions(start_timestamp, end_timestamp, transactions) - st.session_state.transactions.rename(columns={'pnl': 'value'}, inplace=True) - display_pivot(st.session_state.transactions, - rows=['underlying', 'asset'], - columns=['type'], - values=['gas', 'value'], - hidden=['id', 'protocol', 'chain']) - - if 'plex' in st.session_state: - plex_download_col, tx_download_col = st.columns(2) - with plex_download_col: - download_button(st.session_state.plex, file_name='plex.csv', label="Download plex data") - with tx_download_col: - download_button(st.session_state.transactions, file_name='tx.csv', label="Download tx data") - st.session_state.transactions.to_csv('tx.csv') - - with history_tab: - # snapshots - snapshots_within = st.session_state.plex_db.query_table_between(st.session_state.parameters['profile']['addresses'], start_timestamp, end_timestamp, "snapshots") - # explains and transactions btw snapshots - explain_list = [] - transactions_list = [] - for start, end in zip( - snapshots_within['timestamp'].unique()[:-1], - snapshots_within['timestamp'].unique()[1:], - ): - start_snapshots = snapshots_within[snapshots_within['timestamp'] == start] - end_snapshots = snapshots_within[snapshots_within['timestamp'] == end] - explain = st.session_state.pnl_explainer.explain(start_snapshots, end_snapshots) - explain_list.append(explain) - - # transactions - transactions = st.session_state.plex_db.query_table_between(addresses, start, end, "transactions") - transactions = st.session_state.pnl_explainer.format_transactions(start, end, transactions) - transactions_list.append(transactions) - explains = pd.concat(explain_list, axis=0, ignore_index=True) - tx_pnl = pd.concat(transactions_list, axis=0, ignore_index=True) - - ''' - plot timeseries of explain by some staked_columns - ''' - categoricals = ['underlying', 'asset', 'protocol', 'pnl_bucket', 'chain', 'hold_mode', 'type'] - values = ['pnl'] - rows = ['timestamp_end'] - granularity_field = st.selectbox("granularity field", categoricals, index=0) - totals = pd.pivot_table(explains, values=values, columns=[granularity_field], index=rows, aggfunc='sum').cumsum() - totals = totals.stack().reset_index() - - fig = px.bar(totals, x='timestamp_end', y='pnl', - color=granularity_field, title='cum_pnl', - barmode='stack') - min_dt = 4*3600 # 4h - fig.update_traces(width=min_dt*1000) - st.plotly_chart(fig, use_container_width=True) - - if 'history' in st.session_state: - download_button(st.session_state.history, file_name='history.csv', label="Download history data") + pnl_start_timestamp, pnl_end_timestamp = prompt_plex_interval(st.session_state.plex_db, addresses, nonce='pnl', default_dt=timedelta(days=1)) + + ## display_pivot plex + st.subheader("Pnl Explain") + + st.latex(r'PnL_{\text{full delta}} = \sum (P_{\text{asset}}^1-P_{\text{asset}}^0) \times N^{\text{start}}') + st.latex(r'PnL_{\text{delta}} = \sum (\frac{P_{\text{underlying}}^1}{P_{\text{underlying}}^0}-1) \times N^{\text{start}} \frac{P_{\text{underlying}}^0}{P_{\text{asset}}^0}') + st.latex(r'PnL_{\text{basis}} = PnL_{\text{full delta}} - PnL_{\text{delta}}') + st.latex(r'PnL_{\text{amt\_chng}} = \sum \Delta N \times P^{\text{end}}') + + start_snapshot = st.session_state.plex_db.query_table_at(addresses, pnl_start_timestamp, "snapshots") + end_snapshot = st.session_state.plex_db.query_table_at(addresses, pnl_end_timestamp, "snapshots") + st.session_state.plex = st.session_state.pnl_explainer.explain(start_snapshot=start_snapshot, end_snapshot=end_snapshot) + + display_pivot(st.session_state.plex.loc[st.session_state.plex['pnl'].apply(lambda x: abs(x) > start_snapshot['value'].sum() * 1e-4)], + rows=['underlying', 'asset'], + columns=['pnl_bucket'], + values=['pnl'], + hidden=['protocol', 'chain', 'hold_mode', 'type']) + + download_button(st.session_state.plex, file_name='plex.csv', label='Download pnl explain') + + ## display_pivot transactions + st.subheader("Transactions") + + transactions = st.session_state.plex_db.query_table_between(addresses, pnl_start_timestamp, pnl_end_timestamp, "transactions") + st.session_state.transactions = st.session_state.pnl_explainer.format_transactions(pnl_start_timestamp, pnl_end_timestamp, transactions) + st.session_state.transactions.rename(columns={'pnl': 'value'}, inplace=True) + display_pivot(st.session_state.transactions, + rows=['underlying', 'asset'], + columns=['type'], + values=['gas', 'value'], + hidden=['id', 'protocol', 'chain']) + + download_button(st.session_state.transactions, file_name='tx.csv', label="Download tx data") + +with pnl_history_tab: + # snapshots + pnl_snapshots_within = st.session_state.plex_db.query_table_between(st.session_state.parameters['profile']['addresses'], pnl_start_timestamp, pnl_end_timestamp, "snapshots") + # explains and transactions btw snapshots + explain_list = [] + transactions_list = [] + for start, end in zip( + pnl_snapshots_within['timestamp'].unique()[:-1], + pnl_snapshots_within['timestamp'].unique()[1:], + ): + start_snapshots = pnl_snapshots_within[pnl_snapshots_within['timestamp'] == start] + end_snapshots = pnl_snapshots_within[pnl_snapshots_within['timestamp'] == end] + explain = st.session_state.pnl_explainer.explain(start_snapshots, end_snapshots) + explain_list.append(explain) + + # transactions + transactions = st.session_state.plex_db.query_table_between(addresses, start, end, "transactions") + transactions = st.session_state.pnl_explainer.format_transactions(start, end, transactions) + transactions_list.append(transactions) + explains = pd.concat(explain_list, axis=0, ignore_index=True) + tx_pnl = pd.concat(transactions_list, axis=0, ignore_index=True) + + ''' + plot timeseries of explain by some staked_columns + ''' + categoricals = ['underlying', 'asset', 'protocol', 'pnl_bucket', 'chain', 'hold_mode', 'type'] + values = ['pnl'] + rows = ['timestamp_end'] + granularity_field = st.selectbox("granularity field", categoricals, index=0) + totals = pd.pivot_table(explains, values=values, columns=[granularity_field], index=rows, aggfunc='sum').cumsum() + totals = totals.stack().reset_index() + + fig = px.bar(totals, x=rows[0], y=values[0], + color=granularity_field, title='cum_pnl', + barmode='stack') + min_dt = 4*3600 # 4h + fig.update_traces(width=min_dt*1000) + st.plotly_chart(fig, use_container_width=True) + + download_button(pnl_snapshots_within, file_name='snapshot.csv', label='Download pnl history') diff --git a/utils/streamlit_utils.py b/utils/streamlit_utils.py index c93a601..8507fac 100644 --- a/utils/streamlit_utils.py +++ b/utils/streamlit_utils.py @@ -69,15 +69,15 @@ def prompt_snapshot_timestamp(plex_db: SQLiteDB, addresses: list[str]) -> int: return int(timestamp) -def prompt_plex_interval(plex_db: SQLiteDB, addresses: list[str]) -> tuple[int, int]: +def prompt_plex_interval(plex_db: SQLiteDB, addresses: list[str], nonce: str='', default_dt=timedelta(days=7)) -> tuple[int, int]: date_col, time_col = st.columns(2) now_datetime = datetime.now() with time_col: - start_time = st.time_input("start time", value=now_datetime.time()) - end_time = st.time_input("end time", value=now_datetime.time()) + start_time = st.time_input("start time", value=now_datetime.time(), key=f'st_{nonce}') + end_time = st.time_input("end time", value=now_datetime.time(), key=f'et_{nonce}') with date_col: - start_date = st.date_input("start date", value=now_datetime - timedelta(days=1)) - end_date = st.date_input("end date", value=now_datetime) + start_date = st.date_input("start date", value=now_datetime - default_dt, key=f'sd_{nonce}') + end_date = st.date_input("end date", value=now_datetime, key=f'ed_{nonce}') start_datetime = datetime.combine(start_date, start_time) end_datetime = datetime.combine(end_date, end_time)