Skip to content

Commit

Permalink
more ux
Browse files Browse the repository at this point in the history
  • Loading branch information
daviidarr committed Apr 5, 2024
1 parent 7768055 commit 4e83a99
Show file tree
Hide file tree
Showing 4 changed files with 95 additions and 60 deletions.
15 changes: 12 additions & 3 deletions plex/debank_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@ def __init__(self, json_db: RawDataDB, plex_db: PlexDB, parameters: Dict[str, An
self.json_db: RawDataDB = json_db
self.plex_db: PlexDB = plex_db

def get_credits(self) -> float:
response = requests.get(f'{self.api_url}/account/units',
headers={
"accept": "application/json",
"AccessKey": self.parameters['profile']['debank_key'],
})
return response.json()['balance']
async def _fetch_snapshot(self, address: str, write_to_json=True) -> dict:
'''
Fetches the position snapshot for a given address from the Debank API
Expand Down Expand Up @@ -215,13 +222,13 @@ def append_leg(leg, side):
'project_id'] else
leg['to_addr' if side == -1 else 'from_addr'],
'gas': tx['tx']['usd_gas_fee'] if 'usd_gas_fee' in tx['tx'] else 0.0,
'action': tx['tx']['name'],
'type': tx['tx']['name'],
'asset': leg['token_id'],
'amount': leg['amount'] * side}
if leg['token_id'] in transactions['token_dict']:
if transactions['token_dict'][leg['token_id']]['price']:
result['price'] = transactions['token_dict'][leg['token_id']]['price']
result['value'] = leg['amount'] * result['price'] * side
result['pnl'] = leg['amount'] * result['price'] * side
return result


Expand All @@ -232,5 +239,7 @@ def append_leg(leg, side):
for cur_leg in tx['sends']:
result.append(append_leg(cur_leg, -1))

return pd.DataFrame(result)
df = pd.DataFrame(result)
df['pnl'] = df['pnl'] - df['gas']
return df

24 changes: 12 additions & 12 deletions plex/plex.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@ def validate_categories(self, data) -> bool:
return False
return True

def explain(self, start_snapshot: pd.DataFrame, end_snapshot: pd.DataFrame, transactions: pd.DataFrame = pd.DataFrame()) -> DataFrame | \
tuple[
DataFrame, Any]:
def explain(self, start_snapshot: pd.DataFrame, end_snapshot: pd.DataFrame) -> DataFrame:
snapshot_start = start_snapshot.set_index([col for col in start_snapshot.columns if col not in ['price', 'amount', 'value', 'timestamp']])
snapshot_end = end_snapshot.set_index([col for col in end_snapshot.columns if col not in ['price', 'amount', 'value', 'timestamp']])
data = snapshot_start.join(snapshot_end, how='outer', lsuffix='_start', rsuffix='_end')
Expand Down Expand Up @@ -57,14 +55,6 @@ def explain(self, start_snapshot: pd.DataFrame, end_snapshot: pd.DataFrame, tran
amt_chng_pnl['pnl_bucket'] = 'amt_chng'
amt_chng_pnl['pnl'] = (data['amount_end'] - data['amount_start']) * data['price_end']

tx_pnl = transactions.groupby(by=['chain', 'protocol', 'action', 'asset']).sum()['value'].reset_index()
tx_pnl['pnl_bucket'] = 'tx_pnl'
tx_pnl['timestamp_start'] = start_snapshot['timestamp'].values[0]
tx_pnl['timestamp_end'] = end_snapshot['timestamp'].values[0]
tx_pnl['type'] = tx_pnl['action']
tx_pnl['underlying'] = tx_pnl['asset']
tx_pnl.rename(columns={'value': 'pnl', 'action': 'hold_mode'}, inplace=True)

assert (data['value_end'] - data['value_start'] - delta_pnl['pnl'] - basis_pnl['pnl'] - amt_chng_pnl['pnl']).apply(abs).max() < 1, \
"something doesn't add up..."

Expand All @@ -73,4 +63,14 @@ def explain(self, start_snapshot: pd.DataFrame, end_snapshot: pd.DataFrame, tran
result['timestamp_start'] = result['timestamp_start'].apply(
lambda x: datetime.fromtimestamp(x, tz=timezone.utc))

return result, tx_pnl
return result

def format_transactions(self, start_snapshot_timestamp: int, end_snapshot_timestamp: int, transactions: pd.DataFrame) -> pd.DataFrame:
tx_pnl = transactions.groupby(by=['chain', 'protocol', 'type', 'asset']).sum()[['pnl', 'gas']].reset_index()
tx_pnl['pnl_bucket'] = 'tx_pnl'
tx_pnl['timestamp_start'] = start_snapshot_timestamp
tx_pnl['timestamp_end'] = end_snapshot_timestamp
tx_pnl['hold_mode'] = tx_pnl['type']
tx_pnl['underlying'] = tx_pnl['asset']

return tx_pnl
94 changes: 50 additions & 44 deletions pnl_explain.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
st.set_page_config(layout="wide")
st.session_state.set_config =True

from utils.streamlit_utils import load_parameters, prompt_plex_interval, display_pivot
from utils.streamlit_utils import load_parameters, prompt_plex_interval, display_pivot, download_button, \
download_db_button

pd.options.mode.chained_assignment = None
st.session_state.parameters = load_parameters()
Expand All @@ -44,16 +45,22 @@

with st.sidebar.form("snapshot_form"):
refresh = st.form_submit_button("fetch from debank", help="fetch from debank costs credits !")
if refresh:
debank_credits = st.session_state.api.get_credits()
all_fetch = asyncio.run(safe_gather([st.session_state.api.fetch_snapshot(address, refresh=refresh)
for address in addresses] +
[st.session_state.api.fetch_transactions(address)
for address in addresses if refresh],
n=st.session_state.parameters['run_parameters']['async']['gather_limit']))
snapshots = all_fetch[:len(addresses)]
if refresh:
st.write(f"Debank credits used: {(debank_credits-st.session_state.api.get_credits())*200/1e6} $")
st.session_state.plex_db.upload_to_s3()

snapshots = all_fetch[:len(addresses)]
st.session_state.snapshot = pd.concat(snapshots, axis=0, ignore_index=True)

download_db_button(st.session_state.plex_db, file_name='snapshot.db', label='Download database')

with risk_tab:
# dynamic categorization
if 'snapshot' in st.session_state:
Expand All @@ -72,14 +79,7 @@
values=['value'],
hidden=['hold_mode', 'type', 'price', 'amount'])

st.session_state.snapshot.to_csv('temp.csv')
with open('temp.csv', "rb") as file:
st.download_button(
label="Download risk data",
data=file,
file_name='temp.csv',
mime='text/csv',
)
download_button(st.session_state.snapshot, file_name='snapshot.csv', label='Download snapshot')

with st.form("categorization_form"):
# categorization
Expand All @@ -100,59 +100,72 @@
details_tab, history_tab = st.tabs(["details", "history"])

with details_tab:
# snapshots
start_snapshot = st.session_state.plex_db.query_table_at(addresses, start_timestamp, "snapshots")
end_snapshot = st.session_state.plex_db.query_table_at(addresses, end_timestamp, "snapshots")
# transactions
transactions = st.session_state.plex_db.query_table_between(addresses, start_timestamp, end_timestamp, "transactions")
## display_pivot plex
st.subheader("Pnl Explain")

# perform pnl explain
st.latex(r'PnL_{\text{delta}} = \sum \Delta P_{\text{underlying}} \times N^{\text{start}}')
st.latex(r'PnL_{\text{basis}} = \sum \Delta (P_{\text{asset}}-P_{\text{underlying}}) \times N^{\text{start}}')
st.latex(r'PnL_{\text{amt\_chng}} = \sum \Delta N \times P^{\text{end}}')
st.session_state.plex = st.session_state.pnl_explainer.explain(start_snapshot=start_snapshot, end_snapshot=end_snapshot, transactions=transactions)

start_snapshot = st.session_state.plex_db.query_table_at(addresses, start_timestamp, "snapshots")
end_snapshot = st.session_state.plex_db.query_table_at(addresses, end_timestamp, "snapshots")
st.session_state.plex = st.session_state.pnl_explainer.explain(start_snapshot=start_snapshot, end_snapshot=end_snapshot)

display_pivot(st.session_state.plex,
rows=['underlying', 'asset'],
columns=['pnl_bucket'],
values=['pnl'],
hidden=['protocol', 'chain', 'hold_mode', 'type'])

## display_pivot transactions
st.subheader("Transactions")

transactions = st.session_state.plex_db.query_table_between(addresses, start_timestamp, end_timestamp, "transactions")
st.session_state.transactions = st.session_state.pnl_explainer.format_transactions(start_timestamp, end_timestamp, transactions)

display_pivot(st.session_state.transactions,
rows=['asset'],
columns=['type'],
values=['gas', 'pnl'],
hidden=['protocol', 'chain'])

if 'plex' in st.session_state:
st.session_state.plex.to_csv('temp.csv')
with open('temp.csv', "rb") as file:
st.download_button(
label="Download plex data",
data=file,
file_name='temp.csv',
mime='text/csv',
)
plex_download_col, tx_download_col = st.columns(2)
with plex_download_col:
download_button(st.session_state.plex, file_name='plex.csv', label="Download plex data")
with tx_download_col:
download_button(st.session_state.transactions, file_name='tx.csv', label="Download tx data")
st.session_state.transactions.to_csv('tx.csv')

with history_tab:
# snapshots
snapshots_within = st.session_state.plex_db.query_table_between(st.session_state.parameters['profile']['addresses'], start_timestamp, end_timestamp, "snapshots")
# explains btw snapshots
explains = []
tx_pnl = []
# explains and transactions btw snapshots
explain_list = []
transactions_list = []
for start, end in zip(
snapshots_within['timestamp'].unique()[:-1],
snapshots_within['timestamp'].unique()[1:],
):
start_snapshots = snapshots_within[snapshots_within['timestamp'] == start]
end_snapshots = snapshots_within[snapshots_within['timestamp'] == end]
explain = st.session_state.pnl_explainer.explain(start_snapshots, end_snapshots)
explain_list.append(explain)

# transactions
transactions = st.session_state.plex_db.query_table_between(addresses, start, end, "transactions")

explain = st.session_state.pnl_explainer.explain(start_snapshots, end_snapshots, transactions)
explains.append(explain[0])
tx_pnl.append(explain[1])
explains = pd.concat(explains, axis=0, ignore_index=True)

# plot timeseries of pnl by some staked_columns
transactions = st.session_state.pnl_explainer.format_transactions(start, end, transactions)
transactions_list.append(transactions)
explains = pd.concat(explain_list, axis=0, ignore_index=True)
tx_pnl = pd.concat(transactions_list, axis=0, ignore_index=True)

'''
plot timeseries of explain by some staked_columns
'''
categoricals = ['underlying', 'asset', 'protocol', 'pnl_bucket', 'chain', 'hold_mode', 'type']
values = ['pnl']
rows = ['timestamp_end']
granularity_field = st.selectbox("granularity field", categoricals, index=0)
relevant_columns = categoricals + values + rows
totals = pd.pivot_table(explains, values=values, columns=[granularity_field], index=rows, aggfunc='sum').cumsum()
totals = totals.stack().reset_index()

Expand All @@ -164,12 +177,5 @@
st.plotly_chart(fig, use_container_width=True)

if 'history' in st.session_state:
st.session_state.history.to_csv('temp.csv')
with open('temp.csv', "rb") as file:
st.download_button(
label="Download history data",
data=file,
file_name='temp.csv',
mime='text/csv',
)
download_button(st.session_state.history, file_name='history.csv', label="Download history data")

22 changes: 21 additions & 1 deletion utils/streamlit_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def prompt_plex_interval(plex_db: PlexDB, addresses: list[str]) -> tuple[int, in
end_timestamp = next((ts for ts in sorted(timestamps, reverse=False)
if ts >= end_datetime.timestamp()), max(timestamps))

st.write(f"Actual dates of snapshots: {pd.to_datetime(start_timestamp, utc=True)}, {pd.to_datetime(end_timestamp, utc=True)}")
st.write(f"Actual dates of snapshots: {datetime.fromtimestamp(start_timestamp)}, {datetime.fromtimestamp(end_timestamp)}")

return start_timestamp, end_timestamp

Expand Down Expand Up @@ -118,3 +118,23 @@ def display_pivot(grid: pd.DataFrame, rows: list[str], columns: list[str], value
grid[values] = grid[values].astype(int)
grid = grid.sort_values(by=values[0], ascending=False)
AgGrid(grid, gridOptions=go)


def download_button(df: pd.DataFrame, label: str, file_name: str, file_type='text/csv'):
df.to_csv(file_name)
with open(file_name, "rb") as file:
st.download_button(
label=label,
data=file,
file_name=file_name,
mime=file_type
)

def download_db_button(db: PlexDB, label: str, file_name: str, file_type='application/x-sqlite3'):
with open(db.data_location['local_file'], "rb") as file:
st.sidebar.download_button(
label=label,
data=file,
file_name=file_name,
mime=file_type
)

0 comments on commit 4e83a99

Please sign in to comment.