diff --git a/gs_quant/api/gs/backtests_xasset/json_encoders/request_encoders.py b/gs_quant/api/gs/backtests_xasset/json_encoders/request_encoders.py index 1990ccae..9fdafa63 100644 --- a/gs_quant/api/gs/backtests_xasset/json_encoders/request_encoders.py +++ b/gs_quant/api/gs/backtests_xasset/json_encoders/request_encoders.py @@ -49,4 +49,4 @@ def legs_decoder(data: Any): def legs_encoder(data: Iterable[Instrument]): - return [i.as_dict() for i in data] + return [i.to_dict() for i in data] diff --git a/gs_quant/backtests/actions.py b/gs_quant/backtests/actions.py index 69d271f1..1b10f710 100644 --- a/gs_quant/backtests/actions.py +++ b/gs_quant/backtests/actions.py @@ -101,6 +101,7 @@ class AddTradeAction(Action): :param trade_duration: an instrument attribute eg. 'expiration_date' or a date or a tenor or timedelta if left as None the trade will be added for all future dates + can also specify 'next schedule' in order to exit at the next periodic trigger date :param name: optional additional name to the priceable name :param transaction_cost: optional a cash amount paid for each transaction, paid on both enter and exit """ @@ -139,11 +140,12 @@ def dated_priceables(self): return self._dated_priceables -AddTradeActionInfo = namedtuple('AddTradeActionInfo', 'scaling') +AddTradeActionInfo = namedtuple('AddTradeActionInfo', ['scaling', 'next_schedule']) EnterPositionQuantityScaledActionInfo = namedtuple('EnterPositionQuantityScaledActionInfo', 'not_applicable') -HedgeActionInfo = namedtuple('HedgeActionInfo', 'not_applicable') +HedgeActionInfo = namedtuple('HedgeActionInfo', 'next_schedule') ExitTradeActionInfo = namedtuple('ExitTradeActionInfo', 'not_applicable') RebalanceActionInfo = namedtuple('RebalanceActionInfo', 'not_applicable') +AddScaledTradeActionInfo = namedtuple('AddScaledActionInfo', 'next_schedule') @dataclass_json @@ -158,6 +160,7 @@ class AddScaledTradeAction(Action): :param trade_duration: an instrument attribute eg. 'expiration_date' or a date or a tenor or timedelta if left as None the trade will be added for all future dates + can also specify 'next schedule' in order to exit at the next periodic trigger date :param name: optional additional name to the priceable name :param scaling_type: the type of scaling we are doing :param scaling_risk: if the scaling type is a measure then this is the definition of the measure @@ -263,6 +266,23 @@ def __post_init__(self): @dataclass_json @dataclass class HedgeAction(Action): + + """ + create an action which adds a hedge trade when triggered. This trade will be scaled to hedge the risk + specified. The trades are resolved on the trigger date (state) and + last until the trade_duration if specified or for all future dates if not. + :param risk: a risk measure which should be hedged + :param priceables: a priceable or a list of pricables these should have sensitivity to the risk. + :param trade_duration: an instrument attribute eg. 'expiration_date' or a date or a tenor or timedelta + if left as None the + trade will be added for all future dates + can also specify 'next schedule' in order to exit at the next periodic trigger date + :param name: optional additional name to the priceable name + :param transaction_cost: optional a transaction cost model, paid on both enter and exit + :param risk_transformation: optional a Transformer which will be applied to the raw risk numbers before hedging + :param holiday_calendar: optional an iterable list of holiday dates + """ + risk: RiskMeasure = field(default=None, metadata=config(decoder=decode_risk_measure, encoder=encode_risk_measure)) priceables: Optional[Priceable] = field(default=None, metadata=config(decoder=decode_named_instrument, diff --git a/gs_quant/backtests/backtest_utils.py b/gs_quant/backtests/backtest_utils.py index bb9427a4..eed65d4f 100644 --- a/gs_quant/backtests/backtest_utils.py +++ b/gs_quant/backtests/backtest_utils.py @@ -43,7 +43,7 @@ def make_list(thing): final_date_cache = {} -def get_final_date(inst, create_date, duration, holiday_calendar=None): +def get_final_date(inst, create_date, duration, holiday_calendar=None, trigger_info=None): global final_date_cache cache_key = (inst, create_date, duration, holiday_calendar) if cache_key in final_date_cache: @@ -58,6 +58,10 @@ def get_final_date(inst, create_date, duration, holiday_calendar=None): if hasattr(inst, str(duration)): final_date_cache[cache_key] = getattr(inst, str(duration)) return getattr(inst, str(duration)) + if str(duration).lower() == 'next schedule': + if hasattr(trigger_info, 'next_schedule'): + return trigger_info.next_schedule or dt.date.max + raise RuntimeError('Next schedule not supported by action') final_date_cache[cache_key] = RelativeDate(duration, create_date).apply_rule(holiday_calendar=holiday_calendar) return final_date_cache[cache_key] diff --git a/gs_quant/backtests/generic_engine.py b/gs_quant/backtests/generic_engine.py index 3d4b1838..33a0b448 100644 --- a/gs_quant/backtests/generic_engine.py +++ b/gs_quant/backtests/generic_engine.py @@ -25,12 +25,15 @@ from gs_quant import risk from gs_quant.backtests.action_handler import ActionHandlerBaseFactory, ActionHandler -from gs_quant.backtests.actions import Action, AddTradeAction, HedgeAction, EnterPositionQuantityScaledAction, \ - AddTradeActionInfo, HedgeActionInfo, ExitTradeAction, ExitTradeActionInfo, EnterPositionQuantityScaledActionInfo, \ - RebalanceAction, RebalanceActionInfo, ExitAllPositionsAction, AddScaledTradeAction, ScalingActionType +from gs_quant.backtests.actions import (Action, AddTradeAction, HedgeAction, EnterPositionQuantityScaledAction, + AddTradeActionInfo, HedgeActionInfo, ExitTradeAction, ExitTradeActionInfo, + EnterPositionQuantityScaledActionInfo, RebalanceAction, RebalanceActionInfo, + ExitAllPositionsAction, AddScaledTradeAction, ScalingActionType, + AddScaledTradeActionInfo) from gs_quant.backtests.backtest_engine import BacktestBaseEngine from gs_quant.backtests.backtest_objects import BackTest, ScalingPortfolio, CashPayment, Hedge from gs_quant.backtests.backtest_utils import make_list, CalcType, get_final_date +from gs_quant.common import AssetClass from gs_quant.common import ParameterisedRiskMeasure, RiskMeasure from gs_quant.context_base import nullcontext from gs_quant.datetime.relative_date import RelativeDateSchedule @@ -39,7 +42,6 @@ from gs_quant.risk import Price from gs_quant.risk.results import PortfolioRiskResult from gs_quant.target.backtests import BacktestTradingQuantityType -from gs_quant.common import AssetClass from gs_quant.target.measures import ResolvedInstrumentValues from gs_quant.tracing import Tracer @@ -74,7 +76,7 @@ def _raise_order(self, final_orders = {} for d, p in orders.items(): new_port = Portfolio([t.clone(name=f'{t.name}_{d}') for t in p[0].result()]) - final_orders[d] = new_port.scale(None if p[1] is None else p[1].scaling, in_place=False) + final_orders[d] = (new_port.scale(None if p[1] is None else p[1].scaling, in_place=False), p[1]) return final_orders @@ -86,12 +88,13 @@ def apply_action(self, orders = self._raise_order(state, trigger_info) # record entry and unwind cashflows - for create_date, portfolio in orders.items(): + for create_date, (portfolio, info) in orders.items(): for inst in portfolio.all_instruments: backtest.cash_payments[create_date].append(CashPayment(inst, effective_date=create_date, direction=-1)) backtest.transaction_costs[create_date] -= self.action.transaction_cost.get_cost(create_date, backtest, trigger_info, inst) - final_date = get_final_date(inst, create_date, self.action.trade_duration, self.action.holiday_calendar) + final_date = get_final_date(inst, create_date, self.action.trade_duration, self.action.holiday_calendar, + info) backtest.cash_payments[final_date].append(CashPayment(inst, effective_date=final_date)) backtest.transaction_costs[final_date] -= self.action.transaction_cost.get_cost(final_date, backtest, @@ -169,9 +172,8 @@ def _scale_order(self, orders, daily_risk, price_measure): raise RuntimeError(f'Scaling Type {self.action.scaling_type} not supported by engine') def _raise_order(self, - state: Union[date, Iterable[date]], + state_list: Iterable[date], price_measure: RiskMeasure): - state_list = make_list(state) orders = {} order_valuations = (ResolvedInstrumentValues,) if self.action.scaling_type == ScalingActionType.risk_measure: @@ -201,18 +203,24 @@ def _raise_order(self, def apply_action(self, state: Union[date, Iterable[date]], backtest: BackTest, - trigger_info: Optional[Union[EnterPositionQuantityScaledActionInfo, - Iterable[EnterPositionQuantityScaledActionInfo]]] = None): + trigger_info: Optional[Union[AddScaledTradeActionInfo, + Iterable[AddScaledTradeActionInfo]]] = None): - orders = self._raise_order(state, backtest.price_measure) + state_list = make_list(state) + if trigger_info is None or isinstance(trigger_info, AddScaledTradeActionInfo): + trigger_info = [trigger_info for _ in range(len(state_list))] + orders = self._raise_order(state_list, backtest.price_measure) + trigger_infos = dict(zip_longest(state_list, trigger_info)) # record entry and unwind cashflows for create_date, portfolio in orders.items(): + info = trigger_infos[create_date] for inst in portfolio.all_instruments: backtest.cash_payments[create_date].append(CashPayment(inst, effective_date=create_date, direction=-1)) backtest.transaction_costs[create_date] -= self.action.transaction_cost.get_cost(create_date, backtest, trigger_info, inst) - final_date = get_final_date(inst, create_date, self.action.trade_duration, self.action.holiday_calendar) + final_date = get_final_date(inst, create_date, self.action.trade_duration, self.action.holiday_calendar, + info) backtest.cash_payments[final_date].append(CashPayment(inst, effective_date=final_date)) backtest.transaction_costs[final_date] -= self.action.transaction_cost.get_cost(final_date, backtest, @@ -372,19 +380,25 @@ def apply_action(self, state: Union[date, Iterable[date]], backtest: BackTest, trigger_info: Optional[Union[HedgeActionInfo, Iterable[HedgeActionInfo]]] = None): - with HistoricalPricingContext(dates=make_list(state), csa_term=self.action.csa_term): + state_list = make_list(state) + if trigger_info is None or isinstance(trigger_info, HedgeActionInfo): + trigger_info = [trigger_info for _ in range(len(state_list))] + trigger_infos = dict(zip_longest(state_list, trigger_info)) + + with HistoricalPricingContext(dates=state_list, csa_term=self.action.csa_term): backtest.calc_calls += 1 - backtest.calculations += len(make_list(state)) + backtest.calculations += len(state_list) f = Portfolio(self.action.priceable).resolve(in_place=False) for create_date, portfolio in f.result().items(): + info = trigger_infos[create_date] hedge_trade = portfolio.priceables[0] hedge_trade.name = f'{hedge_trade.name}_{create_date.strftime("%Y-%m-%d")}' if isinstance(hedge_trade, Portfolio): for instrument in hedge_trade.all_instruments: instrument.name = f'{hedge_trade.name}_{instrument.name}' final_date = get_final_date(hedge_trade, create_date, self.action.trade_duration, - self.action.holiday_calendar) + self.action.holiday_calendar, info) active_dates = [s for s in backtest.states if create_date <= s < final_date] if len(active_dates): @@ -711,10 +725,12 @@ def __run(self, strategy, start, end, frequency, states, risks, initial_value, r self._price_semi_det_triggers(backtest, risks) logger.info('Scaling semi-determ triggers and actions and calculating path dependent triggers and actions') - for d in strategy_pricing_dates: - with self._trace('Process date') as scope: + with self._trace('Process dates') as scope: + if scope: + scope.span.set_tag('dates.length', len(strategy_pricing_dates)) + for d in strategy_pricing_dates: if scope: - scope.span.set_tag('date', str(d)) + scope.span.log_kv({'date': str(d)}) self._process_triggers_and_actions_for_date(d, strategy, backtest, risks) with self._trace('Calc New Trades'): @@ -795,7 +811,7 @@ def _price_semi_det_triggers(self, backtest, risks): port = p.trade if isinstance(p.trade, Portfolio) else Portfolio([p.trade]) p.results = port.calc(tuple(risks)) - def _process_triggers_and_actions_for_date(self, d, strategy, backtest, risks): + def _process_triggers_and_actions_for_date(self, d, strategy, backtest: BackTest, risks): logger.debug(f'{d}: Processing triggers and actions') # path dependent for trigger in strategy.triggers: diff --git a/gs_quant/backtests/strategy.py b/gs_quant/backtests/strategy.py index 372fd99f..bcd7a4c5 100644 --- a/gs_quant/backtests/strategy.py +++ b/gs_quant/backtests/strategy.py @@ -14,23 +14,27 @@ under the License. """ -from dataclasses import dataclass -from dataclasses_json import dataclass_json, config +from dataclasses import dataclass, field from typing import Tuple, Optional, Union, Iterable -from gs_quant.backtests.triggers import * -from gs_quant.backtests.generic_engine import GenericEngine -from gs_quant.backtests.predefined_asset_engine import PredefinedAssetEngine -from gs_quant.backtests.equity_vol_engine import EquityVolEngine +from dataclasses_json import dataclass_json, config + +from gs_quant.backtests.backtest_utils import make_list +from gs_quant.backtests.triggers import Trigger from gs_quant.base import Priceable from gs_quant.json_convertors import decode_named_instrument, encode_named_instrument, dc_decode -backtest_engines = [GenericEngine(), PredefinedAssetEngine(), EquityVolEngine()] + +def _backtest_engines(): + from gs_quant.backtests.equity_vol_engine import EquityVolEngine + from gs_quant.backtests.generic_engine import GenericEngine + from gs_quant.backtests.predefined_asset_engine import PredefinedAssetEngine + return [GenericEngine(), PredefinedAssetEngine(), EquityVolEngine()] @dataclass_json @dataclass -class Strategy(object): +class Strategy: """ A strategy object on which one may run a backtest """ @@ -54,4 +58,4 @@ def get_risks(self): return risk_list def get_available_engines(self): - return [engine for engine in backtest_engines if engine.supports_strategy(self)] + return [engine for engine in _backtest_engines() if engine.supports_strategy(self)] diff --git a/gs_quant/backtests/triggers.py b/gs_quant/backtests/triggers.py index 47c6f0c1..48f44885 100644 --- a/gs_quant/backtests/triggers.py +++ b/gs_quant/backtests/triggers.py @@ -101,7 +101,14 @@ def get_trigger_times(self) -> [dt.date]: def has_triggered(self, state: dt.date, backtest: BackTest = None) -> TriggerInfo: if not self.trigger_dates: self.get_trigger_times() - return TriggerInfo(state in self.trigger_dates) + if state in self.trigger_dates: + next_state = None + if self.trigger_dates.index(state) != len(self.trigger_dates) - 1: + next_state = self.trigger_dates[self.trigger_dates.index(state) + 1] + return TriggerInfo(True, {AddTradeAction: AddTradeActionInfo(scaling=None, next_schedule=next_state), + AddScaledTradeAction: AddScaledTradeActionInfo(next_schedule=next_state), + HedgeAction: HedgeActionInfo(next_schedule=next_state)}) + return TriggerInfo(False) @dataclass_json diff --git a/gs_quant/documentation/04_backtesting/examples/03_GenericEngine/040314_chained_actions.ipynb b/gs_quant/documentation/04_backtesting/examples/03_GenericEngine/040314_chained_actions.ipynb new file mode 100644 index 00000000..347caa55 --- /dev/null +++ b/gs_quant/documentation/04_backtesting/examples/03_GenericEngine/040314_chained_actions.ipynb @@ -0,0 +1,229 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "431469a8-b15b-4a5d-8559-f8561ccb75f1", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import os\n", + "import sys\n", + "sys.path.insert(0, os.path.abspath('H:/code/gs_quant'))\n", + "sys.path.insert(0, os.path.abspath('H:/code/gs_quant_analytics'))\n", + "sys.path.insert(0, os.path.abspath('H:/code/visual-structuring-core'))\n", + "sys.path.insert(0, os.path.abspath('H:/code/gs_quant_internal'))\n", + "sys.path.insert(0, os.path.abspath('H:/code/inventa-rates-utils'))\n", + "sys.path.insert(0, os.path.abspath('H:/code/franchise-rnd'))\n", + "import warnings\n", + "warnings.filterwarnings('ignore')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a53fde7e-f24f-4cb0-9962-9ed075790628", + "metadata": {}, + "outputs": [], + "source": [ + "from gs_quant.session import GsSession\n", + "\n", + "GsSession.use()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6a9bc9fb-3914-49fc-b3cf-4e59d22fd8a2", + "metadata": {}, + "outputs": [], + "source": [ + "from gs_quant.session import Environment\n", + "from gs_quant.instrument import FXOption\n", + "from gs_quant.backtests.strategy import Strategy\n", + "from gs_quant.backtests.triggers import *\n", + "from gs_quant.backtests.actions import *\n", + "from gs_quant.backtests.equity_vol_engine import *\n", + "from gs_quant.backtests.generic_engine import GenericEngine\n", + "from gs_quant.risk import Price\n", + "from datetime import datetime, date\n", + "\n", + "import gs_quant.risk as risk" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2a6175aa-bd47-4cd3-bead-dcebe4f5ea8d", + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize session\n", + "from gs_quant.session import GsSession\n", + "GsSession.use(client_id=None, client_secret=None, scopes=('run_analytics',)) " + ] + }, + { + "cell_type": "markdown", + "id": "3911f5d4-3a4b-4fca-9404-315dfa03aea9", + "metadata": {}, + "source": [ + "#### If we have a periodic trigger which triggers every 1w and has an add trade action which has a trade duration of 1w sometimes you can end up with a situation where you have 2 trades or 0 trades. \n", + "#### This is because the schedule of 1w dates for the trigger will derive a schedule of 1w dates and then adjust each date for holidays. Where as if you take a specific date and add a week this will not take note of the holiday and therefore may give a different date." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5e2c6435-b2a0-4824-ae5c-4d4ef0f42ee4", + "metadata": {}, + "outputs": [], + "source": [ + "# Define backtest dates\n", + "start_date = date(2024, 3, 24)\n", + "end_date = date(2024, 5, 1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f30447d-103c-4792-adec-8c7f2516b617", + "metadata": {}, + "outputs": [], + "source": [ + "call = FXOption(pair='EURUSD', expiration_date='3m', option_type='Call', name='call')" + ] + }, + { + "cell_type": "markdown", + "id": "f62f44c9-d432-4212-9313-966c9e7b52ec", + "metadata": {}, + "source": [ + "### Entering a position weekly" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "adb4fc9d-1c23-40a6-b109-3ca8425ad623", + "metadata": {}, + "outputs": [], + "source": [ + "# To demonstrate create add trade action with a duration of 1w on a periodic trigger of 1w across a holiday\n", + "hol_cal = (dt.date(2024, 3, 29), dt.date(2024, 4, 1))\n", + "trade_action = AddTradeAction(priceables=call, trade_duration='1w', name='weekly_duration', \n", + " holiday_calendar=hol_cal)\n", + "trade_trigger = PeriodicTrigger(trigger_requirements=PeriodicTriggerRequirements(start_date=start_date, \n", + " end_date=end_date, frequency='1w', \n", + " calendar=hol_cal),\n", + " actions=trade_action)\n", + "\n", + "strategy = Strategy(None, trade_trigger)\n", + "\n", + "# Run backtest daily\n", + "GE = GenericEngine()\n", + "backtest = GE.run_backtest(strategy, start=start_date, end=end_date, frequency='1b', show_progress=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9c126b67-aecf-4493-9ac6-397b984852ed", + "metadata": {}, + "outputs": [], + "source": [ + "# View backtest trade ledger\n", + "backtest.trade_ledger()" + ] + }, + { + "cell_type": "markdown", + "id": "2fd8f5f6-7521-4182-a0ec-193f39553860", + "metadata": {}, + "source": [ + "#### note that the second trade closes on the 4th April but the third trade opens on the 5th." + ] + }, + { + "cell_type": "markdown", + "id": "7248340c-13b8-4c98-b837-1a5e4a12b88f", + "metadata": {}, + "source": [ + "In order to fix this we can set the trade duration to \"next schedule\".\n", + "This will have the effect of forcing the close date to line up with the open date of the following trade.\n", + "\n", + "This should only be used if you want the trade duration of the trade and the frequency of the trigger to line up \n", + "and the frequency is not daily. Thereby chaining (or rolling) the positions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a1d2cf05-0622-4899-af32-be9dc0fad840", + "metadata": {}, + "outputs": [], + "source": [ + "hol_cal = (dt.date(2024, 3, 29), dt.date(2024, 4, 1))\n", + "trade_action = AddTradeAction(priceables=call, trade_duration='next schedule', name='weekly_duration', holiday_calendar=hol_cal)\n", + "trade_trigger = PeriodicTrigger(trigger_requirements=PeriodicTriggerRequirements(start_date=start_date, end_date=end_date, frequency='1w', \n", + " calendar=hol_cal),\n", + " actions=trade_action)\n", + "\n", + "strategy = Strategy(None, trade_trigger)\n", + "\n", + "# Run backtest daily\n", + "GE = GenericEngine()\n", + "backtest = GE.run_backtest(strategy, start=start_date, end=end_date, frequency='1b', show_progress=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13b2bdc3-c710-4cd3-a020-23e8f1351151", + "metadata": {}, + "outputs": [], + "source": [ + "# View backtest trade ledger\n", + "backtest.trade_ledger()" + ] + }, + { + "cell_type": "markdown", + "id": "ca734a86-f37f-497e-bd96-34044a95d70a", + "metadata": {}, + "source": [ + "#### now note that all the trades start and end dates line up" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "294b7063-3aff-4322-a400-070bc0b1ce35", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/gs_quant/markets/hedge.py b/gs_quant/markets/hedge.py index f6f371c6..432604c5 100644 --- a/gs_quant/markets/hedge.py +++ b/gs_quant/markets/hedge.py @@ -328,7 +328,8 @@ def __init__( max_market_cap: Optional[float] = None, market_participation_rate: float = 10, lasso_weight: float = 0, - ridge_weight: float = 0): + ridge_weight: float = 0, + benchmarks: List[str] = None): self.__initial_portfolio = initial_portfolio self.__universe = universe self.__exclusions = exclusions @@ -350,6 +351,7 @@ def __init__( self.__market_participation_rate = market_participation_rate self.__lasso_weight = lasso_weight self.__ridge_weight = ridge_weight + self.__benchmarks = benchmarks @property def initial_portfolio(self) -> PositionSet: @@ -555,7 +557,7 @@ def ridge_weight(self) -> float: def ridge_weight(self, value: float): self.__ridge_weight = value - def to_dict(self): + def to_dict(self, resolved_identifiers): positions_to_price = [] for position in self.initial_portfolio.positions: pos_to_price = {'assetId': position.asset_id} @@ -591,24 +593,17 @@ def to_dict(self): # Resolve any assets in the hedge universe, asset constraints, and asset exclusions hedge_date = self.initial_portfolio.date - identifiers = [identifier for identifier in self.universe] - if self.exclusions is not None and self.exclusions.assets is not None: - identifiers = identifiers + [asset for asset in self.exclusions.assets] - if self.constraints is not None: - if self.constraints.assets is not None: - identifiers = identifiers + [asset.constraint_name for asset in self.constraints.assets] - resolver = GsAssetApi.resolve_assets(identifier=identifiers, - fields=['id'], - as_of=hedge_date) - self.universe = [resolver.get(asset, [{'id': asset}])[0].get('id') for asset in self.universe] + self.universe = [resolved_identifiers.get(asset, [{'id': asset}])[0].get('id') for asset in self.universe] + self.benchmarks = [resolved_identifiers.get(asset, [{'id': asset}])[0].get('id') for asset in self.benchmarks] if self.exclusions is not None: if self.exclusions.assets is not None: - self.exclusions.assets = [resolver.get(asset, [{'id': asset}])[0].get('id') + self.exclusions.assets = [resolved_identifiers.get(asset, [{'id': asset}])[0].get('id') for asset in self.exclusions.assets] if self.constraints is not None and self.constraints.assets is not None: for con in self.constraints.assets: - if len(resolver.get(con.constraint_name, [])) > 0: - con.constraint_name = resolver.get(con.constraint_name)[0].get('id', con.constraint_name) + if len(resolved_identifiers.get(con.constraint_name, [])) > 0: + con.constraint_name = resolved_identifiers.get(con.constraint_name)[0].get('id', + con.constraint_name) # Parse and return dictionary observation_start_date = self.observation_start_date or hedge_date - relativedelta(years=1) @@ -657,9 +652,29 @@ def to_dict(self): as_dict['minMarketCap'] = self.min_market_cap if self.max_market_cap is not None: as_dict['maxMarketCap'] = self.max_market_cap + if self.benchmarks is not None and len(self.benchmarks): + as_dict['benchmarks'] = self.benchmarks return as_dict + def resolve_identifiers_in_payload(self, hedge_date) -> tuple: + """ + The hedge payload has identifiers which need to be resolved + The resolved values here are used to convert back from asset Id to provided identifier for benchmark curves + """ + identifiers = [identifier for identifier in self.universe] + if self.exclusions is not None and self.exclusions.assets is not None: + identifiers = identifiers + [asset for asset in self.exclusions.assets] + if self.benchmarks is not None: + identifiers = identifiers + [asset for asset in self.benchmarks] + if self.constraints is not None: + if self.constraints.assets is not None: + identifiers = identifiers + [asset.constraint_name for asset in self.constraints.assets] + resolver = GsAssetApi.resolve_assets(identifier=identifiers, + fields=['id'], + as_of=hedge_date) + return resolver + class Hedge: """ @@ -695,10 +710,14 @@ def calculate(self) -> Dict: Calculates the hedge :return: a dictionary with calculation results """ - params = self.parameters.to_dict() + resolved_identifiers = self.parameters.resolve_identifiers_in_payload(self.parameters.initial_portfolio.date) + params = self.parameters.to_dict(resolved_identifiers) calculation_results = GsHedgeApi.calculate_hedge({'objective': self.objective.value, 'parameters': params}).get('result') formatted_results = self._format_hedge_calculate_results(calculation_results) + formatted_results = self._enhance_result_with_benchmark_curves(formatted_results, + calculation_results.get('benchmarks', []), + resolved_identifiers) self.__result = formatted_results return formatted_results @@ -767,11 +786,30 @@ def _format_hedge_calculate_results(calculation_results): formatted_results = {} for key in renamed_results: - formatted_results[key] = {} - for inner_key in renamed_results[key]: - formatted_results[key][inner_key[0].capitalize() + - ''.join(map(lambda x: x if x.islower() else f' {x}', - inner_key[1:]))] = renamed_results[key][inner_key] + formatted_results[key] = Hedge.format_dictionary_key_to_readable_format(renamed_results[key]) + + return formatted_results + + @staticmethod + def _enhance_result_with_benchmark_curves(formatted_results, benchmark_results, resolver): + asset_id_to_provided_identifier_map = dict( + (x['id'], provided_identifier) + for provided_identifier, marquee_assets in resolver.items() + for x in marquee_assets) + + if len(benchmark_results): + for x in benchmark_results: + benchmark_asset_id = asset_id_to_provided_identifier_map[x['assetId']] + formatted_results[benchmark_asset_id] = Hedge.format_dictionary_key_to_readable_format(x) + + return formatted_results + + @staticmethod + def format_dictionary_key_to_readable_format(renamed_results): + formatted_results = {} + for inner_key in renamed_results: + formatted_results[inner_key[0].capitalize() + ''.join(map(lambda x: x if x.islower() else f' {x}', + inner_key[1:]))] = renamed_results[inner_key] return formatted_results @staticmethod diff --git a/gs_quant/target/instrument.py b/gs_quant/target/instrument.py index 7390851c..120f4589 100644 --- a/gs_quant/target/instrument.py +++ b/gs_quant/target/instrument.py @@ -571,6 +571,19 @@ class FXBinary(Instrument): type_: Optional[AssetType] = field(init=False, default=AssetType.Binary, metadata=config(field_name='type', exclude=exclude_none)) name: Optional[str] = field(default=None, metadata=name_metadata) + def scale_in_place(self, scaling: Optional[float] = None): + if self.unresolved is None: + raise RuntimeError('Can only scale resolved instruments') + if scaling is None or scaling == 1: + return + + if scaling < 0: + flip_dict = {BuySell.Buy: BuySell.Sell, BuySell.Sell: BuySell.Buy} + self.buy_sell = flip_dict[self.buy_sell] + + self.notional_amount *= abs(scaling) + return + @handle_camel_case_args @dataclass_json(letter_case=LetterCase.CAMEL) diff --git a/gs_quant/test/api/backtests_xasset/json_encoders/test_request_encoders.py b/gs_quant/test/api/backtests_xasset/json_encoders/test_request_encoders.py index ef136cdd..696bc741 100644 --- a/gs_quant/test/api/backtests_xasset/json_encoders/test_request_encoders.py +++ b/gs_quant/test/api/backtests_xasset/json_encoders/test_request_encoders.py @@ -20,9 +20,9 @@ def test_legs_decoder(): - fx_leg_1 = {"pair": "EURUSD", "asset_class": "FX", "type": "Option", "name": "leg_0"} - fx_leg_2 = {"pair": "GBPUSD", "asset_class": "FX", "type": "Option"} - eq_leg = {"underlier": ".SPX", "asset_class": "Equity", "type": "Option", "name": "test_eq"} + fx_leg_1 = {"pair": "EURUSD", "assetClass": "FX", "type": "Option", "name": "leg_0"} + fx_leg_2 = {"pair": "GBPUSD", "assetClass": "FX", "type": "Option"} + eq_leg = {"underlier": ".SPX", "assetClass": "Equity", "type": "Option", "name": "test_eq"} [inst_1, inst_2, inst_3] = legs_decoder([fx_leg_1, fx_leg_2, eq_leg]) assert isinstance(inst_1, FXOption) assert inst_1.name == "leg_0" @@ -41,16 +41,14 @@ def test_legs_encoder(): eq_leg = EqOption(underlier=".SPX", name="test_eq") [inst_1, inst_2, inst_3] = legs_encoder([fx_leg_1, fx_leg_2, eq_leg]) assert isinstance(inst_1, dict) - assert inst_1["asset_class"] == AssetClass.FX + assert inst_1["assetClass"] == AssetClass.FX assert inst_1["type"] == AssetType.Option - assert inst_1["name"] == "leg_0" assert inst_1["pair"] == "EURUSD" assert isinstance(inst_2, dict) - assert inst_2["asset_class"] == AssetClass.FX + assert inst_2["assetClass"] == AssetClass.FX assert inst_2["type"] == AssetType.Option assert inst_2["pair"] == "GBPUSD" assert isinstance(inst_3, dict) - assert inst_3["asset_class"] == AssetClass.Equity + assert inst_3["assetClass"] == AssetClass.Equity assert inst_3["type"] == AssetType.Option - assert inst_3["name"] == "test_eq" assert inst_3["underlier"] == ".SPX" diff --git a/gs_quant/test/markets/test_hedger.py b/gs_quant/test/markets/test_hedger.py index 80d0c0a0..b5fd2dbd 100644 --- a/gs_quant/test/markets/test_hedger.py +++ b/gs_quant/test/markets/test_hedger.py @@ -1957,6 +1957,7 @@ def test_format_hedge_calculate_results(): def get_mock_hedge(mocker): mocker.patch.object(PerformanceHedgeParameters, 'to_dict', return_value={}) + mocker.patch.object(PerformanceHedgeParameters, 'resolve_identifiers_in_payload', return_value={}) mocker.patch.object(GsHedgeApi, 'calculate_hedge', return_value={'result': calculation_results}) hedge = PerformanceHedge(parameters=PerformanceHedgeParameters( initial_portfolio=PositionSet(positions=[Position(asset_id='fakeId', identifier='fakeId', quantity=1)], diff --git a/gs_quant/test/timeseries/test_econometrics.py b/gs_quant/test/timeseries/test_econometrics.py index 72b6d98e..e9c8fc0a 100644 --- a/gs_quant/test/timeseries/test_econometrics.py +++ b/gs_quant/test/timeseries/test_econometrics.py @@ -449,6 +449,7 @@ def test_max_drawdown(): date(2019, 1, 7), date(2019, 1, 8), ] + daily_dates = pd.to_datetime(daily_dates) series = pd.Series([1, 5, 5, 4, 4, 1], index=daily_dates) diff --git a/gs_quant/timeseries/econometrics.py b/gs_quant/timeseries/econometrics.py index 1d4c1eac..165d0cd9 100644 --- a/gs_quant/timeseries/econometrics.py +++ b/gs_quant/timeseries/econometrics.py @@ -758,7 +758,7 @@ def max_drawdown(x: pd.Series, w: Union[Window, int, str] = Window(None, 0)) -> """ Compute the maximum peak to trough drawdown over a rolling window as a ratio. - i.e. if the max drawdown for a period is 20%, this function will return 0.2. + i.e. if the max drawdown for a period is 20%, this function will return -0.2. :param x: time series :param w: Window, int, or str: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window size @@ -780,10 +780,10 @@ def max_drawdown(x: pd.Series, w: Union[Window, int, str] = Window(None, 0)) -> """ w = normalize_window(x, w) if isinstance(w.w, pd.DateOffset): - if np.issubdtype(x.index, dt.date): - scores = pd.Series([x[idx] / x.loc[(x.index > (idx - w.w).date()) & (x.index <= idx)].max() - 1 + if pd.api.types.is_datetime64_dtype(x.index): + scores = pd.Series([x[idx] / x.loc[(x.index > (idx - w.w)) & (x.index <= idx)].max() - 1 for idx in x.index], index=x.index) - result = pd.Series([scores.loc[(scores.index > (idx - w.w).date()) & (scores.index <= idx)].min() + result = pd.Series([scores.loc[(scores.index > (idx - w.w)) & (scores.index <= idx)].min() for idx in scores.index], index=scores.index) else: raise TypeError('Please pass in list of dates as index') diff --git a/gs_quant/timeseries/technicals.py b/gs_quant/timeseries/technicals.py index 784f3e48..576e2850 100644 --- a/gs_quant/timeseries/technicals.py +++ b/gs_quant/timeseries/technicals.py @@ -420,8 +420,9 @@ def _freq_to_period(x: pd.Series, freq: Frequency = Frequency.YEAR): if not isinstance(x.index, pd.DatetimeIndex): raise MqValueError("Series must have a pandas.DateTimeIndex.") pfreq = getattr(getattr(x, 'index', None), 'inferred_freq', None) - pfreq = 'MS' if pfreq in ('ME', 'MS') else pfreq # Convert Month[Start|End] into Monthly - pfreq = 'QS' if pfreq in ('QE-DEC', 'QE') else pfreq # Convert Month[Start|End] into Monthly + # Some older versions of statsmodels don't handle some of the newer pandas frequencies, so we manually adjust them + pfreq = 'MS' if pfreq in ('ME', 'M') else pfreq # Convert Month[End] into MonthlyStart + pfreq = 'QS' if pfreq in ('QE-DEC', 'QE') else pfreq # Convert Quarter[End] into QuarterlyStart period = None if pfreq is None else statsmodels.tsa.seasonal.freq_to_period(pfreq) if period in [7, None]: # daily x = x.asfreq('D', method='ffill')