Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

191 smooth weights #207

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 56 additions & 7 deletions cvx/covariance/combination.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,22 +75,43 @@ def _nu(Ls, means):


class _CombinationProblem:
def __init__(self, keys, n, window):
def __init__(self, keys, n, window, smoother, gamma):
self.keys = keys
K = len(keys)
self._weight = cvx.Variable(len(keys), name="weights")
self.A_param = cvx.Parameter((n * window, K))
self.P_chol_param = cvx.Parameter((K, K))

if smoother:
self._weight_prev = cvx.Parameter(len(keys), name="weights_prev")
if smoother == "l2":
self.smooth_penalty = cvx.norm(self._weight - self._weight_prev, 2)
elif smoother == "l1":
self.smooth_penalty = cvx.norm(self._weight - self._weight_prev, 1)
elif smoother == "sum_squares":
self.smooth_penalty = cvx.sum_squares(self._weight - self._weight_prev)
else:
raise ValueError("smoother must be None, 'l2', 'l1' or 'sum_squares'")

self.smoother = smoother
self.gamma = gamma

@property
def _constraints(self):
return [cvx.sum(self._weight) == 1, self._weight >= 0]

@property
def _objective(self):
return cvx.sum(cvx.log(self.A_param @ self._weight)) - 0.5 * cvx.sum_squares(
self.P_chol_param.T @ self._weight
)
if self.smoother:
return (
cvx.sum(cvx.log(self.A_param @ self._weight))
- 0.5 * cvx.sum_squares(self.P_chol_param.T @ self._weight)
- self.gamma * self.smooth_penalty
)
else:
return cvx.sum(
cvx.log(self.A_param @ self._weight)
) - 0.5 * cvx.sum_squares(self.P_chol_param.T @ self._weight)

def _construct_problem(self):
self.prob = cvx.Problem(cvx.Maximize(self._objective), self._constraints)
Expand Down Expand Up @@ -218,7 +239,15 @@ def assets(self):
"""
return self.returns.columns

def solve(self, window=None, times=None, **kwargs):
def solve(
self,
window=None,
times=None,
smoother=None,
gamma=None,
weight_init=None,
**kwargs,
):
"""
The size of the window is crucial to specify the size of the parameters
for the cvxpy problem. Hence those computations are not in the __init__ method
Expand All @@ -227,7 +256,19 @@ def solve(self, window=None, times=None, **kwargs):

param window: number of previous time steps to use in the covariance
combination problem
param times: list of time steps to solve the problem at; if None, solve at all available time steps
param times: list of time steps to solve the problem at; if None, solve
at all available time steps
param smoother: smoothing parameter for the covariance combination
problem; None, 'l2', 'l1', or 'sum_squares'. 'l2' yields piecewise
constant weights; 'l1' yields sparese weight updates; 'sum_squares'
yields smooth weight updates
param gamma: regularization parameter for the covariance combination;
only applicable if smoother is not None; penalty becomes gamma *
||w||_2, gamma * ||w||_1, or gamma * ||w||_2^2 depending on the value
of the 'smoother' parameter
param weight_init: initial weights for the covariance combination
problem; if None, use the default uniform initialization; only
applicable if smoother is not None
"""
# If window is None, use all available data; cap window at length of data
window = window or len(self.__Ls_shifted)
Expand Down Expand Up @@ -282,17 +323,25 @@ def solve(self, window=None, times=None, **kwargs):
}

problem = _CombinationProblem(
keys=self.sigmas.keys(), n=len(self.assets), window=window
keys=self.sigmas.keys(),
n=len(self.assets),
window=window,
smoother=smoother,
gamma=gamma,
)

problem._construct_problem()
if smoother and not weight_init:
problem._weight_prev.value = np.ones(self.K) / self.K

for time, AA in A.items():
problem.A_param.value = AA
problem.P_chol_param.value = P_chol[time]

try:
yield self._solve(time=time, problem=problem, **kwargs)
if smoother:
problem._weight_prev.value = problem._weight.value
except cvx.SolverError:
print(f"Solver did not converge at time {time}")
yield None
Expand Down
13 changes: 10 additions & 3 deletions cvx/covariance/ewma.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ def iterated_ewma(
returns,
vola_halflife,
cov_halflife,
min_periods_vola=20,
min_periods_cov=20,
min_periods_vola=None,
min_periods_cov=None,
mean=False,
mu_halflife1=None,
mu_halflife2=None,
Expand All @@ -117,9 +117,15 @@ def iterated_ewma(
if None, no winsorization is performed
nan_to_num: if True, replace NaNs in returns with 0.0
"""
n = returns.shape[1]
mu_halflife1 = mu_halflife1 or vola_halflife
mu_halflife2 = mu_halflife2 or cov_halflife

if min_periods_vola is None:
min_periods_vola = n
if min_periods_cov is None:
min_periods_cov = min_periods_cov or 3 * n

def scale_cov(vola, matrix):
index = matrix.index
columns = matrix.columns
Expand Down Expand Up @@ -215,7 +221,8 @@ def _ewma_mean(data, halflife, min_periods=0, clip_at=None):
min_periods=min_periods,
clip_at=clip_at,
):
# converting back into a series costs some performance but adds robustness
# converting back into a series costs some performance but adds
# robustness
if not np.isnan(ewma).all():
yield t, pd.Series(index=data.columns, data=ewma)

Expand Down
1 change: 0 additions & 1 deletion cvx/covariance/regularization.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def _e_step(Sigma, F, d):

def _m_step(Cxx, Cxs, Css):
F = Cxs @ np.linalg.inv(Css)
# d = np.diag(Cxx - 2 * Cxs @ F.T + F @ Css @ F.T)
d = np.diag(Cxx) - 2 * np.sum(Cxs * F, axis=1) + np.sum(F * (F @ Css), axis=1)
return LowRankDiag(F=F, d=pd.Series(d, index=F.index))

Expand Down
Loading
Loading