Skip to content

Commit

Permalink
Ammend alpha, lam type hints
Browse files Browse the repository at this point in the history
  • Loading branch information
PabloRoque committed Sep 20, 2024
1 parent b1d63de commit d2eb266
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions pymc_marketing/mmm/transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -828,8 +828,8 @@ def tanh_saturation_baselined(

def michaelis_menten_function(
x: float | np.ndarray | npt.NDArray[np.float64],
alpha: float | np.ndarray | npt.NDArray[np.float64],
lam: float | np.ndarray | npt.NDArray[np.float64],
alpha: float,
lam: float,
) -> float | Any:
r"""Evaluate the Michaelis-Menten function for given values of x, alpha, and lambda.
Expand Down Expand Up @@ -916,8 +916,8 @@ def michaelis_menten_function(

def michaelis_menten(
x: float | np.ndarray | npt.NDArray[np.float64],
alpha: float | np.ndarray | npt.NDArray[np.float64],
lam: float | np.ndarray | npt.NDArray[np.float64],
alpha: float,
lam: float,
) -> pt.TensorVariable:
r"""TensorVariable wrap over the Michaelis-Menten transformation.
Expand Down

0 comments on commit d2eb266

Please sign in to comment.