Skip to content

Commit

Permalink
Standardize name of out_features in UnimodalLogitsHead
Browse files Browse the repository at this point in the history
  • Loading branch information
nathanpainchaud committed Nov 1, 2023
1 parent efb1330 commit 453561f
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions didactic/models/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class UnimodalLogitsHead(nn.Module):
def __init__(
self,
in_features: int,
num_logits: int,
out_features: int,
backbone_distribution: Literal["poisson", "binomial"] = "poisson",
tau: float = 1.0,
tau_mode: Literal["fixed", "learn", "learn_sigm", "learn_fn"] = "learn_sigm",
Expand All @@ -86,7 +86,7 @@ def __init__(
Args:
in_features: Number of features in the input feature vector.
num_logits: Number of (tempered) logits to output.
out_features: Number of (tempered) logits to output.
backbone_distribution: Distribution whose probability mass function (PMF) is used to enforce an unimodal
distribution of the logits.
tau: Temperature parameter to control the sharpness of the distribution.
Expand All @@ -102,7 +102,7 @@ def __init__(
eps: Epsilon value to use in probabilities' log to avoid numerical instability.
"""
super().__init__()
self.num_logits = num_logits
self.num_logits = out_features
self.backbone_distribution = backbone_distribution
self.tau_mode = tau_mode
self.eps = eps
Expand Down

0 comments on commit 453561f

Please sign in to comment.