Skip to content

Commit

Permalink
Option to change optimizer eps
Browse files Browse the repository at this point in the history
  • Loading branch information
georgeyiasemis committed Dec 12, 2023
1 parent 1f829dd commit 0d49cb8
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 0 deletions.
1 change: 1 addition & 0 deletions direct/config/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class TrainingConfig(BaseConfig):
optimizer: str = "Adam"
lr: float = 5e-4
weight_decay: float = 1e-6
eps: float = 1e-8
batch_size: int = 2

# LR Scheduler
Expand Down
1 change: 1 addition & 0 deletions direct/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,7 @@ def setup_train(
optimizer_params,
lr=env.cfg.training.lr,
weight_decay=env.cfg.training.weight_decay,
eps=env.cfg.training.eps,
) # noqa

# Build the LR scheduler, we use a fixed LR schedule step size, no adaptive training schedule.
Expand Down

0 comments on commit 0d49cb8

Please sign in to comment.