Skip to content

Commit

Permalink
update betas
Browse files Browse the repository at this point in the history
  • Loading branch information
felixdittrich92 committed Dec 20, 2024
1 parent 2799d92 commit a034ed8
Show file tree
Hide file tree
Showing 10 changed files with 14 additions and 14 deletions.
2 changes: 1 addition & 1 deletion references/classification/train_pytorch_character.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def main(args):
optimizer = torch.optim.AdamW(
[p for p in model.parameters() if p.requires_grad],
args.lr,
betas=(0.9, 0.99),
betas=(0.9, 0.999),
eps=1e-6,
weight_decay=args.weight_decay or 1e-4,
)
Expand Down
2 changes: 1 addition & 1 deletion references/classification/train_pytorch_orientation.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ def main(args):
optimizer = torch.optim.AdamW(
[p for p in model.parameters() if p.requires_grad],
args.lr,
betas=(0.9, 0.99),
betas=(0.9, 0.999),
eps=1e-6,
weight_decay=args.weight_decay or 1e-4,
)
Expand Down
4 changes: 2 additions & 2 deletions references/classification/train_tensorflow_character.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def main(args):
optimizer = optimizers.Adam(
learning_rate=scheduler,
beta_1=0.95,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=None if args.weight_decay == 0 else args.weight_decay,
Expand All @@ -267,7 +267,7 @@ def main(args):
optimizer = optimizers.AdamW(
learning_rate=scheduler,
beta_1=0.9,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=args.weight_decay or 1e-4,
Expand Down
4 changes: 2 additions & 2 deletions references/classification/train_tensorflow_orientation.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def main(args):
optimizer = optimizers.Adam(
learning_rate=scheduler,
beta_1=0.95,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=None if args.weight_decay == 0 else args.weight_decay,
Expand All @@ -277,7 +277,7 @@ def main(args):
optimizer = optimizers.AdamW(
learning_rate=scheduler,
beta_1=0.9,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=args.weight_decay or 1e-4,
Expand Down
2 changes: 1 addition & 1 deletion references/detection/train_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def main(args):
optimizer = torch.optim.AdamW(
[p for p in model.parameters() if p.requires_grad],
args.lr,
betas=(0.9, 0.99),
betas=(0.9, 0.999),
eps=1e-6,
weight_decay=args.weight_decay or 1e-4,
)
Expand Down
2 changes: 1 addition & 1 deletion references/detection/train_pytorch_ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ def main(rank: int, world_size: int, args):
optimizer = torch.optim.AdamW(
[p for p in model.parameters() if p.requires_grad],
args.lr,
betas=(0.9, 0.99),
betas=(0.9, 0.999),
eps=1e-6,
weight_decay=args.weight_decay or 1e-4,
)
Expand Down
4 changes: 2 additions & 2 deletions references/detection/train_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def main(args):
optimizer = optimizers.Adam(
learning_rate=scheduler,
beta_1=0.95,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=None if args.weight_decay == 0 else args.weight_decay,
Expand All @@ -312,7 +312,7 @@ def main(args):
optimizer = optimizers.AdamW(
learning_rate=scheduler,
beta_1=0.9,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=args.weight_decay or 1e-4,
Expand Down
2 changes: 1 addition & 1 deletion references/recognition/train_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ def main(args):
optimizer = torch.optim.AdamW(
[p for p in model.parameters() if p.requires_grad],
args.lr,
betas=(0.9, 0.99),
betas=(0.9, 0.999),
eps=1e-6,
weight_decay=args.weight_decay or 1e-4,
)
Expand Down
2 changes: 1 addition & 1 deletion references/recognition/train_pytorch_ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def main(rank: int, world_size: int, args):
optimizer = torch.optim.AdamW(
[p for p in model.parameters() if p.requires_grad],
args.lr,
betas=(0.9, 0.99),
betas=(0.9, 0.999),
eps=1e-6,
weight_decay=args.weight_decay or 1e-4,
)
Expand Down
4 changes: 2 additions & 2 deletions references/recognition/train_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def main(args):
optimizer = optimizers.Adam(
learning_rate=scheduler,
beta_1=0.95,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=None if args.weight_decay == 0 else args.weight_decay,
Expand All @@ -314,7 +314,7 @@ def main(args):
optimizer = optimizers.AdamW(
learning_rate=scheduler,
beta_1=0.9,
beta_2=0.99,
beta_2=0.999,
epsilon=1e-6,
clipnorm=5,
weight_decay=args.weight_decay or 1e-4,
Expand Down

0 comments on commit a034ed8

Please sign in to comment.