Skip to content

Commit

Permalink
📚 Fix tutorials logs
Browse files Browse the repository at this point in the history
  • Loading branch information
o-laurent committed Nov 5, 2023
1 parent 5fd0ada commit e4fd953
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 5 deletions.
2 changes: 1 addition & 1 deletion auto_tutorials_source/tutorial_bayesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def optim_lenet(model: nn.Module) -> dict:
):
args = init_args(datamodule=MNISTDataModule)

net_name = "bayesian-lenet-mnist"
net_name = "logs/bayesian-lenet-mnist"

# datamodule
args.root = str(root / "data")
Expand Down
3 changes: 2 additions & 1 deletion auto_tutorials_source/tutorial_der_cubic.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,9 @@ def optim_regression(
"False",
):
args = init_args()
args.use_cv = False

net_name = "der-mlp-cubic"
net_name = "logs/der-mlp-cubic"

# dataset
train_ds = Cubic(num_samples=1000)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def optim_lenet(model: nn.Module) -> dict:
):
args = init_args(datamodule=MNISTDataModule)

net_name = "dec-lenet-mnist"
net_name = "logs/dec-lenet-mnist"

# datamodule
args.root = str(root / "data")
Expand Down
2 changes: 1 addition & 1 deletion auto_tutorials_source/tutorial_mc_dropout.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
):
args = init_args(network=ResNet, datamodule=MNISTDataModule)

net_name = "mc-dropout-lenet-mnist"
net_name = "logs/mc-dropout-lenet-mnist"

# datamodule
args.root = str(root / "data")
Expand Down
2 changes: 1 addition & 1 deletion torch_uncertainty/transforms/mixup.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def __call__(self, x: Tensor, y: Tensor) -> tuple[Tensor, Tensor]:

class RegMixup(AbstractMixup):
"""RegMixup method from Pinto et al.,
"RegMixup: Mixup as a Regularizer Can Surprisingly Improve Accuracy and Out Distribution Robustness" (NeurIPS 2022)
'RegMixup: Mixup as a Regularizer Can Surprisingly Improve Accuracy and Out Distribution Robustness' (NeurIPS 2022)
https://arxiv.org/abs/2206.14502.
"""

Expand Down

0 comments on commit e4fd953

Please sign in to comment.