From 59c76065939138d24f1289303edf6f71f3018392 Mon Sep 17 00:00:00 2001 From: Yoann Schneider Date: Fri, 16 Dec 2022 11:01:04 +0000 Subject: [PATCH 1/2] bump up to 1.1.7 --- benchmarks/common.py | 4 ++-- laia/callbacks/learning_rate.py | 11 ++++++----- laia/dummies/dummy_trainer.py | 2 +- requirements.txt | 2 +- tests/callbacks/learning_rate_test.py | 2 -- tests/callbacks/progress_bar_test.py | 1 + 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/benchmarks/common.py b/benchmarks/common.py index 0ea6144c..f7a1be0f 100644 --- a/benchmarks/common.py +++ b/benchmarks/common.py @@ -17,8 +17,8 @@ def setup(train_path, fixed_input_height=0): syms = str(train_path / "syms") syms_table = SymbolsTable() - for k, v in data_module.syms.items(): - syms_table.add(v, k) + for k, v in data_module.syms: + syms_table.add(k,v) syms_table.save(syms) model( diff --git a/laia/callbacks/learning_rate.py b/laia/callbacks/learning_rate.py index c103ab90..6a496590 100644 --- a/laia/callbacks/learning_rate.py +++ b/laia/callbacks/learning_rate.py @@ -1,12 +1,13 @@ import pytorch_lightning as pl from pytorch_lightning.utilities import rank_zero_only +from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor import laia.common.logging as log _logger = log.get_logger(__name__) -class LearningRate(pl.callbacks.LearningRateMonitor): +class LearningRate(LearningRateMonitor): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.last_values = None @@ -19,12 +20,12 @@ def on_train_start(self, trainer, *args, **kwargs): RuntimeWarning, ) names = self._find_names(trainer.lr_schedulers) - self.lrs = {name: [] for name in names} + self.lrs = {name: [] for name in names} self.last_values = {} - + @rank_zero_only - def on_epoch_end(self, trainer, *args, **kwargs): - super().on_epoch_end(trainer, *args, **kwargs) + def on_train_epoch_end(self, trainer, *args, **kwargs): + super().on_train_epoch_end(trainer, *args, **kwargs) for k, v in self.lrs.items(): prev_value = self.last_values.get(k, None) new_value = v[-1] diff --git a/laia/dummies/dummy_trainer.py b/laia/dummies/dummy_trainer.py index c2034d49..0e612152 100644 --- a/laia/dummies/dummy_trainer.py +++ b/laia/dummies/dummy_trainer.py @@ -5,7 +5,7 @@ class DummyTrainer(pl.Trainer): def __init__(self, **kwargs): defaults = { "checkpoint_callback": False, - "logger": False, + "logger": True, "weights_summary": None, "max_epochs": 1, "limit_train_batches": 10, diff --git a/requirements.txt b/requirements.txt index c68b2609..c5fb36b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ scipy matplotlib # cpu version: nnutils-pytorch nnutils-pytorch-cuda -pytorch-lightning==1.1.0 +pytorch-lightning==1.1.7 torch==1.13 torchvision==0.14 torchaudio==0.13 diff --git a/tests/callbacks/learning_rate_test.py b/tests/callbacks/learning_rate_test.py index 123f82aa..ee889b9b 100644 --- a/tests/callbacks/learning_rate_test.py +++ b/tests/callbacks/learning_rate_test.py @@ -4,7 +4,6 @@ from laia.callbacks import LearningRate from laia.dummies import DummyEngine, DummyLoggingPlugin, DummyMNIST, DummyTrainer - def test_learning_rate_warns(tmpdir): trainer = DummyTrainer( default_root_dir=tmpdir, @@ -14,7 +13,6 @@ def test_learning_rate_warns(tmpdir): with pytest.warns(RuntimeWarning, match=r"You are using LearningRateMonitor.*"): trainer.fit(DummyEngine(), datamodule=DummyMNIST()) - class __TestEngine(DummyEngine): def configure_optimizers(self): optimizer = super().configure_optimizers() diff --git a/tests/callbacks/progress_bar_test.py b/tests/callbacks/progress_bar_test.py index 67b48391..a4397912 100644 --- a/tests/callbacks/progress_bar_test.py +++ b/tests/callbacks/progress_bar_test.py @@ -75,6 +75,7 @@ def test_progress_bar(tmpdir): r"100%\|[█]+\| 10/10 \[00:0[0-9]<00:00, " rf"{float_pattern}it/s, " rf"loss={float_pattern}, " + r"v_num=0, " rf"cer={float_pattern}%, " r"gpu_stats={'gpu_stats': 'baz'}]" ) From f1c54a03b8830cb4dd71761c69d0045bcb6e5cd1 Mon Sep 17 00:00:00 2001 From: Yoann Schneider Date: Fri, 16 Dec 2022 12:02:14 +0100 Subject: [PATCH 2/2] fix lint --- benchmarks/common.py | 2 +- laia/callbacks/learning_rate.py | 6 +++--- tests/callbacks/learning_rate_test.py | 2 ++ 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/benchmarks/common.py b/benchmarks/common.py index f7a1be0f..95bce329 100644 --- a/benchmarks/common.py +++ b/benchmarks/common.py @@ -18,7 +18,7 @@ def setup(train_path, fixed_input_height=0): syms = str(train_path / "syms") syms_table = SymbolsTable() for k, v in data_module.syms: - syms_table.add(k,v) + syms_table.add(k, v) syms_table.save(syms) model( diff --git a/laia/callbacks/learning_rate.py b/laia/callbacks/learning_rate.py index 6a496590..47a33200 100644 --- a/laia/callbacks/learning_rate.py +++ b/laia/callbacks/learning_rate.py @@ -1,6 +1,6 @@ import pytorch_lightning as pl -from pytorch_lightning.utilities import rank_zero_only from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor +from pytorch_lightning.utilities import rank_zero_only import laia.common.logging as log @@ -20,9 +20,9 @@ def on_train_start(self, trainer, *args, **kwargs): RuntimeWarning, ) names = self._find_names(trainer.lr_schedulers) - self.lrs = {name: [] for name in names} + self.lrs = {name: [] for name in names} self.last_values = {} - + @rank_zero_only def on_train_epoch_end(self, trainer, *args, **kwargs): super().on_train_epoch_end(trainer, *args, **kwargs) diff --git a/tests/callbacks/learning_rate_test.py b/tests/callbacks/learning_rate_test.py index ee889b9b..123f82aa 100644 --- a/tests/callbacks/learning_rate_test.py +++ b/tests/callbacks/learning_rate_test.py @@ -4,6 +4,7 @@ from laia.callbacks import LearningRate from laia.dummies import DummyEngine, DummyLoggingPlugin, DummyMNIST, DummyTrainer + def test_learning_rate_warns(tmpdir): trainer = DummyTrainer( default_root_dir=tmpdir, @@ -13,6 +14,7 @@ def test_learning_rate_warns(tmpdir): with pytest.warns(RuntimeWarning, match=r"You are using LearningRateMonitor.*"): trainer.fit(DummyEngine(), datamodule=DummyMNIST()) + class __TestEngine(DummyEngine): def configure_optimizers(self): optimizer = super().configure_optimizers()