From 0ec511a7ac0fc664ce6c6532fe4534252793daa4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 10 Dec 2020 12:39:36 +0100 Subject: [PATCH 1/7] Do not warn when the name key is used --- pytorch_lightning/callbacks/lr_monitor.py | 3 +- pytorch_lightning/trainer/optimizers.py | 1 + tests/callbacks/test_lr_monitor.py | 32 +++++++++++++++++---- tests/trainer/test_optimizers.py | 34 +++++++++++++++++++---- 4 files changed, 58 insertions(+), 12 deletions(-) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index 081aec45067cf..4c8e6d0415d8d 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -156,8 +156,7 @@ def _find_names(self, lr_schedulers) -> List[str]: # rate schduler + multiple parameter groups names = [] for scheduler in lr_schedulers: - sch = scheduler['scheduler'] - if 'name' in scheduler: + if scheduler.get("name") is not None: name = scheduler['name'] else: opt_name = 'lr-' + sch.optimizer.__class__.__name__ diff --git a/pytorch_lightning/trainer/optimizers.py b/pytorch_lightning/trainer/optimizers.py index 6f3ba80bd0734..479d401720261 100644 --- a/pytorch_lightning/trainer/optimizers.py +++ b/pytorch_lightning/trainer/optimizers.py @@ -94,6 +94,7 @@ def configure_schedulers(self, schedulers: list, monitor: Optional[str] = None): lr_schedulers = [] default_config = { 'scheduler': None, + 'name': None, # no custom name 'interval': 'epoch', # after epoch is over 'frequency': 1, # every epoch/batch 'reduce_on_plateau': False, # most often not ReduceLROnPlateau scheduler diff --git a/tests/callbacks/test_lr_monitor.py b/tests/callbacks/test_lr_monitor.py index a6783435ed3e2..d29f254df67d0 100644 --- a/tests/callbacks/test_lr_monitor.py +++ b/tests/callbacks/test_lr_monitor.py @@ -13,11 +13,11 @@ # limitations under the License. import pytest +import tests.base.develop_utils as tutils from pytorch_lightning import Trainer from pytorch_lightning.callbacks import LearningRateMonitor from pytorch_lightning.utilities.exceptions import MisconfigurationException -from tests.base import EvalModelTemplate -import tests.base.develop_utils as tutils +from tests.base import BoringModel, EvalModelTemplate def test_lr_monitor_single_lr(tmpdir): @@ -43,7 +43,7 @@ def test_lr_monitor_single_lr(tmpdir): 'Momentum should not be logged by default' assert len(lr_monitor.lrs) == len(trainer.lr_schedulers), \ 'Number of learning rates logged does not match number of lr schedulers' - assert all([k in ['lr-Adam'] for k in lr_monitor.lrs.keys()]), \ + assert lr_monitor.lr_sch_names == list(lr_monitor.lrs.keys()) == ['lr-Adam'], \ 'Names of learning rates not set correctly' @@ -134,7 +134,7 @@ def test_lr_monitor_multi_lrs(tmpdir, logging_interval): assert lr_monitor.lrs, 'No learning rates logged' assert len(lr_monitor.lrs) == len(trainer.lr_schedulers), \ 'Number of learning rates logged does not match number of lr schedulers' - assert all([k in ['lr-Adam', 'lr-Adam-1'] for k in lr_monitor.lrs.keys()]), \ + assert lr_monitor.lr_sch_names == ['lr-Adam', 'lr-Adam-1'], \ 'Names of learning rates not set correctly' if logging_interval == 'step': @@ -167,5 +167,27 @@ def test_lr_monitor_param_groups(tmpdir): assert lr_monitor.lrs, 'No learning rates logged' assert len(lr_monitor.lrs) == 2 * len(trainer.lr_schedulers), \ 'Number of learning rates logged does not match number of param groups' - assert all([k in ['lr-Adam/pg1', 'lr-Adam/pg2'] for k in lr_monitor.lrs.keys()]), \ + assert lr_monitor.lr_sch_names == ['lr-Adam'] + assert list(lr_monitor.lrs.keys()) == ['lr-Adam/pg1', 'lr-Adam/pg2'], \ 'Names of learning rates not set correctly' + + +def test_lr_monitor_custom_name(tmpdir): + class TestModel(BoringModel): + def configure_optimizers(self): + optimizer, [scheduler] = super().configure_optimizers() + lr_scheduler = {'scheduler': scheduler, 'name': 'my_logging_name'} + return optimizer, [lr_scheduler] + + lr_monitor = LearningRateMonitor() + trainer = Trainer( + default_root_dir=tmpdir, + max_epochs=2, + limit_val_batches=0.1, + limit_train_batches=0.5, + callbacks=[lr_monitor], + progress_bar_refresh_rate=0, + weights_summary=None, + ) + trainer.fit(TestModel()) + assert lr_monitor.lr_sch_names == list(lr_monitor.lrs.keys()) == ['my_logging_name'] diff --git a/tests/trainer/test_optimizers.py b/tests/trainer/test_optimizers.py index 2e76192836740..52e085b2b7b8c 100644 --- a/tests/trainer/test_optimizers.py +++ b/tests/trainer/test_optimizers.py @@ -15,7 +15,6 @@ import torch from pytorch_lightning import Callback, Trainer -from pytorch_lightning.core.optimizer import LightningOptimizer from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import EvalModelTemplate from tests.base.boring_model import BoringModel @@ -177,6 +176,7 @@ def test_reducelronplateau_scheduling(tmpdir): frequency=1, reduce_on_plateau=True, strict=True, + name=None, ), 'lr scheduler was not correctly converted to dict' @@ -215,7 +215,13 @@ def test_optimizer_return_options(enable_pl_optimizer): assert len(freq) == 0 assert optim[0] == opt_a assert lr_sched[0] == dict( - scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True + scheduler=scheduler_a, + interval='epoch', + frequency=1, + reduce_on_plateau=False, + monitor=None, + strict=True, + name=None, ) # opt tuple of 1 list @@ -225,7 +231,13 @@ def test_optimizer_return_options(enable_pl_optimizer): assert len(freq) == 0 assert optim[0] == opt_a assert lr_sched[0] == dict( - scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True + scheduler=scheduler_a, + interval='epoch', + frequency=1, + reduce_on_plateau=False, + monitor=None, + strict=True, + name=None, ) # opt single dictionary @@ -235,7 +247,13 @@ def test_optimizer_return_options(enable_pl_optimizer): assert len(freq) == 0 assert optim[0] == opt_a assert lr_sched[0] == dict( - scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True + scheduler=scheduler_a, + interval='epoch', + frequency=1, + reduce_on_plateau=False, + monitor=None, + strict=True, + name=None, ) # opt multiple dictionaries with frequencies @@ -247,7 +265,13 @@ def test_optimizer_return_options(enable_pl_optimizer): assert len(optim) == len(lr_sched) == len(freq) == 2 assert optim[0] == opt_a assert lr_sched[0] == dict( - scheduler=scheduler_a, interval='epoch', frequency=1, reduce_on_plateau=False, monitor=None, strict=True + scheduler=scheduler_a, + interval='epoch', + frequency=1, + reduce_on_plateau=False, + monitor=None, + strict=True, + name=None, ) assert freq == [1, 5] From 3b116007007123b35b583cf5090a3dbd491197ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 10 Dec 2020 12:46:03 +0100 Subject: [PATCH 2/7] Missing line --- pytorch_lightning/callbacks/lr_monitor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index 4c8e6d0415d8d..df0fd3ae04847 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -156,6 +156,7 @@ def _find_names(self, lr_schedulers) -> List[str]: # rate schduler + multiple parameter groups names = [] for scheduler in lr_schedulers: + sch = scheduler['scheduler'] if scheduler.get("name") is not None: name = scheduler['name'] else: From e5192cc83fca466bf014c07363afb980a08c6433 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 10 Dec 2020 12:47:16 +0100 Subject: [PATCH 3/7] Consistency --- pytorch_lightning/callbacks/lr_monitor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index df0fd3ae04847..c8bd9ea3d2434 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -157,7 +157,7 @@ def _find_names(self, lr_schedulers) -> List[str]: names = [] for scheduler in lr_schedulers: sch = scheduler['scheduler'] - if scheduler.get("name") is not None: + if scheduler.get('name') is not None: name = scheduler['name'] else: opt_name = 'lr-' + sch.optimizer.__class__.__name__ From d51a1baa15c0772224596aa999660e8aa91ce06d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 10 Dec 2020 19:34:05 +0100 Subject: [PATCH 4/7] Update pytorch_lightning/callbacks/lr_monitor.py --- pytorch_lightning/callbacks/lr_monitor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index c8bd9ea3d2434..9799e0d3298d3 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -157,7 +157,7 @@ def _find_names(self, lr_schedulers) -> List[str]: names = [] for scheduler in lr_schedulers: sch = scheduler['scheduler'] - if scheduler.get('name') is not None: + if scheduler['name'] is not None: name = scheduler['name'] else: opt_name = 'lr-' + sch.optimizer.__class__.__name__ From 3898d59f03c0c92061399f7e5923d40343f1a236 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 10 Dec 2020 20:02:24 +0100 Subject: [PATCH 5/7] Update docs --- pytorch_lightning/core/lightning.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index f29e7f75bfbff..7d2734ba2115c 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -989,7 +989,7 @@ def configure_optimizers( - List or Tuple - List of optimizers. - Two lists - The first list has multiple optimizers, the second a list of LR schedulers (or lr_dict). - Dictionary, with an 'optimizer' key, and (optionally) a 'lr_scheduler' - key which value is a single LR scheduler or lr_dict. + key whose value is a single LR scheduler or lr_dict. - Tuple of dictionaries as described, with an optional 'frequency' key. - None - Fit will run without any optimizer. @@ -1001,21 +1001,22 @@ def configure_optimizers( In the former case, all optimizers will operate on the given batch in each optimization step. In the latter, only one optimizer will operate on the given batch at every step. - The lr_dict is a dictionary which contains scheduler and its associated configuration. - It has five keys. The default configuration is shown below. + The lr_dict is a dictionary which contains the scheduler and its associated configuration. + The default configuration is shown below. .. code-block:: python { - 'scheduler': lr_scheduler, # The LR schduler + 'scheduler': lr_scheduler, # The LR scheduler instance (required) 'interval': 'epoch', # The unit of the scheduler's step size 'frequency': 1, # The frequency of the scheduler 'reduce_on_plateau': False, # For ReduceLROnPlateau scheduler 'monitor': 'val_loss', # Metric for ReduceLROnPlateau to monitor - 'strict': True # Whether to crash the training if `monitor` is not found + 'strict': True, # Whether to crash the training if `monitor` is not found + 'name': None, # Custom name for LearningRateMonitor to use } - If user only provides LR schedulers, then their configuration will set to default as shown above. + Only the `scheduler` key is required, the rest will be set to the defaults above. Examples: .. code-block:: python From f9175924cd8abe070a3f5919ce12470164a3ec39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Thu, 10 Dec 2020 20:11:36 +0100 Subject: [PATCH 6/7] Update pytorch_lightning/core/lightning.py Co-authored-by: Rohit Gupta --- pytorch_lightning/core/lightning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 7d2734ba2115c..24d8ac89b6872 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -1016,7 +1016,7 @@ def configure_optimizers( 'name': None, # Custom name for LearningRateMonitor to use } - Only the `scheduler` key is required, the rest will be set to the defaults above. + Only the ``scheduler`` key is required, the rest will be set to the defaults above. Examples: .. code-block:: python From b4af7b69758da4722d2a2c8b1be0142ed29bc10f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Sun, 13 Dec 2020 23:35:46 +0100 Subject: [PATCH 7/7] Update CHANGELOG --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f078349ef3665..87d29ff6df643 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,6 +45,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed `LightningOptimizer` exposes optimizer attributes ([#5095](https://github.com/PyTorchLightning/pytorch-lightning/pull/5095)) +- Do not warn when the `name` key is used in the `lr_scheduler` dict ([#5057](https://github.com/PyTorchLightning/pytorch-lightning/pull/5057)) + ## [1.1.0] - 2020-12-09