Skip to content

Commit 8266300

Browse files
rschiremanRaymond G Schiremanakihironitta
authored
Remove pytorch lightning.callbacks.lr monitor.learning rate monitor.lr_sch_names (#13353)
Co-authored-by: Raymond G Schireman <[email protected]> Co-authored-by: Akihiro Nitta <[email protected]>
1 parent 2e9cd72 commit 8266300

File tree

3 files changed

+9
-33
lines changed

3 files changed

+9
-33
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
229229
- Removed the need to explicitly load habana module ([#13338](https://github.com/PyTorchLightning/pytorch-lightning/pull/13338))
230230

231231

232+
- Removed deprecated `pytorch_lightning.callbacks.lr_monitor.LearningRateMonitor.lr_sch_names` ([#13353](https://github.com/Lightning-AI/lightning/pull/13353))
233+
234+
232235
### Fixed
233236

234237

src/pytorch_lightning/callbacks/lr_monitor.py

Lines changed: 6 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
import pytorch_lightning as pl
2929
from pytorch_lightning.callbacks.callback import Callback
3030
from pytorch_lightning.utilities.exceptions import MisconfigurationException
31-
from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_warn
31+
from pytorch_lightning.utilities.rank_zero import rank_zero_warn
3232
from pytorch_lightning.utilities.types import LRSchedulerConfig
3333

3434

@@ -94,7 +94,6 @@ def __init__(self, logging_interval: Optional[str] = None, log_momentum: bool =
9494
self.logging_interval = logging_interval
9595
self.log_momentum = log_momentum
9696
self.lrs: Dict[str, List[float]] = {}
97-
self._lr_sch_names: List[str] = []
9897

9998
def on_train_start(self, trainer: "pl.Trainer", *args: Any, **kwargs: Any) -> None:
10099
"""Called before training, determines unique names for all lr schedulers in the case of multiple of the
@@ -176,7 +175,7 @@ def _extract_stats(self, trainer: "pl.Trainer", interval: str) -> Dict[str, floa
176175
scheduler_hparam_keys,
177176
optimizers_with_scheduler,
178177
optimizers_with_scheduler_types,
179-
) = self._find_names_from_schedulers(trainer.lr_scheduler_configs, add_lr_sch_names=False)
178+
) = self._find_names_from_schedulers(trainer.lr_scheduler_configs)
180179
self._remap_keys(scheduler_hparam_keys)
181180

182181
for name, config in zip(scheduler_hparam_keys, trainer.lr_scheduler_configs):
@@ -189,7 +188,6 @@ def _extract_stats(self, trainer: "pl.Trainer", interval: str) -> Dict[str, floa
189188
trainer.optimizers,
190189
seen_optimizers=optimizers_with_scheduler,
191190
seen_optimizer_types=optimizers_with_scheduler_types,
192-
add_lr_sch_names=False,
193191
)
194192
self._remap_keys(optimizer_hparam_keys)
195193

@@ -264,7 +262,8 @@ def _duplicate_param_group_names(self, param_groups: List[Dict]) -> Set[str]:
264262
return {n for n in names if names.count(n) > 1}
265263

266264
def _find_names_from_schedulers(
267-
self, lr_scheduler_configs: List[LRSchedulerConfig], add_lr_sch_names: bool = True
265+
self,
266+
lr_scheduler_configs: List[LRSchedulerConfig],
268267
) -> Tuple[List[List[str]], List[Optimizer], DefaultDict[Type[Optimizer], int]]:
269268
# Create unique names in the case we have multiple of the same learning
270269
# rate scheduler + multiple parameter groups
@@ -279,7 +278,7 @@ def _find_names_from_schedulers(
279278
name = "lr-" + sch.optimizer.__class__.__name__
280279

281280
updated_names = self._check_duplicates_and_update_name(
282-
sch.optimizer, name, seen_optimizers, seen_optimizer_types, config, add_lr_sch_names
281+
sch.optimizer, name, seen_optimizers, seen_optimizer_types, config
283282
)
284283
names.append(updated_names)
285284

@@ -290,7 +289,6 @@ def _find_names_from_optimizers(
290289
optimizers: List[Any],
291290
seen_optimizers: List[Optimizer],
292291
seen_optimizer_types: DefaultDict[Type[Optimizer], int],
293-
add_lr_sch_names: bool = True,
294292
) -> Tuple[List[List[str]], List[Optimizer]]:
295293
names = []
296294
optimizers_without_scheduler = []
@@ -303,7 +301,7 @@ def _find_names_from_optimizers(
303301

304302
name = "lr-" + optimizer.__class__.__name__
305303
updated_names = self._check_duplicates_and_update_name(
306-
optimizer, name, seen_optimizers, seen_optimizer_types, None, add_lr_sch_names
304+
optimizer, name, seen_optimizers, seen_optimizer_types, None
307305
)
308306
names.append(updated_names)
309307
optimizers_without_scheduler.append(optimizer)
@@ -317,7 +315,6 @@ def _check_duplicates_and_update_name(
317315
seen_optimizers: List[Optimizer],
318316
seen_optimizer_types: DefaultDict[Type[Optimizer], int],
319317
lr_scheduler_config: Optional[LRSchedulerConfig],
320-
add_lr_sch_names: bool = True,
321318
) -> List[str]:
322319
seen_optimizers.append(optimizer)
323320
optimizer_cls = type(optimizer)
@@ -338,17 +335,4 @@ def _check_duplicates_and_update_name(
338335
name = self._add_prefix(name, optimizer_cls, seen_optimizer_types)
339336
name_list = [self._add_suffix(name, param_groups, i) for i in range(len(param_groups))]
340337

341-
if add_lr_sch_names:
342-
self._lr_sch_names.append(name)
343-
344338
return name_list
345-
346-
@property
347-
def lr_sch_names(self) -> List[str]:
348-
# TODO remove `lr_sch_names` and `add_lr_sch_names` argument in v1.7.0
349-
rank_zero_deprecation(
350-
"`LearningRateMonitor.lr_sch_names` has been deprecated in v1.5 and will be removed in 1.7."
351-
" Consider accessing them using `LearningRateMonitor.lrs.keys()` which will return"
352-
" the names of all the optimizers, even those without a scheduler."
353-
)
354-
return self._lr_sch_names

tests/tests_pytorch/deprecated_api/test_remove_1-7.py

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
import torch
2222

2323
from pytorch_lightning import Callback, Trainer
24-
from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor
2524
from pytorch_lightning.demos.boring_classes import BoringModel
2625
from pytorch_lightning.overrides.distributed import IndexBatchSamplerWrapper
2726
from pytorch_lightning.plugins.environments import (
@@ -124,16 +123,6 @@ def test_v1_7_0_deprecated_max_steps_none(tmpdir):
124123
trainer.fit_loop.max_steps = None
125124

126125

127-
def test_v1_7_0_deprecate_lr_sch_names(tmpdir):
128-
model = BoringModel()
129-
lr_monitor = LearningRateMonitor()
130-
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, callbacks=[lr_monitor])
131-
trainer.fit(model)
132-
133-
with pytest.deprecated_call(match="`LearningRateMonitor.lr_sch_names` has been deprecated in v1.5"):
134-
assert lr_monitor.lr_sch_names == ["lr-SGD"]
135-
136-
137126
@pytest.mark.parametrize(
138127
"cls",
139128
[

0 commit comments

Comments
 (0)