2828import pytorch_lightning as pl
2929from pytorch_lightning .callbacks .callback import Callback
3030from pytorch_lightning .utilities .exceptions import MisconfigurationException
31- from pytorch_lightning .utilities .rank_zero import rank_zero_deprecation , rank_zero_warn
31+ from pytorch_lightning .utilities .rank_zero import rank_zero_warn
3232from pytorch_lightning .utilities .types import LRSchedulerConfig
3333
3434
@@ -94,7 +94,6 @@ def __init__(self, logging_interval: Optional[str] = None, log_momentum: bool =
9494 self .logging_interval = logging_interval
9595 self .log_momentum = log_momentum
9696 self .lrs : Dict [str , List [float ]] = {}
97- self ._lr_sch_names : List [str ] = []
9897
9998 def on_train_start (self , trainer : "pl.Trainer" , * args : Any , ** kwargs : Any ) -> None :
10099 """Called before training, determines unique names for all lr schedulers in the case of multiple of the
@@ -176,7 +175,7 @@ def _extract_stats(self, trainer: "pl.Trainer", interval: str) -> Dict[str, floa
176175 scheduler_hparam_keys ,
177176 optimizers_with_scheduler ,
178177 optimizers_with_scheduler_types ,
179- ) = self ._find_names_from_schedulers (trainer .lr_scheduler_configs , add_lr_sch_names = False )
178+ ) = self ._find_names_from_schedulers (trainer .lr_scheduler_configs )
180179 self ._remap_keys (scheduler_hparam_keys )
181180
182181 for name , config in zip (scheduler_hparam_keys , trainer .lr_scheduler_configs ):
@@ -189,7 +188,6 @@ def _extract_stats(self, trainer: "pl.Trainer", interval: str) -> Dict[str, floa
189188 trainer .optimizers ,
190189 seen_optimizers = optimizers_with_scheduler ,
191190 seen_optimizer_types = optimizers_with_scheduler_types ,
192- add_lr_sch_names = False ,
193191 )
194192 self ._remap_keys (optimizer_hparam_keys )
195193
@@ -264,7 +262,8 @@ def _duplicate_param_group_names(self, param_groups: List[Dict]) -> Set[str]:
264262 return {n for n in names if names .count (n ) > 1 }
265263
266264 def _find_names_from_schedulers (
267- self , lr_scheduler_configs : List [LRSchedulerConfig ], add_lr_sch_names : bool = True
265+ self ,
266+ lr_scheduler_configs : List [LRSchedulerConfig ],
268267 ) -> Tuple [List [List [str ]], List [Optimizer ], DefaultDict [Type [Optimizer ], int ]]:
269268 # Create unique names in the case we have multiple of the same learning
270269 # rate scheduler + multiple parameter groups
@@ -279,7 +278,7 @@ def _find_names_from_schedulers(
279278 name = "lr-" + sch .optimizer .__class__ .__name__
280279
281280 updated_names = self ._check_duplicates_and_update_name (
282- sch .optimizer , name , seen_optimizers , seen_optimizer_types , config , add_lr_sch_names
281+ sch .optimizer , name , seen_optimizers , seen_optimizer_types , config
283282 )
284283 names .append (updated_names )
285284
@@ -290,7 +289,6 @@ def _find_names_from_optimizers(
290289 optimizers : List [Any ],
291290 seen_optimizers : List [Optimizer ],
292291 seen_optimizer_types : DefaultDict [Type [Optimizer ], int ],
293- add_lr_sch_names : bool = True ,
294292 ) -> Tuple [List [List [str ]], List [Optimizer ]]:
295293 names = []
296294 optimizers_without_scheduler = []
@@ -303,7 +301,7 @@ def _find_names_from_optimizers(
303301
304302 name = "lr-" + optimizer .__class__ .__name__
305303 updated_names = self ._check_duplicates_and_update_name (
306- optimizer , name , seen_optimizers , seen_optimizer_types , None , add_lr_sch_names
304+ optimizer , name , seen_optimizers , seen_optimizer_types , None
307305 )
308306 names .append (updated_names )
309307 optimizers_without_scheduler .append (optimizer )
@@ -317,7 +315,6 @@ def _check_duplicates_and_update_name(
317315 seen_optimizers : List [Optimizer ],
318316 seen_optimizer_types : DefaultDict [Type [Optimizer ], int ],
319317 lr_scheduler_config : Optional [LRSchedulerConfig ],
320- add_lr_sch_names : bool = True ,
321318 ) -> List [str ]:
322319 seen_optimizers .append (optimizer )
323320 optimizer_cls = type (optimizer )
@@ -338,17 +335,4 @@ def _check_duplicates_and_update_name(
338335 name = self ._add_prefix (name , optimizer_cls , seen_optimizer_types )
339336 name_list = [self ._add_suffix (name , param_groups , i ) for i in range (len (param_groups ))]
340337
341- if add_lr_sch_names :
342- self ._lr_sch_names .append (name )
343-
344338 return name_list
345-
346- @property
347- def lr_sch_names (self ) -> List [str ]:
348- # TODO remove `lr_sch_names` and `add_lr_sch_names` argument in v1.7.0
349- rank_zero_deprecation (
350- "`LearningRateMonitor.lr_sch_names` has been deprecated in v1.5 and will be removed in 1.7."
351- " Consider accessing them using `LearningRateMonitor.lrs.keys()` which will return"
352- " the names of all the optimizers, even those without a scheduler."
353- )
354- return self ._lr_sch_names
0 commit comments