@@ -1147,3 +1147,41 @@ def dis_closure():
11471147@RunIf (min_gpus = 2 , special = True )
11481148def test_step_with_optimizer_closure_with_different_frequencies_ddp_with_toggle_model (tmpdir ):
11491149 train_manual_optimization (tmpdir , "ddp" , model_cls = TestManualOptimizationDDPModelToggleModel )
1150+
1151+
1152+ def test_lr_schedulers (tmpdir ):
1153+ """
1154+ Test `lr_schedulers()` return the same objects
1155+ in the correct order as defined in `configure_optimizers()`.
1156+ """
1157+
1158+ class TestModel (BoringModel ):
1159+
1160+ def __init__ (self ):
1161+ super ().__init__ ()
1162+ self .automatic_optimization = False
1163+
1164+ def training_step (self , batch , batch_idx ):
1165+ scheduler_1 , scheduler_2 = self .lr_schedulers ()
1166+ assert scheduler_1 is self .scheduler_1
1167+ assert scheduler_2 is self .scheduler_2
1168+
1169+ def configure_optimizers (self ):
1170+ optimizer_1 = torch .optim .SGD (self .parameters (), lr = 0.1 )
1171+ optimizer_2 = torch .optim .SGD (self .parameters (), lr = 0.1 )
1172+ self .scheduler_1 = torch .optim .lr_scheduler .StepLR (optimizer_1 , step_size = 1 )
1173+ self .scheduler_2 = torch .optim .lr_scheduler .StepLR (optimizer_2 , step_size = 1 )
1174+ return [optimizer_1 , optimizer_2 ], [self .scheduler_1 , self .scheduler_2 ]
1175+
1176+ model = TestModel ()
1177+ model .training_epoch_end = None
1178+
1179+ trainer = Trainer (
1180+ default_root_dir = tmpdir ,
1181+ max_epochs = 1 ,
1182+ limit_train_batches = 1 ,
1183+ limit_val_batches = 1 ,
1184+ limit_test_batches = 1 ,
1185+ )
1186+
1187+ trainer .fit (model )
0 commit comments