Skip to content

Commit 331c92c

Browse files
authored
Merge branch 'master' into cifar10-baseline
2 parents a715751 + 4ebce38 commit 331c92c

File tree

7 files changed

+83
-35
lines changed

7 files changed

+83
-35
lines changed

pytorch_lightning/core/lightning.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1412,8 +1412,10 @@ def get_progress_bar_dict(self):
14121412

14131413
def _verify_is_manual_optimization(self, fn_name):
14141414
if self.trainer.train_loop.automatic_optimization:
1415-
m = f'to use {fn_name}, please disable automatic optimization: Trainer(automatic_optimization=False)'
1416-
raise MisconfigurationException(m)
1415+
raise MisconfigurationException(
1416+
f'to use {fn_name}, please disable automatic optimization:'
1417+
' set model property `automatic_optimization` as False'
1418+
)
14171419

14181420
@classmethod
14191421
def _auto_collect_arguments(cls, frame=None) -> Tuple[Dict, Dict]:

pytorch_lightning/trainer/configuration_validator.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def __verify_train_loop_configuration(self, model):
7979
if trainer.overriden_optimizer_step and not enable_pl_optimizer and automatic_optimization:
8080
rank_zero_warn(
8181
"When overriding `LightningModule` optimizer_step with"
82-
" `Trainer(..., enable_pl_optimizer=False, automatic_optimization=True, ...)`,"
82+
" `Trainer(..., enable_pl_optimizer=False, ...)`,"
8383
" we won't be calling `.zero_grad` we can't assume when you call your `optimizer.step()`."
8484
" For Lightning to take care of it, please use `Trainer(enable_pl_optimizer=True)`."
8585
)
@@ -89,15 +89,16 @@ def __verify_train_loop_configuration(self, model):
8989
has_overriden_optimization_functions = trainer.overriden_optimizer_step or trainer.overriden_optimizer_zero_grad
9090
if (has_overriden_optimization_functions) and going_to_accumulate_grad_batches and automatic_optimization:
9191
raise MisconfigurationException(
92-
'When overriding `LightningModule` optimizer_step or optimizer_zero_grad with '
93-
'`Trainer(automatic_optimization=True, ...)`, `accumulate_grad_batches` should to be 1.'
92+
'When overriding `LightningModule` optimizer_step or optimizer_zero_grad'
93+
' , `accumulate_grad_batches` in `Trainer` should to be 1.'
9494
' It ensures optimizer_step or optimizer_zero_grad are called on every batch.'
9595
)
9696

9797
if (enable_pl_optimizer) and trainer.overriden_optimizer_zero_grad and not automatic_optimization:
9898
raise MisconfigurationException(
99-
'When overriding `LightningModule` optimizer_zero_grad with '
100-
'`Trainer(automatic_optimization=False, enable_pl_optimizer=True, ...) is not supported'
99+
'When overriding `LightningModule` optimizer_zero_grad'
100+
' and preserving model property `automatic_optimization` as True with'
101+
' `Trainer(enable_pl_optimizer=True, ...) is not supported'
101102
)
102103

103104
def __verify_eval_loop_configuration(self, model, eval_loop_name):

pytorch_lightning/trainer/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -358,7 +358,7 @@ def __init__(
358358
)
359359

360360
# init train loop related flags
361-
# TODO: deprecate in 1.2.0
361+
# TODO: remove in 1.3.0
362362
if automatic_optimization is None:
363363
automatic_optimization = True
364364
else:

tests/core/test_lightning_module.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,6 @@ def optimizer_step(self, *_, **__):
3838
default_root_dir=tmpdir,
3939
limit_train_batches=2,
4040
accumulate_grad_batches=2,
41-
automatic_optimization=True
4241
)
4342

4443
trainer.fit(model)
@@ -90,7 +89,6 @@ def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx,
9089
default_root_dir=tmpdir,
9190
limit_train_batches=8,
9291
accumulate_grad_batches=1,
93-
automatic_optimization=True,
9492
enable_pl_optimizer=enable_pl_optimizer
9593
)
9694

tests/core/test_lightning_optimizer.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,10 @@ def configure_optimizers(self):
112112
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer_1, step_size=1)
113113
return [optimizer_1, optimizer_2], [lr_scheduler]
114114

115+
@property
116+
def automatic_optimization(self) -> bool:
117+
return False
118+
115119
model = TestModel()
116120
model.training_step_end = None
117121
model.training_epoch_end = None
@@ -121,8 +125,8 @@ def configure_optimizers(self):
121125
limit_val_batches=1,
122126
max_epochs=1,
123127
weights_summary=None,
124-
automatic_optimization=False,
125-
enable_pl_optimizer=True)
128+
enable_pl_optimizer=True,
129+
)
126130
trainer.fit(model)
127131

128132
assert len(mock_sgd_step.mock_calls) == 2
@@ -161,6 +165,10 @@ def configure_optimizers(self):
161165
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer_1, step_size=1)
162166
return [optimizer_1, optimizer_2], [lr_scheduler]
163167

168+
@property
169+
def automatic_optimization(self) -> bool:
170+
return False
171+
164172
model = TestModel()
165173
model.training_step_end = None
166174
model.training_epoch_end = None
@@ -170,7 +178,6 @@ def configure_optimizers(self):
170178
limit_val_batches=1,
171179
max_epochs=1,
172180
weights_summary=None,
173-
automatic_optimization=False,
174181
accumulate_grad_batches=2,
175182
enable_pl_optimizer=True,
176183
)
@@ -237,7 +244,6 @@ def configure_optimizers(self):
237244
max_epochs=1,
238245
weights_summary=None,
239246
enable_pl_optimizer=True,
240-
automatic_optimization=True
241247
)
242248
trainer.fit(model)
243249

@@ -291,7 +297,6 @@ def configure_optimizers(self):
291297
max_epochs=1,
292298
weights_summary=None,
293299
enable_pl_optimizer=True,
294-
automatic_optimization=True
295300
)
296301
trainer.fit(model)
297302

@@ -352,7 +357,6 @@ def configure_optimizers(self):
352357
max_epochs=1,
353358
weights_summary=None,
354359
enable_pl_optimizer=True,
355-
automatic_optimization=True
356360
)
357361
trainer.fit(model)
358362

@@ -406,7 +410,6 @@ def configure_optimizers(self):
406410
max_epochs=1,
407411
weights_summary=None,
408412
enable_pl_optimizer=True,
409-
automatic_optimization=True,
410413
)
411414
trainer.fit(model)
412415

tests/trainer/dynamic_args/test_multiple_optimizers.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,11 +97,14 @@ def configure_optimizers(self):
9797
optimizer_2 = torch.optim.SGD(self.layer.parameters(), lr=0.1)
9898
return optimizer, optimizer_2
9999

100+
@property
101+
def automatic_optimization(self) -> bool:
102+
return False
103+
100104
model = TestModel()
101105
model.val_dataloader = None
102106

103107
trainer = Trainer(
104-
automatic_optimization=False,
105108
default_root_dir=tmpdir,
106109
limit_train_batches=2,
107110
limit_val_batches=2,

0 commit comments

Comments
 (0)