Skip to content

Commit 34c585b

Browse files
SeanNarenBorda
authored andcommitted
Disable pl optimizer temporarily to fix AMP issues (#5163)
* Disable pl optimizer temporarily to fix AMP issues * Add todo and enable pl optimizer in the test
1 parent d757f8b commit 34c585b

File tree

2 files changed

+3
-1
lines changed

2 files changed

+3
-1
lines changed

pytorch_lightning/trainer/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ def __init__(
133133
distributed_backend: Optional[str] = None,
134134
automatic_optimization: Optional[bool] = None,
135135
move_metrics_to_cpu: bool = False,
136-
enable_pl_optimizer: bool = True,
136+
enable_pl_optimizer: bool = False,
137137
multiple_trainloader_mode: str = 'max_size_cycle',
138138
):
139139
r"""

tests/callbacks/test_callbacks.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ def test_trainer_callback_system(torch_save):
3333
limit_train_batches=3,
3434
limit_test_batches=2,
3535
progress_bar_refresh_rate=0,
36+
# todo: enabled since internally we wrap the model for optimizer step, this should be fixed
37+
enable_pl_optimizer=True
3638
)
3739

3840
# no call yet

0 commit comments

Comments
 (0)