Skip to content

Commit 7625e49

Browse files
author
SeanNaren
committed
Revert "Skip tuner algorithms on fast dev (#3903)"
This reverts commit 189ed25
1 parent 4c61f70 commit 7625e49

File tree

4 files changed

+5
-43
lines changed

4 files changed

+5
-43
lines changed

CHANGELOG.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
3030
- Added `fsspec` to tuner ([#4458](https://github.com/PyTorchLightning/pytorch-lightning/pull/4458))
3131

3232

33-
- Added metrics aggregation in Horovod and fixed early stopping ([#3775](https://github.com/PyTorchLightning/pytorch-lightning/pull/3775))
33+
- Added metrics aggregation in Horovod and fixed early stopping ([#3775](https://github.com/PyTorchLightning/pytorch-lightning/pull/3775))
3434

3535

3636
- Added `manual_optimizer_step` which work with `AMP Native` and `accumulated_grad_batches` ([#4485](https://github.com/PyTorchLightning/pytorch-lightning/pull/4485))
@@ -41,7 +41,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
4141

4242
### Changed
4343

44-
- Tuner algorithms will be skipped if `fast_dev_run=True` ([#3903](https://github.com/PyTorchLightning/pytorch-lightning/pull/3903))
44+
4545

4646
### Deprecated
4747

pytorch_lightning/tuner/batch_size_scaling.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,10 +68,6 @@ def scale_batch_size(trainer,
6868
**fit_kwargs: remaining arguments to be passed to .fit(), e.g., dataloader
6969
or datamodule.
7070
"""
71-
if trainer.fast_dev_run:
72-
rank_zero_warn('Skipping batch size scaler since `fast_dev_run=True`', UserWarning)
73-
return
74-
7571
if not lightning_hasattr(model, batch_arg_name):
7672
raise MisconfigurationException(
7773
f'Field {batch_arg_name} not found in both `model` and `model.hparams`')

pytorch_lightning/tuner/lr_finder.py

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,6 @@
2929
from pytorch_lightning.loggers.base import DummyLogger
3030
from pytorch_lightning.utilities.exceptions import MisconfigurationException
3131
from pytorch_lightning.utilities.parsing import lightning_hasattr, lightning_setattr
32-
from pytorch_lightning.utilities import rank_zero_warn
33-
from pytorch_lightning.utilities.cloud_io import get_filesystem
3432

3533
# check if ipywidgets is installed before importing tqdm.auto
3634
# to ensure it won't fail and a progress bar is displayed
@@ -43,10 +41,6 @@
4341
def _run_lr_finder_internally(trainer, model: LightningModule):
4442
""" Call lr finder internally during Trainer.fit() """
4543
lr_finder = lr_find(trainer, model)
46-
47-
if lr_finder is None:
48-
return
49-
5044
lr = lr_finder.suggestion()
5145

5246
# TODO: log lr.results to self.logger
@@ -136,11 +130,7 @@ def lr_find(
136130
trainer.fit(model)
137131
138132
"""
139-
if trainer.fast_dev_run:
140-
rank_zero_warn('Skipping learning rate finder since `fast_dev_run=True`', UserWarning)
141-
return
142-
143-
save_path = os.path.join(trainer.default_root_dir, 'lr_find_temp_model.ckpt')
133+
save_path = os.path.join(trainer.default_root_dir, 'lr_find_temp.ckpt')
144134

145135
__lr_finder_dump_params(trainer, model)
146136

@@ -191,11 +181,8 @@ def lr_find(
191181
lr_finder._total_batch_idx = trainer.total_batch_idx # for debug purpose
192182

193183
# Reset model state
194-
if trainer.is_global_zero:
195-
trainer.checkpoint_connector.restore(str(save_path), on_gpu=trainer.on_gpu)
196-
fs = get_filesystem(str(save_path))
197-
if fs.exists(save_path):
198-
fs.rm(save_path)
184+
trainer.checkpoint_connector.restore(str(save_path), on_gpu=trainer.on_gpu)
185+
os.remove(save_path)
199186

200187
# Finish by resetting variables so trainer is ready to fit model
201188
__lr_finder_restore_params(trainer, model)

tests/trainer/flags/test_fast_dev_run.py

Lines changed: 0 additions & 21 deletions
This file was deleted.

0 commit comments

Comments
 (0)