Skip to content

Commit ac76dfc

Browse files
rohitgr7Borda
andauthored
Remove NaNs from loss in LRFinder (#1862)
* Remove NaNs from loss in LRFinder * np.isfinite * chlog * add test * chlog Co-authored-by: Jirka <[email protected]>
1 parent a153fe4 commit ac76dfc

File tree

3 files changed

+28
-4
lines changed

3 files changed

+28
-4
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
1414

1515
- Allow user to select individual TPU core to train on ([#1729](https://github.com/PyTorchLightning/pytorch-lightning/pull/1729))
1616

17+
- Removed non-finite values from loss in `LRFinder` ([#1862](https://github.com/PyTorchLightning/pytorch-lightning/pull/1862))
18+
1719
### Deprecated
1820

1921
### Removed

pytorch_lightning/trainer/lr_finder.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -321,8 +321,9 @@ def suggestion(self, skip_begin: int = 10, skip_end: int = 1):
321321
322322
"""
323323
try:
324-
loss = self.results["loss"][skip_begin:-skip_end]
325-
min_grad = (np.gradient(np.array(loss))).argmin()
324+
loss = np.array(self.results["loss"][skip_begin:-skip_end])
325+
loss = loss[np.isfinite(loss)]
326+
min_grad = np.gradient(loss).argmin()
326327
self._optimal_idx = min_grad + skip_begin
327328
return self.results["lr"][self._optimal_idx]
328329
except Exception:

tests/trainer/test_lr_finder.py

Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def test_call_to_trainer_method(tmpdir):
124124
# logger file to get meta
125125
trainer = Trainer(
126126
default_save_path=tmpdir,
127-
max_epochs=5,
127+
max_epochs=5
128128
)
129129

130130
lrfinder = trainer.lr_find(model, mode='linear')
@@ -170,7 +170,7 @@ def test_suggestion_parameters_work(tmpdir):
170170
# logger file to get meta
171171
trainer = Trainer(
172172
default_save_path=tmpdir,
173-
max_epochs=10,
173+
max_epochs=10
174174
)
175175

176176
lrfinder = trainer.lr_find(model)
@@ -179,3 +179,24 @@ def test_suggestion_parameters_work(tmpdir):
179179

180180
assert lr1 != lr2, \
181181
'Skipping parameter did not influence learning rate'
182+
183+
184+
def test_suggestion_with_non_finite_values(tmpdir):
185+
""" Test that non-finite values does not alter results """
186+
187+
hparams = EvalModelTemplate.get_default_hparams()
188+
model = EvalModelTemplate(hparams)
189+
190+
# logger file to get meta
191+
trainer = Trainer(
192+
default_save_path=tmpdir,
193+
max_epochs=10
194+
)
195+
196+
lrfinder = trainer.lr_find(model)
197+
before_lr = lrfinder.suggestion()
198+
lrfinder.results['loss'][-1] = float('nan')
199+
after_lr = lrfinder.suggestion()
200+
201+
assert before_lr == after_lr, \
202+
'Learning rate was altered because of non-finite loss values'

0 commit comments

Comments
 (0)