Skip to content

Commit 8bf41f1

Browse files
awaelchliBorda
authored andcommitted
Improve DummyLogger (#6398)
* fix dummy logger * docs * update docs * add changelog * add none return annotation * return empty string for name, version
1 parent cc40fa3 commit 8bf41f1

File tree

4 files changed

+37
-18
lines changed

4 files changed

+37
-18
lines changed

CHANGELOG.md

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file.
44

55
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
66

7+
## [1.2.6] - 2021-03-30
8+
9+
### Changed
10+
11+
-
12+
13+
### Fixed
14+
15+
- Fixed `DummyLogger.log_hyperparams` raising a `TypeError` when running with `fast_dev_run=True` ([#6398](https://github.com/PyTorchLightning/pytorch-lightning/pull/6398))
716

817

918
## [1.2.5] - 2021-03-23
@@ -13,7 +22,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
1322
- Update Gradient Clipping for the TPU Accelerator ([#6576](https://github.com/PyTorchLightning/pytorch-lightning/pull/6576))
1423
- Refactored setup for typing friendly ([#6590](https://github.com/PyTorchLightning/pytorch-lightning/pull/6590))
1524

16-
1725
### Fixed
1826

1927
- Fixed a bug where `all_gather` would not work correctly with `tpu_cores=8` ([#6587](https://github.com/PyTorchLightning/pytorch-lightning/pull/6587))
@@ -36,7 +44,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
3644
- Fixed broadcast to use PyTorch `broadcast_object_list` and add `reduce_decision` ([#6410](https://github.com/PyTorchLightning/pytorch-lightning/pull/6410))
3745
- Fixed logger creating directory structure too early in DDP ([#6380](https://github.com/PyTorchLightning/pytorch-lightning/pull/6380))
3846
- Fixed DeepSpeed additional memory use on rank 0 when default device not set early enough ([#6460](https://github.com/PyTorchLightning/pytorch-lightning/pull/6460))
39-
- Fixed `DummyLogger.log_hyperparams` raising a `TypeError` when running with `fast_dev_run=True` ([#6398](https://github.com/PyTorchLightning/pytorch-lightning/pull/6398))
4047
- Fixed an issue with `Tuner.scale_batch_size` not finding the batch size attribute in the datamodule ([#5968](https://github.com/PyTorchLightning/pytorch-lightning/pull/5968))
4148
- Fixed an exception in the layer summary when the model contains torch.jit scripted submodules ([#6511](https://github.com/PyTorchLightning/pytorch-lightning/pull/6511))
4249
- Fixed when Train loop config was run during `Trainer.predict` ([#6541](https://github.com/PyTorchLightning/pytorch-lightning/pull/6541))

pytorch_lightning/loggers/base.py

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -279,12 +279,14 @@ def _sanitize_params(params: Dict[str, Any]) -> Dict[str, Any]:
279279
return params
280280

281281
@abstractmethod
282-
def log_hyperparams(self, params: argparse.Namespace):
282+
def log_hyperparams(self, params: argparse.Namespace, *args, **kwargs):
283283
"""
284284
Record hyperparameters.
285285
286286
Args:
287287
params: :class:`~argparse.Namespace` containing the hyperparameters
288+
args: Optional positional arguments, depends on the specific logger being used
289+
kwargs: Optional keywoard arguments, depends on the specific logger being used
288290
"""
289291

290292
def log_graph(self, model: LightningModule, input_array=None) -> None:
@@ -418,41 +420,41 @@ def nop(*args, **kw):
418420
def __getattr__(self, _):
419421
return self.nop
420422

421-
def __getitem__(self, idx):
422-
# enables self.logger[0].experiment.add_image
423-
# and self.logger.experiment[0].add_image(...)
423+
def __getitem__(self, idx) -> "DummyExperiment":
424+
# enables self.logger.experiment[0].add_image(...)
424425
return self
425426

426427

427428
class DummyLogger(LightningLoggerBase):
428-
""" Dummy logger for internal use. Is usefull if we want to disable users
429-
logger for a feature, but still secure that users code can run """
429+
"""
430+
Dummy logger for internal use. It is useful if we want to disable user's
431+
logger for a feature, but still ensure that user code can run
432+
"""
430433

431434
def __init__(self):
432435
super().__init__()
433436
self._experiment = DummyExperiment()
434437

435438
@property
436-
def experiment(self):
439+
def experiment(self) -> DummyExperiment:
437440
return self._experiment
438441

439-
@rank_zero_only
440-
def log_metrics(self, metrics, step):
442+
def log_metrics(self, *args, **kwargs) -> None:
441443
pass
442444

443-
@rank_zero_only
444-
def log_hyperparams(self, params):
445+
def log_hyperparams(self, *args, **kwargs) -> None:
445446
pass
446447

447448
@property
448-
def name(self):
449-
pass
449+
def name(self) -> str:
450+
return ""
450451

451452
@property
452-
def version(self):
453-
pass
453+
def version(self) -> str:
454+
return ""
454455

455-
def __getitem__(self, idx):
456+
def __getitem__(self, idx) -> "DummyLogger":
457+
# enables self.logger[0].experiment.add_image(...)
456458
return self
457459

458460

tests/loggers/test_base.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -229,15 +229,24 @@ def log_metrics(self, metrics, step):
229229

230230

231231
def test_dummyexperiment_support_indexing():
232+
""" Test that the DummyExperiment can imitate indexing the experiment in a LoggerCollection. """
232233
experiment = DummyExperiment()
233234
assert experiment[0] == experiment
234235

235236

236237
def test_dummylogger_support_indexing():
238+
""" Test that the DummyLogger can imitate indexing of a LoggerCollection. """
237239
logger = DummyLogger()
238240
assert logger[0] == logger
239241

240242

243+
def test_dummylogger_noop_method_calls():
244+
""" Test that the DummyLogger methods can be called with arbitrary arguments. """
245+
logger = DummyLogger()
246+
logger.log_hyperparams("1", 2, three="three")
247+
logger.log_metrics("1", 2, three="three")
248+
249+
241250
def test_np_sanitization():
242251

243252
class CustomParamsLogger(CustomLogger):

tests/trainer/flags/test_fast_dev_run.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@ def test_step(self, batch, batch_idx):
7171
checkpoint_callback = ModelCheckpoint()
7272
early_stopping_callback = EarlyStopping()
7373
trainer_config = dict(
74+
default_root_dir=tmpdir,
7475
fast_dev_run=fast_dev_run,
7576
val_check_interval=2,
7677
logger=True,

0 commit comments

Comments
 (0)