Skip to content

Commit 44b2fb4

Browse files
committed
fix bug in log_hyperparams
1 parent 6cda5b5 commit 44b2fb4

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

pytorch_lightning/callbacks/model_checkpoint.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -592,6 +592,7 @@ def __resolve_ckpt_dir(self, trainer: "pl.Trainer") -> None:
592592
if isinstance(trainer.logger.version, str)
593593
else f"version_{trainer.logger.version}"
594594
)
595+
# TODO: Find out what ckpt_path should be with multiple loggers
595596
ckpt_path = os.path.join(save_dir, str(trainer.logger.name), version, "checkpoints")
596597
else:
597598
ckpt_path = os.path.join(trainer.weights_save_path, "checkpoints")

pytorch_lightning/trainer/trainer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1242,9 +1242,9 @@ def _log_hyperparams(self) -> None:
12421242

12431243
for logger in self.loggers:
12441244
if hparams_initial is not None:
1245-
self.logger.log_hyperparams(hparams_initial)
1246-
self.logger.log_graph(self.lightning_module)
1247-
self.logger.save()
1245+
logger.log_hyperparams(hparams_initial)
1246+
logger.log_graph(self.lightning_module)
1247+
logger.save()
12481248

12491249
def _teardown(self):
12501250
"""This is the Trainer's internal teardown, unrelated to the `teardown` hooks in LightningModule and

0 commit comments

Comments
 (0)