Skip to content

Commit ef762a0

Browse files
rohitgr7tchaton
andauthored
update logging docs and decorators (#4431)
* update logging docs * experiment * add decorators to base and csv logger methods * fix * doc fix * update docs * update docs * Update pytorch_lightning/loggers/base.py Co-authored-by: chaton <[email protected]>
1 parent c2e6e68 commit ef762a0

File tree

4 files changed

+25
-16
lines changed

4 files changed

+25
-16
lines changed

docs/source/logging.rst

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -124,24 +124,28 @@ Once your training starts, you can view the logs by using your favorite logger o
124124
Make a custom logger
125125
********************
126126

127-
You can implement your own logger by writing a class that inherits from
128-
:class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only`
129-
decorator to make sure that only the first process in DDP training logs data.
127+
You can implement your own logger by writing a class that inherits from :class:`~pytorch_lightning.loggers.base.LightningLoggerBase`.
128+
Use the :func:`~pytorch_lightning.loggers.base.rank_zero_experiment` and :func:`~pytorch_lightning.utilities.distributed.rank_zero_only` decorators to make sure that only the first process in DDP training creates the experiment and logs the data respectively.
130129

131130
.. testcode::
132131

133132
from pytorch_lightning.utilities import rank_zero_only
134133
from pytorch_lightning.loggers import LightningLoggerBase
134+
from pytorch_lightning.loggers.base import rank_zero_experiment
135135

136136
class MyLogger(LightningLoggerBase):
137137

138+
@property
138139
def name(self):
139140
return 'MyLogger'
140141

142+
@property
143+
@rank_zero_experiment
141144
def experiment(self):
142145
# Return the experiment object associated with this logger.
143146
pass
144-
147+
148+
@property
145149
def version(self):
146150
# Return the experiment version, int or str.
147151
return '0.1'
@@ -158,6 +162,7 @@ decorator to make sure that only the first process in DDP training logs data.
158162
# your code to record metrics goes here
159163
pass
160164

165+
@rank_zero_only
161166
def save(self):
162167
# Optional. Any code necessary to save logger data goes here
163168
# If you implement this, remember to call `super().save()`

pytorch_lightning/loggers/base.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,17 @@
2929
from pytorch_lightning.utilities import rank_zero_only
3030

3131

32+
def rank_zero_experiment(fn: Callable) -> Callable:
33+
""" Returns the real experiment on rank 0 and otherwise the DummyExperiment. """
34+
@wraps(fn)
35+
def experiment(self):
36+
@rank_zero_only
37+
def get_experiment():
38+
return fn(self)
39+
return get_experiment() or DummyExperiment()
40+
return experiment
41+
42+
3243
class LightningLoggerBase(ABC):
3344
"""
3445
Base class for experiment loggers.
@@ -410,9 +421,11 @@ def __init__(self):
410421
def experiment(self):
411422
return self._experiment
412423

424+
@rank_zero_only
413425
def log_metrics(self, metrics, step):
414426
pass
415427

428+
@rank_zero_only
416429
def log_hyperparams(self, params):
417430
pass
418431

@@ -477,14 +490,3 @@ def merge_dicts(
477490
d_out[k] = (fn or default_func)(values_to_agg)
478491

479492
return d_out
480-
481-
482-
def rank_zero_experiment(fn: Callable) -> Callable:
483-
""" Returns the real experiment on rank 0 and otherwise the DummyExperiment. """
484-
@wraps(fn)
485-
def experiment(self):
486-
@rank_zero_only
487-
def get_experiment():
488-
return fn(self)
489-
return get_experiment() or DummyExperiment()
490-
return experiment

pytorch_lightning/loggers/csv_logs.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929

3030
from pytorch_lightning import _logger as log
3131
from pytorch_lightning.core.saving import save_hparams_to_yaml
32-
from pytorch_lightning.loggers.base import LightningLoggerBase
32+
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
3333
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn
3434

3535

@@ -162,6 +162,7 @@ def save_dir(self) -> Optional[str]:
162162
return self._save_dir
163163

164164
@property
165+
@rank_zero_experiment
165166
def experiment(self) -> ExperimentWriter:
166167
r"""
167168

pytorch_lightning/loggers/wandb.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,7 @@ def version(self) -> Optional[str]:
170170
# don't create an experiment if we don't have one
171171
return self._experiment.id if self._experiment else self._id
172172

173+
@rank_zero_only
173174
def finalize(self, status: str) -> None:
174175
# offset future training logged on same W&B run
175176
if self._experiment is not None:

0 commit comments

Comments
 (0)