Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
ca5afa5
standardize log_dir for loggers
awaelchli May 14, 2021
f536abd
change to experiment dir
awaelchli May 14, 2021
acf8e4d
wandb experiment dir
awaelchli May 14, 2021
5573a58
tensorboard experimen dir
awaelchli May 14, 2021
674b4d3
test tube experiment dir
awaelchli May 14, 2021
42d4974
csv logger experiment dir
awaelchli May 14, 2021
90049f2
comet experiment dir
awaelchli May 14, 2021
4eb470a
fix types
awaelchli May 14, 2021
d11c31f
rename
awaelchli May 14, 2021
3330f34
neptune experiment dir
awaelchli May 14, 2021
939eea4
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 14, 2021
573f447
changelog
awaelchli May 15, 2021
fc95546
mlflow experiment dir
awaelchli May 15, 2021
8542ffd
test deprecation message
awaelchli May 15, 2021
068a3f8
add deprecation
awaelchli May 15, 2021
618cb7d
force broadcast once
awaelchli May 15, 2021
a279d5e
past tense
awaelchli May 15, 2021
41b3c22
update log_dir references
awaelchli May 15, 2021
8746bd2
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 15, 2021
3bf9ad3
update wandb tests
awaelchli May 15, 2021
8fb5085
add typehint
awaelchli May 15, 2021
8ccda24
update tensorboard test
awaelchli May 15, 2021
4f06552
test tube experiment dir
awaelchli May 15, 2021
0a9841d
fix mlflow experiment dir
awaelchli May 28, 2021
1f03001
Merge branch 'master' into bugfix/logdir
awaelchli May 28, 2021
fa1cc5f
wandb save dir None check
awaelchli May 28, 2021
bdd0127
update experiment dir for profiler
awaelchli May 28, 2021
fdf1bf1
set default for save_dir
awaelchli May 28, 2021
839cae1
unused import
awaelchli May 28, 2021
27dd085
update none check
awaelchli May 29, 2021
2809ba6
update none check
awaelchli May 29, 2021
510a456
mock save dir
awaelchli May 29, 2021
bc214b8
Merge branch 'master' into bugfix/logdir
awaelchli Jun 6, 2021
7a5fa4e
fix test tube version int
awaelchli Jun 6, 2021
344e240
fix save dir test_all
awaelchli Jun 6, 2021
7cb26a2
move properties
awaelchli Jun 6, 2021
5fdb90f
fix merge error
awaelchli Jun 6, 2021
5b019e8
fix mocks
awaelchli Jun 6, 2021
bbda02b
fix unused import
awaelchli Jun 6, 2021
d571d04
Merge branch 'master' into bugfix/logdir
awaelchli Jul 20, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Added support for `devices` flag to Trainer ([#8440](https://github.com/PyTorchLightning/pytorch-lightning/pull/8440))


- Added `Trainer.experiment_dir` property for accessing the logging directory ([#7543](https://github.com/PyTorchLightning/pytorch-lightning/pull/7543))


### Changed


Expand Down Expand Up @@ -377,6 +380,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Deprecated `reload_dataloaders_every_epoch` argument of `Trainer` in favor of `reload_dataloaders_every_n_epochs` ([#5043](https://github.com/PyTorchLightning/pytorch-lightning/pull/5043))


- Deprecated `Trainer.log_dir` in favor of `Trainer.experiment_dir` with standardized access to the logging folder for all built-in loggers ([#7543](https://github.com/PyTorchLightning/pytorch-lightning/pull/7543))


### Removed


Expand Down
20 changes: 20 additions & 0 deletions pytorch_lightning/loggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,26 @@ def save_dir(self) -> Optional[str]:
"""
return None

@property
def experiment_dir(self) -> Optional[str]:
"""
Return the experiment directory for the current run where logs get saved, or `None` if the logger does not
save data locally. This is usually a versioned directory under
:meth:`pytorch_lightning.loggers.base.LightningLoggerBase.save_dir`.
"""
return self.save_dir

@property
def log_dir(self) -> str:
"""
Return the experiment directory for the current run where logs get saved.

.. deprecated::
Use :meth:`pytorch_lightning.loggers.base.LightningLoggerBase.experiment_dir` instead.
Will be removed in v1.6.0.
"""
return self.experiment_dir

@property
@abstractmethod
def name(self) -> str:
Expand Down
4 changes: 4 additions & 0 deletions pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,10 @@ def finalize(self, status: str) -> None:
def save_dir(self) -> Optional[str]:
return self._save_dir

@property
def experiment_dir(self) -> str:
return os.path.join(self.save_dir, self.name, self.version)

@property
def name(self) -> str:
# Don't create an experiment if we don't have one
Expand Down
14 changes: 7 additions & 7 deletions pytorch_lightning/loggers/csv_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,21 +149,21 @@ def root_dir(self) -> str:
return os.path.join(self.save_dir, self.name)

@property
def log_dir(self) -> str:
def save_dir(self) -> Optional[str]:
return self._save_dir

@property
def experiment_dir(self) -> str:
"""
The log directory for this run. By default, it is named
``'version_${self.version}'`` but it can be overridden by passing a string value
for the constructor's version parameter instead of ``None`` or an int.
``save_dir/name/'version_${self.version}'`` but the root directory, experiment name and version can also be
manually set via the constructor.
"""
# create a pseudo standard path ala test-tube
version = self.version if isinstance(self.version, str) else f"version_{self.version}"
log_dir = os.path.join(self.root_dir, version)
return log_dir

@property
def save_dir(self) -> Optional[str]:
return self._save_dir

@property
@rank_zero_experiment
def experiment(self) -> ExperimentWriter:
Expand Down
5 changes: 5 additions & 0 deletions pytorch_lightning/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,11 @@ def save_dir(self) -> Optional[str]:
if self._tracking_uri.startswith(LOCAL_FILE_URI_PREFIX):
return self._tracking_uri.lstrip(LOCAL_FILE_URI_PREFIX)

@property
def experiment_dir(self) -> Optional[str]:
if self._experiment_id is not None:
return os.path.join(self.save_dir, self.name, self.version)

@property
def name(self) -> str:
return self.experiment_id
Expand Down
5 changes: 5 additions & 0 deletions pytorch_lightning/loggers/neptune.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,11 @@ def save_dir(self) -> Optional[str]:
# Neptune does not save any local files
return None

@property
def experiment_dir(self) -> Optional[str]:
# Neptune does not save any local files
return None

@property
def name(self) -> str:
if self.offline_mode:
Expand Down
32 changes: 16 additions & 16 deletions pytorch_lightning/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,28 +115,28 @@ def root_dir(self) -> str:
return os.path.join(self.save_dir, self.name)

@property
def log_dir(self) -> str:
def save_dir(self) -> Optional[str]:
return self._save_dir

@property
def sub_dir(self) -> Optional[str]:
return self._sub_dir

@property
def experiment_dir(self) -> str:
"""
The directory for this run's tensorboard checkpoint. By default, it is named
``'version_${self.version}'`` but it can be overridden by passing a string value
for the constructor's version parameter instead of ``None`` or an int.
"""
# create a pseudo standard path ala test-tube
version = self.version if isinstance(self.version, str) else f"version_{self.version}"
log_dir = os.path.join(self.root_dir, version)
experiment_dir = os.path.join(self.root_dir, version)
if isinstance(self.sub_dir, str):
log_dir = os.path.join(log_dir, self.sub_dir)
log_dir = os.path.expandvars(log_dir)
log_dir = os.path.expanduser(log_dir)
return log_dir

@property
def save_dir(self) -> Optional[str]:
return self._save_dir

@property
def sub_dir(self) -> Optional[str]:
return self._sub_dir
experiment_dir = os.path.join(experiment_dir, self.sub_dir)
experiment_dir = os.path.expandvars(experiment_dir)
experiment_dir = os.path.expanduser(experiment_dir)
return experiment_dir

@property
@rank_zero_experiment
Expand All @@ -156,7 +156,7 @@ def experiment(self) -> SummaryWriter:
assert rank_zero_only.rank == 0, 'tried to init log dirs in non global_rank=0'
if self.root_dir:
self._fs.makedirs(self.root_dir, exist_ok=True)
self._experiment = SummaryWriter(log_dir=self.log_dir, **self._kwargs)
self._experiment = SummaryWriter(log_dir=self.experiment_dir, **self._kwargs)
return self._experiment

@rank_zero_only
Expand Down Expand Up @@ -240,7 +240,7 @@ def log_graph(self, model: 'pl.LightningModule', input_array=None):
@rank_zero_only
def save(self) -> None:
super().save()
dir_path = self.log_dir
dir_path = self.experiment_dir

# prepare the file path
hparams_file = os.path.join(dir_path, self.NAME_HPARAMS_FILE)
Expand Down
10 changes: 8 additions & 2 deletions pytorch_lightning/loggers/test_tube.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
Test Tube Logger
----------------
"""
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Union

Expand Down Expand Up @@ -196,11 +197,16 @@ def save_dir(self) -> Optional[str]:
return self._save_dir

@property
def name(self) -> str:
def experiment_dir(self) -> str:
version = self.version if isinstance(self.version, str) else f"version_{self.version}"
return os.path.join(self.save_dir, self.name, version)

@property
def name(self) -> Union[int, str]:
if self._experiment is None:
return self._name

return self.experiment.name
return str(self.experiment.name)

@property
def version(self) -> int:
Expand Down
11 changes: 8 additions & 3 deletions pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,8 @@ def __init__(
self._wandb_init = dict(
name=name,
project=project,
id=version or id,
dir=save_dir,
id=(version or id),
dir=(save_dir or './wandb'),
resume='allow',
anonymous=anonymous_lut.get(anonymous, anonymous)
)
Expand Down Expand Up @@ -221,7 +221,12 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) ->

@property
def save_dir(self) -> Optional[str]:
return self._save_dir
return self._experiment.dir if self._experiment else self._save_dir

@property
def experiment_dir(self) -> str:
if self._experiment:
return os.path.join(self.save_dir, self.name, self.version)

@property
def name(self) -> Optional[str]:
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/profiler/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def __init__(

Args:
dirpath: Directory path for the ``filename``. If ``dirpath`` is ``None`` but ``filename`` is present, the
``trainer.log_dir`` (from :class:`~pytorch_lightning.loggers.tensorboard.TensorBoardLogger`)
``trainer.experiment_dir`` (from :class:`~pytorch_lightning.loggers.tensorboard.TensorBoardLogger`)
will be used.

filename: If present, filename where the profiler results will be saved instead of printing to stdout.
Expand Down
35 changes: 21 additions & 14 deletions pytorch_lightning/trainer/properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,20 +227,6 @@ def model(self, model: torch.nn.Module) -> None:
General properties
"""

@property
def log_dir(self) -> Optional[str]:
if self.logger is None:
dirpath = self.default_root_dir
elif isinstance(self.logger, TensorBoardLogger):
dirpath = self.logger.log_dir
elif isinstance(self.logger, LoggerCollection):
dirpath = self.default_root_dir
else:
dirpath = self.logger.save_dir

dirpath = self.accelerator.broadcast(dirpath)
return dirpath

@property
def use_amp(self) -> bool:
return self.precision == 16
Expand Down Expand Up @@ -618,6 +604,27 @@ def _active_loop(self) -> Optional[Union[FitLoop, EvaluationLoop, PredictionLoop
def callback_metrics(self) -> dict:
return self.logger_connector.callback_metrics

@property
def experiment_dir(self) -> Optional[str]:
dirpath = self.logger.experiment_dir if self.logger is not None else self.default_root_dir
if not getattr(self, "__experiment_dir_broadcasted", False):
dirpath = self.accelerator.broadcast(dirpath)
setattr(self, "__experiment_dir_broadcasted", True)
return dirpath

@property
def log_dir(self) -> Optional[str]:
rank_zero_deprecation(
"Trainer.log_dir is deprecated since v1.4 and will be removed in v1.6."
" Use Trainer.experiment_dir instead, which consistently points to `save_dir/name/version`"
" for all built-in loggers."
)
if self.logger is None:
dirpath = self.default_root_dir
else:
dirpath = getattr(self.logger, 'log_dir' if isinstance(self.logger, TensorBoardLogger) else 'save_dir')
return dirpath

@property
def logged_metrics(self) -> dict:
return self.logger_connector.logged_metrics
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1263,7 +1263,7 @@ def __init_profiler(self, profiler: Optional[Union[BaseProfiler, str]]) -> None:
def __setup_profiler(self) -> None:
local_rank = self.local_rank if self.world_size > 1 else None
self.profiler._lightning_module = proxy(self.lightning_module)
self.profiler.setup(stage=self.state.fn._setup_fn, local_rank=local_rank, log_dir=self.log_dir)
self.profiler.setup(stage=self.state.fn._setup_fn, local_rank=local_rank, log_dir=self.experiment_dir)

def _log_device_info(self) -> None:
rank_zero_info(f'GPU available: {torch.cuda.is_available()}, used: {self._device_type == DeviceType.GPU}')
Expand Down
10 changes: 5 additions & 5 deletions pytorch_lightning/utilities/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,9 @@ def __init__(
def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None:
# save the config in `setup` because (1) we want it to save regardless of the trainer function run
# and we want to save before processes are spawned
log_dir = trainer.log_dir
assert log_dir is not None
config_path = os.path.join(log_dir, self.config_filename)
experiment_dir = trainer.experiment_dir
assert experiment_dir is not None
config_path = os.path.join(experiment_dir, self.config_filename)
if not self.overwrite and os.path.isfile(config_path):
raise RuntimeError(
f'{self.__class__.__name__} expected {config_path} to NOT exist. Aborting to avoid overwriting'
Expand All @@ -178,9 +178,9 @@ def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[st
)
if trainer.is_global_zero:
# save only on rank zero to avoid race conditions on DDP.
# the `log_dir` needs to be created as we rely on the logger to do it usually
# the `experiment_dir` needs to be created as we rely on the logger to do it usually
# but it hasn't logged anything at this point
get_filesystem(log_dir).makedirs(log_dir, exist_ok=True)
get_filesystem(experiment_dir).makedirs(experiment_dir, exist_ok=True)
self.parser.save(self.config, config_path, skip_none=False, overwrite=self.overwrite)

def __reduce__(self) -> Tuple[Type['SaveConfigCallback'], Tuple, Dict]:
Expand Down
2 changes: 1 addition & 1 deletion tests/callbacks/test_gpu_stats_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def test_gpu_stats_monitor(tmpdir):
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"

path_csv = os.path.join(logger.log_dir, ExperimentWriter.NAME_METRICS_FILE)
path_csv = os.path.join(logger.experiment_dir, ExperimentWriter.NAME_METRICS_FILE)
met_data = np.genfromtxt(path_csv, delimiter=',', names=True, deletechars='', replace_space=' ')

batch_time_data = met_data['batch_time/intra_step (ms)']
Expand Down
7 changes: 7 additions & 0 deletions tests/deprecated_api/test_remove_1-6.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,3 +322,10 @@ def test_v1_6_0_deprecated_hpc_load(tmpdir):
checkpoint_path = trainer.checkpoint_connector.get_max_ckpt_path_from_folder(str(tmpdir))
with pytest.deprecated_call(match=r"`CheckpointConnector.hpc_load\(\)` was deprecated in v1.4"):
trainer.checkpoint_connector.hpc_load(checkpoint_path)


def test_v1_6_0_log_dir():
trainer = Trainer()
with pytest.deprecated_call(match="Trainer.log_dir is deprecated since v1.4 and will be removed in v1.6."):
log_dir = trainer.log_dir
assert log_dir == trainer.experiment_dir
7 changes: 5 additions & 2 deletions tests/loggers/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def test_loggers_fit_test_all(tmpdir, monkeypatch):
with mock.patch('pytorch_lightning.loggers.wandb.wandb') as wandb:
wandb.run = None
wandb.init().step = 0
wandb.init().dir = "wandb"
_test_loggers_fit_test(tmpdir, WandbLogger)


Expand Down Expand Up @@ -177,7 +178,9 @@ def test_loggers_save_dir_and_weights_save_path_all(tmpdir, monkeypatch):
with mock.patch('pytorch_lightning.loggers.test_tube.Experiment'):
_test_loggers_save_dir_and_weights_save_path(tmpdir, TestTubeLogger)

with mock.patch('pytorch_lightning.loggers.wandb.wandb'):
with mock.patch('pytorch_lightning.loggers.wandb.wandb') as wandb:
wandb.run = None
wandb.init().dir = tmpdir / "logs"
_test_loggers_save_dir_and_weights_save_path(tmpdir, WandbLogger)


Expand Down Expand Up @@ -217,7 +220,7 @@ def name(self):
trainer = Trainer(**trainer_args, logger=logger, weights_save_path=weights_save_path)
trainer.fit(model)
assert trainer.weights_save_path == weights_save_path
assert trainer.logger.save_dir == save_dir
assert trainer.experiment_dir == save_dir / logger.name / logger.version
assert trainer.checkpoint_callback.dirpath == weights_save_path / 'name' / 'version' / 'checkpoints'
assert trainer.default_root_dir == tmpdir

Expand Down
4 changes: 2 additions & 2 deletions tests/loggers/test_comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,12 @@ def test_comet_logger_dirs_creation(comet, comet_experiment, tmpdir, monkeypatch

model = BoringModel()
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=1, limit_train_batches=3, limit_val_batches=3)
assert trainer.log_dir == logger.save_dir
assert trainer.experiment_dir == logger.experiment_dir
trainer.fit(model)

assert trainer.checkpoint_callback.dirpath == (tmpdir / 'test' / "1" / 'checkpoints')
assert set(os.listdir(trainer.checkpoint_callback.dirpath)) == {'epoch=0-step=2.ckpt'}
assert trainer.log_dir == logger.save_dir
assert trainer.experiment_dir == logger.experiment_dir


@patch('pytorch_lightning.loggers.comet.comet_ml')
Expand Down
4 changes: 2 additions & 2 deletions tests/loggers/test_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def test_file_logger_log_metrics(tmpdir, step_idx):
logger.log_metrics(metrics, step_idx)
logger.save()

path_csv = os.path.join(logger.log_dir, ExperimentWriter.NAME_METRICS_FILE)
path_csv = os.path.join(logger.experiment_dir, ExperimentWriter.NAME_METRICS_FILE)
with open(path_csv, 'r') as fp:
lines = fp.readlines()
assert len(lines) == 2
Expand All @@ -109,6 +109,6 @@ def test_file_logger_log_hyperparams(tmpdir):
logger.log_hyperparams(hparams)
logger.save()

path_yaml = os.path.join(logger.log_dir, ExperimentWriter.NAME_HPARAMS_FILE)
path_yaml = os.path.join(logger.experiment_dir, ExperimentWriter.NAME_HPARAMS_FILE)
params = load_hparams_from_yaml(path_yaml)
assert all(n in params for n in hparams)
Loading