Skip to content

Commit ec0fb7a

Browse files
BordaAdrian Wälchliawaelchli
authored
refactor imports of logger dependencies (#4860)
* refactor imports of logger dependencies * fix * fix * fix * name * fix * mocks * fix tests * fix mlflow * fix test tube * fix wandb import check * whitespace * name * name * hack * hack * rev * fix * update mlflow import check * try without installing conda dep * . * . * . * . * . * . * . * . * . Co-authored-by: Adrian Wälchli <[email protected]> Co-authored-by: Adrian Wälchli <[email protected]>
1 parent 410d67f commit ec0fb7a

File tree

9 files changed

+78
-80
lines changed

9 files changed

+78
-80
lines changed

.github/workflows/ci_test-base.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: CI base testing
1+
name: CI basic testing
22

33
# see: https://help.github.com/en/actions/reference/events-that-trigger-workflows
44
on: # Trigger the workflow on push or pull request, but only for the master branch

pytorch_lightning/loggers/__init__.py

Lines changed: 12 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -24,40 +24,25 @@
2424
'CSVLogger',
2525
]
2626

27-
try:
28-
# needed to prevent ImportError and duplicated logs.
29-
environ["COMET_DISABLE_AUTO_LOGGING"] = "1"
27+
from pytorch_lightning.loggers.comet import _COMET_AVAILABLE, CometLogger
28+
from pytorch_lightning.loggers.mlflow import _MLFLOW_AVAILABLE, MLFlowLogger
29+
from pytorch_lightning.loggers.neptune import _NEPTUNE_AVAILABLE, NeptuneLogger
30+
from pytorch_lightning.loggers.test_tube import _TESTTUBE_AVAILABLE, TestTubeLogger
31+
from pytorch_lightning.loggers.wandb import _WANDB_AVAILABLE, WandbLogger
3032

31-
from pytorch_lightning.loggers.comet import CometLogger
32-
except ImportError: # pragma: no-cover
33-
del environ["COMET_DISABLE_AUTO_LOGGING"] # pragma: no-cover
34-
else:
33+
if _COMET_AVAILABLE:
3534
__all__.append('CometLogger')
35+
# needed to prevent ImportError and duplicated logs.
36+
environ["COMET_DISABLE_AUTO_LOGGING"] = "1"
3637

37-
try:
38-
from pytorch_lightning.loggers.mlflow import MLFlowLogger
39-
except ImportError: # pragma: no-cover
40-
pass # pragma: no-cover
41-
else:
38+
if _MLFLOW_AVAILABLE:
4239
__all__.append('MLFlowLogger')
4340

44-
try:
45-
from pytorch_lightning.loggers.neptune import NeptuneLogger
46-
except ImportError: # pragma: no-cover
47-
pass # pragma: no-cover
48-
else:
41+
if _NEPTUNE_AVAILABLE:
4942
__all__.append('NeptuneLogger')
5043

51-
try:
52-
from pytorch_lightning.loggers.test_tube import TestTubeLogger
53-
except ImportError: # pragma: no-cover
54-
pass # pragma: no-cover
55-
else:
44+
if _TESTTUBE_AVAILABLE:
5645
__all__.append('TestTubeLogger')
5746

58-
try:
59-
from pytorch_lightning.loggers.wandb import WandbLogger
60-
except ImportError: # pragma: no-cover
61-
pass # pragma: no-cover
62-
else:
47+
if _WANDB_AVAILABLE:
6348
__all__.append('WandbLogger')

pytorch_lightning/loggers/comet.py

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -21,17 +21,18 @@
2121
from argparse import Namespace
2222
from typing import Any, Dict, Optional, Union
2323

24-
try:
25-
import comet_ml
24+
import torch
25+
from torch import is_tensor
2626

27-
except ModuleNotFoundError: # pragma: no-cover
28-
comet_ml = None
29-
CometExperiment = None
30-
CometExistingExperiment = None
31-
CometOfflineExperiment = None
32-
API = None
33-
generate_guid = None
34-
else:
27+
from pytorch_lightning import _logger as log
28+
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
29+
from pytorch_lightning.utilities import rank_zero_only, _module_available
30+
from pytorch_lightning.utilities.exceptions import MisconfigurationException
31+
32+
_COMET_AVAILABLE = _module_available("comet_ml")
33+
34+
if _COMET_AVAILABLE:
35+
import comet_ml
3536
from comet_ml import ExistingExperiment as CometExistingExperiment
3637
from comet_ml import Experiment as CometExperiment
3738
from comet_ml import OfflineExperiment as CometOfflineExperiment
@@ -41,14 +42,11 @@
4142
except ImportError: # pragma: no-cover
4243
# For more information, see: https://www.comet.ml/docs/python-sdk/releases/#release-300
4344
from comet_ml.papi import API # pragma: no-cover
44-
45-
import torch
46-
from torch import is_tensor
47-
48-
from pytorch_lightning import _logger as log
49-
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
50-
from pytorch_lightning.utilities import rank_zero_only
51-
from pytorch_lightning.utilities.exceptions import MisconfigurationException
45+
else:
46+
# needed for test mocks, these tests shall be updated
47+
comet_ml = None
48+
CometExperiment, CometExistingExperiment, CometOfflineExperiment = None, None, None
49+
API = None
5250

5351

5452
class CometLogger(LightningLoggerBase):

pytorch_lightning/loggers/mlflow.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,21 +21,25 @@
2121
from time import time
2222
from typing import Any, Dict, Optional, Union
2323

24-
try:
25-
import mlflow
26-
from mlflow.tracking import MlflowClient
27-
except ModuleNotFoundError: # pragma: no-cover
28-
mlflow = None
29-
MlflowClient = None
30-
3124

3225
from pytorch_lightning import _logger as log
3326
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
34-
from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn
27+
from pytorch_lightning.utilities import rank_zero_only, rank_zero_warn, _module_available
28+
3529

3630
LOCAL_FILE_URI_PREFIX = "file:"
3731

3832

33+
_MLFLOW_AVAILABLE = _module_available("mlflow")
34+
try:
35+
import mlflow
36+
from mlflow.tracking import MlflowClient
37+
# todo: there seems to be still some remaining import error with Conda env
38+
except ImportError:
39+
_MLFLOW_AVAILABLE = False
40+
mlflow, MlflowClient = None, None
41+
42+
3943
class MLFlowLogger(LightningLoggerBase):
4044
"""
4145
Log using `MLflow <https://mlflow.org>`_.

pytorch_lightning/loggers/neptune.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,21 +17,23 @@
1717
--------------
1818
"""
1919
from argparse import Namespace
20-
from typing import Any, Dict, Iterable, List, Optional, Union
21-
22-
try:
23-
import neptune
24-
from neptune.experiments import Experiment
25-
except ImportError: # pragma: no-cover
26-
neptune = None
27-
Experiment = None
20+
from typing import Any, Dict, Iterable, Optional, Union
2821

2922
import torch
3023
from torch import is_tensor
3124

3225
from pytorch_lightning import _logger as log
3326
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
34-
from pytorch_lightning.utilities import rank_zero_only
27+
from pytorch_lightning.utilities import rank_zero_only, _module_available
28+
29+
_NEPTUNE_AVAILABLE = _module_available("neptune")
30+
31+
if _NEPTUNE_AVAILABLE:
32+
import neptune
33+
from neptune.experiments import Experiment
34+
else:
35+
# needed for test mocks, these tests shall be updated
36+
neptune, Experiment = None, None
3537

3638

3739
class NeptuneLogger(LightningLoggerBase):

pytorch_lightning/loggers/test_tube.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,18 @@
1919
from argparse import Namespace
2020
from typing import Any, Dict, Optional, Union
2121

22-
try:
23-
from test_tube import Experiment
24-
except ImportError: # pragma: no-cover
25-
Experiment = None
26-
2722
from pytorch_lightning.core.lightning import LightningModule
2823
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
24+
from pytorch_lightning.utilities import _module_available
2925
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn
3026

27+
_TESTTUBE_AVAILABLE = _module_available("test_tube")
28+
29+
if _TESTTUBE_AVAILABLE:
30+
from test_tube import Experiment
31+
else:
32+
Experiment = None
33+
3134

3235
class TestTubeLogger(LightningLoggerBase):
3336
r"""

pytorch_lightning/loggers/wandb.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,18 @@
2222

2323
import torch.nn as nn
2424

25+
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
26+
from pytorch_lightning.utilities import rank_zero_only, _module_available
27+
from pytorch_lightning.utilities.warning_utils import WarningCache
28+
29+
_WANDB_AVAILABLE = _module_available("wandb")
30+
2531
try:
2632
import wandb
2733
from wandb.wandb_run import Run
28-
except ImportError: # pragma: no-cover
29-
wandb = None
30-
Run = None
31-
32-
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
33-
from pytorch_lightning.utilities import rank_zero_only
34-
from pytorch_lightning.utilities.warning_utils import WarningCache
34+
except ImportError:
35+
# needed for test mocks, these tests shall be updated
36+
wandb, Run = None, None
3537

3638

3739
class WandbLogger(LightningLoggerBase):

tests/checkpointing/test_model_checkpoint.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
from pytorch_lightning.utilities.cloud_io import load as pl_load
3535
from pytorch_lightning.utilities.exceptions import MisconfigurationException
3636
from tests.base import BoringModel
37-
import tests.base.develop_utils as tutils
3837

3938

4039
class LogInTwoMethods(BoringModel):

tests/loggers/test_mlflow.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121

2222
from pytorch_lightning import Trainer
23-
from pytorch_lightning.loggers import MLFlowLogger
23+
from pytorch_lightning.loggers import _MLFLOW_AVAILABLE, MLFlowLogger
2424
from tests.base import EvalModelTemplate
2525

2626

@@ -120,7 +120,7 @@ def test_mlflow_log_dir(client, mlflow, tmpdir):
120120

121121
def test_mlflow_logger_dirs_creation(tmpdir):
122122
""" Test that the logger creates the folders and files in the right place. """
123-
if not importlib.util.find_spec('mlflow'):
123+
if not _MLFLOW_AVAILABLE:
124124
pytest.xfail("test for explicit file creation requires mlflow dependency to be installed.")
125125

126126
assert not os.listdir(tmpdir)
@@ -137,8 +137,13 @@ def test_mlflow_logger_dirs_creation(tmpdir):
137137
assert set(os.listdir(tmpdir / exp_id)) == {run_id, 'meta.yaml'}
138138

139139
model = EvalModelTemplate()
140-
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=1, limit_val_batches=3,
141-
log_gpu_memory=True)
140+
trainer = Trainer(
141+
default_root_dir=tmpdir,
142+
logger=logger,
143+
max_epochs=1,
144+
limit_val_batches=3,
145+
log_gpu_memory=True,
146+
)
142147
trainer.fit(model)
143148
assert set(os.listdir(tmpdir / exp_id)) == {run_id, 'meta.yaml'}
144149
assert 'epoch' in os.listdir(tmpdir / exp_id / run_id / 'metrics')

0 commit comments

Comments
 (0)