From 8dc27344560505589a05e1afd089951fd6c20b1a Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Thu, 27 Jan 2022 15:12:37 -0600 Subject: [PATCH 01/16] feat(wandb): support distributed mode --- pytorch_lightning/loggers/wandb.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 4c09d6591f428..a0ee55671fdce 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -307,11 +307,16 @@ def __init__( self._save_dir = self._wandb_init.get("dir") self._name = self._wandb_init.get("name") self._id = self._wandb_init.get("id") + # start wandb run (to create an attach_id for distributed modes) + self.experiment def __getstate__(self): state = self.__dict__.copy() # args needed to reload correct experiment - state["_id"] = self._experiment.id if self._experiment is not None else None + if self._experiment is not None: + state["_id"] = getattr(self._experiment, "id", None) + state["_attach_id"] = getattr(self._experiment, "id", None) + state["_name"] = self._experiment.project_name() # cannot be pickled state["_experiment"] = None @@ -335,19 +340,26 @@ def experiment(self) -> Run: if self._experiment is None: if self._offline: os.environ["WANDB_MODE"] = "dryrun" - if wandb.run is None: - self._experiment = wandb.init(**self._wandb_init) - else: + + attach_id = getattr(self, "_attach_id", None) + if attach_id is not None and hasattr(wandb, "_attach"): + # attach to wandb process referenced + self._experiment = wandb._attach(attach_id) + elif wandb.run is not None: + # wandb process already created in this instance rank_zero_warn( "There is a wandb run already in progress and newly created instances of `WandbLogger` will reuse" " this run. If this is not desired, call `wandb.finish()` before instantiating `WandbLogger`." ) self._experiment = wandb.run + else: + # create new wandb process + self._experiment = wandb.init(**self._wandb_init) - # define default x-axis (for latest wandb versions) - if getattr(self._experiment, "define_metric", None): - self._experiment.define_metric("trainer/global_step") - self._experiment.define_metric("*", step_metric="trainer/global_step", step_sync=True) + # define default x-axis + if getattr(self._experiment, "define_metric", None): + self._experiment.define_metric("trainer/global_step") + self._experiment.define_metric("*", step_metric="trainer/global_step", step_sync=True) return self._experiment @@ -490,6 +502,7 @@ def _scan_and_log_checkpoints(self, checkpoint_callback: "ReferenceType[ModelChe "save_top_k", "save_weights_only", "_every_n_train_steps", + "_every_n_val_epochs", ] # ensure it does not break if `ModelCheckpoint` args change if hasattr(checkpoint_callback, k) From 97d0765c4db634a6d611c0923d23e6f0b4db046b Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Thu, 27 Jan 2022 19:32:10 -0600 Subject: [PATCH 02/16] fix(wandb): attach_id typo --- pytorch_lightning/loggers/wandb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index a0ee55671fdce..ce3980ae28c3c 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -315,7 +315,7 @@ def __getstate__(self): # args needed to reload correct experiment if self._experiment is not None: state["_id"] = getattr(self._experiment, "id", None) - state["_attach_id"] = getattr(self._experiment, "id", None) + state["_attach_id"] = getattr(self._experiment, "_attach_id", None) state["_name"] = self._experiment.project_name() # cannot be pickled From e17ec76b070e27c59c00f84fdda2230e2ee866be Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Mon, 7 Feb 2022 15:09:12 -0600 Subject: [PATCH 03/16] fix: apply linting suggestion MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Adrian Wälchli --- pytorch_lightning/loggers/wandb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index ce3980ae28c3c..96a75739008c0 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -308,7 +308,7 @@ def __init__( self._name = self._wandb_init.get("name") self._id = self._wandb_init.get("id") # start wandb run (to create an attach_id for distributed modes) - self.experiment + _ = self.experiment def __getstate__(self): state = self.__dict__.copy() From d01ebe0d19e0b04ec538d1231606d04207edcd8e Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Mon, 7 Feb 2022 16:29:07 -0600 Subject: [PATCH 04/16] feat(wandb): enable service by default --- pytorch_lightning/loggers/wandb.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 96a75739008c0..478b9b8e3a67d 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -33,6 +33,7 @@ _WANDB_AVAILABLE = _module_available("wandb") _WANDB_GREATER_EQUAL_0_10_22 = _compare_version("wandb", operator.ge, "0.10.22") +_WANDB_GREATER_EQUAL_0_12_10 = _compare_version("wandb", operator.ge, "0.12.10") try: import wandb @@ -308,7 +309,9 @@ def __init__( self._name = self._wandb_init.get("name") self._id = self._wandb_init.get("id") # start wandb run (to create an attach_id for distributed modes) - _ = self.experiment + if _WANDB_GREATER_EQUAL_0_12_10: + wandb.require("service") + _ = self.experiment def __getstate__(self): state = self.__dict__.copy() @@ -342,16 +345,16 @@ def experiment(self) -> Run: os.environ["WANDB_MODE"] = "dryrun" attach_id = getattr(self, "_attach_id", None) - if attach_id is not None and hasattr(wandb, "_attach"): - # attach to wandb process referenced - self._experiment = wandb._attach(attach_id) - elif wandb.run is not None: + if wandb.run is not None: # wandb process already created in this instance rank_zero_warn( "There is a wandb run already in progress and newly created instances of `WandbLogger` will reuse" " this run. If this is not desired, call `wandb.finish()` before instantiating `WandbLogger`." ) self._experiment = wandb.run + elif attach_id is not None and hasattr(wandb, "_attach"): + # attach to wandb process referenced + self._experiment = wandb._attach(attach_id) else: # create new wandb process self._experiment = wandb.init(**self._wandb_init) From bdbdfd6a69408aefcfc08ddd5e74882d9cf053d7 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Mon, 7 Feb 2022 16:29:21 -0600 Subject: [PATCH 05/16] tests(wandb): update tests --- tests/loggers/test_wandb.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 85b20c562464e..939b39056c145 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -22,6 +22,7 @@ from pytorch_lightning.loggers import WandbLogger from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel +from tests.helpers.utils import no_warning_call @mock.patch("pytorch_lightning.loggers.wandb.wandb") @@ -52,14 +53,12 @@ def test_wandb_logger_init(wandb): wandb.init().log.reset_mock() wandb.init.reset_mock() wandb.run = wandb.init() - logger = WandbLogger() + with pytest.warns(UserWarning, match="There is a wandb run already in progress"): + logger = WandbLogger() # verify default resume value assert logger._wandb_init["resume"] == "allow" - with pytest.warns(UserWarning, match="There is a wandb run already in progress"): - _ = logger.experiment - logger.log_metrics({"acc": 1.0}, step=3) wandb.init.assert_called_once() wandb.init().log.assert_called_once_with({"acc": 1.0, "trainer/global_step": 3}) @@ -124,8 +123,9 @@ def project_name(self): def test_wandb_logger_dirs_creation(wandb, tmpdir): """Test that the logger creates the folders and files in the right place.""" logger = WandbLogger(save_dir=str(tmpdir), offline=True) - assert logger.version is None - assert logger.name is None + # the logger get initialized + assert logger.version is not None + assert logger.name is not None # mock return values of experiment wandb.run = None From b24739647558a64e030d0aa4e53032055fdc4781 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Mon, 7 Feb 2022 16:37:57 -0600 Subject: [PATCH 06/16] test(wandb): remove unused import --- tests/loggers/test_wandb.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 939b39056c145..ba2cbba54e36c 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -22,7 +22,6 @@ from pytorch_lightning.loggers import WandbLogger from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel -from tests.helpers.utils import no_warning_call @mock.patch("pytorch_lightning.loggers.wandb.wandb") From bcce95a8c6004b6e1b759df37ba97159ce2accca Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Mon, 7 Feb 2022 20:03:16 -0600 Subject: [PATCH 07/16] test(wandb): consider latest version --- tests/loggers/test_wandb.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index ba2cbba54e36c..b8d4c8ef474ed 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -18,11 +18,16 @@ import pytest +# ensure we consider recent wandb versions +import pytorch_lightning.loggers.wandb as pl_wandb from pytorch_lightning import Trainer from pytorch_lightning.loggers import WandbLogger from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel +pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True +pl_wandb._WANDB_GREATER_EQUAL_0_12_10 = True + @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_logger_init(wandb): @@ -121,10 +126,11 @@ def project_name(self): @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_logger_dirs_creation(wandb, tmpdir): """Test that the logger creates the folders and files in the right place.""" + wandb.run = None logger = WandbLogger(save_dir=str(tmpdir), offline=True) # the logger get initialized - assert logger.version is not None - assert logger.name is not None + assert logger.version == wandb.init().id + assert logger.name == wandb.init().project_name() # mock return values of experiment wandb.run = None @@ -186,13 +192,10 @@ def test_wandb_log_model(wandb, tmpdir): assert not wandb.init().log_artifact.called # test correct metadata - import pytorch_lightning.loggers.wandb as pl_wandb - - pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True wandb.init().log_artifact.reset_mock() wandb.init.reset_mock() wandb.Artifact.reset_mock() - logger = pl_wandb.WandbLogger(log_model=True) + logger = WandbLogger(log_model=True) logger.experiment.id = "1" logger.experiment.project_name.return_value = "project" trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3) From be74721c9dc59d881c802eee7577da162e781aed Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Mon, 7 Feb 2022 21:04:15 -0600 Subject: [PATCH 08/16] test(wandb): check no new experiment created --- tests/loggers/test_wandb.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index b8d4c8ef474ed..ea85c1d930b76 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -24,6 +24,7 @@ from pytorch_lightning.loggers import WandbLogger from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel +from tests.helpers.utils import no_warning_call pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True pl_wandb._WANDB_GREATER_EQUAL_0_12_10 = True @@ -59,6 +60,9 @@ def test_wandb_logger_init(wandb): wandb.run = wandb.init() with pytest.warns(UserWarning, match="There is a wandb run already in progress"): logger = WandbLogger() + # check that no new run is created + with no_warning_call(UserWarning, match="There is a wandb run already in progress"): + _ = logger.experiment # verify default resume value assert logger._wandb_init["resume"] == "allow" From 3da09c68bb15576bddcd15433a0e9bca31cf3588 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 12:49:09 -0600 Subject: [PATCH 09/16] refactor(wandb): use utilities/imports.py --- pytorch_lightning/loggers/wandb.py | 18 +++++++++--------- pytorch_lightning/utilities/imports.py | 3 +++ 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 478b9b8e3a67d..9e206028a85bb 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -15,7 +15,6 @@ Weights and Biases Logger ------------------------- """ -import operator import os from argparse import Namespace from pathlib import Path @@ -26,18 +25,19 @@ from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment -from pytorch_lightning.utilities import _module_available, rank_zero_only +from pytorch_lightning.utilities import rank_zero_only from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.imports import _compare_version +from pytorch_lightning.utilities.imports import ( + _WANDB_AVAILABLE, + _WANDB_GREATER_EQUAL_0_10_22, + _WANDB_GREATER_EQUAL_0_12_10, +) from pytorch_lightning.utilities.warnings import rank_zero_warn -_WANDB_AVAILABLE = _module_available("wandb") -_WANDB_GREATER_EQUAL_0_10_22 = _compare_version("wandb", operator.ge, "0.10.22") -_WANDB_GREATER_EQUAL_0_12_10 = _compare_version("wandb", operator.ge, "0.12.10") - try: - import wandb from wandb.wandb_run import Run + + import wandb except ModuleNotFoundError: # needed for test mocks, these tests shall be updated wandb, Run = None, None @@ -266,7 +266,7 @@ def __init__( prefix: Optional[str] = "", **kwargs, ): - if wandb is None: + if not _WANDB_AVAILABLE or wandb is None: raise ModuleNotFoundError( "You want to use `wandb` logger which is not installed yet," " install it with `pip install wandb`." # pragma: no-cover diff --git a/pytorch_lightning/utilities/imports.py b/pytorch_lightning/utilities/imports.py index d4f615b2e7cc0..23a8645ca1371 100644 --- a/pytorch_lightning/utilities/imports.py +++ b/pytorch_lightning/utilities/imports.py @@ -91,6 +91,9 @@ def _compare_version(package: str, op: Callable, version: str, use_base_version: _TORCHTEXT_AVAILABLE = _module_available("torchtext") _TORCHTEXT_LEGACY: bool = _TORCHTEXT_AVAILABLE and _compare_version("torchtext", operator.lt, "0.11.0") _TORCHVISION_AVAILABLE = _module_available("torchvision") +_WANDB_AVAILABLE = _module_available("wandb") +_WANDB_GREATER_EQUAL_0_10_22 = _WANDB_AVAILABLE and _compare_version("wandb", operator.ge, "0.10.22") +_WANDB_GREATER_EQUAL_0_12_10 = _WANDB_AVAILABLE and _compare_version("wandb", operator.ge, "0.12.10") _XLA_AVAILABLE: bool = _module_available("torch_xla") from pytorch_lightning.utilities.xla_device import XLADeviceUtils # noqa: E402 From 36b2c431a67ef99833bfd192f9f76416737214c7 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 12:53:00 -0600 Subject: [PATCH 10/16] fix(wandb): remove unused variable --- pytorch_lightning/loggers/wandb.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index 9e206028a85bb..81da64f332d94 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -505,7 +505,6 @@ def _scan_and_log_checkpoints(self, checkpoint_callback: "ReferenceType[ModelChe "save_top_k", "save_weights_only", "_every_n_train_steps", - "_every_n_val_epochs", ] # ensure it does not break if `ModelCheckpoint` args change if hasattr(checkpoint_callback, k) From 0bacf7465f2e18b5c8fcb595c130a411edca8cb5 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 14:38:04 -0600 Subject: [PATCH 11/16] test(wandb): use monkeypatching --- tests/loggers/test_wandb.py | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 6347f892c27c8..5e2bac1f8ca86 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -17,7 +17,6 @@ import pytest -# ensure we consider recent wandb versions import pytorch_lightning.loggers.wandb as pl_wandb from pytorch_lightning import Trainer from pytorch_lightning.loggers import WandbLogger @@ -25,17 +24,16 @@ from tests.helpers import BoringModel from tests.helpers.utils import no_warning_call -pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True -pl_wandb._WANDB_GREATER_EQUAL_0_12_10 = True - @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_init(wandb): +def test_wandb_logger_init(wandb, monkeypatch): """Verify that basic functionality of wandb logger works. Wandb doesn't work well with pytest so we have to mock it out here. """ + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) + # test wandb.init called when there is no W&B run wandb.run = None logger = WandbLogger( @@ -90,12 +88,14 @@ def test_wandb_logger_init(wandb): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_pickle(wandb, tmpdir): +def test_wandb_pickle(wandb, monkeypatch, tmpdir): """Verify that pickling trainer with wandb logger works. Wandb doesn't work well with pytest so we have to mock it out here. """ + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) + class Experiment: id = "the_id" step = 0 @@ -127,8 +127,11 @@ def project_name(self): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_dirs_creation(wandb, tmpdir): +def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" + + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) + wandb.run = None logger = WandbLogger(save_dir=str(tmpdir), offline=True) # the logger get initialized @@ -160,8 +163,11 @@ def test_wandb_logger_dirs_creation(wandb, tmpdir): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_log_model(wandb, tmpdir): - """Test that the logger creates the folders and files in the right place.""" +def test_wandb_log_model(wandb, monkeypatch, tmpdir): + """Test that logger model logging functionality.""" + + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_10_22", True) + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) wandb.run = None model = BoringModel() @@ -222,8 +228,10 @@ def test_wandb_log_model(wandb, tmpdir): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_log_media(wandb, tmpdir): - """Test that the logger creates the folders and files in the right place.""" +def test_wandb_log_media(wandb, monkeypatch): + """Test that logger media logging functionality.""" + + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) wandb.run = None @@ -291,7 +299,10 @@ def test_wandb_log_media(wandb, tmpdir): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_offline_log_model(wandb, tmpdir): +def test_wandb_logger_offline_log_model(wandb, monkeypatch, tmpdir): """Test that log_model=True raises an error in offline mode.""" + + monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) + with pytest.raises(MisconfigurationException, match="checkpoints cannot be uploaded in offline mode"): _ = WandbLogger(save_dir=str(tmpdir), offline=True, log_model=True) From b269af7183e678a4cb84edad979e1c82935e625c Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 15:07:37 -0600 Subject: [PATCH 12/16] Revert "test(wandb): use monkeypatching" This reverts commit 0bacf7465f2e18b5c8fcb595c130a411edca8cb5. --- tests/loggers/test_wandb.py | 35 ++++++++++++----------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 5e2bac1f8ca86..6347f892c27c8 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -17,6 +17,7 @@ import pytest +# ensure we consider recent wandb versions import pytorch_lightning.loggers.wandb as pl_wandb from pytorch_lightning import Trainer from pytorch_lightning.loggers import WandbLogger @@ -24,16 +25,17 @@ from tests.helpers import BoringModel from tests.helpers.utils import no_warning_call +pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True +pl_wandb._WANDB_GREATER_EQUAL_0_12_10 = True + @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_init(wandb, monkeypatch): +def test_wandb_logger_init(wandb): """Verify that basic functionality of wandb logger works. Wandb doesn't work well with pytest so we have to mock it out here. """ - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) - # test wandb.init called when there is no W&B run wandb.run = None logger = WandbLogger( @@ -88,14 +90,12 @@ def test_wandb_logger_init(wandb, monkeypatch): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_pickle(wandb, monkeypatch, tmpdir): +def test_wandb_pickle(wandb, tmpdir): """Verify that pickling trainer with wandb logger works. Wandb doesn't work well with pytest so we have to mock it out here. """ - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) - class Experiment: id = "the_id" step = 0 @@ -127,11 +127,8 @@ def project_name(self): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): +def test_wandb_logger_dirs_creation(wandb, tmpdir): """Test that the logger creates the folders and files in the right place.""" - - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) - wandb.run = None logger = WandbLogger(save_dir=str(tmpdir), offline=True) # the logger get initialized @@ -163,11 +160,8 @@ def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_log_model(wandb, monkeypatch, tmpdir): - """Test that logger model logging functionality.""" - - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_10_22", True) - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) +def test_wandb_log_model(wandb, tmpdir): + """Test that the logger creates the folders and files in the right place.""" wandb.run = None model = BoringModel() @@ -228,10 +222,8 @@ def test_wandb_log_model(wandb, monkeypatch, tmpdir): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_log_media(wandb, monkeypatch): - """Test that logger media logging functionality.""" - - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) +def test_wandb_log_media(wandb, tmpdir): + """Test that the logger creates the folders and files in the right place.""" wandb.run = None @@ -299,10 +291,7 @@ def test_wandb_log_media(wandb, monkeypatch): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_offline_log_model(wandb, monkeypatch, tmpdir): +def test_wandb_logger_offline_log_model(wandb, tmpdir): """Test that log_model=True raises an error in offline mode.""" - - monkeypatch.setattr(pl_wandb, "_WANDB_GREATER_EQUAL_0_12_10", True) - with pytest.raises(MisconfigurationException, match="checkpoints cannot be uploaded in offline mode"): _ = WandbLogger(save_dir=str(tmpdir), offline=True, log_model=True) From ee427ed67ac6cf8a81476db749dcbc874a4923f3 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 15:48:50 -0600 Subject: [PATCH 13/16] fix(wandb): unused variable --- pytorch_lightning/loggers/wandb.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index f8c9833c3e765..c528f2db52027 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -26,11 +26,7 @@ from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.imports import ( - _WANDB_AVAILABLE, - _WANDB_GREATER_EQUAL_0_10_22, - _WANDB_GREATER_EQUAL_0_12_10, -) +from pytorch_lightning.utilities.imports import _WANDB_GREATER_EQUAL_0_10_22, _WANDB_GREATER_EQUAL_0_12_10 from pytorch_lightning.utilities.logger import _add_prefix, _convert_params, _flatten_dict, _sanitize_callable_params from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn @@ -266,7 +262,7 @@ def __init__( prefix: Optional[str] = "", **kwargs, ): - if not _WANDB_AVAILABLE or wandb is None: + if wandb is None: raise ModuleNotFoundError( "You want to use `wandb` logger which is not installed yet," " install it with `pip install wandb`." # pragma: no-cover From 6637a39d5ea5a240b6eecf9abd136a0955fc206e Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 16:03:09 -0600 Subject: [PATCH 14/16] feat(wandb): _WANDB_AVAILABLE import from utilities.import --- pytorch_lightning/loggers/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/loggers/__init__.py b/pytorch_lightning/loggers/__init__.py index d0e3cf759aeb8..d8049346f69bf 100644 --- a/pytorch_lightning/loggers/__init__.py +++ b/pytorch_lightning/loggers/__init__.py @@ -23,7 +23,8 @@ from pytorch_lightning.loggers.mlflow import _MLFLOW_AVAILABLE, MLFlowLogger # noqa: F401 from pytorch_lightning.loggers.neptune import _NEPTUNE_AVAILABLE, NeptuneLogger # noqa: F401 from pytorch_lightning.loggers.test_tube import _TESTTUBE_AVAILABLE, TestTubeLogger # noqa: F401 -from pytorch_lightning.loggers.wandb import _WANDB_AVAILABLE, WandbLogger # noqa: F401 +from pytorch_lightning.loggers.wandb import WandbLogger # noqa: F401 +from pytorch_lightning.utilities.imports import _WANDB_AVAILABLE if _COMET_AVAILABLE: __all__.append("CometLogger") From 6c2cc216570ce18411c849d5013db619aacafc77 Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Tue, 8 Feb 2022 18:43:46 -0600 Subject: [PATCH 15/16] fix: import order Co-authored-by: Rohit Gupta --- pytorch_lightning/loggers/wandb.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pytorch_lightning/loggers/wandb.py b/pytorch_lightning/loggers/wandb.py index c528f2db52027..a256ab05d1d89 100644 --- a/pytorch_lightning/loggers/wandb.py +++ b/pytorch_lightning/loggers/wandb.py @@ -31,9 +31,8 @@ from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn try: - from wandb.wandb_run import Run - import wandb + from wandb.wandb_run import Run except ModuleNotFoundError: # needed for test mocks, these tests shall be updated wandb, Run = None, None From efb6941b14796e924cd45bd52f7a3efbae38a81a Mon Sep 17 00:00:00 2001 From: Boris Dayma Date: Wed, 9 Feb 2022 11:36:47 -0600 Subject: [PATCH 16/16] tests(wandb): use monkeypatch - by @rohitgr7 --- tests/loggers/test_wandb.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 6347f892c27c8..280303a3f7318 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -17,24 +17,20 @@ import pytest -# ensure we consider recent wandb versions -import pytorch_lightning.loggers.wandb as pl_wandb from pytorch_lightning import Trainer from pytorch_lightning.loggers import WandbLogger from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel from tests.helpers.utils import no_warning_call -pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True -pl_wandb._WANDB_GREATER_EQUAL_0_12_10 = True - @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_init(wandb): +def test_wandb_logger_init(wandb, monkeypatch): """Verify that basic functionality of wandb logger works. Wandb doesn't work well with pytest so we have to mock it out here. """ + import pytorch_lightning.loggers.wandb as imports # test wandb.init called when there is no W&B run wandb.run = None @@ -57,6 +53,8 @@ def test_wandb_logger_init(wandb): wandb.init().log.reset_mock() wandb.init.reset_mock() wandb.run = wandb.init() + + monkeypatch.setattr(imports, "_WANDB_GREATER_EQUAL_0_12_10", True) with pytest.warns(UserWarning, match="There is a wandb run already in progress"): logger = WandbLogger() # check that no new run is created @@ -127,8 +125,11 @@ def project_name(self): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_logger_dirs_creation(wandb, tmpdir): +def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" + import pytorch_lightning.loggers.wandb as imports + + monkeypatch.setattr(imports, "_WANDB_GREATER_EQUAL_0_12_10", True) wandb.run = None logger = WandbLogger(save_dir=str(tmpdir), offline=True) # the logger get initialized @@ -160,8 +161,11 @@ def test_wandb_logger_dirs_creation(wandb, tmpdir): @mock.patch("pytorch_lightning.loggers.wandb.wandb") -def test_wandb_log_model(wandb, tmpdir): +def test_wandb_log_model(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" + import pytorch_lightning.loggers.wandb as imports + + monkeypatch.setattr(imports, "_WANDB_GREATER_EQUAL_0_10_22", True) wandb.run = None model = BoringModel()