From deedcb86fd22f21a89bf6a83290761c743bc057e Mon Sep 17 00:00:00 2001 From: Danielle Pintz Date: Thu, 26 Aug 2021 06:41:59 +0000 Subject: [PATCH 1/7] Deprecate add_to_q/get_from_q --- CHANGELOG.md | 2 ++ pytorch_lightning/accelerators/accelerator.py | 2 +- pytorch_lightning/core/lightning.py | 6 ++++++ .../plugins/training_type/ddp.py | 3 ++- .../plugins/training_type/ddp_spawn.py | 7 ++++++- .../training_type/training_type_plugin.py | 2 +- .../trainer/configuration_validator.py | 20 ++++++++++++++++++- tests/plugins/test_ddp_spawn_plugin.py | 7 +++++-- 8 files changed, 42 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5232cc793163f..0a25c86a94dea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -150,6 +150,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of `DataHooks`, accessible in the `LightningModule` and `LightningDataModule` [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) +- Deprecated `add_to_queue`, `get_from_queue` from Lightning Module + ### Removed - Removed deprecated `metrics` ([#8586](https://github.com/PyTorchLightning/pytorch-lightning/pull/8586/)) diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/accelerator.py index 6038a8abc8f5c..546f79a31adc9 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/accelerator.py @@ -120,7 +120,7 @@ def dispatch(self, trainer: "pl.Trainer") -> None: def post_dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something after the training/evaluation/prediction starts.""" - self.training_type_plugin.post_dispatch() + self.training_type_plugin.post_dispatch(trainer) self.precision_plugin.post_dispatch() @property diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 096333388c3b1..1c0d27c4c8ce1 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -1960,6 +1960,9 @@ def add_to_queue(self, queue: torch.multiprocessing.SimpleQueue) -> None: Args: queue: the instance of the queue to append the data. + + .. deprecated:: v1.5 + This method was deprecated in v1.5 and will be removed in v1.7. """ callback_metrics: dict = apply_to_collection( self.trainer.callback_metrics, torch.Tensor, lambda x: x.cpu().numpy() @@ -1973,6 +1976,9 @@ def get_from_queue(self, queue: torch.multiprocessing.SimpleQueue) -> None: Args: queue: the instance of the queue from where to get the data. + + .. deprecated:: v1.5 + This method was deprecated in v1.5 and will be removed in v1.7. """ # NOTE: `add_to_queue` needs to be called before callback_metrics: dict = queue.get() diff --git a/pytorch_lightning/plugins/training_type/ddp.py b/pytorch_lightning/plugins/training_type/ddp.py index 787353be307e6..bc9895bea1de3 100644 --- a/pytorch_lightning/plugins/training_type/ddp.py +++ b/pytorch_lightning/plugins/training_type/ddp.py @@ -29,6 +29,7 @@ import torch.distributed from torch.nn.parallel.distributed import DistributedDataParallel +import pytorch_lightning as pl from pytorch_lightning.distributed import LightningDistributed from pytorch_lightning.overrides import LightningDistributedModule from pytorch_lightning.overrides.distributed import prepare_for_backward @@ -326,7 +327,7 @@ def pre_dispatch(self): # share ddp pids to all processes self._share_information_to_prevent_deadlock() - def post_dispatch(self) -> None: + def post_dispatch(self, trainer: "pl.Trainer") -> None: self.cluster_environment.teardown() def barrier(self, *args, **kwargs) -> None: diff --git a/pytorch_lightning/plugins/training_type/ddp_spawn.py b/pytorch_lightning/plugins/training_type/ddp_spawn.py index 08c049997bdfd..2c37b7ab79659 100644 --- a/pytorch_lightning/plugins/training_type/ddp_spawn.py +++ b/pytorch_lightning/plugins/training_type/ddp_spawn.py @@ -213,13 +213,16 @@ def new_process(self, process_idx: int, trainer: "pl.Trainer", mp_queue: SimpleQ # ensure that spawned processes go through teardown before joining trainer._call_teardown_hook() - def post_dispatch(self): + def post_dispatch(self, trainer: "pl.Trainer"): # restore main state with best weights best_path = self.mp_queue.get() last_path = self.mp_queue.get() self._results = self.mp_queue.get() + trainer.callback_metrics = self.mp_queue.get() # get the `callback_metrics` and set it to the trainer # only in case the user does not override it. + + # TODO(@daniellepintz): remove in v1.7 self.lightning_module.get_from_queue(self.mp_queue) # recover the weights of the processes trained in the children @@ -286,6 +289,8 @@ def __transfer_distrib_spawn_state_on_fit_end(self, trainer: "pl.Trainer", resul self.mp_queue.put(best_model_path) self.mp_queue.put(last_path) self.mp_queue.put(results) + self.mp_queue.put(trainer.callback_metrics) + # TODO(@daniellepintz): remove in v1.7 self.lightning_module.add_to_queue(self.mp_queue) # adds the `callback_metrics` to the queue def __recover_child_process_weights(self, best_path, last_path): diff --git a/pytorch_lightning/plugins/training_type/training_type_plugin.py b/pytorch_lightning/plugins/training_type/training_type_plugin.py index 6ee1ce77c8c24..f388c0148206c 100644 --- a/pytorch_lightning/plugins/training_type/training_type_plugin.py +++ b/pytorch_lightning/plugins/training_type/training_type_plugin.py @@ -358,5 +358,5 @@ def pre_dispatch(self) -> None: def dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something at trainer run_stage starts.""" - def post_dispatch(self) -> None: + def post_dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something after the training/evaluation/prediction finishes.""" diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index d9c341c5dfaeb..b4c99b0815748 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -13,7 +13,7 @@ # limitations under the License. import pytorch_lightning as pl from pytorch_lightning.trainer.states import TrainerFn -from pytorch_lightning.utilities import rank_zero_warn +from pytorch_lightning.utilities import rank_zero_warn, rank_zero_deprecation from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature @@ -43,6 +43,7 @@ def verify_loop_configurations(self, model: "pl.LightningModule") -> None: elif self.trainer.state.fn == TrainerFn.PREDICTING: self.__verify_predict_loop_configuration(model) self.__verify_dp_batch_transfer_support(model) + self._check_add_get_queue(model) def __verify_train_loop_configuration(self, model: "pl.LightningModule") -> None: # ----------------------------------- @@ -153,3 +154,20 @@ def __check_training_step_requires_dataloader_iter(self, model: "pl.LightningMod "The model taking a `dataloader_iter` argument in your `training_step` " "is incompatible with `truncated_bptt_steps > 0`." ) + + def _check_add_get_queue(self, model: "pl.LightningModule"): + r""" + Checks if add_to_queue or get_from_queue is overriden and sends a deprecation warning. + + Args: + model: The lightning module + + """ + if is_overridden("add_to_queue", model): + rank_zero_deprecation( + "The `LightningModule.add_to_queue` method was deprecated in v1.5 and will be removed in v1.7." + ) + if is_overridden("get_from_queue", model): + rank_zero_deprecation( + "The `LightningModule.get_from_queue` method was deprecated in v1.5 and will be removed in v1.7." + ) diff --git a/tests/plugins/test_ddp_spawn_plugin.py b/tests/plugins/test_ddp_spawn_plugin.py index 1ab94446c8176..cddea20322ce7 100644 --- a/tests/plugins/test_ddp_spawn_plugin.py +++ b/tests/plugins/test_ddp_spawn_plugin.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import torch +import pytest from pytorch_lightning import Trainer from pytorch_lightning.plugins import DDPSpawnPlugin @@ -62,8 +63,9 @@ def test_ddp_cpu(): @RunIf(min_gpus=2) def test_ddp_spawn_extra_parameters(tmpdir): - """Tests if device is set correctely when training for DDPSpawnPlugin.""" - trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") + """Tests if device is set correctly when training for DDPSpawnPlugin.""" + with pytest.deprecated_call(match=r"`LightningModule.add_to_queue` method was deprecated in v1.5"): + trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") assert isinstance(trainer.training_type_plugin, DDPSpawnPlugin) assert trainer.training_type_plugin.on_gpu @@ -76,4 +78,5 @@ def test_ddp_spawn_extra_parameters(tmpdir): trainer.fit(model, datamodule=dm) assert trainer.callback_metrics[val_name] == torch.tensor(val) + # TODO(@daniellepintz) remove assert in v1.7 assert model.test_val == "test_val" From a3c44dc012289da3aa7ebe3d69545159e899f0aa Mon Sep 17 00:00:00 2001 From: Danielle Pintz Date: Thu, 26 Aug 2021 06:57:24 +0000 Subject: [PATCH 2/7] wip --- pytorch_lightning/accelerators/accelerator.py | 2 +- pytorch_lightning/plugins/training_type/ddp.py | 2 +- pytorch_lightning/plugins/training_type/ddp_spawn.py | 9 ++++++--- .../plugins/training_type/training_type_plugin.py | 2 +- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/accelerator.py index 546f79a31adc9..6038a8abc8f5c 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/accelerator.py @@ -120,7 +120,7 @@ def dispatch(self, trainer: "pl.Trainer") -> None: def post_dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something after the training/evaluation/prediction starts.""" - self.training_type_plugin.post_dispatch(trainer) + self.training_type_plugin.post_dispatch() self.precision_plugin.post_dispatch() @property diff --git a/pytorch_lightning/plugins/training_type/ddp.py b/pytorch_lightning/plugins/training_type/ddp.py index bc9895bea1de3..47bb2980b1109 100644 --- a/pytorch_lightning/plugins/training_type/ddp.py +++ b/pytorch_lightning/plugins/training_type/ddp.py @@ -327,7 +327,7 @@ def pre_dispatch(self): # share ddp pids to all processes self._share_information_to_prevent_deadlock() - def post_dispatch(self, trainer: "pl.Trainer") -> None: + def post_dispatch(self) -> None: self.cluster_environment.teardown() def barrier(self, *args, **kwargs) -> None: diff --git a/pytorch_lightning/plugins/training_type/ddp_spawn.py b/pytorch_lightning/plugins/training_type/ddp_spawn.py index 2c37b7ab79659..a051ef558dc4c 100644 --- a/pytorch_lightning/plugins/training_type/ddp_spawn.py +++ b/pytorch_lightning/plugins/training_type/ddp_spawn.py @@ -213,12 +213,14 @@ def new_process(self, process_idx: int, trainer: "pl.Trainer", mp_queue: SimpleQ # ensure that spawned processes go through teardown before joining trainer._call_teardown_hook() - def post_dispatch(self, trainer: "pl.Trainer"): + # TODO(@daniellepintz): add trainer argument in v1.7 + def post_dispatch(self): # restore main state with best weights best_path = self.mp_queue.get() last_path = self.mp_queue.get() self._results = self.mp_queue.get() - trainer.callback_metrics = self.mp_queue.get() + # TODO(@daniellepintz): add `trainer.callback_metrics = self.mp_queue.get()` in v1.7 + # get the `callback_metrics` and set it to the trainer # only in case the user does not override it. @@ -289,7 +291,8 @@ def __transfer_distrib_spawn_state_on_fit_end(self, trainer: "pl.Trainer", resul self.mp_queue.put(best_model_path) self.mp_queue.put(last_path) self.mp_queue.put(results) - self.mp_queue.put(trainer.callback_metrics) + # TODO(@daniellepintz): add `self.mp_queue.put(trainer.callback_metrics)` in v1.7 + # TODO(@daniellepintz): remove in v1.7 self.lightning_module.add_to_queue(self.mp_queue) # adds the `callback_metrics` to the queue diff --git a/pytorch_lightning/plugins/training_type/training_type_plugin.py b/pytorch_lightning/plugins/training_type/training_type_plugin.py index f388c0148206c..6ee1ce77c8c24 100644 --- a/pytorch_lightning/plugins/training_type/training_type_plugin.py +++ b/pytorch_lightning/plugins/training_type/training_type_plugin.py @@ -358,5 +358,5 @@ def pre_dispatch(self) -> None: def dispatch(self, trainer: "pl.Trainer") -> None: """Hook to do something at trainer run_stage starts.""" - def post_dispatch(self, trainer: "pl.Trainer") -> None: + def post_dispatch(self) -> None: """Hook to do something after the training/evaluation/prediction finishes.""" From e2be7b30e717eeb6eced1cf2e4e646619f5f6f71 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 26 Aug 2021 06:58:31 +0000 Subject: [PATCH 3/7] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/configuration_validator.py | 2 +- tests/plugins/test_ddp_spawn_plugin.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index b4c99b0815748..217e489109bd7 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -13,7 +13,7 @@ # limitations under the License. import pytorch_lightning as pl from pytorch_lightning.trainer.states import TrainerFn -from pytorch_lightning.utilities import rank_zero_warn, rank_zero_deprecation +from pytorch_lightning.utilities import rank_zero_deprecation, rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature diff --git a/tests/plugins/test_ddp_spawn_plugin.py b/tests/plugins/test_ddp_spawn_plugin.py index cddea20322ce7..31aeca8c30dda 100644 --- a/tests/plugins/test_ddp_spawn_plugin.py +++ b/tests/plugins/test_ddp_spawn_plugin.py @@ -11,8 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import torch import pytest +import torch from pytorch_lightning import Trainer from pytorch_lightning.plugins import DDPSpawnPlugin From 5ff599e4f6c097bce3e862ea8dd3840eedd17e28 Mon Sep 17 00:00:00 2001 From: Danielle Pintz Date: Thu, 26 Aug 2021 07:00:37 +0000 Subject: [PATCH 4/7] update changelog --- CHANGELOG.md | 4 ++-- pytorch_lightning/plugins/training_type/ddp.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a25c86a94dea..5434fa4910337 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -147,10 +147,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Deprecated `DataModule` properties: `train_transforms`, `val_transforms`, `test_transforms`, `size`, `dims` ([#8851](https://github.com/PyTorchLightning/pytorch-lightning/pull/8851)) -- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of `DataHooks`, accessible in the `LightningModule` and `LightningDataModule` [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) +- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of `DataHooks`, accessible in the `LightningModule` and `LightningDataModule` ([#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958)) -- Deprecated `add_to_queue`, `get_from_queue` from Lightning Module +- Deprecated `add_to_queue`, `get_from_queue` from Lightning Module ([#9126](https://github.com/PyTorchLightning/pytorch-lightning/pull/9126)) ### Removed diff --git a/pytorch_lightning/plugins/training_type/ddp.py b/pytorch_lightning/plugins/training_type/ddp.py index 47bb2980b1109..787353be307e6 100644 --- a/pytorch_lightning/plugins/training_type/ddp.py +++ b/pytorch_lightning/plugins/training_type/ddp.py @@ -29,7 +29,6 @@ import torch.distributed from torch.nn.parallel.distributed import DistributedDataParallel -import pytorch_lightning as pl from pytorch_lightning.distributed import LightningDistributed from pytorch_lightning.overrides import LightningDistributedModule from pytorch_lightning.overrides.distributed import prepare_for_backward From 6bd53e89d0d3818c0549b6e2dc4819526970a347 Mon Sep 17 00:00:00 2001 From: Danielle Pintz Date: Fri, 27 Aug 2021 19:49:31 +0000 Subject: [PATCH 5/7] update test --- tests/deprecated_api/test_remove_1-7.py | 11 +++++++++++ tests/plugins/test_ddp_spawn_plugin.py | 3 +-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/deprecated_api/test_remove_1-7.py b/tests/deprecated_api/test_remove_1-7.py index 7581bf2b0c142..6766d4b92fa99 100644 --- a/tests/deprecated_api/test_remove_1-7.py +++ b/tests/deprecated_api/test_remove_1-7.py @@ -19,6 +19,7 @@ from tests.deprecated_api import _soft_unimport_module from tests.helpers import BoringModel from tests.helpers.datamodules import MNISTDataModule +from tests.helpers.runif import RunIf def test_v1_7_0_deprecated_lightning_module_summarize(tmpdir): @@ -87,3 +88,13 @@ def test_v1_7_0_trainer_prepare_data_per_node(tmpdir): match="Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0!" ): _ = Trainer(prepare_data_per_node=False) + + +@RunIf(min_gpus=2) +def test_v1_7_0_deprecate_add_get_queue(tmpdir): + """Tests if device is set correctly when training for DDPSpawnPlugin.""" + with pytest.deprecated_call(match=r"`LightningModule.add_to_queue` method was deprecated in v1.5"): + _ = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") + + with pytest.deprecated_call(match=r"`LightningModule.get_from_queue` method was deprecated in v1.5"): + _ = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") diff --git a/tests/plugins/test_ddp_spawn_plugin.py b/tests/plugins/test_ddp_spawn_plugin.py index 31aeca8c30dda..3346d1a424ece 100644 --- a/tests/plugins/test_ddp_spawn_plugin.py +++ b/tests/plugins/test_ddp_spawn_plugin.py @@ -64,8 +64,7 @@ def test_ddp_cpu(): @RunIf(min_gpus=2) def test_ddp_spawn_extra_parameters(tmpdir): """Tests if device is set correctly when training for DDPSpawnPlugin.""" - with pytest.deprecated_call(match=r"`LightningModule.add_to_queue` method was deprecated in v1.5"): - trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") + trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") assert isinstance(trainer.training_type_plugin, DDPSpawnPlugin) assert trainer.training_type_plugin.on_gpu From d873653261a10ef8b717360e877813171a93a93f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 27 Aug 2021 20:04:48 +0000 Subject: [PATCH 6/7] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/deprecated_api/test_remove_1-7.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/deprecated_api/test_remove_1-7.py b/tests/deprecated_api/test_remove_1-7.py index b0d2a47742bd7..78956f4c60682 100644 --- a/tests/deprecated_api/test_remove_1-7.py +++ b/tests/deprecated_api/test_remove_1-7.py @@ -106,4 +106,4 @@ def test_v1_7_0_deprecate_add_get_queue(tmpdir): def test_v1_7_0_test_tube_logger(_, tmpdir): with pytest.deprecated_call(match="The TestTubeLogger is deprecated since v1.5 and will be removed in v1.7"): _ = TestTubeLogger(tmpdir) ->>>>>>> 045c879e08455683584167713c5f7e3d389afa66 +>>>>>>> 0o45c879e08455683584167713c5f7e3d389afa66 From cd41928daa6b6b354a4550cfcfd1631e5c3ea147 Mon Sep 17 00:00:00 2001 From: Danielle Pintz Date: Fri, 27 Aug 2021 20:06:17 +0000 Subject: [PATCH 7/7] update --- pytorch_lightning/__about__.py | 2 +- tests/deprecated_api/test_remove_1-7.py | 5 ++--- tests/plugins/test_ddp_spawn_plugin.py | 1 - 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/__about__.py b/pytorch_lightning/__about__.py index 2df30ab330870..5bfc1ae6d3473 100644 --- a/pytorch_lightning/__about__.py +++ b/pytorch_lightning/__about__.py @@ -1,7 +1,7 @@ import time _this_year = time.strftime("%Y") -__version__ = "20210827" +__version__ = "1.5.0dev" __author__ = "William Falcon et al." __author_email__ = "waf2107@columbia.edu" __license__ = "Apache-2.0" diff --git a/tests/deprecated_api/test_remove_1-7.py b/tests/deprecated_api/test_remove_1-7.py index b0d2a47742bd7..3e61b2baa2661 100644 --- a/tests/deprecated_api/test_remove_1-7.py +++ b/tests/deprecated_api/test_remove_1-7.py @@ -92,7 +92,6 @@ def test_v1_7_0_trainer_prepare_data_per_node(tmpdir): _ = Trainer(prepare_data_per_node=False) -<<<<<<< HEAD @RunIf(min_gpus=2) def test_v1_7_0_deprecate_add_get_queue(tmpdir): """Tests if device is set correctly when training for DDPSpawnPlugin.""" @@ -101,9 +100,9 @@ def test_v1_7_0_deprecate_add_get_queue(tmpdir): with pytest.deprecated_call(match=r"`LightningModule.get_from_queue` method was deprecated in v1.5"): _ = Trainer(default_root_dir=tmpdir, fast_dev_run=True, gpus=2, accelerator="ddp_spawn") -======= + + @mock.patch("pytorch_lightning.loggers.test_tube.Experiment") def test_v1_7_0_test_tube_logger(_, tmpdir): with pytest.deprecated_call(match="The TestTubeLogger is deprecated since v1.5 and will be removed in v1.7"): _ = TestTubeLogger(tmpdir) ->>>>>>> 045c879e08455683584167713c5f7e3d389afa66 diff --git a/tests/plugins/test_ddp_spawn_plugin.py b/tests/plugins/test_ddp_spawn_plugin.py index 3346d1a424ece..e9a8f1aaf839e 100644 --- a/tests/plugins/test_ddp_spawn_plugin.py +++ b/tests/plugins/test_ddp_spawn_plugin.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import pytest import torch from pytorch_lightning import Trainer