From 3ffe5bf0fd3191394b780c8b7b78a07a7b3011d2 Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Thu, 24 Feb 2022 14:14:45 -0800 Subject: [PATCH 01/21] first commit --- .../connectors/accelerator_connector.py | 5 ++++- .../test_accelerator_connector.py | 19 +++++++++---------- tests/accelerators/test_ipu.py | 4 +++- tests/accelerators/test_tpu.py | 7 +++++-- 4 files changed, 21 insertions(+), 14 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index f2d27a249f6f2..8d81afac2abb6 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -492,7 +492,10 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: "The flag `devices` must be an int with `accelerator='cpu'`," f" got `devices={self._devices_flag}` instead." ) - + if not self.accelerator.is_available(): + raise MisconfigurationException( + f"{self.accelerator.__class__.__qualname__} can not run on this hardware." + ) self._gpus = self._devices_flag if not self._gpus else self._gpus self._tpu_cores = self._devices_flag if not self._tpu_cores else self._tpu_cores diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 0e13b4af0f8d2..294ad9ff14e1f 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -414,7 +414,9 @@ def test_ipython_incompatible_backend_error(_, monkeypatch): @pytest.mark.parametrize("trainer_kwargs", [{}, dict(strategy="dp", accelerator="gpu"), dict(accelerator="tpu")]) -def test_ipython_compatible_backend(trainer_kwargs, monkeypatch): +@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True) +@mock.patch("pytorch_lightning.accelerators.gpu.GPUAccelerator.is_available", return_value=True) +def test_ipython_compatible_backend(mock_gpuacc_avail, mock_tpuacc_avail, trainer_kwargs, monkeypatch): monkeypatch.setattr(pytorch_lightning.utilities, "_IS_INTERACTIVE", True) trainer = Trainer(**trainer_kwargs) assert trainer.strategy.launcher is None or trainer.strategy.launcher.is_interactive_compatible @@ -464,9 +466,8 @@ def test_accelerator_cpu(_): with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(gpus=1) - # TODO enable this test when add device availability check - # with pytest.raises(MisconfigurationException, match="You requested gpu, but gpu is not available"): - # trainer = Trainer(accelerator="gpu") + with pytest.raises(MisconfigurationException, match="GPUAccelerator can not run on this hardware."): + trainer = Trainer(accelerator="gpu") with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(accelerator="cpu", gpus=1) @@ -883,12 +884,11 @@ def test_strategy_choice_ddp_cpu_slurm(device_count_mock, setup_distributed_mock assert trainer.strategy.local_rank == 0 -def test_unsupported_tpu_choice(monkeypatch): +@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True) +def test_unsupported_tpu_choice(mock_tpuacc_avail, monkeypatch): import pytorch_lightning.utilities.imports as imports - from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector monkeypatch.setattr(imports, "_XLA_AVAILABLE", True) - monkeypatch.setattr(AcceleratorConnector, "has_tpu", True) with pytest.raises(MisconfigurationException, match=r"accelerator='tpu', precision=64\)` is not implemented"): Trainer(accelerator="tpu", precision=64) @@ -902,14 +902,13 @@ def test_unsupported_tpu_choice(monkeypatch): Trainer(accelerator="tpu", precision=16, amp_backend="apex", strategy="single_device") -def test_unsupported_ipu_choice(monkeypatch): +@mock.patch("pytorch_lightning.accelerators.ipu.IPUAccelerator.is_available", return_value=True) +def test_unsupported_ipu_choice(mock_ipuacc_avail, monkeypatch): import pytorch_lightning.strategies.ipu as ipu import pytorch_lightning.utilities.imports as imports - from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector monkeypatch.setattr(imports, "_IPU_AVAILABLE", True) monkeypatch.setattr(ipu, "_IPU_AVAILABLE", True) - monkeypatch.setattr(AcceleratorConnector, "has_ipu", True) with pytest.raises(MisconfigurationException, match=r"accelerator='ipu', precision='bf16'\)` is not supported"): Trainer(accelerator="ipu", precision="bf16") with pytest.raises(MisconfigurationException, match=r"accelerator='ipu', precision=64\)` is not supported"): diff --git a/tests/accelerators/test_ipu.py b/tests/accelerators/test_ipu.py index 23c2c5e93a34f..624d678d1b2c9 100644 --- a/tests/accelerators/test_ipu.py +++ b/tests/accelerators/test_ipu.py @@ -13,6 +13,7 @@ # limitations under the License. import os from typing import Optional +from unittest import mock import pytest import torch @@ -96,7 +97,8 @@ def test_epoch_end(self, outputs) -> None: @pytest.mark.skipif(_IPU_AVAILABLE, reason="test requires non-IPU machine") -def test_fail_if_no_ipus(tmpdir): +@mock.patch("pytorch_lightning.accelerators.ipu.IPUAccelerator.is_available", return_value=True) +def test_fail_if_no_ipus(mock_ipuacc_avail, tmpdir): with pytest.raises(MisconfigurationException, match="IPU Accelerator requires IPU devices to run"): Trainer(default_root_dir=tmpdir, ipus=1) diff --git a/tests/accelerators/test_tpu.py b/tests/accelerators/test_tpu.py index 7e1d8c15cf323..022279622e100 100644 --- a/tests/accelerators/test_tpu.py +++ b/tests/accelerators/test_tpu.py @@ -13,6 +13,7 @@ # limitations under the License import collections from copy import deepcopy +from unittest import mock from unittest.mock import patch import pytest @@ -266,7 +267,8 @@ def forward(self, x): assert torch.all(torch.eq(model.net_a.layer.weight, model.net_b.layer.weight)) -def test_tpu_invalid_raises(): +@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True) +def test_tpu_invalid_raises(mock_tpuacc_avail): strategy = TPUSpawnStrategy(accelerator=TPUAccelerator(), precision_plugin=PrecisionPlugin()) with pytest.raises(ValueError, match="TPUAccelerator` can only be used with a `TPUPrecisionPlugin"): Trainer(strategy=strategy) @@ -276,7 +278,8 @@ def test_tpu_invalid_raises(): Trainer(strategy=strategy) -def test_tpu_invalid_raises_set_precision_with_strategy(): +@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True) +def test_tpu_invalid_raises_set_precision_with_strategy(mock_tpuacc_avail): accelerator = TPUAccelerator() strategy = TPUSpawnStrategy(accelerator=accelerator, precision_plugin=PrecisionPlugin()) with pytest.raises(ValueError, match="`TPUAccelerator` can only be used with a `TPUPrecisionPlugin`"): From 4c0f4a761371907b960c5e9f8eb3fa9548ed5f97 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 24 Feb 2022 22:16:46 +0000 Subject: [PATCH 02/21] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/connectors/accelerator_connector.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 8d81afac2abb6..608bbbb275547 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -493,9 +493,7 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: f" got `devices={self._devices_flag}` instead." ) if not self.accelerator.is_available(): - raise MisconfigurationException( - f"{self.accelerator.__class__.__qualname__} can not run on this hardware." - ) + raise MisconfigurationException(f"{self.accelerator.__class__.__qualname__} can not run on this hardware.") self._gpus = self._devices_flag if not self._gpus else self._gpus self._tpu_cores = self._devices_flag if not self._tpu_cores else self._tpu_cores From 9c642f971b5e7af52ade8404be2f5f31b12e62b2 Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Thu, 24 Feb 2022 16:23:58 -0800 Subject: [PATCH 03/21] update --- tests/plugins/test_cluster_integration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/plugins/test_cluster_integration.py b/tests/plugins/test_cluster_integration.py index a8957d46fb394..be01767f0f684 100644 --- a/tests/plugins/test_cluster_integration.py +++ b/tests/plugins/test_cluster_integration.py @@ -58,7 +58,8 @@ def environment_combinations(): "strategy_cls", [DDPStrategy, DDPShardedStrategy, DDP2Strategy, pytest.param(DeepSpeedStrategy, marks=RunIf(deepspeed=True))], ) -def test_ranks_available_manual_strategy_selection(strategy_cls): +@mock.patch("pytorch_lightning.accelerators.gpu.GPUAccelerator.is_available", return_value=True) +def test_ranks_available_manual_strategy_selection(mock_gpuacc_available, strategy_cls): """Test that the rank information is readily available after Trainer initialization.""" num_nodes = 2 for cluster, variables, expected in environment_combinations(): From 55b49956556297a018c9fe16ace4e3f244416cee Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Thu, 24 Feb 2022 16:40:18 -0800 Subject: [PATCH 04/21] doctest? --- docs/source/extensions/accelerator.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/source/extensions/accelerator.rst b/docs/source/extensions/accelerator.rst index 03c547f49a0f8..53fa8f2302890 100644 --- a/docs/source/extensions/accelerator.rst +++ b/docs/source/extensions/accelerator.rst @@ -25,10 +25,13 @@ One to handle differences from the training routine and one to handle different from pytorch_lightning.accelerators import GPUAccelerator from pytorch_lightning.plugins import NativeMixedPrecisionPlugin from pytorch_lightning.strategies import DDPStrategy + from unittest.mock import MagicMock accelerator = GPUAccelerator() precision_plugin = NativeMixedPrecisionPlugin(precision=16, device="cuda") training_type_plugin = DDPStrategy(accelerator=accelerator, precision_plugin=precision_plugin) + # device checks happen during trainer init, mock availability for doctest + accelerator.is_available = MagicMock(return_value=True) trainer = Trainer(strategy=training_type_plugin) From 729a8aa8187e72c27c5652c2c1543397aa60e2a5 Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Thu, 24 Feb 2022 16:57:08 -0800 Subject: [PATCH 05/21] codeblock instead --- docs/source/extensions/accelerator.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/source/extensions/accelerator.rst b/docs/source/extensions/accelerator.rst index 53fa8f2302890..d683b968b0c19 100644 --- a/docs/source/extensions/accelerator.rst +++ b/docs/source/extensions/accelerator.rst @@ -19,19 +19,16 @@ Currently there are accelerators for: Each Accelerator gets two plugins upon initialization: One to handle differences from the training routine and one to handle different precisions. -.. testcode:: +.. code-block:: python from pytorch_lightning import Trainer from pytorch_lightning.accelerators import GPUAccelerator from pytorch_lightning.plugins import NativeMixedPrecisionPlugin from pytorch_lightning.strategies import DDPStrategy - from unittest.mock import MagicMock accelerator = GPUAccelerator() precision_plugin = NativeMixedPrecisionPlugin(precision=16, device="cuda") training_type_plugin = DDPStrategy(accelerator=accelerator, precision_plugin=precision_plugin) - # device checks happen during trainer init, mock availability for doctest - accelerator.is_available = MagicMock(return_value=True) trainer = Trainer(strategy=training_type_plugin) From 15d386cb300b75ffae159c310e68496c312dd6cc Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Fri, 25 Feb 2022 15:06:09 -0800 Subject: [PATCH 06/21] update msg --- .../trainer/connectors/accelerator_connector.py | 8 +++++++- tests/accelerators/test_accelerator_connector.py | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 608bbbb275547..da0405dd9aa66 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -493,7 +493,13 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: f" got `devices={self._devices_flag}` instead." ) if not self.accelerator.is_available(): - raise MisconfigurationException(f"{self.accelerator.__class__.__qualname__} can not run on this hardware.") + if isinstance(self.accelerator, GPUAccelerator): + hardware_str = "GPU" + elif isinstance(self.accelerator, IPUAccelerator): + hardware_str = "IPU" + else: + hardware_str = "TPU" + raise MisconfigurationException(f"{self.accelerator.__class__.__qualname__} can not run on this hardware, " + hardware_str + "s are not available.") self._gpus = self._devices_flag if not self._gpus else self._gpus self._tpu_cores = self._devices_flag if not self._tpu_cores else self._tpu_cores diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 294ad9ff14e1f..baf26d4035704 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -466,7 +466,7 @@ def test_accelerator_cpu(_): with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(gpus=1) - with pytest.raises(MisconfigurationException, match="GPUAccelerator can not run on this hardware."): + with pytest.raises(MisconfigurationException, match="GPUAccelerator can not run on this hardware, GPUs are not available."): trainer = Trainer(accelerator="gpu") with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(accelerator="cpu", gpus=1) From d36d852fad4a1483debb40166d46b0aa47c3652e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 25 Feb 2022 23:07:37 +0000 Subject: [PATCH 07/21] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../trainer/connectors/accelerator_connector.py | 6 +++++- tests/accelerators/test_accelerator_connector.py | 4 +++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index da0405dd9aa66..e62031a8c3932 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -499,7 +499,11 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: hardware_str = "IPU" else: hardware_str = "TPU" - raise MisconfigurationException(f"{self.accelerator.__class__.__qualname__} can not run on this hardware, " + hardware_str + "s are not available.") + raise MisconfigurationException( + f"{self.accelerator.__class__.__qualname__} can not run on this hardware, " + + hardware_str + + "s are not available." + ) self._gpus = self._devices_flag if not self._gpus else self._gpus self._tpu_cores = self._devices_flag if not self._tpu_cores else self._tpu_cores diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index baf26d4035704..d2da7fb517974 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -466,7 +466,9 @@ def test_accelerator_cpu(_): with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(gpus=1) - with pytest.raises(MisconfigurationException, match="GPUAccelerator can not run on this hardware, GPUs are not available."): + with pytest.raises( + MisconfigurationException, match="GPUAccelerator can not run on this hardware, GPUs are not available." + ): trainer = Trainer(accelerator="gpu") with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(accelerator="cpu", gpus=1) From c69bcf705baf627388d654f9c488253be4f6ef3c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Feb 2022 22:16:21 +0000 Subject: [PATCH 08/21] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/connectors/accelerator_connector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 6b826d46887c7..4d2c96c44dc96 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -473,7 +473,7 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: ) accelerator_class = ACCELERATORS[self._accelerator_flag] self.accelerator = accelerator_class() # type: ignore[abstract] - + if not self.accelerator.is_available(): if isinstance(self.accelerator, GPUAccelerator): hardware_str = "GPU" @@ -486,7 +486,7 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: + hardware_str + "s are not available." ) - + self._set_devices_flag_if_auto_passed() self._gpus = self._devices_flag if not self._gpus else self._gpus From 238585557c381e0cfc6b66c554b4240a6160b954 Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Mon, 28 Feb 2022 15:26:47 -0800 Subject: [PATCH 09/21] update msg, hardware_name --- pytorch_lightning/accelerators/cpu.py | 5 ++++ pytorch_lightning/accelerators/gpu.py | 5 ++++ pytorch_lightning/accelerators/ipu.py | 5 ++++ pytorch_lightning/accelerators/tpu.py | 5 ++++ .../connectors/accelerator_connector.py | 24 +++++++------------ .../test_accelerator_connector.py | 2 +- tests/accelerators/test_tpu.py | 1 - .../test_estimated_stepping_batches.py | 4 +++- 8 files changed, 33 insertions(+), 18 deletions(-) diff --git a/pytorch_lightning/accelerators/cpu.py b/pytorch_lightning/accelerators/cpu.py index d586478619c05..3492bf95014c0 100644 --- a/pytorch_lightning/accelerators/cpu.py +++ b/pytorch_lightning/accelerators/cpu.py @@ -62,3 +62,8 @@ def auto_device_count() -> int: def is_available() -> bool: """CPU is always available for execution.""" return True + + @staticmethod + def hardware_name() -> str: + """Name of the hardware.""" + return "CPU" diff --git a/pytorch_lightning/accelerators/gpu.py b/pytorch_lightning/accelerators/gpu.py index f9181e8802e21..794f0feab1711 100644 --- a/pytorch_lightning/accelerators/gpu.py +++ b/pytorch_lightning/accelerators/gpu.py @@ -93,6 +93,11 @@ def auto_device_count() -> int: def is_available() -> bool: return torch.cuda.device_count() > 0 + @staticmethod + def hardware_name() -> str: + """Name of the hardware.""" + return "GPU" + def get_nvidia_gpu_stats(device: _DEVICE) -> Dict[str, float]: """Get GPU stats including memory, fan speed, and temperature from nvidia-smi. diff --git a/pytorch_lightning/accelerators/ipu.py b/pytorch_lightning/accelerators/ipu.py index 2ac1c794610d8..eb9700a397b22 100644 --- a/pytorch_lightning/accelerators/ipu.py +++ b/pytorch_lightning/accelerators/ipu.py @@ -46,3 +46,8 @@ def auto_device_count() -> int: @staticmethod def is_available() -> bool: return _IPU_AVAILABLE + + @staticmethod + def hardware_name() -> str: + """Name of the hardware.""" + return "IPU" diff --git a/pytorch_lightning/accelerators/tpu.py b/pytorch_lightning/accelerators/tpu.py index cd84cccd8b493..b8aaa62de9d9e 100644 --- a/pytorch_lightning/accelerators/tpu.py +++ b/pytorch_lightning/accelerators/tpu.py @@ -64,3 +64,8 @@ def auto_device_count() -> int: @staticmethod def is_available() -> bool: return _TPU_AVAILABLE + + @staticmethod + def hardware_name() -> str: + """Name of the hardware.""" + return "TPU" diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 4d2c96c44dc96..30d693d28b487 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -455,15 +455,15 @@ def _choose_accelerator(self) -> str: return "cpu" def _set_parallel_devices_and_init_accelerator(self) -> None: + ACCELERATORS = { + "cpu": CPUAccelerator, + "gpu": GPUAccelerator, + "tpu": TPUAccelerator, + "ipu": IPUAccelerator, + } if isinstance(self._accelerator_flag, Accelerator): self.accelerator: Accelerator = self._accelerator_flag else: - ACCELERATORS = { - "cpu": CPUAccelerator, - "gpu": GPUAccelerator, - "tpu": TPUAccelerator, - "ipu": IPUAccelerator, - } assert self._accelerator_flag is not None self._accelerator_flag = self._accelerator_flag.lower() if self._accelerator_flag not in ACCELERATORS: @@ -475,16 +475,10 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: self.accelerator = accelerator_class() # type: ignore[abstract] if not self.accelerator.is_available(): - if isinstance(self.accelerator, GPUAccelerator): - hardware_str = "GPU" - elif isinstance(self.accelerator, IPUAccelerator): - hardware_str = "IPU" - else: - hardware_str = "TPU" + available_hardware = [acc_str for acc_str in list(ACCELERATORS) if ACCELERATORS[acc_str].is_available()] raise MisconfigurationException( - f"{self.accelerator.__class__.__qualname__} can not run on this hardware, " - + hardware_str - + "s are not available." + f"{self.accelerator.__class__.__qualname__} can not run on this hardware since {self.accelerator.hardware_name()}s are not available." + f" The following hardware is available and can be passed into `accelerator` argument of `Trainer`: {available_hardware}." ) self._set_devices_flag_if_auto_passed() diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 8f23c07596564..078bd7101693c 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -481,7 +481,7 @@ def test_accelerator_cpu(_): with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(gpus=1) with pytest.raises( - MisconfigurationException, match="GPUAccelerator can not run on this hardware, GPUs are not available." + MisconfigurationException, match="GPUAccelerator can not run on this hardware since GPUs are not available." ): trainer = Trainer(accelerator="gpu") with pytest.raises(MisconfigurationException, match="You requested gpu:"): diff --git a/tests/accelerators/test_tpu.py b/tests/accelerators/test_tpu.py index 327fb09af2188..1e74cde1f70c6 100644 --- a/tests/accelerators/test_tpu.py +++ b/tests/accelerators/test_tpu.py @@ -13,7 +13,6 @@ # limitations under the License import collections from copy import deepcopy -from unittest import mock from unittest.mock import patch import pytest diff --git a/tests/trainer/properties/test_estimated_stepping_batches.py b/tests/trainer/properties/test_estimated_stepping_batches.py index 320dd55692064..1416e4ff6764a 100644 --- a/tests/trainer/properties/test_estimated_stepping_batches.py +++ b/tests/trainer/properties/test_estimated_stepping_batches.py @@ -17,6 +17,7 @@ import pytest import torch from torch.utils.data import DataLoader +from unittest import mock from pytorch_lightning import Trainer from pytorch_lightning.callbacks.gradient_accumulation_scheduler import GradientAccumulationScheduler @@ -148,7 +149,8 @@ def test_num_stepping_batches_with_tpu(devices, estimated_steps): assert trainer.estimated_stepping_batches == estimated_steps -def test_num_stepping_batches_with_ipu(monkeypatch): +@mock.patch("pytorch_lightning.accelerators.ipu.IPUAccelerator.is_available", return_value=True) +def test_num_stepping_batches_with_ipu(mock_ipuacc_avail, monkeypatch): """Test stepping batches with IPU training which acts like DP.""" import pytorch_lightning.strategies.ipu as ipu From 250dc2c87b4725c648939a68b06322e62f6ec087 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Feb 2022 23:31:06 +0000 Subject: [PATCH 10/21] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/trainer/properties/test_estimated_stepping_batches.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/trainer/properties/test_estimated_stepping_batches.py b/tests/trainer/properties/test_estimated_stepping_batches.py index 1416e4ff6764a..7e1a13cddb203 100644 --- a/tests/trainer/properties/test_estimated_stepping_batches.py +++ b/tests/trainer/properties/test_estimated_stepping_batches.py @@ -13,11 +13,11 @@ # limitations under the License. import logging +from unittest import mock import pytest import torch from torch.utils.data import DataLoader -from unittest import mock from pytorch_lightning import Trainer from pytorch_lightning.callbacks.gradient_accumulation_scheduler import GradientAccumulationScheduler From d76b7e0ac1629b4ec1c29cdc5de3acbe01563f87 Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Mon, 28 Feb 2022 15:36:50 -0800 Subject: [PATCH 11/21] format --- .../trainer/connectors/accelerator_connector.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 30d693d28b487..46e6f5e1d0ea1 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -477,8 +477,10 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: if not self.accelerator.is_available(): available_hardware = [acc_str for acc_str in list(ACCELERATORS) if ACCELERATORS[acc_str].is_available()] raise MisconfigurationException( - f"{self.accelerator.__class__.__qualname__} can not run on this hardware since {self.accelerator.hardware_name()}s are not available." - f" The following hardware is available and can be passed into `accelerator` argument of `Trainer`: {available_hardware}." + f"{self.accelerator.__class__.__qualname__} can not run on this hardware" + f" since {self.accelerator.hardware_name()}s are not available." + " The following hardware is available and can be passed into" + f" `accelerator` argument of `Trainer`: {available_hardware}." ) self._set_devices_flag_if_auto_passed() From cd920ac636a0766d92c4f4c6515d2c9e38846d89 Mon Sep 17 00:00:00 2001 From: jjenniferdai <89552168+jjenniferdai@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:46:02 -0800 Subject: [PATCH 12/21] format Update tests/trainer/properties/test_estimated_stepping_batches.py Co-authored-by: ananthsub --- tests/trainer/properties/test_estimated_stepping_batches.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/trainer/properties/test_estimated_stepping_batches.py b/tests/trainer/properties/test_estimated_stepping_batches.py index 7e1a13cddb203..203aaff65ef53 100644 --- a/tests/trainer/properties/test_estimated_stepping_batches.py +++ b/tests/trainer/properties/test_estimated_stepping_batches.py @@ -150,7 +150,7 @@ def test_num_stepping_batches_with_tpu(devices, estimated_steps): @mock.patch("pytorch_lightning.accelerators.ipu.IPUAccelerator.is_available", return_value=True) -def test_num_stepping_batches_with_ipu(mock_ipuacc_avail, monkeypatch): +def test_num_stepping_batches_with_ipu(mock_ipu_acc_avail, monkeypatch): """Test stepping batches with IPU training which acts like DP.""" import pytorch_lightning.strategies.ipu as ipu From 1e031f4b580e51e734bee8fe90ab6427efaf0f93 Mon Sep 17 00:00:00 2001 From: jjenniferdai <89552168+jjenniferdai@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:46:22 -0800 Subject: [PATCH 13/21] format Update tests/plugins/test_cluster_integration.py Co-authored-by: ananthsub --- tests/plugins/test_cluster_integration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/plugins/test_cluster_integration.py b/tests/plugins/test_cluster_integration.py index be01767f0f684..f482c1ff97438 100644 --- a/tests/plugins/test_cluster_integration.py +++ b/tests/plugins/test_cluster_integration.py @@ -59,7 +59,7 @@ def environment_combinations(): [DDPStrategy, DDPShardedStrategy, DDP2Strategy, pytest.param(DeepSpeedStrategy, marks=RunIf(deepspeed=True))], ) @mock.patch("pytorch_lightning.accelerators.gpu.GPUAccelerator.is_available", return_value=True) -def test_ranks_available_manual_strategy_selection(mock_gpuacc_available, strategy_cls): +def test_ranks_available_manual_strategy_selection(mock_gpu_acc_available, strategy_cls): """Test that the rank information is readily available after Trainer initialization.""" num_nodes = 2 for cluster, variables, expected in environment_combinations(): From 4c57f8a23e58b653cb61e7f155dc5ba994f96d87 Mon Sep 17 00:00:00 2001 From: jjenniferdai <89552168+jjenniferdai@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:46:33 -0800 Subject: [PATCH 14/21] format Update tests/accelerators/test_ipu.py Co-authored-by: ananthsub --- tests/accelerators/test_ipu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/accelerators/test_ipu.py b/tests/accelerators/test_ipu.py index 84cc9926cd5de..11d876a3ed176 100644 --- a/tests/accelerators/test_ipu.py +++ b/tests/accelerators/test_ipu.py @@ -99,7 +99,7 @@ def test_epoch_end(self, outputs) -> None: @pytest.mark.skipif(_IPU_AVAILABLE, reason="test requires non-IPU machine") @mock.patch("pytorch_lightning.accelerators.ipu.IPUAccelerator.is_available", return_value=True) -def test_fail_if_no_ipus(mock_ipuacc_avail, tmpdir): +def test_fail_if_no_ipus(mock_ipu_acc_avail, tmpdir): with pytest.raises(MisconfigurationException, match="IPU Accelerator requires IPU devices to run"): Trainer(default_root_dir=tmpdir, ipus=1) From 8b632b42bf4ef71eb352cf3e2b861ec3049f8ccc Mon Sep 17 00:00:00 2001 From: jjenniferdai <89552168+jjenniferdai@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:46:40 -0800 Subject: [PATCH 15/21] format Update tests/accelerators/test_accelerator_connector.py Co-authored-by: ananthsub --- tests/accelerators/test_accelerator_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 078bd7101693c..d25398c807bdc 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -918,7 +918,7 @@ def test_unsupported_tpu_choice(mock_devices, mock_tpuacc_avail): @mock.patch("pytorch_lightning.accelerators.ipu.IPUAccelerator.is_available", return_value=True) -def test_unsupported_ipu_choice(mock_ipuacc_avail, monkeypatch): +def test_unsupported_ipu_choice(mock_ipu_acc_avail, monkeypatch): import pytorch_lightning.strategies.ipu as ipu import pytorch_lightning.utilities.imports as imports From 21934843c018952c6f212a996f5d8f00591484a6 Mon Sep 17 00:00:00 2001 From: jjenniferdai <89552168+jjenniferdai@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:46:48 -0800 Subject: [PATCH 16/21] format Update tests/accelerators/test_accelerator_connector.py Co-authored-by: ananthsub --- tests/accelerators/test_accelerator_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index d25398c807bdc..9b1dd31ba603b 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -902,7 +902,7 @@ def test_strategy_choice_ddp_cpu_slurm(device_count_mock, setup_distributed_mock @mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True) @mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.parse_devices", return_value=8) -def test_unsupported_tpu_choice(mock_devices, mock_tpuacc_avail): +def test_unsupported_tpu_choice(mock_devices, mock_tpu_acc_avail): with pytest.raises(MisconfigurationException, match=r"accelerator='tpu', precision=64\)` is not implemented"): Trainer(accelerator="tpu", precision=64) From fadb69b3e9cb83c426ef383c65a8f76f0118a93b Mon Sep 17 00:00:00 2001 From: jjenniferdai <89552168+jjenniferdai@users.noreply.github.com> Date: Tue, 1 Mar 2022 17:47:13 -0800 Subject: [PATCH 17/21] format Update tests/accelerators/test_accelerator_connector.py Co-authored-by: ananthsub --- tests/accelerators/test_accelerator_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 9b1dd31ba603b..47b2b35614a89 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -430,7 +430,7 @@ def test_ipython_compatible_dp_strategy_gpu(_, monkeypatch): @mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True) @mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.parse_devices", return_value=8) -def test_ipython_compatible_strategy_tpu(mock_devices, mock_tpuacc_avail, monkeypatch): +def test_ipython_compatible_strategy_tpu(mock_devices, mock_tpu_acc_avail, monkeypatch): monkeypatch.setattr(pytorch_lightning.utilities, "_IS_INTERACTIVE", True) trainer = Trainer(accelerator="tpu") assert trainer.strategy.launcher is None or trainer.strategy.launcher.is_interactive_compatible From 31a54eb890a5b9ff434bd7168e3e0c137e77631b Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Tue, 1 Mar 2022 18:08:09 -0800 Subject: [PATCH 18/21] req base, update name, lowercase --- pytorch_lightning/accelerators/accelerator.py | 5 +++++ pytorch_lightning/accelerators/cpu.py | 4 ++-- pytorch_lightning/accelerators/gpu.py | 4 ++-- pytorch_lightning/accelerators/ipu.py | 4 ++-- pytorch_lightning/accelerators/tpu.py | 4 ++-- .../trainer/connectors/accelerator_connector.py | 2 +- tests/accelerators/test_accelerator_connector.py | 4 ++++ 7 files changed, 18 insertions(+), 9 deletions(-) diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/accelerator.py index cbd0e2309e311..42739f3fa3503 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/accelerator.py @@ -74,3 +74,8 @@ def auto_device_count() -> int: @abstractmethod def is_available() -> bool: """Detect if the hardware is available.""" + + @staticmethod + @abstractmethod + def accelerator_name() -> str: + """Name of the hardware.""" diff --git a/pytorch_lightning/accelerators/cpu.py b/pytorch_lightning/accelerators/cpu.py index 3492bf95014c0..26542dce8eb0c 100644 --- a/pytorch_lightning/accelerators/cpu.py +++ b/pytorch_lightning/accelerators/cpu.py @@ -64,6 +64,6 @@ def is_available() -> bool: return True @staticmethod - def hardware_name() -> str: + def accelerator_name() -> str: """Name of the hardware.""" - return "CPU" + return "cpu" diff --git a/pytorch_lightning/accelerators/gpu.py b/pytorch_lightning/accelerators/gpu.py index 794f0feab1711..14d2e877c53cf 100644 --- a/pytorch_lightning/accelerators/gpu.py +++ b/pytorch_lightning/accelerators/gpu.py @@ -94,9 +94,9 @@ def is_available() -> bool: return torch.cuda.device_count() > 0 @staticmethod - def hardware_name() -> str: + def accelerator_name() -> str: """Name of the hardware.""" - return "GPU" + return "gpu" def get_nvidia_gpu_stats(device: _DEVICE) -> Dict[str, float]: diff --git a/pytorch_lightning/accelerators/ipu.py b/pytorch_lightning/accelerators/ipu.py index eb9700a397b22..88e9c90253014 100644 --- a/pytorch_lightning/accelerators/ipu.py +++ b/pytorch_lightning/accelerators/ipu.py @@ -48,6 +48,6 @@ def is_available() -> bool: return _IPU_AVAILABLE @staticmethod - def hardware_name() -> str: + def accelerator_name() -> str: """Name of the hardware.""" - return "IPU" + return "ipu" diff --git a/pytorch_lightning/accelerators/tpu.py b/pytorch_lightning/accelerators/tpu.py index b8aaa62de9d9e..5c04cadd34d3d 100644 --- a/pytorch_lightning/accelerators/tpu.py +++ b/pytorch_lightning/accelerators/tpu.py @@ -66,6 +66,6 @@ def is_available() -> bool: return _TPU_AVAILABLE @staticmethod - def hardware_name() -> str: + def accelerator_name() -> str: """Name of the hardware.""" - return "TPU" + return "tpu" diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 46e6f5e1d0ea1..46f9c2ad1f841 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -478,7 +478,7 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: available_hardware = [acc_str for acc_str in list(ACCELERATORS) if ACCELERATORS[acc_str].is_available()] raise MisconfigurationException( f"{self.accelerator.__class__.__qualname__} can not run on this hardware" - f" since {self.accelerator.hardware_name()}s are not available." + f" since {self.accelerator.accelerator_name().upper()}s are not available." " The following hardware is available and can be passed into" f" `accelerator` argument of `Trainer`: {available_hardware}." ) diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 47b2b35614a89..11d93e078dc0a 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -357,6 +357,10 @@ def auto_device_count() -> int: def is_available() -> bool: return True + @staticmethod + def accelerator_name() -> str: + return "custom_acc_name" + class Prec(PrecisionPlugin): pass From 67301d6bcaa67f60944c0966ed0f184f3836a47c Mon Sep 17 00:00:00 2001 From: Jennifer Dai Date: Tue, 1 Mar 2022 18:22:11 -0800 Subject: [PATCH 19/21] update --- tests/accelerators/test_common.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/accelerators/test_common.py b/tests/accelerators/test_common.py index 473546696e1e3..39df5f601cb41 100644 --- a/tests/accelerators/test_common.py +++ b/tests/accelerators/test_common.py @@ -44,6 +44,10 @@ def auto_device_count(): def is_available(): return True + @staticmethod + def accelerator_name(): + return "custom_acc_name" + trainer = Trainer(accelerator=TestAccelerator(), devices=2, strategy="ddp") assert isinstance(trainer.accelerator, TestAccelerator) assert isinstance(trainer.strategy, DDPStrategy) From 70e9f712d0423d03b20275bf668a5c2b03098de1 Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Wed, 2 Mar 2022 14:43:50 +0530 Subject: [PATCH 20/21] Address reviews --- pytorch_lightning/accelerators/accelerator.py | 4 ++-- pytorch_lightning/accelerators/cpu.py | 4 ++-- pytorch_lightning/accelerators/gpu.py | 4 ++-- pytorch_lightning/accelerators/ipu.py | 4 ++-- pytorch_lightning/accelerators/tpu.py | 4 ++-- .../trainer/connectors/accelerator_connector.py | 10 +++++----- tests/accelerators/test_accelerator_connector.py | 2 +- tests/accelerators/test_common.py | 2 +- 8 files changed, 17 insertions(+), 17 deletions(-) diff --git a/pytorch_lightning/accelerators/accelerator.py b/pytorch_lightning/accelerators/accelerator.py index 42739f3fa3503..ad0779d88b96c 100644 --- a/pytorch_lightning/accelerators/accelerator.py +++ b/pytorch_lightning/accelerators/accelerator.py @@ -77,5 +77,5 @@ def is_available() -> bool: @staticmethod @abstractmethod - def accelerator_name() -> str: - """Name of the hardware.""" + def name() -> str: + """Name of the Accelerator.""" diff --git a/pytorch_lightning/accelerators/cpu.py b/pytorch_lightning/accelerators/cpu.py index 26542dce8eb0c..a027e7db6e209 100644 --- a/pytorch_lightning/accelerators/cpu.py +++ b/pytorch_lightning/accelerators/cpu.py @@ -64,6 +64,6 @@ def is_available() -> bool: return True @staticmethod - def accelerator_name() -> str: - """Name of the hardware.""" + def name() -> str: + """Name of the Accelerator.""" return "cpu" diff --git a/pytorch_lightning/accelerators/gpu.py b/pytorch_lightning/accelerators/gpu.py index 14d2e877c53cf..529d067025f97 100644 --- a/pytorch_lightning/accelerators/gpu.py +++ b/pytorch_lightning/accelerators/gpu.py @@ -94,8 +94,8 @@ def is_available() -> bool: return torch.cuda.device_count() > 0 @staticmethod - def accelerator_name() -> str: - """Name of the hardware.""" + def name() -> str: + """Name of the Accelerator.""" return "gpu" diff --git a/pytorch_lightning/accelerators/ipu.py b/pytorch_lightning/accelerators/ipu.py index 88e9c90253014..1e8b2bc27fe57 100644 --- a/pytorch_lightning/accelerators/ipu.py +++ b/pytorch_lightning/accelerators/ipu.py @@ -48,6 +48,6 @@ def is_available() -> bool: return _IPU_AVAILABLE @staticmethod - def accelerator_name() -> str: - """Name of the hardware.""" + def name() -> str: + """Name of the Accelerator.""" return "ipu" diff --git a/pytorch_lightning/accelerators/tpu.py b/pytorch_lightning/accelerators/tpu.py index 5c04cadd34d3d..dfdc950e70124 100644 --- a/pytorch_lightning/accelerators/tpu.py +++ b/pytorch_lightning/accelerators/tpu.py @@ -66,6 +66,6 @@ def is_available() -> bool: return _TPU_AVAILABLE @staticmethod - def accelerator_name() -> str: - """Name of the hardware.""" + def name() -> str: + """Name of the Accelerator.""" return "tpu" diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 46f9c2ad1f841..6f84fd2601a69 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -475,12 +475,12 @@ def _set_parallel_devices_and_init_accelerator(self) -> None: self.accelerator = accelerator_class() # type: ignore[abstract] if not self.accelerator.is_available(): - available_hardware = [acc_str for acc_str in list(ACCELERATORS) if ACCELERATORS[acc_str].is_available()] + available_accelerator = [acc_str for acc_str in list(ACCELERATORS) if ACCELERATORS[acc_str].is_available()] raise MisconfigurationException( - f"{self.accelerator.__class__.__qualname__} can not run on this hardware" - f" since {self.accelerator.accelerator_name().upper()}s are not available." - " The following hardware is available and can be passed into" - f" `accelerator` argument of `Trainer`: {available_hardware}." + f"{self.accelerator.__class__.__qualname__} can not run on your system" + f" since {self.accelerator.name().upper()}s are not available." + " The following accelerator(s) is available and can be passed into" + f" `accelerator` argument of `Trainer`: {available_accelerator}." ) self._set_devices_flag_if_auto_passed() diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index 11d93e078dc0a..ef6ac331c84be 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -358,7 +358,7 @@ def is_available() -> bool: return True @staticmethod - def accelerator_name() -> str: + def name() -> str: return "custom_acc_name" class Prec(PrecisionPlugin): diff --git a/tests/accelerators/test_common.py b/tests/accelerators/test_common.py index 39df5f601cb41..ef8780f6986da 100644 --- a/tests/accelerators/test_common.py +++ b/tests/accelerators/test_common.py @@ -45,7 +45,7 @@ def is_available(): return True @staticmethod - def accelerator_name(): + def name(): return "custom_acc_name" trainer = Trainer(accelerator=TestAccelerator(), devices=2, strategy="ddp") From 65b23b635395e324b62cad153a3ca2b247c93167 Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Wed, 2 Mar 2022 14:58:39 +0530 Subject: [PATCH 21/21] Fix test --- tests/accelerators/test_accelerator_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/accelerators/test_accelerator_connector.py b/tests/accelerators/test_accelerator_connector.py index ef6ac331c84be..43c9bd5f69018 100644 --- a/tests/accelerators/test_accelerator_connector.py +++ b/tests/accelerators/test_accelerator_connector.py @@ -485,7 +485,7 @@ def test_accelerator_cpu(_): with pytest.raises(MisconfigurationException, match="You requested gpu:"): trainer = Trainer(gpus=1) with pytest.raises( - MisconfigurationException, match="GPUAccelerator can not run on this hardware since GPUs are not available." + MisconfigurationException, match="GPUAccelerator can not run on your system since GPUs are not available." ): trainer = Trainer(accelerator="gpu") with pytest.raises(MisconfigurationException, match="You requested gpu:"):