From 4b796131d90574210a17d5f010b95e71e44c2390 Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Sat, 19 Feb 2022 17:13:02 +0100 Subject: [PATCH 1/3] The CLI argument parser is now pickleable --- pytorch_lightning/utilities/cli.py | 8 +------- pytorch_lightning/utilities/imports.py | 2 +- requirements/extra.txt | 2 +- tests/utilities/test_cli.py | 28 +++++++++++--------------- 4 files changed, 15 insertions(+), 25 deletions(-) diff --git a/pytorch_lightning/utilities/cli.py b/pytorch_lightning/utilities/cli.py index a9c06febbb9b4..5e39c0d5c95ed 100644 --- a/pytorch_lightning/utilities/cli.py +++ b/pytorch_lightning/utilities/cli.py @@ -415,8 +415,6 @@ def __init__( self.multifile = multifile def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[str] = None) -> None: - # save the config in `setup` because (1) we want it to save regardless of the trainer function run - # and we want to save before processes are spawned log_dir = trainer.log_dir # this broadcasts the directory assert log_dir is not None config_path = os.path.join(log_dir, self.config_filename) @@ -437,7 +435,7 @@ def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[st # save the file on rank 0 if trainer.is_global_zero: - # save only on rank zero to avoid race conditions on DDP. + # save only on rank zero to avoid race conditions. # the `log_dir` needs to be created as we rely on the logger to do it usually # but it hasn't logged anything at this point fs.makedirs(log_dir, exist_ok=True) @@ -445,10 +443,6 @@ def setup(self, trainer: Trainer, pl_module: LightningModule, stage: Optional[st self.config, config_path, skip_none=False, overwrite=self.overwrite, multifile=self.multifile ) - def __reduce__(self) -> Tuple[Type["SaveConfigCallback"], Tuple, Dict]: - # `ArgumentParser` is un-pickleable. Drop it - return self.__class__, (None, self.config, self.config_filename), {} - class LightningCLI: """Implementation of a configurable command line tool for pytorch-lightning.""" diff --git a/pytorch_lightning/utilities/imports.py b/pytorch_lightning/utilities/imports.py index 6c20d90e01646..4ab6cc9459cf5 100644 --- a/pytorch_lightning/utilities/imports.py +++ b/pytorch_lightning/utilities/imports.py @@ -105,7 +105,7 @@ def _compare_version(package: str, op: Callable, version: str, use_base_version: _HOROVOD_AVAILABLE = _module_available("horovod.torch") _HYDRA_AVAILABLE = _package_available("hydra") _HYDRA_EXPERIMENTAL_AVAILABLE = _module_available("hydra.experimental") -_JSONARGPARSE_AVAILABLE = _package_available("jsonargparse") and _compare_version("jsonargparse", operator.ge, "4.0.0") +_JSONARGPARSE_AVAILABLE = _package_available("jsonargparse") and _compare_version("jsonargparse", operator.ge, "4.2.1") _KINETO_AVAILABLE = _TORCH_GREATER_EQUAL_1_8_1 and torch.profiler.kineto_available() _NEPTUNE_AVAILABLE = _package_available("neptune") _NEPTUNE_GREATER_EQUAL_0_9 = _NEPTUNE_AVAILABLE and _compare_version("neptune", operator.ge, "0.9.0") diff --git a/requirements/extra.txt b/requirements/extra.txt index babaffca6280d..a164434ac33b2 100644 --- a/requirements/extra.txt +++ b/requirements/extra.txt @@ -5,6 +5,6 @@ horovod>=0.21.2 # no need to install with [pytorch] as pytorch is already insta torchtext>=0.8.* omegaconf>=2.0.5 hydra-core>=1.0.5 -jsonargparse[signatures]>=4.0.4 +jsonargparse[signatures]>=4.2.1 gcsfs>=2021.5.0 rich>=10.2.2 diff --git a/tests/utilities/test_cli.py b/tests/utilities/test_cli.py index 2803c0c4601c1..5b37adfa1398b 100644 --- a/tests/utilities/test_cli.py +++ b/tests/utilities/test_cli.py @@ -50,9 +50,8 @@ SaveConfigCallback, ) from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.imports import _TORCHVISION_AVAILABLE +from pytorch_lightning.utilities.imports import _TORCH_GREATER_EQUAL_1_8, _TORCHVISION_AVAILABLE from tests.helpers import BoringDataModule, BoringModel -from tests.helpers.runif import RunIf from tests.helpers.utils import no_warning_call torchvision_version = version.parse("0") @@ -577,20 +576,15 @@ def on_fit_start(self): @pytest.mark.parametrize("logger", (False, True)) -@pytest.mark.parametrize( - "trainer_kwargs", - ( - # dict(strategy="ddp_spawn") - # dict(strategy="ddp") - # the previous accl_conn will choose singleDeviceStrategy for both strategy=ddp/ddp_spawn - # TODO revisit this test as it never worked with DDP or DDPSpawn - dict(strategy="single_device"), - pytest.param({"tpu_cores": 1}, marks=RunIf(tpu=True)), - ), -) -def test_cli_distributed_save_config_callback(tmpdir, logger, trainer_kwargs): +@pytest.mark.parametrize("strategy", ("ddp_spawn", "ddp")) +def test_cli_distributed_save_config_callback(tmpdir, logger, strategy): + if _TORCH_GREATER_EQUAL_1_8: + from torch.multiprocessing import ProcessRaisedException + else: + ProcessRaisedException = Exception + with mock.patch("sys.argv", ["any.py", "fit"]), pytest.raises( - MisconfigurationException, match=r"Error on fit start" + (MisconfigurationException, ProcessRaisedException), match=r"Error on fit start" ): LightningCLI( EarlyExitTestModel, @@ -599,7 +593,9 @@ def test_cli_distributed_save_config_callback(tmpdir, logger, trainer_kwargs): "logger": logger, "max_steps": 1, "max_epochs": 1, - **trainer_kwargs, + "strategy": strategy, + "accelerator": "auto", + "devices": 1, }, ) if logger: From 7cc04d462d04c63a92f972f99581d0907396a1b6 Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Sat, 19 Feb 2022 17:22:51 +0100 Subject: [PATCH 2/3] 4.3.0 --- pytorch_lightning/utilities/imports.py | 2 +- requirements/extra.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/utilities/imports.py b/pytorch_lightning/utilities/imports.py index 4ab6cc9459cf5..d22fc797fcf16 100644 --- a/pytorch_lightning/utilities/imports.py +++ b/pytorch_lightning/utilities/imports.py @@ -105,7 +105,7 @@ def _compare_version(package: str, op: Callable, version: str, use_base_version: _HOROVOD_AVAILABLE = _module_available("horovod.torch") _HYDRA_AVAILABLE = _package_available("hydra") _HYDRA_EXPERIMENTAL_AVAILABLE = _module_available("hydra.experimental") -_JSONARGPARSE_AVAILABLE = _package_available("jsonargparse") and _compare_version("jsonargparse", operator.ge, "4.2.1") +_JSONARGPARSE_AVAILABLE = _package_available("jsonargparse") and _compare_version("jsonargparse", operator.ge, "4.3.0") _KINETO_AVAILABLE = _TORCH_GREATER_EQUAL_1_8_1 and torch.profiler.kineto_available() _NEPTUNE_AVAILABLE = _package_available("neptune") _NEPTUNE_GREATER_EQUAL_0_9 = _NEPTUNE_AVAILABLE and _compare_version("neptune", operator.ge, "0.9.0") diff --git a/requirements/extra.txt b/requirements/extra.txt index a164434ac33b2..f1162864de4de 100644 --- a/requirements/extra.txt +++ b/requirements/extra.txt @@ -5,6 +5,6 @@ horovod>=0.21.2 # no need to install with [pytorch] as pytorch is already insta torchtext>=0.8.* omegaconf>=2.0.5 hydra-core>=1.0.5 -jsonargparse[signatures]>=4.2.1 +jsonargparse[signatures]>=4.3.0 gcsfs>=2021.5.0 rich>=10.2.2 From 6cbb39175a0e90e77bca8cdb69f6f82237ad5bd2 Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Mon, 28 Feb 2022 13:52:11 +0100 Subject: [PATCH 3/3] Skip windows --- tests/utilities/test_cli.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/utilities/test_cli.py b/tests/utilities/test_cli.py index 8fa8b949f8b11..4db8cc8f83fc4 100644 --- a/tests/utilities/test_cli.py +++ b/tests/utilities/test_cli.py @@ -52,6 +52,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _TORCH_GREATER_EQUAL_1_8, _TORCHVISION_AVAILABLE from tests.helpers import BoringDataModule, BoringModel +from tests.helpers.runif import RunIf from tests.helpers.utils import no_warning_call torchvision_version = version.parse("0") @@ -575,6 +576,7 @@ def on_fit_start(self): raise MisconfigurationException("Error on fit start") +@RunIf(skip_windows=True) @pytest.mark.parametrize("logger", (False, True)) @pytest.mark.parametrize("strategy", ("ddp_spawn", "ddp")) def test_cli_distributed_save_config_callback(tmpdir, logger, strategy):