From 3aa2f0eddc5c211a6bbaa01ab5a3fcaa504f6aaf Mon Sep 17 00:00:00 2001 From: Ning Li Date: Mon, 16 Aug 2021 21:09:05 -0700 Subject: [PATCH 01/36] add prepare_data_per_node property to datahooks --- pytorch_lightning/core/hooks.py | 15 +++++++++++++++ tests/models/test_hooks.py | 5 +++++ 2 files changed, 20 insertions(+) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 3b73ae418ffe2..7710d724b7ffd 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -372,6 +372,9 @@ def configure_sharded_model(self) -> None: class DataHooks: """Hooks to be used for data related stuff.""" + def __init__(self) -> None: + self._prepare_data_per_node: bool = True + def prepare_data(self) -> None: """ Use this to download and prepare data. @@ -805,6 +808,18 @@ def on_after_batch_transfer(self, batch, dataloader_idx): """ return batch + @property + def prepare_data_per_node(self) -> bool: + """ + If True, each LOCAL_RANK=0 will call prepare data. + Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data. + """ + return self._prepare_data_per_node + + @prepare_data_per_node.setter + def prepare_data_per_node(self, prepare_data_per_node: bool) -> None: + self._prepare_data_per_node = prepare_data_per_node + class CheckpointHooks: """Hooks to be used with Checkpointing.""" diff --git a/tests/models/test_hooks.py b/tests/models/test_hooks.py index 990178b09d07f..4a948f3a7d738 100644 --- a/tests/models/test_hooks.py +++ b/tests/models/test_hooks.py @@ -821,6 +821,7 @@ def test_trainer_datamodule_hook_system(tmpdir): class HookedDataModule(BoringDataModule): def __init__(self, called): super().__init__() + self._prepare_data_per_node = True def call(hook, fn, *args, **kwargs): out = fn(*args, **kwargs) @@ -836,6 +837,10 @@ def call(hook, fn, *args, **kwargs): attr = getattr(self, h) setattr(self, h, partial(call, h, attr)) + @property + def prepare_data_per_node(self) -> bool: + return True + model = BoringModel() batches = 2 trainer = Trainer( From cfad7ad102ef315ecb62de315781007b548cdcfc Mon Sep 17 00:00:00 2001 From: Ning Li Date: Mon, 16 Aug 2021 21:26:28 -0700 Subject: [PATCH 02/36] add __init__() --- pytorch_lightning/core/hooks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 7710d724b7ffd..5cf8c6b270873 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -373,6 +373,7 @@ class DataHooks: """Hooks to be used for data related stuff.""" def __init__(self) -> None: + super().__init__() self._prepare_data_per_node: bool = True def prepare_data(self) -> None: From 9b246b555c088417db0deaa3f39eceb919aafbd7 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Mon, 16 Aug 2021 21:37:25 -0700 Subject: [PATCH 03/36] update prepare_data_per_node in data_connector --- pytorch_lightning/trainer/connectors/data_connector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 5c98eb68783a9..2bead2837cbef 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -36,7 +36,7 @@ def on_trainer_init( prepare_data_per_node: bool, ) -> None: self.trainer.datamodule = None - self.trainer.prepare_data_per_node = prepare_data_per_node + self.prepare_data_per_node = prepare_data_per_node if not isinstance(check_val_every_n_epoch, int): raise MisconfigurationException( @@ -81,7 +81,7 @@ def can_prepare_data(self): if self.trainer.datamodule is not None and is_overridden("prepare_data", self.trainer.datamodule): should_call_dm_prepare_data = not self.trainer.datamodule._has_prepared_data - if self.trainer.prepare_data_per_node: + if self.prepare_data_per_node: return self.trainer.local_rank == 0 and should_call_dm_prepare_data return self.trainer.node_rank == 0 and self.trainer.local_rank == 0 and should_call_dm_prepare_data From e8da75ee823f29eb27fd76dba7f5abdda904f382 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Mon, 16 Aug 2021 23:09:51 -0700 Subject: [PATCH 04/36] mark 'prepare_data_per_node' optional in trainer --- pytorch_lightning/trainer/trainer.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index becf4d4cf2c4d..43c79c12cc6b9 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -151,7 +151,7 @@ def __init__( replace_sampler_ddp: bool = True, terminate_on_nan: bool = False, auto_scale_batch_size: Union[str, bool] = False, - prepare_data_per_node: bool = True, + prepare_data_per_node: Optional[bool] = None, plugins: Optional[Union[List[Union[Plugin, ClusterEnvironment, str]], Plugin, ClusterEnvironment, str]] = None, amp_backend: str = "native", amp_level: str = "O2", @@ -422,6 +422,14 @@ def __init__( self.optimizer_connector.on_trainer_init() # init data flags + if prepare_data_per_node is None: + prepare_data_per_node = True + else: + rank_zero_warn( + "Setting prepare_data_per_node with the trainer flag is deprecated and will be removed in v1.7.0!" + "Please use the property in the DataHooks for setting prepare_data_per_node" + ) + self.data_connector.on_trainer_init( check_val_every_n_epoch, reload_dataloaders_every_n_epochs, From f3a30f85bb1c0043b8719ebc55799fef0dbde593 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Tue, 17 Aug 2021 00:48:56 -0700 Subject: [PATCH 05/36] update failed test_datamodules --- tests/core/test_datamodules.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index 5c33a2f68acf0..d7a0ae78e5aee 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -41,7 +41,7 @@ def test_can_prepare_data(local_rank, node_rank): # 1 no DM # prepare_data_per_node = True # local rank = 0 (True) - trainer.prepare_data_per_node = True + trainer.data_connector.prepare_data_per_node = True dm.random_full = None dm._has_prepared_data = False @@ -66,7 +66,7 @@ def test_can_prepare_data(local_rank, node_rank): # global rank = 0 (True) dm.random_full = None dm._has_prepared_data = False - trainer.prepare_data_per_node = False + trainer.data_connector.prepare_data_per_node = False node_rank.return_value = 0 local_rank.return_value = 0 assert trainer.data_connector.can_prepare_data() @@ -94,7 +94,7 @@ def test_can_prepare_data(local_rank, node_rank): # 2 dm # prepar per node = True # local rank = 0 (True) - trainer.prepare_data_per_node = True + trainer.data_connector.prepare_data_per_node = True local_rank.return_value = 0 # is_overridden prepare data = True From 3244cd2906fa9666dc17058dfb7ca50e9c303373 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Tue, 17 Aug 2021 17:15:31 -0700 Subject: [PATCH 06/36] move deprecation warning in data_connector --- .../trainer/connectors/data_connector.py | 14 +++++++++++--- pytorch_lightning/trainer/trainer.py | 10 ++-------- tests/core/test_datamodules.py | 6 +++--- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 2bead2837cbef..030809bda8407 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -33,10 +33,18 @@ def on_trainer_init( check_val_every_n_epoch: int, reload_dataloaders_every_n_epochs: int, reload_dataloaders_every_epoch: bool, - prepare_data_per_node: bool, + prepare_data_per_node: Optional[bool] = None, ) -> None: self.trainer.datamodule = None - self.prepare_data_per_node = prepare_data_per_node + + if prepare_data_per_node is None: + prepare_data_per_node = True + else: + rank_zero_deprecation( + "Setting prepare_data_per_node with the trainer flag is deprecated and will be removed in v1.7.0!" + "Please use `~pytorch_lightning.core.datamodule.prepare_data_per_node` instead. " + ) + self.trainer.prepare_data_per_node = prepare_data_per_node if not isinstance(check_val_every_n_epoch, int): raise MisconfigurationException( @@ -81,7 +89,7 @@ def can_prepare_data(self): if self.trainer.datamodule is not None and is_overridden("prepare_data", self.trainer.datamodule): should_call_dm_prepare_data = not self.trainer.datamodule._has_prepared_data - if self.prepare_data_per_node: + if self.trainer.datamodule is not None and self.trainer.datamodule.prepare_data_per_node: return self.trainer.local_rank == 0 and should_call_dm_prepare_data return self.trainer.node_rank == 0 and self.trainer.local_rank == 0 and should_call_dm_prepare_data diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 43c79c12cc6b9..288ae0190d9d1 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -242,6 +242,8 @@ def __init__( prepare_data_per_node: If True, each LOCAL_RANK=0 will call prepare data. Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data + Deprecated in v1.5.0 and will be removed in v1.7.0 + Please use `~pytorch_lightning.core.datamodule.prepare_data_per_node` instead process_position: orders the progress bar when running multiple models on same machine. @@ -422,14 +424,6 @@ def __init__( self.optimizer_connector.on_trainer_init() # init data flags - if prepare_data_per_node is None: - prepare_data_per_node = True - else: - rank_zero_warn( - "Setting prepare_data_per_node with the trainer flag is deprecated and will be removed in v1.7.0!" - "Please use the property in the DataHooks for setting prepare_data_per_node" - ) - self.data_connector.on_trainer_init( check_val_every_n_epoch, reload_dataloaders_every_n_epochs, diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index d7a0ae78e5aee..8ab8bcbe87352 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -41,7 +41,7 @@ def test_can_prepare_data(local_rank, node_rank): # 1 no DM # prepare_data_per_node = True # local rank = 0 (True) - trainer.data_connector.prepare_data_per_node = True + dm.prepare_data_per_node = True dm.random_full = None dm._has_prepared_data = False @@ -66,7 +66,7 @@ def test_can_prepare_data(local_rank, node_rank): # global rank = 0 (True) dm.random_full = None dm._has_prepared_data = False - trainer.data_connector.prepare_data_per_node = False + dm.prepare_data_per_node = False node_rank.return_value = 0 local_rank.return_value = 0 assert trainer.data_connector.can_prepare_data() @@ -94,7 +94,7 @@ def test_can_prepare_data(local_rank, node_rank): # 2 dm # prepar per node = True # local rank = 0 (True) - trainer.data_connector.prepare_data_per_node = True + dm.prepare_data_per_node = True local_rank.return_value = 0 # is_overridden prepare data = True From fac2d0b2f3f9fe4e3fe2e94afcafd48f9c1f7e52 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Tue, 17 Aug 2021 17:30:18 -0700 Subject: [PATCH 07/36] update test_remove_1-7.py --- pytorch_lightning/trainer/connectors/data_connector.py | 2 +- tests/deprecated_api/test_remove_1-7.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 030809bda8407..f7b9ac732943c 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -41,7 +41,7 @@ def on_trainer_init( prepare_data_per_node = True else: rank_zero_deprecation( - "Setting prepare_data_per_node with the trainer flag is deprecated and will be removed in v1.7.0!" + "Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0!" "Please use `~pytorch_lightning.core.datamodule.prepare_data_per_node` instead. " ) self.trainer.prepare_data_per_node = prepare_data_per_node diff --git a/tests/deprecated_api/test_remove_1-7.py b/tests/deprecated_api/test_remove_1-7.py index d836f1427a110..fdc52f18b983a 100644 --- a/tests/deprecated_api/test_remove_1-7.py +++ b/tests/deprecated_api/test_remove_1-7.py @@ -15,7 +15,7 @@ import pytest -from pytorch_lightning import LightningDataModule +from pytorch_lightning import LightningDataModule, Trainer from tests.deprecated_api import _soft_unimport_module from tests.helpers import BoringModel from tests.helpers.datamodules import MNISTDataModule @@ -80,3 +80,10 @@ def test_v1_7_0_datamodule_dims_property(tmpdir): _ = dm.dims with pytest.deprecated_call(match=r"DataModule property `dims` was deprecated in v1.5"): _ = LightningDataModule(dims=(1, 1, 1)) + + +def test_v1_7_0_trainer_prepare_data_per_node(tmpdir): + with pytest.deprecated_call( + match="Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0!" + ): + _ = Trainer(prepare_data_per_node=True) From c53fa4acc53aaa7f7fc802d87b68ae37949046b5 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Tue, 17 Aug 2021 22:46:40 -0700 Subject: [PATCH 08/36] update when datamodule is not defined --- pytorch_lightning/core/hooks.py | 3 +++ .../trainer/connectors/data_connector.py | 12 +++++++++--- tests/core/test_datamodules.py | 1 - tests/deprecated_api/test_remove_1-7.py | 2 +- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 5cf8c6b270873..16771bb1d59c6 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -409,6 +409,9 @@ def prepare_data(self): # call on GLOBAL_RANK=0 (great for shared file systems) Trainer(prepare_data_per_node=False) + Note: Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0. + Please set `prepare_data_per_node` in LightningDataModule directly instead. + This is called before requesting the dataloaders: .. code-block:: python diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index f7b9ac732943c..d86b5273f05e2 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -41,8 +41,8 @@ def on_trainer_init( prepare_data_per_node = True else: rank_zero_deprecation( - "Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0!" - "Please use `~pytorch_lightning.core.datamodule.prepare_data_per_node` instead. " + "Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0! " + "Please set `prepare_data_per_node` in LightningDataModule directly instead. " ) self.trainer.prepare_data_per_node = prepare_data_per_node @@ -89,10 +89,16 @@ def can_prepare_data(self): if self.trainer.datamodule is not None and is_overridden("prepare_data", self.trainer.datamodule): should_call_dm_prepare_data = not self.trainer.datamodule._has_prepared_data - if self.trainer.datamodule is not None and self.trainer.datamodule.prepare_data_per_node: + if self._prepare_data_per_node(): return self.trainer.local_rank == 0 and should_call_dm_prepare_data return self.trainer.node_rank == 0 and self.trainer.local_rank == 0 and should_call_dm_prepare_data + def _prepare_data_per_node(self) -> bool: + # temporary private util function until `prepare_data_per_node` is fully migrated to LightningDataModule + if self.trainer.datamodule is None: + return self.trainer.prepare_data_per_node + return self.trainer.datamodule.prepare_data_per_node + def attach_data( self, model: "pl.LightningModule", diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index 8ab8bcbe87352..32df1b565061a 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -41,7 +41,6 @@ def test_can_prepare_data(local_rank, node_rank): # 1 no DM # prepare_data_per_node = True # local rank = 0 (True) - dm.prepare_data_per_node = True dm.random_full = None dm._has_prepared_data = False diff --git a/tests/deprecated_api/test_remove_1-7.py b/tests/deprecated_api/test_remove_1-7.py index fdc52f18b983a..7581bf2b0c142 100644 --- a/tests/deprecated_api/test_remove_1-7.py +++ b/tests/deprecated_api/test_remove_1-7.py @@ -86,4 +86,4 @@ def test_v1_7_0_trainer_prepare_data_per_node(tmpdir): with pytest.deprecated_call( match="Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0!" ): - _ = Trainer(prepare_data_per_node=True) + _ = Trainer(prepare_data_per_node=False) From e5107a4700b4316b44c18124b7b4229a966a2fd0 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Tue, 17 Aug 2021 23:31:12 -0700 Subject: [PATCH 09/36] update hook docs --- pytorch_lightning/core/hooks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 16771bb1d59c6..6d9b9caf17bf4 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -409,8 +409,9 @@ def prepare_data(self): # call on GLOBAL_RANK=0 (great for shared file systems) Trainer(prepare_data_per_node=False) - Note: Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0. - Please set `prepare_data_per_node` in LightningDataModule directly instead. + Note: + Setting ``prepare_data_per_node`` with the trainer flag is deprecated and will be removed in v1.7.0. + Please set ``prepare_data_per_node`` in LightningDataModule directly instead. This is called before requesting the dataloaders: From f9bd34213b988198bc122c9d64ab5b7400a576c7 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Tue, 17 Aug 2021 23:37:37 -0700 Subject: [PATCH 10/36] update CHANGELOG.md --- CHANGELOG.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c90af8b9c97cd..8541728c01621 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -114,9 +114,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - -- - - - ### Removed From f9706b2bfc1e4acb49e73babc06c1c3bd519f6d4 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 12:52:09 -0700 Subject: [PATCH 11/36] refactor `prepare_data()` in data_connector --- .../trainer/connectors/data_connector.py | 50 +++++++++++-------- tests/core/test_datamodules.py | 10 +--- tests/models/test_hooks.py | 4 -- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index d86b5273f05e2..c965f79845b06 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -42,7 +42,7 @@ def on_trainer_init( else: rank_zero_deprecation( "Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0! " - "Please set `prepare_data_per_node` in LightningDataModule directly instead. " + "Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. " ) self.trainer.prepare_data_per_node = prepare_data_per_node @@ -78,27 +78,37 @@ def get_profiled_train_dataloader(self, train_dataloader) -> Iterable: def prepare_data(self) -> None: # on multi-gpu jobs we only want to manipulate (download, etc) on node_rank=0, local_rank=0 # or in the case where each node needs to do its own manipulation in which case just local_rank=0 - if self.can_prepare_data(): - if self.trainer.datamodule is not None: - self.trainer.datamodule.prepare_data() - self.trainer.call_hook("prepare_data") + should_call_once_per_node = self.trainer.local_rank == 0 + should_call_once_in_total = self.trainer.local_rank == 0 and self.trainer.node_rank == 0 + + # handle datamodule prepare data: + # check for prepare_data_per_node & datamodule lifecycle properties before calling datamodule.prepare_data + if self.trainer.datamodule is not None: + if is_overridden("prepare_data", self.trainer.datamodule) and ( + not self.trainer.datamodule._has_prepared_data + ): + dm_prepare_data_per_node = ( + self.trainer.prepare_data_per_node + if self.trainer.datamodule is None + else self.trainer.datamodule.prepare_data_per_node + ) + if (dm_prepare_data_per_node and should_call_once_per_node) or ( + not dm_prepare_data_per_node and should_call_once_in_total + ): + self.trainer.datamodule.prepare_data() + # handle lightning module prepare data: + # check for prepare_data_per_node before calling lightning_module.prepare_data + lm_prepare_data_per_node = ( + self.trainer.prepare_data_per_node + if self.trainer.lightning_module is None + else self.trainer.lightning_module.prepare_data_per_node + ) + if (lm_prepare_data_per_node and should_call_once_per_node) or ( + not lm_prepare_data_per_node and should_call_once_in_total + ): + self.trainer.lightning_module.prepare_data() self.trainer._is_data_prepared = True - def can_prepare_data(self): - should_call_dm_prepare_data = True - if self.trainer.datamodule is not None and is_overridden("prepare_data", self.trainer.datamodule): - should_call_dm_prepare_data = not self.trainer.datamodule._has_prepared_data - - if self._prepare_data_per_node(): - return self.trainer.local_rank == 0 and should_call_dm_prepare_data - return self.trainer.node_rank == 0 and self.trainer.local_rank == 0 and should_call_dm_prepare_data - - def _prepare_data_per_node(self) -> bool: - # temporary private util function until `prepare_data_per_node` is fully migrated to LightningDataModule - if self.trainer.datamodule is None: - return self.trainer.prepare_data_per_node - return self.trainer.datamodule.prepare_data_per_node - def attach_data( self, model: "pl.LightningModule", diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index 32df1b565061a..409557adf4ddd 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -46,7 +46,6 @@ def test_can_prepare_data(local_rank, node_rank): dm._has_prepared_data = False local_rank.return_value = 0 assert trainer.local_rank == 0 - assert trainer.data_connector.can_prepare_data() trainer.data_connector.prepare_data() assert dm.random_full is not None @@ -56,7 +55,6 @@ def test_can_prepare_data(local_rank, node_rank): dm._has_prepared_data = False local_rank.return_value = 1 assert trainer.local_rank == 1 - assert not trainer.data_connector.can_prepare_data() trainer.data_connector.prepare_data() assert dm.random_full is None @@ -66,9 +64,9 @@ def test_can_prepare_data(local_rank, node_rank): dm.random_full = None dm._has_prepared_data = False dm.prepare_data_per_node = False + trainer.lightning_module.prepare_data_per_node = False node_rank.return_value = 0 local_rank.return_value = 0 - assert trainer.data_connector.can_prepare_data() trainer.data_connector.prepare_data() assert dm.random_full is not None @@ -78,14 +76,12 @@ def test_can_prepare_data(local_rank, node_rank): dm._has_prepared_data = False node_rank.return_value = 1 local_rank.return_value = 0 - assert not trainer.data_connector.can_prepare_data() trainer.data_connector.prepare_data() assert dm.random_full is None node_rank.return_value = 0 local_rank.return_value = 1 - assert not trainer.data_connector.can_prepare_data() trainer.data_connector.prepare_data() assert dm.random_full is None @@ -94,23 +90,21 @@ def test_can_prepare_data(local_rank, node_rank): # prepar per node = True # local rank = 0 (True) dm.prepare_data_per_node = True + trainer.lightning_module.prepare_data_per_node = True local_rank.return_value = 0 # is_overridden prepare data = True # has been called # False dm._has_prepared_data = True - assert not trainer.data_connector.can_prepare_data() # has not been called # True dm._has_prepared_data = False - assert trainer.data_connector.can_prepare_data() # is_overridden prepare data = False # True dm.prepare_data = None - assert trainer.data_connector.can_prepare_data() def test_hooks_no_recursion_error(): diff --git a/tests/models/test_hooks.py b/tests/models/test_hooks.py index 4a948f3a7d738..ddcde04fec991 100644 --- a/tests/models/test_hooks.py +++ b/tests/models/test_hooks.py @@ -837,10 +837,6 @@ def call(hook, fn, *args, **kwargs): attr = getattr(self, h) setattr(self, h, partial(call, h, attr)) - @property - def prepare_data_per_node(self) -> bool: - return True - model = BoringModel() batches = 2 trainer = Trainer( From 78a81c8111748604ff53ed5487f207ef24b09683 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 13:09:02 -0700 Subject: [PATCH 12/36] use `has_prepared_data` property in datamodule --- pytorch_lightning/trainer/connectors/data_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index c965f79845b06..df7546d901dc4 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -85,7 +85,7 @@ def prepare_data(self) -> None: # check for prepare_data_per_node & datamodule lifecycle properties before calling datamodule.prepare_data if self.trainer.datamodule is not None: if is_overridden("prepare_data", self.trainer.datamodule) and ( - not self.trainer.datamodule._has_prepared_data + not self.trainer.datamodule.has_prepared_data ): dm_prepare_data_per_node = ( self.trainer.prepare_data_per_node From 7db98ce045e3bb2c2c0b1d5e5f87fe56d0dc54a9 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 13:49:09 -0700 Subject: [PATCH 13/36] minor - comment update --- pytorch_lightning/core/hooks.py | 2 +- pytorch_lightning/trainer/trainer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 6d9b9caf17bf4..63e764bc607be 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -411,7 +411,7 @@ def prepare_data(self): Note: Setting ``prepare_data_per_node`` with the trainer flag is deprecated and will be removed in v1.7.0. - Please set ``prepare_data_per_node`` in LightningDataModule directly instead. + Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. This is called before requesting the dataloaders: diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 288ae0190d9d1..b2254fbe7aec3 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -243,7 +243,7 @@ def __init__( prepare_data_per_node: If True, each LOCAL_RANK=0 will call prepare data. Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data Deprecated in v1.5.0 and will be removed in v1.7.0 - Please use `~pytorch_lightning.core.datamodule.prepare_data_per_node` instead + Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. process_position: orders the progress bar when running multiple models on same machine. From 8e77b04ed31001efc099ffb5f4b36c504b0e3d5a Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 14:45:44 -0700 Subject: [PATCH 14/36] add MisconfigurationException --- CHANGELOG.md | 1 + .../trainer/connectors/data_connector.py | 55 ++++++++++--------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8541728c01621..3f7ae4e0b1f14 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -99,6 +99,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - `Trainer.request_dataloader` now takes a `RunningStage` enum instance ([#8858](https://github.com/PyTorchLightning/pytorch-lightning/pull/8858)) +- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of DataHooks, accessible in the LightningModule and LightningDataModule [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) ### Deprecated diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index df7546d901dc4..de9ff2402ae50 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -37,9 +37,7 @@ def on_trainer_init( ) -> None: self.trainer.datamodule = None - if prepare_data_per_node is None: - prepare_data_per_node = True - else: + if prepare_data_per_node is not None: rank_zero_deprecation( "Setting `prepare_data_per_node` with the trainer flag is deprecated and will be removed in v1.7.0! " "Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. " @@ -78,36 +76,41 @@ def get_profiled_train_dataloader(self, train_dataloader) -> Iterable: def prepare_data(self) -> None: # on multi-gpu jobs we only want to manipulate (download, etc) on node_rank=0, local_rank=0 # or in the case where each node needs to do its own manipulation in which case just local_rank=0 - should_call_once_per_node = self.trainer.local_rank == 0 - should_call_once_in_total = self.trainer.local_rank == 0 and self.trainer.node_rank == 0 + local_rank_zero = self.trainer.local_rank == 0 + global_rank_zero = self.trainer.local_rank == 0 and self.trainer.node_rank == 0 # handle datamodule prepare data: # check for prepare_data_per_node & datamodule lifecycle properties before calling datamodule.prepare_data - if self.trainer.datamodule is not None: - if is_overridden("prepare_data", self.trainer.datamodule) and ( - not self.trainer.datamodule.has_prepared_data + if self.trainer.datamodule is not None and (not self.trainer.datamodule.has_prepared_data): + + dm_prepare_data_per_node = self.trainer.datamodule.prepare_data_per_node + if (self.trainer.prepare_data_per_node is not None) and ( + dm_prepare_data_per_node != self.trainer.prepare_data_per_node ): - dm_prepare_data_per_node = ( - self.trainer.prepare_data_per_node - if self.trainer.datamodule is None - else self.trainer.datamodule.prepare_data_per_node + raise MisconfigurationException( + f"`prepare_data_per_node` should not be set in Trainer, got {self.trainer.prepare_data_per_node}. " + f"`self.trainer.prepare_data_per_node`(={self.trainer.prepare_data_per_node}) is inconsistent with " + f"`self.trainer.datamodule.prepare_data_per_node`" + f"(={self.trainer.datamodule.prepare_data_per_node})." ) - if (dm_prepare_data_per_node and should_call_once_per_node) or ( - not dm_prepare_data_per_node and should_call_once_in_total - ): - self.trainer.datamodule.prepare_data() + if (dm_prepare_data_per_node and local_rank_zero) or (not dm_prepare_data_per_node and global_rank_zero): + self.trainer.datamodule.prepare_data() # handle lightning module prepare data: # check for prepare_data_per_node before calling lightning_module.prepare_data - lm_prepare_data_per_node = ( - self.trainer.prepare_data_per_node - if self.trainer.lightning_module is None - else self.trainer.lightning_module.prepare_data_per_node - ) - if (lm_prepare_data_per_node and should_call_once_per_node) or ( - not lm_prepare_data_per_node and should_call_once_in_total - ): - self.trainer.lightning_module.prepare_data() - self.trainer._is_data_prepared = True + if self.trainer.lightning_module is not None: + lm_prepare_data_per_node = self.trainer.lightning_module.prepare_data_per_node + if (self.trainer.prepare_data_per_node is not None) and ( + lm_prepare_data_per_node != self.trainer.prepare_data_per_node + ): + raise MisconfigurationException( + f"`prepare_data_per_node` should not be set in Trainer, got {self.trainer.prepare_data_per_node}. " + f"`self.trainer.prepare_data_per_node`(={self.trainer.prepare_data_per_node}) is inconsistent with " + f" `self.trainer.lightinig_module.prepare_data_per_node`" + f"(={self.trainer.lightning_module.prepare_data_per_node})." + ) + if (lm_prepare_data_per_node and local_rank_zero) or (not lm_prepare_data_per_node and global_rank_zero): + self.trainer.lightning_module.prepare_data() + self.trainer._is_data_prepared = True def attach_data( self, From 153d16cfc6136464e226a59d2c1d23299cb64dbd Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 16:01:35 -0700 Subject: [PATCH 15/36] update MisconfigurationException --- .../trainer/connectors/data_connector.py | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index de9ff2402ae50..1e8f109617bda 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -79,34 +79,35 @@ def prepare_data(self) -> None: local_rank_zero = self.trainer.local_rank == 0 global_rank_zero = self.trainer.local_rank == 0 and self.trainer.node_rank == 0 + datamodule = self.trainer.datamodule + lightning_module = self.trainer.lightning_module # handle datamodule prepare data: # check for prepare_data_per_node & datamodule lifecycle properties before calling datamodule.prepare_data - if self.trainer.datamodule is not None and (not self.trainer.datamodule.has_prepared_data): - - dm_prepare_data_per_node = self.trainer.datamodule.prepare_data_per_node + if self.trainer.datamodule is not None and (not datamodule.has_prepared_data): + dm_prepare_data_per_node = datamodule.prepare_data_per_node if (self.trainer.prepare_data_per_node is not None) and ( - dm_prepare_data_per_node != self.trainer.prepare_data_per_node + datamodule.prepare_data_per_node != self.trainer.prepare_data_per_node ): raise MisconfigurationException( - f"`prepare_data_per_node` should not be set in Trainer, got {self.trainer.prepare_data_per_node}. " - f"`self.trainer.prepare_data_per_node`(={self.trainer.prepare_data_per_node}) is inconsistent with " - f"`self.trainer.datamodule.prepare_data_per_node`" - f"(={self.trainer.datamodule.prepare_data_per_node})." + f"Inconsistent settings found for `prepare_data_per_node`. " + f"Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)` " + f"and `DataModule.prepare_data_per_node={datamodule.prepare_data_per_node}`. " + f"Move `prepare_data_per_node` setting to DataModule property" ) if (dm_prepare_data_per_node and local_rank_zero) or (not dm_prepare_data_per_node and global_rank_zero): self.trainer.datamodule.prepare_data() # handle lightning module prepare data: # check for prepare_data_per_node before calling lightning_module.prepare_data if self.trainer.lightning_module is not None: - lm_prepare_data_per_node = self.trainer.lightning_module.prepare_data_per_node + lm_prepare_data_per_node = lightning_module._prepare_data_per_node if (self.trainer.prepare_data_per_node is not None) and ( - lm_prepare_data_per_node != self.trainer.prepare_data_per_node + lightning_module.prepare_data_per_node != self.trainer.prepare_data_per_node ): raise MisconfigurationException( - f"`prepare_data_per_node` should not be set in Trainer, got {self.trainer.prepare_data_per_node}. " - f"`self.trainer.prepare_data_per_node`(={self.trainer.prepare_data_per_node}) is inconsistent with " - f" `self.trainer.lightinig_module.prepare_data_per_node`" - f"(={self.trainer.lightning_module.prepare_data_per_node})." + f"Inconsistent settings found for `prepare_data_per_node`. " + f"Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)` " + f"and `LightningModule.prepare_data_per_node={lightning_module.prepare_data_per_node}`. " + f"Move `prepare_data_per_node` setting to LightningModule property" ) if (lm_prepare_data_per_node and local_rank_zero) or (not lm_prepare_data_per_node and global_rank_zero): self.trainer.lightning_module.prepare_data() From fe6d2fbc886b2d4b41739a7db59eeb62d12afe63 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 16:15:23 -0700 Subject: [PATCH 16/36] add unit test for MisconfigurationException --- tests/core/test_datamodules.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index 409557adf4ddd..89b1eaca1ffe8 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -23,6 +23,7 @@ from pytorch_lightning import LightningDataModule, Trainer from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.utilities import AttributeDict +from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden from tests.helpers import BoringDataModule, BoringModel from tests.helpers.datamodules import ClassifDataModule @@ -532,3 +533,13 @@ def __init__(self, arg0, arg1, kwarg0=None): def test_simple_hyperparameters_saving(): data = DataModuleWithHparams(10, "foo", kwarg0="bar") assert data.hparams == AttributeDict({"arg0": 10, "arg1": "foo", "kwarg0": "bar"}) + + +def test_inconsistent_prepare_data_per_node(tmpdir): + with pytest.raises(MisconfigurationException, match="Inconsistent settings found for `prepare_data_per_node`."): + model = BoringModel() + dm = BoringDataModule() + trainer = Trainer(prepare_data_per_node=False) + trainer.model = model + trainer.datamodule = dm + trainer.data_connector.prepare_data() From 3f140b67b3e360677353cc6104d941c1ad95e632 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 17:03:20 -0700 Subject: [PATCH 17/36] move item to CHANGELOG.md deprecation session --- CHANGELOG.md | 3 +-- pytorch_lightning/trainer/connectors/data_connector.py | 8 ++++---- tests/models/test_hooks.py | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3f7ae4e0b1f14..d543ebc78b819 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -99,7 +99,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - `Trainer.request_dataloader` now takes a `RunningStage` enum instance ([#8858](https://github.com/PyTorchLightning/pytorch-lightning/pull/8858)) -- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of DataHooks, accessible in the LightningModule and LightningDataModule [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) ### Deprecated @@ -112,7 +111,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Deprecated `DataModule` properties: `train_transforms`, `val_transforms`, `test_transforms`, `size`, `dims` ([#8851](https://github.com/PyTorchLightning/pytorch-lightning/pull/8851)) -- +- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of DataHooks, accessible in the LightningModule and LightningDataModule [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) - diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 1e8f109617bda..4f69e686fd70b 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -83,7 +83,7 @@ def prepare_data(self) -> None: lightning_module = self.trainer.lightning_module # handle datamodule prepare data: # check for prepare_data_per_node & datamodule lifecycle properties before calling datamodule.prepare_data - if self.trainer.datamodule is not None and (not datamodule.has_prepared_data): + if datamodule is not None and not datamodule.has_prepared_data: dm_prepare_data_per_node = datamodule.prepare_data_per_node if (self.trainer.prepare_data_per_node is not None) and ( datamodule.prepare_data_per_node != self.trainer.prepare_data_per_node @@ -98,8 +98,8 @@ def prepare_data(self) -> None: self.trainer.datamodule.prepare_data() # handle lightning module prepare data: # check for prepare_data_per_node before calling lightning_module.prepare_data - if self.trainer.lightning_module is not None: - lm_prepare_data_per_node = lightning_module._prepare_data_per_node + if lightning_module is not None: + lm_prepare_data_per_node = lightning_module.prepare_data_per_node if (self.trainer.prepare_data_per_node is not None) and ( lightning_module.prepare_data_per_node != self.trainer.prepare_data_per_node ): @@ -110,7 +110,7 @@ def prepare_data(self) -> None: f"Move `prepare_data_per_node` setting to LightningModule property" ) if (lm_prepare_data_per_node and local_rank_zero) or (not lm_prepare_data_per_node and global_rank_zero): - self.trainer.lightning_module.prepare_data() + lightning_module.prepare_data() self.trainer._is_data_prepared = True def attach_data( diff --git a/tests/models/test_hooks.py b/tests/models/test_hooks.py index ddcde04fec991..cb34c2539d4fe 100644 --- a/tests/models/test_hooks.py +++ b/tests/models/test_hooks.py @@ -821,7 +821,7 @@ def test_trainer_datamodule_hook_system(tmpdir): class HookedDataModule(BoringDataModule): def __init__(self, called): super().__init__() - self._prepare_data_per_node = True + self.prepare_data_per_node = True def call(hook, fn, *args, **kwargs): out = fn(*args, **kwargs) From 42f500fe22621e3df432ecc338fdf4378f459bf5 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 21:42:45 -0700 Subject: [PATCH 18/36] update BoringModel and BoringDataModule --- tests/helpers/boring_model.py | 2 ++ tests/models/test_hooks.py | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/helpers/boring_model.py b/tests/helpers/boring_model.py index aeaf85ecf254c..70991896cec4d 100644 --- a/tests/helpers/boring_model.py +++ b/tests/helpers/boring_model.py @@ -89,6 +89,7 @@ def training_step(...): """ super().__init__() self.layer = torch.nn.Linear(32, 2) + self.prepare_data_per_node = True def forward(self, x): return self.layer(x) @@ -150,6 +151,7 @@ def predict_dataloader(self): class BoringDataModule(LightningDataModule): def __init__(self, data_dir: str = "./"): super().__init__() + self.prepare_data_per_node = True self.data_dir = data_dir self.non_picklable = None self.checkpoint_state: Optional[str] = None diff --git a/tests/models/test_hooks.py b/tests/models/test_hooks.py index cb34c2539d4fe..990178b09d07f 100644 --- a/tests/models/test_hooks.py +++ b/tests/models/test_hooks.py @@ -821,7 +821,6 @@ def test_trainer_datamodule_hook_system(tmpdir): class HookedDataModule(BoringDataModule): def __init__(self, called): super().__init__() - self.prepare_data_per_node = True def call(hook, fn, *args, **kwargs): out = fn(*args, **kwargs) From fcbff81c972ad7283d6863d22ad1dfb34c4bf93d Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 23:19:03 -0700 Subject: [PATCH 19/36] set prepare_data_per_node as unused property --- pytorch_lightning/core/hooks.py | 2 ++ pytorch_lightning/core/lightning.py | 1 + 2 files changed, 3 insertions(+) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 63e764bc607be..c56a66b65dc05 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -372,6 +372,8 @@ def configure_sharded_model(self) -> None: class DataHooks: """Hooks to be used for data related stuff.""" + __jit_unused_properties__ = ["prepare_data_per_node"] + def __init__(self) -> None: super().__init__() self._prepare_data_per_node: bool = True diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index c847eea57ccd0..2b96972d1eae0 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -78,6 +78,7 @@ class LightningModule( "automatic_optimization", "truncated_bptt_steps", "loaded_optimizer_states_dict", + "prepare_data_per_node", ] + DeviceDtypeModuleMixin.__jit_unused_properties__ + HyperparametersMixin.__jit_unused_properties__ From 61a3b71b28be790f63018a29af7d0addc087d7db Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 23:49:56 -0700 Subject: [PATCH 20/36] update __jit_unused_properties__ in lightning --- pytorch_lightning/core/lightning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 2b96972d1eae0..31f6893198eab 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -66,7 +66,6 @@ class LightningModule( # since none of these are important when using JIT, we are going to ignore them. __jit_unused_properties__ = ( [ - "datamodule", "example_input_array", "on_gpu", "current_epoch", @@ -80,6 +79,7 @@ class LightningModule( "loaded_optimizer_states_dict", "prepare_data_per_node", ] + + DataHooks.__jit_unused__properties__ + DeviceDtypeModuleMixin.__jit_unused_properties__ + HyperparametersMixin.__jit_unused_properties__ ) From 056ca6a25a45cead5988a7188dd5c24cb55558f0 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Wed, 18 Aug 2021 23:54:49 -0700 Subject: [PATCH 21/36] fix error --- pytorch_lightning/core/lightning.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 31f6893198eab..92f4f30857e92 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -66,6 +66,7 @@ class LightningModule( # since none of these are important when using JIT, we are going to ignore them. __jit_unused_properties__ = ( [ + "datamodule", "example_input_array", "on_gpu", "current_epoch", @@ -77,9 +78,8 @@ class LightningModule( "automatic_optimization", "truncated_bptt_steps", "loaded_optimizer_states_dict", - "prepare_data_per_node", ] - + DataHooks.__jit_unused__properties__ + + DataHooks.__jit_unused_properties__ + DeviceDtypeModuleMixin.__jit_unused_properties__ + HyperparametersMixin.__jit_unused_properties__ ) From 25f1c4546d336ebf88d076da77e6adeff88c8648 Mon Sep 17 00:00:00 2001 From: Ning Date: Thu, 19 Aug 2021 09:20:28 -0700 Subject: [PATCH 22/36] Update comments in pytorch_lightning/trainer/trainer.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Adrian Wälchli --- pytorch_lightning/trainer/trainer.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index b2254fbe7aec3..cf86869f952db 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -242,8 +242,10 @@ def __init__( prepare_data_per_node: If True, each LOCAL_RANK=0 will call prepare data. Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data - Deprecated in v1.5.0 and will be removed in v1.7.0 - Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. + + .. deprecated:: v1.5 + Deprecated in v1.5.0 and will be removed in v1.7.0 + Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. process_position: orders the progress bar when running multiple models on same machine. From 5da20d89ddf38e5dd7aacbcf63c29482df5b6a4b Mon Sep 17 00:00:00 2001 From: Ning Date: Thu, 19 Aug 2021 09:20:42 -0700 Subject: [PATCH 23/36] Update CHANGELOG.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Adrian Wälchli --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d543ebc78b819..57cb9f3f04a40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -111,7 +111,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Deprecated `DataModule` properties: `train_transforms`, `val_transforms`, `test_transforms`, `size`, `dims` ([#8851](https://github.com/PyTorchLightning/pytorch-lightning/pull/8851)) -- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of DataHooks, accessible in the LightningModule and LightningDataModule [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) +- Deprecated `prepare_data_per_node` flag on Trainer and set it as a property of `DataHooks`, accessible in the `LightningModule` and `LightningDataModule` [#8958](https://github.com/PyTorchLightning/pytorch-lightning/pull/8958) - From d89f947e4250deb99d1ffcfdfd3127e336dcc412 Mon Sep 17 00:00:00 2001 From: Ning Date: Thu, 19 Aug 2021 09:21:00 -0700 Subject: [PATCH 24/36] Update pytorch_lightning/trainer/connectors/data_connector.py Co-authored-by: Jirka Borovec --- pytorch_lightning/trainer/connectors/data_connector.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 4f69e686fd70b..673850cad8da1 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -104,10 +104,10 @@ def prepare_data(self) -> None: lightning_module.prepare_data_per_node != self.trainer.prepare_data_per_node ): raise MisconfigurationException( - f"Inconsistent settings found for `prepare_data_per_node`. " - f"Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)` " - f"and `LightningModule.prepare_data_per_node={lightning_module.prepare_data_per_node}`. " - f"Move `prepare_data_per_node` setting to LightningModule property" + "Inconsistent settings found for `prepare_data_per_node`." + f" Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)`" + f" and `LightningModule.prepare_data_per_node={lightning_module.prepare_data_per_node}`." + " Move `prepare_data_per_node` setting to LightningModule property." ) if (lm_prepare_data_per_node and local_rank_zero) or (not lm_prepare_data_per_node and global_rank_zero): lightning_module.prepare_data() From 630796f2cf4fb3e7d0c56d2b70b359e2ac57bef6 Mon Sep 17 00:00:00 2001 From: Ning Date: Thu, 19 Aug 2021 09:21:08 -0700 Subject: [PATCH 25/36] Update pytorch_lightning/trainer/connectors/data_connector.py Co-authored-by: Jirka Borovec --- pytorch_lightning/trainer/connectors/data_connector.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 673850cad8da1..6310f9505a271 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -100,9 +100,8 @@ def prepare_data(self) -> None: # check for prepare_data_per_node before calling lightning_module.prepare_data if lightning_module is not None: lm_prepare_data_per_node = lightning_module.prepare_data_per_node - if (self.trainer.prepare_data_per_node is not None) and ( - lightning_module.prepare_data_per_node != self.trainer.prepare_data_per_node - ): + eq_prepare_data = lightning_module.prepare_data_per_node == self.trainer.prepare_data_per_node + if (self.trainer.prepare_data_per_node is not None) and not eq_prepare_data: raise MisconfigurationException( "Inconsistent settings found for `prepare_data_per_node`." f" Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)`" From ca37a83b8d1262f6293759a47ecbdeb663f5de9f Mon Sep 17 00:00:00 2001 From: Ning Date: Thu, 19 Aug 2021 09:21:14 -0700 Subject: [PATCH 26/36] Update pytorch_lightning/trainer/connectors/data_connector.py Co-authored-by: Jirka Borovec --- pytorch_lightning/trainer/connectors/data_connector.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 6310f9505a271..b395bd4e8d5c1 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -89,10 +89,10 @@ def prepare_data(self) -> None: datamodule.prepare_data_per_node != self.trainer.prepare_data_per_node ): raise MisconfigurationException( - f"Inconsistent settings found for `prepare_data_per_node`. " - f"Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)` " - f"and `DataModule.prepare_data_per_node={datamodule.prepare_data_per_node}`. " - f"Move `prepare_data_per_node` setting to DataModule property" + "Inconsistent settings found for `prepare_data_per_node`." + f" Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)`" + f" and `DataModule.prepare_data_per_node={datamodule.prepare_data_per_node}`." + " Move `prepare_data_per_node` setting to DataModule property." ) if (dm_prepare_data_per_node and local_rank_zero) or (not dm_prepare_data_per_node and global_rank_zero): self.trainer.datamodule.prepare_data() From 7b52795cceee7468be9064ea1907c7692a3d8c9e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 19 Aug 2021 16:21:26 +0000 Subject: [PATCH 27/36] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index cf86869f952db..6267c6d2139a7 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -242,7 +242,7 @@ def __init__( prepare_data_per_node: If True, each LOCAL_RANK=0 will call prepare data. Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data - + .. deprecated:: v1.5 Deprecated in v1.5.0 and will be removed in v1.7.0 Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. From 8ec10f6ffd54fb2aa6bcbffc203d339be0811b1c Mon Sep 17 00:00:00 2001 From: Ning Li Date: Thu, 19 Aug 2021 09:33:14 -0700 Subject: [PATCH 28/36] remove `prepare_data_per_node` from boring_model --- tests/helpers/boring_model.py | 2 -- tests/plugins/test_deepspeed_plugin.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/helpers/boring_model.py b/tests/helpers/boring_model.py index 70991896cec4d..aeaf85ecf254c 100644 --- a/tests/helpers/boring_model.py +++ b/tests/helpers/boring_model.py @@ -89,7 +89,6 @@ def training_step(...): """ super().__init__() self.layer = torch.nn.Linear(32, 2) - self.prepare_data_per_node = True def forward(self, x): return self.layer(x) @@ -151,7 +150,6 @@ def predict_dataloader(self): class BoringDataModule(LightningDataModule): def __init__(self, data_dir: str = "./"): super().__init__() - self.prepare_data_per_node = True self.data_dir = data_dir self.non_picklable = None self.checkpoint_state: Optional[str] = None diff --git a/tests/plugins/test_deepspeed_plugin.py b/tests/plugins/test_deepspeed_plugin.py index f0c1d7d49b586..a5e4e1d189aaa 100644 --- a/tests/plugins/test_deepspeed_plugin.py +++ b/tests/plugins/test_deepspeed_plugin.py @@ -470,6 +470,7 @@ def __init__(self, lr: float = 0.01, num_blocks: int = 5): super().__init__() self.lr = lr self.num_blocks = num_blocks + self.prepare_data_per_node = True self.train_acc = Accuracy() self.valid_acc = Accuracy() From 5a81b85d7d1645a7f20399ec05c1c4a1422fd1e6 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Thu, 19 Aug 2021 10:54:48 -0700 Subject: [PATCH 29/36] update test_datamodules to avoid noop --- .../trainer/connectors/data_connector.py | 9 ++++--- tests/core/test_datamodules.py | 24 +++++++++---------- 2 files changed, 16 insertions(+), 17 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index b395bd4e8d5c1..001a04f008d05 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -85,9 +85,8 @@ def prepare_data(self) -> None: # check for prepare_data_per_node & datamodule lifecycle properties before calling datamodule.prepare_data if datamodule is not None and not datamodule.has_prepared_data: dm_prepare_data_per_node = datamodule.prepare_data_per_node - if (self.trainer.prepare_data_per_node is not None) and ( - datamodule.prepare_data_per_node != self.trainer.prepare_data_per_node - ): + dm_eq_prepare_data = datamodule.prepare_data_per_node == self.trainer.prepare_data_per_node + if (self.trainer.prepare_data_per_node is not None) and not dm_eq_prepare_data: raise MisconfigurationException( "Inconsistent settings found for `prepare_data_per_node`." f" Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)`" @@ -100,8 +99,8 @@ def prepare_data(self) -> None: # check for prepare_data_per_node before calling lightning_module.prepare_data if lightning_module is not None: lm_prepare_data_per_node = lightning_module.prepare_data_per_node - eq_prepare_data = lightning_module.prepare_data_per_node == self.trainer.prepare_data_per_node - if (self.trainer.prepare_data_per_node is not None) and not eq_prepare_data: + lm_eq_prepare_data = lightning_module.prepare_data_per_node == self.trainer.prepare_data_per_node + if (self.trainer.prepare_data_per_node is not None) and not lm_eq_prepare_data: raise MisconfigurationException( "Inconsistent settings found for `prepare_data_per_node`." f" Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)`" diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index 89b1eaca1ffe8..ef163cac30aad 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -42,7 +42,6 @@ def test_can_prepare_data(local_rank, node_rank): # 1 no DM # prepare_data_per_node = True # local rank = 0 (True) - dm.random_full = None dm._has_prepared_data = False local_rank.return_value = 0 @@ -94,18 +93,19 @@ def test_can_prepare_data(local_rank, node_rank): trainer.lightning_module.prepare_data_per_node = True local_rank.return_value = 0 - # is_overridden prepare data = True - # has been called - # False - dm._has_prepared_data = True - - # has not been called - # True - dm._has_prepared_data = False + with mock.patch.object(trainer.datamodule, "prepare_data") as dm_mock: + # is_overridden prepare data = True + # has been called + # False + dm._has_prepared_data = True + trainer.data_connector.prepare_data() + dm_mock.assert_not_called() - # is_overridden prepare data = False - # True - dm.prepare_data = None + # has not been called + # True + dm._has_prepared_data = False + trainer.data_connector.prepare_data() + dm_mock.assert_called_once() def test_hooks_no_recursion_error(): From b66f371fd9d211bd1df5311fcbfa21d8e2e31147 Mon Sep 17 00:00:00 2001 From: Ning Date: Fri, 20 Aug 2021 10:28:55 -0700 Subject: [PATCH 30/36] Update comment pytorch_lightning/core/hooks.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos Mocholí --- pytorch_lightning/core/hooks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index c56a66b65dc05..1c5744b375862 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -413,7 +413,7 @@ def prepare_data(self): Note: Setting ``prepare_data_per_node`` with the trainer flag is deprecated and will be removed in v1.7.0. - Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. + Please set ``prepare_data_per_node`` in LightningDataModule or LightningModule directly instead. This is called before requesting the dataloaders: From 1693e63fc3b7f6aef54f283532ac539fec31ed5d Mon Sep 17 00:00:00 2001 From: Ning Date: Fri, 20 Aug 2021 10:29:39 -0700 Subject: [PATCH 31/36] Update comment pytorch_lightning/trainer/trainer.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos Mocholí --- pytorch_lightning/trainer/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 6267c6d2139a7..d3446bc1d4e7b 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -245,7 +245,7 @@ def __init__( .. deprecated:: v1.5 Deprecated in v1.5.0 and will be removed in v1.7.0 - Please set `prepare_data_per_node` in LightningDataModule or LightningModule directly instead. + Please set ``prepare_data_per_node`` in LightningDataModule or LightningModule directly instead. process_position: orders the progress bar when running multiple models on same machine. From 13e64e976b303c3301e027e5433eec9362c53349 Mon Sep 17 00:00:00 2001 From: Ning Date: Fri, 20 Aug 2021 11:28:50 -0700 Subject: [PATCH 32/36] Update pytorch_lightning/trainer/connectors/data_connector.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos Mocholí --- pytorch_lightning/trainer/connectors/data_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index 001a04f008d05..f10129dd6e0a9 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -86,7 +86,7 @@ def prepare_data(self) -> None: if datamodule is not None and not datamodule.has_prepared_data: dm_prepare_data_per_node = datamodule.prepare_data_per_node dm_eq_prepare_data = datamodule.prepare_data_per_node == self.trainer.prepare_data_per_node - if (self.trainer.prepare_data_per_node is not None) and not dm_eq_prepare_data: + if self.trainer.prepare_data_per_node is not None and not dm_eq_prepare_data: raise MisconfigurationException( "Inconsistent settings found for `prepare_data_per_node`." f" Value was set with both `Trainer(prepare_data_per_node={self.trainer.prepare_data_per_node}.)`" From ca6c82957cf5b129d864df2392c1037d4fe63fa4 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Fri, 20 Aug 2021 21:08:46 -0700 Subject: [PATCH 33/36] update property in DataHooks --- pytorch_lightning/core/hooks.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 1c5744b375862..99e38ce799bc6 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -375,8 +375,14 @@ class DataHooks: __jit_unused_properties__ = ["prepare_data_per_node"] def __init__(self) -> None: + """ + property: + prepare_data_per_node: + If True, each LOCAL_RANK=0 will call prepare data. + Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data. + """ super().__init__() - self._prepare_data_per_node: bool = True + self.prepare_data_per_node: bool = True def prepare_data(self) -> None: """ @@ -815,18 +821,6 @@ def on_after_batch_transfer(self, batch, dataloader_idx): """ return batch - @property - def prepare_data_per_node(self) -> bool: - """ - If True, each LOCAL_RANK=0 will call prepare data. - Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data. - """ - return self._prepare_data_per_node - - @prepare_data_per_node.setter - def prepare_data_per_node(self, prepare_data_per_node: bool) -> None: - self._prepare_data_per_node = prepare_data_per_node - class CheckpointHooks: """Hooks to be used with Checkpointing.""" From e5442b72e181aee3e4ce7c22dbc416e97f929f71 Mon Sep 17 00:00:00 2001 From: Ning Li Date: Fri, 20 Aug 2021 21:19:08 -0700 Subject: [PATCH 34/36] add call_hook --- pytorch_lightning/trainer/connectors/data_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/connectors/data_connector.py b/pytorch_lightning/trainer/connectors/data_connector.py index f10129dd6e0a9..d5b2face7563c 100644 --- a/pytorch_lightning/trainer/connectors/data_connector.py +++ b/pytorch_lightning/trainer/connectors/data_connector.py @@ -108,7 +108,7 @@ def prepare_data(self) -> None: " Move `prepare_data_per_node` setting to LightningModule property." ) if (lm_prepare_data_per_node and local_rank_zero) or (not lm_prepare_data_per_node and global_rank_zero): - lightning_module.prepare_data() + self.trainer.call_hook("prepare_data") self.trainer._is_data_prepared = True def attach_data( From a72c929fae670cbb90c75d228c6b6f548da39dfc Mon Sep 17 00:00:00 2001 From: Ning Li Date: Fri, 20 Aug 2021 21:49:39 -0700 Subject: [PATCH 35/36] fix test `test_datamodules.py` --- tests/core/test_datamodules.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/core/test_datamodules.py b/tests/core/test_datamodules.py index ef163cac30aad..3bfe3aaa6cf80 100644 --- a/tests/core/test_datamodules.py +++ b/tests/core/test_datamodules.py @@ -64,7 +64,6 @@ def test_can_prepare_data(local_rank, node_rank): dm.random_full = None dm._has_prepared_data = False dm.prepare_data_per_node = False - trainer.lightning_module.prepare_data_per_node = False node_rank.return_value = 0 local_rank.return_value = 0 @@ -90,7 +89,6 @@ def test_can_prepare_data(local_rank, node_rank): # prepar per node = True # local rank = 0 (True) dm.prepare_data_per_node = True - trainer.lightning_module.prepare_data_per_node = True local_rank.return_value = 0 with mock.patch.object(trainer.datamodule, "prepare_data") as dm_mock: From 702cdc81e39a79e1add2383fc54b8de22433bc5a Mon Sep 17 00:00:00 2001 From: Ning Li Date: Sun, 22 Aug 2021 20:51:48 -0700 Subject: [PATCH 36/36] update prepare_data_per_node as attributes --- pytorch_lightning/core/hooks.py | 4 +--- pytorch_lightning/core/lightning.py | 1 - 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 99e38ce799bc6..7ff21885343a9 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -372,11 +372,9 @@ def configure_sharded_model(self) -> None: class DataHooks: """Hooks to be used for data related stuff.""" - __jit_unused_properties__ = ["prepare_data_per_node"] - def __init__(self) -> None: """ - property: + Attributes: prepare_data_per_node: If True, each LOCAL_RANK=0 will call prepare data. Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data. diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 92f4f30857e92..c847eea57ccd0 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -79,7 +79,6 @@ class LightningModule( "truncated_bptt_steps", "loaded_optimizer_states_dict", ] - + DataHooks.__jit_unused_properties__ + DeviceDtypeModuleMixin.__jit_unused_properties__ + HyperparametersMixin.__jit_unused_properties__ )