From 8eba0598c379699aa0fa5d7142d18b6b431f1f6c Mon Sep 17 00:00:00 2001 From: donlapark Date: Wed, 13 Jul 2022 15:11:48 +0700 Subject: [PATCH 01/10] change type of trainer.py to TypedDict --- pyproject.toml | 1 - src/pytorch_lightning/trainer/trainer.py | 4 ++-- src/pytorch_lightning/tuner/tuning.py | 16 ++++++++++++---- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c6e3452784945..055677a04e977 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,7 +88,6 @@ module = [ "pytorch_lightning.trainer.supporters", "pytorch_lightning.trainer.trainer", "pytorch_lightning.tuner.batch_size_scaling", - "pytorch_lightning.tuner.tuning", "pytorch_lightning.utilities.auto_restart", "pytorch_lightning.utilities.data", "pytorch_lightning.utilities.distributed", diff --git a/src/pytorch_lightning/trainer/trainer.py b/src/pytorch_lightning/trainer/trainer.py index 37ba9a6ab2161..36c067a438c59 100644 --- a/src/pytorch_lightning/trainer/trainer.py +++ b/src/pytorch_lightning/trainer/trainer.py @@ -86,7 +86,7 @@ from pytorch_lightning.trainer.states import RunningStage, TrainerFn, TrainerState, TrainerStatus from pytorch_lightning.trainer.supporters import CombinedLoader from pytorch_lightning.tuner.lr_finder import _LRFinder -from pytorch_lightning.tuner.tuning import Tuner +from pytorch_lightning.tuner.tuning import _TunerResult, Tuner from pytorch_lightning.utilities import ( _HPU_AVAILABLE, _IPU_AVAILABLE, @@ -1015,7 +1015,7 @@ def tune( datamodule: Optional[LightningDataModule] = None, scale_batch_size_kwargs: Optional[Dict[str, Any]] = None, lr_find_kwargs: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Optional[Union[int, _LRFinder]]]: + ) -> _TunerResult: r""" Runs routines to tune hyperparameters before training. diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index b1a38bd27688c..f7620a55d95ca 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Any, Dict, Optional, Union +from typing_extensions import TypedDict, NotRequired import pytorch_lightning as pl from pytorch_lightning.trainer.states import TrainerStatus @@ -20,6 +21,9 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS +class _TunerResult(TypedDict): + lr_find: NotRequired[Optional[_LRFinder]] + scale_batch_size: NotRequired[Optional[int]] class Tuner: """Tuner class to tune your model.""" @@ -36,11 +40,15 @@ def _tune( model: "pl.LightningModule", scale_batch_size_kwargs: Optional[Dict[str, Any]] = None, lr_find_kwargs: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Optional[Union[int, _LRFinder]]]: + ) -> _TunerResult: scale_batch_size_kwargs = scale_batch_size_kwargs or {} lr_find_kwargs = lr_find_kwargs or {} # return a dict instead of a tuple so BC is not broken if a new tuning procedure is added - result = {} +<<<<<<< HEAD + result: _TunerResult = {} +======= + result: Dict[str, Any] = {} +>>>>>>> 7c4b7df27bfa14ef0611f67cbeba3d9f356c4f44 self.trainer.strategy.connect(model) @@ -84,7 +92,7 @@ def scale_batch_size( init_val: int = 2, max_trials: int = 25, batch_arg_name: str = "batch_size", - ) -> Optional[int]: + ) -> Optional[Union[int, _LRFinder]]: """Iteratively try to find the largest batch size for a given model that does not give an out of memory (OOM) error. @@ -151,7 +159,7 @@ def lr_find( mode: str = "exponential", early_stop_threshold: float = 4.0, update_attr: bool = False, - ) -> Optional[_LRFinder]: + ) -> Optional[Union[int, _LRFinder]]: """Enables the user to do a range test of good initial learning rates, to reduce the amount of guesswork in picking a good starting learning rate. From 128fc7e1074372a948192fc81b56bcc797df3ff6 Mon Sep 17 00:00:00 2001 From: donlapark Date: Wed, 13 Jul 2022 15:17:45 +0700 Subject: [PATCH 02/10] change type of trainer.py to TypedDict --- src/pytorch_lightning/tuner/tuning.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index f7620a55d95ca..721b361df035b 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -44,11 +44,7 @@ def _tune( scale_batch_size_kwargs = scale_batch_size_kwargs or {} lr_find_kwargs = lr_find_kwargs or {} # return a dict instead of a tuple so BC is not broken if a new tuning procedure is added -<<<<<<< HEAD result: _TunerResult = {} -======= - result: Dict[str, Any] = {} ->>>>>>> 7c4b7df27bfa14ef0611f67cbeba3d9f356c4f44 self.trainer.strategy.connect(model) From f89aab6291352803a91a075304c1e12df5f03907 Mon Sep 17 00:00:00 2001 From: donlapark Date: Wed, 13 Jul 2022 15:20:39 +0700 Subject: [PATCH 03/10] change type of trainer.py to TypedDict --- src/pytorch_lightning/tuner/tuning.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index 721b361df035b..25da7a719d29d 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -88,7 +88,7 @@ def scale_batch_size( init_val: int = 2, max_trials: int = 25, batch_arg_name: str = "batch_size", - ) -> Optional[Union[int, _LRFinder]]: + ) -> Optional[int]: """Iteratively try to find the largest batch size for a given model that does not give an out of memory (OOM) error. @@ -155,7 +155,7 @@ def lr_find( mode: str = "exponential", early_stop_threshold: float = 4.0, update_attr: bool = False, - ) -> Optional[Union[int, _LRFinder]]: + ) -> Optional[_LRFinder]: """Enables the user to do a range test of good initial learning rates, to reduce the amount of guesswork in picking a good starting learning rate. From 8de2dc1e8fcad6ce6a16a7715d8f520885cc67a4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 08:36:11 +0000 Subject: [PATCH 04/10] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/tuner/tuning.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index 25da7a719d29d..6f8a80a24482f 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Any, Dict, Optional, Union -from typing_extensions import TypedDict, NotRequired + +from typing_extensions import NotRequired, TypedDict import pytorch_lightning as pl from pytorch_lightning.trainer.states import TrainerStatus @@ -21,10 +22,12 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS + class _TunerResult(TypedDict): lr_find: NotRequired[Optional[_LRFinder]] scale_batch_size: NotRequired[Optional[int]] + class Tuner: """Tuner class to tune your model.""" From ef70a7ca89314308897e70143e7d377fbb8c5955 Mon Sep 17 00:00:00 2001 From: donlapark <10988155+donlapark@users.noreply.github.com> Date: Wed, 13 Jul 2022 15:44:18 +0700 Subject: [PATCH 05/10] Update trainer.py --- src/pytorch_lightning/trainer/trainer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pytorch_lightning/trainer/trainer.py b/src/pytorch_lightning/trainer/trainer.py index 36c067a438c59..d55a26c5fd93b 100644 --- a/src/pytorch_lightning/trainer/trainer.py +++ b/src/pytorch_lightning/trainer/trainer.py @@ -85,7 +85,6 @@ from pytorch_lightning.trainer.optimizers import TrainerOptimizersMixin from pytorch_lightning.trainer.states import RunningStage, TrainerFn, TrainerState, TrainerStatus from pytorch_lightning.trainer.supporters import CombinedLoader -from pytorch_lightning.tuner.lr_finder import _LRFinder from pytorch_lightning.tuner.tuning import _TunerResult, Tuner from pytorch_lightning.utilities import ( _HPU_AVAILABLE, From 049bedcf338db4103977c9773fa59bbb328873fe Mon Sep 17 00:00:00 2001 From: donlapark <10988155+donlapark@users.noreply.github.com> Date: Wed, 13 Jul 2022 13:35:29 +0000 Subject: [PATCH 06/10] Update src/pytorch_lightning/tuner/tuning.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos MocholĂ­ --- src/pytorch_lightning/tuner/tuning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index 6f8a80a24482f..79ebf3bd0d2c9 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -47,7 +47,7 @@ def _tune( scale_batch_size_kwargs = scale_batch_size_kwargs or {} lr_find_kwargs = lr_find_kwargs or {} # return a dict instead of a tuple so BC is not broken if a new tuning procedure is added - result: _TunerResult = {} + result = _TunerResult() self.trainer.strategy.connect(model) From 80e8ab050bf341174bff790321442e54d41af602 Mon Sep 17 00:00:00 2001 From: donlapark <10988155+donlapark@users.noreply.github.com> Date: Thu, 14 Jul 2022 01:24:34 +0700 Subject: [PATCH 07/10] Clean up code --- src/pytorch_lightning/tuner/tuning.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index 79ebf3bd0d2c9..a17d502c7f9aa 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Any, Dict, Optional, Union - from typing_extensions import NotRequired, TypedDict import pytorch_lightning as pl From b6cd11f383653bdd9be710ebbe3dbd5808e72b94 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 18:26:10 +0000 Subject: [PATCH 08/10] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/tuner/tuning.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index a17d502c7f9aa..79ebf3bd0d2c9 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Any, Dict, Optional, Union + from typing_extensions import NotRequired, TypedDict import pytorch_lightning as pl From 702788a272c8bcbf0477dc4704872c3a37d83f65 Mon Sep 17 00:00:00 2001 From: donlapark <10988155+donlapark@users.noreply.github.com> Date: Thu, 14 Jul 2022 01:27:25 +0700 Subject: [PATCH 09/10] Clean up code --- src/pytorch_lightning/tuner/tuning.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index 79ebf3bd0d2c9..d63c875bd429f 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Any, Dict, Optional, Union - from typing_extensions import NotRequired, TypedDict import pytorch_lightning as pl @@ -27,7 +26,6 @@ class _TunerResult(TypedDict): lr_find: NotRequired[Optional[_LRFinder]] scale_batch_size: NotRequired[Optional[int]] - class Tuner: """Tuner class to tune your model.""" From 585288e71476cd1667ceb267af3fd5d63ad4c012 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 18:29:12 +0000 Subject: [PATCH 10/10] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/tuner/tuning.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pytorch_lightning/tuner/tuning.py b/src/pytorch_lightning/tuner/tuning.py index d63c875bd429f..79ebf3bd0d2c9 100644 --- a/src/pytorch_lightning/tuner/tuning.py +++ b/src/pytorch_lightning/tuner/tuning.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from typing import Any, Dict, Optional, Union + from typing_extensions import NotRequired, TypedDict import pytorch_lightning as pl @@ -26,6 +27,7 @@ class _TunerResult(TypedDict): lr_find: NotRequired[Optional[_LRFinder]] scale_batch_size: NotRequired[Optional[int]] + class Tuner: """Tuner class to tune your model."""