Skip to content

Commit f645df5

Browse files
authored
Add typings for evaluation_loop.py and remove some dead code (#7015)
1 parent 5bd3cd5 commit f645df5

File tree

22 files changed

+114
-161
lines changed

22 files changed

+114
-161
lines changed

.github/workflows/ci_pkg-install.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,4 +60,4 @@ jobs:
6060
pip install dist/*.whl
6161
cd ..
6262
python -c "import pytorch_lightning as pl ; print(pl.__version__)"
63-
pip uninstall -y pytorch-lightning
63+
pip uninstall -y pytorch-lightning

.github/workflows/events-recurrent.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,4 +39,4 @@ jobs:
3939
echo $jobs_to_delete
4040
if [ ${#jobs_to_delete} -gt 1 ];
4141
then kubectl delete job $(kubectl get job | awk 'match($4,/[0-9]+[dh]/) {print $1}');
42-
fi
42+
fi

pytorch_lightning/accelerators/tpu.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,10 @@ def run_optimizer_step(
5656
xm.optimizer_step(optimizer, barrier=False, optimizer_args={'closure': lambda_closure, **kwargs})
5757

5858
def clip_gradients(
59-
self, optimizer: Optimizer, clip_val: Union[float, int], norm_type: float = 2.0,
59+
self,
60+
optimizer: Optimizer,
61+
clip_val: Union[float, int],
62+
norm_type: float = 2.0,
6063
gradient_clip_algorithm: GradClipAlgorithmType = GradClipAlgorithmType.NORM
6164
) -> None:
6265
assert gradient_clip_algorithm is GradClipAlgorithmType.NORM, \

pytorch_lightning/callbacks/gpu_stats_monitor.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,9 +118,7 @@ def on_train_epoch_start(self, trainer, pl_module) -> None:
118118
self._snap_inter_step_time = None
119119

120120
@rank_zero_only
121-
def on_train_batch_start(
122-
self, trainer, pl_module, batch: Any, batch_idx: int, dataloader_idx: int
123-
) -> None:
121+
def on_train_batch_start(self, trainer, pl_module, batch: Any, batch_idx: int, dataloader_idx: int) -> None:
124122
if self._log_stats.intra_step_time:
125123
self._snap_intra_step_time = time.time()
126124

pytorch_lightning/callbacks/pruning.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -422,9 +422,8 @@ def sanitize_parameters_to_prune(
422422
current_modules = [m for m in pl_module.modules() if not isinstance(m, _MODULE_CONTAINERS)]
423423

424424
if parameters_to_prune is None:
425-
parameters_to_prune = [
426-
(m, p) for p in parameters for m in current_modules if getattr(m, p, None) is not None
427-
]
425+
parameters_to_prune = [(m, p) for p in parameters for m in current_modules
426+
if getattr(m, p, None) is not None]
428427
elif (
429428
isinstance(parameters_to_prune, (list, tuple)) and len(parameters_to_prune) > 0
430429
and all(len(p) == 2 for p in parameters_to_prune)

pytorch_lightning/loggers/mlflow.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,17 +29,17 @@
2929
_MLFLOW_AVAILABLE = _module_available("mlflow")
3030
try:
3131
import mlflow
32-
from mlflow.tracking import MlflowClient
33-
from mlflow.tracking import context
32+
from mlflow.tracking import context, MlflowClient
3433
# todo: there seems to be still some remaining import error with Conda env
3534
except ImportError:
3635
_MLFLOW_AVAILABLE = False
3736
mlflow, MlflowClient, context = None, None, None
3837

39-
4038
# before v1.1.0
4139
if hasattr(context, 'resolve_tags'):
4240
from mlflow.tracking.context import resolve_tags
41+
42+
4343
# since v1.1.0
4444
elif hasattr(context, 'registry'):
4545
from mlflow.tracking.context.registry import resolve_tags

pytorch_lightning/plugins/precision/sharded_native_amp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,6 @@ def __init__(self) -> None:
3131
super().__init__()
3232
self.scaler = ShardedGradScaler()
3333

34-
def clip_grad_by_norm(self, optimizer: Optimizer, clip_val: Union[int, float], norm_type: float = 2.0) -> None:
34+
def clip_grad_by_norm(self, optimizer: 'Optimizer', clip_val: Union[int, float], norm_type: float = 2.0) -> None:
3535
optimizer = cast(OSS, optimizer)
3636
optimizer.clip_grad_norm(clip_val, norm_type=norm_type)

pytorch_lightning/trainer/connectors/training_trick_connector.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,7 @@ def on_trainer_init(
3535

3636
# gradient clipping
3737
if gradient_clip_algorithm not in list(GradClipAlgorithmType):
38-
raise MisconfigurationException(
39-
f"gradient_clip_algorithm should be in {list(GradClipAlgorithmType)}"
40-
)
38+
raise MisconfigurationException(f"gradient_clip_algorithm should be in {list(GradClipAlgorithmType)}")
4139
self.trainer.gradient_clip_val = gradient_clip_val
4240
self.trainer.gradient_clip_algorithm = GradClipAlgorithmType(gradient_clip_algorithm)
4341

pytorch_lightning/trainer/data_loading.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import multiprocessing
1616
from abc import ABC
1717
from copy import deepcopy
18-
from typing import Iterable, List, Tuple, Union
18+
from typing import Iterable, List, Optional, Tuple, Union
1919

2020
from torch.utils.data import BatchSampler, DataLoader, RandomSampler, SequentialSampler
2121
from torch.utils.data.distributed import DistributedSampler
@@ -41,9 +41,9 @@ class TrainerDataLoadingMixin(ABC):
4141
train_dataloader: DataLoader
4242
num_training_batches: Union[int, float]
4343
val_check_batch: float
44-
val_dataloaders: List[DataLoader]
44+
val_dataloaders: Optional[List[DataLoader]]
4545
num_val_batches: List[Union[int, float]]
46-
test_dataloaders: List[DataLoader]
46+
test_dataloaders: Optional[List[DataLoader]]
4747
num_test_batches: List[Union[int, float]]
4848
limit_train_batches: Union[int, float]
4949
overfit_batches: Union[int, float]

0 commit comments

Comments
 (0)