Skip to content

Commit 81a0a44

Browse files
Improve typing for Lite (#10743)
* improve typing in pytorch_lightning/lite * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * include lite again Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent e94aff1 commit 81a0a44

File tree

4 files changed

+18
-7
lines changed

4 files changed

+18
-7
lines changed

pyproject.toml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ disable_error_code = "attr-defined"
3636
# style choices
3737
warn_no_return = "False"
3838

39-
# Changes mypy default to ignore all errors
39+
# Ignore mypy errors for these files
4040
# TODO: the goal is for this to be empty
4141
[[tool.mypy.overrides]]
4242
# the list can be generated with:
@@ -63,8 +63,6 @@ module = [
6363
"pytorch_lightning.core.mixins.hparams_mixin",
6464
"pytorch_lightning.core.saving",
6565
"pytorch_lightning.distributed.dist",
66-
"pytorch_lightning.lite.lite",
67-
"pytorch_lightning.lite.wrappers",
6866
"pytorch_lightning.loggers.base",
6967
"pytorch_lightning.loggers.comet",
7068
"pytorch_lightning.loggers.csv_logs",

pytorch_lightning/lite/lite.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from contextlib import contextmanager
1717
from functools import partial
1818
from pathlib import Path
19-
from typing import Any, Callable, cast, Dict, Generator, List, Optional, Sequence, Tuple, Union
19+
from typing import Any, Callable, cast, Dict, Generator, List, Optional, overload, Sequence, Tuple, Union
2020

2121
import torch
2222
import torch.nn as nn
@@ -201,7 +201,7 @@ def setup_dataloaders(
201201
for dataloader in dataloaders
202202
]
203203
dataloaders = dataloaders[0] if len(dataloaders) == 1 else dataloaders
204-
return dataloaders
204+
return dataloaders # type: ignore[return-value]
205205

206206
def _setup_dataloader(
207207
self, dataloader: DataLoader, replace_sampler: bool = True, move_to_device: bool = True
@@ -284,6 +284,18 @@ def autocast(self) -> Generator[None, None, None]:
284284
with self._precision_plugin.forward_context():
285285
yield
286286

287+
@overload
288+
def to_device(self, obj: nn.Module) -> nn.Module:
289+
...
290+
291+
@overload
292+
def to_device(self, obj: Tensor) -> Tensor:
293+
...
294+
295+
@overload
296+
def to_device(self, obj: Any) -> Any:
297+
...
298+
287299
def to_device(self, obj: Union[nn.Module, Tensor, Any]) -> Union[nn.Module, Tensor, Any]:
288300
"""Move a :class:`torch.nn.Module` or a collection of tensors to the current device, if it is not already
289301
on that device.

pytorch_lightning/lite/wrappers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ def __iter__(self) -> Union[Iterator[Any], Generator[Any, None, None]]:
131131
iterator = iter(self._dataloader)
132132
if self._device is None:
133133
yield from iterator
134+
return
134135

135136
for item in iterator:
136137
yield move_data_to_device(item, self._device)

pytorch_lightning/plugins/training_type/training_type_plugin.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414
import contextlib
1515
from abc import ABC, abstractmethod
16-
from typing import Any, Dict, Generator, Iterable, List, Mapping, Optional, Tuple, Union
16+
from typing import Any, Dict, Generator, List, Mapping, Optional, Tuple, Union
1717

1818
import torch
1919
from torch import Tensor
@@ -241,7 +241,7 @@ def validation_step_end(self, output):
241241
def test_step_end(self, output):
242242
return output
243243

244-
def process_dataloader(self, dataloader: Union[Iterable, DataLoader]) -> Union[Iterable, DataLoader]:
244+
def process_dataloader(self, dataloader: DataLoader) -> DataLoader:
245245
"""Wraps the dataloader if necessary.
246246
247247
Args:

0 commit comments

Comments
 (0)