Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,6 @@ skip_glob = [

# todo
"pytorch_lightning/tuner/*",


# todo
"pytorch_lightning/utilities/*",
]
profile = "black"
line_length = 120
Expand Down
26 changes: 10 additions & 16 deletions pytorch_lightning/utilities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,31 +22,25 @@
rank_zero_only,
rank_zero_warn,
)
from pytorch_lightning.utilities.enums import ( # noqa: F401
LightningEnum,
AMPType,
DistributedType,
DeviceType,
)
from pytorch_lightning.utilities.enums import AMPType, DeviceType, DistributedType, LightningEnum # noqa: F401
from pytorch_lightning.utilities.imports import ( # noqa: F401
_APEX_AVAILABLE,
_NATIVE_AMP_AVAILABLE,
_XLA_AVAILABLE,
_OMEGACONF_AVAILABLE,
_HYDRA_AVAILABLE,
_HOROVOD_AVAILABLE,
_TORCHTEXT_AVAILABLE,
_BOLTS_AVAILABLE,
_FAIRSCALE_AVAILABLE,
_RPC_AVAILABLE,
_GROUP_AVAILABLE,
_FAIRSCALE_PIPE_AVAILABLE,
_BOLTS_AVAILABLE,
_GROUP_AVAILABLE,
_HOROVOD_AVAILABLE,
_HYDRA_AVAILABLE,
_module_available,
_NATIVE_AMP_AVAILABLE,
_OMEGACONF_AVAILABLE,
_RPC_AVAILABLE,
_TORCHTEXT_AVAILABLE,
_XLA_AVAILABLE,
)
from pytorch_lightning.utilities.parsing import AttributeDict, flatten_dict, is_picklable # noqa: F401
from pytorch_lightning.utilities.xla_device import XLADeviceUtils # noqa: F401


_TPU_AVAILABLE = XLADeviceUtils.tpu_device_exists()

FLOAT16_EPSILON = numpy.finfo(numpy.float16).eps
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/utilities/argparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
import os
from argparse import ArgumentParser, Namespace
from contextlib import suppress
from typing import Dict, Union, List, Tuple, Any
from typing import Any, Dict, List, Tuple, Union

from pytorch_lightning.utilities import parsing


Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/utilities/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@
# limitations under the License.

from distutils.version import LooseVersion
from typing import Union

import torch
from torch.utils.data import DataLoader, IterableDataset

from pytorch_lightning.utilities import rank_zero_warn
from typing import Union


def has_iterable_dataset(dataloader: DataLoader):
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/utilities/debugging.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import time
from collections import Counter
from functools import wraps
from typing import Callable, Any, Optional
from typing import Any, Callable, Optional


def enabled_only(fn: Callable):
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/utilities/device_dtype_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Union, Optional
from typing import Optional, Union

import torch
from torch.nn import Module
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/utilities/device_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, List, MutableSequence, Optional, Union

import torch
from typing import Union, Any, List, Optional, MutableSequence

from pytorch_lightning.utilities import _TPU_AVAILABLE
from pytorch_lightning.utilities.exceptions import MisconfigurationException
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/utilities/model_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@

from typing import Union

from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.core.lightning import LightningModule


def is_overridden(method_name: str, model: Union[LightningModule, LightningDataModule]) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/utilities/parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import inspect
import pickle
from argparse import Namespace
from typing import Dict, Union, Tuple
from typing import Dict, Tuple, Union

from pytorch_lightning.utilities import rank_zero_warn

Expand Down