Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Deprecated `amp_level` from `Trainer` in favour of passing it explictly via precision plugin ([#13898](https://github.com/Lightning-AI/lightning/pull/13898))


-
- Deprecated the calls to `pytorch_lightning.utiltiies.meta` functions in favor of built-in https://github.com/pytorch/torchdistx support ([#13868](https://github.com/Lightning-AI/lightning/pull/13868))


### Removed
Expand All @@ -41,6 +41,12 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed the deprecated `DDP2Strategy` ([#14026](https://github.com/Lightning-AI/lightning/pull/14026))


- Removed the deprecated `DistributedType` and `DeviceType` enum classes ([#14045](https://github.com/Lightning-AI/lightning/pull/14045))


- Removed the experimental `pytorch_lightning.utiltiies.meta` functions in favor of built-in https://github.com/pytorch/torchdistx support ([#13868](https://github.com/Lightning-AI/lightning/pull/13868))


### Fixed

- Casted only floating point tensors to fp16 with IPUs ([#13983](https://github.com/Lightning-AI/lightning/pull/13983))
Expand Down
1 change: 0 additions & 1 deletion src/pytorch_lightning/utilities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
_AcceleratorType,
_StrategyType,
AMPType,
DistributedType,
GradClipAlgorithmType,
LightningEnum,
)
Expand Down
95 changes: 1 addition & 94 deletions src/pytorch_lightning/utilities/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,9 @@
from __future__ import annotations

import os
from enum import Enum, EnumMeta
from typing import Any
from enum import Enum

from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.warnings import rank_zero_deprecation


class LightningEnum(str, Enum):
Expand All @@ -43,37 +41,6 @@ def __hash__(self) -> int:
return hash(self.value.lower())


class _DeprecatedEnumMeta(EnumMeta):
"""Enum that calls `deprecate()` whenever a member is accessed.

Adapted from: https://stackoverflow.com/a/62309159/208880
"""

def __getattribute__(cls, name: str) -> Any:
obj = super().__getattribute__(name)
# ignore __dunder__ names -- prevents potential recursion errors
if not (name.startswith("__") and name.endswith("__")) and isinstance(obj, Enum):
obj.deprecate()
return obj

def __getitem__(cls, name: str) -> Any:
member: _DeprecatedEnumMeta = super().__getitem__(name)
member.deprecate()
return member

def __call__(cls, *args: Any, **kwargs: Any) -> Any:
obj = super().__call__(*args, **kwargs)
if isinstance(obj, Enum):
obj.deprecate()
return obj


class _DeprecatedEnum(LightningEnum, metaclass=_DeprecatedEnumMeta):
"""_DeprecatedEnum calls an enum's `deprecate()` method on member access."""

pass


class AMPType(LightningEnum):
"""Type of Automatic Mixed Precission used for training.

Expand Down Expand Up @@ -110,66 +77,6 @@ def supported_types() -> list[str]:
return [x.value for x in PrecisionType]


class DistributedType(_DeprecatedEnum):
"""Define type of training strategy.

Deprecated since v1.6.0 and will be removed in v1.8.0.

Use `_StrategyType` instead.
"""

DP = "dp"
DDP = "ddp"
DDP_SPAWN = "ddp_spawn"
TPU_SPAWN = "tpu_spawn"
DEEPSPEED = "deepspeed"
HOROVOD = "horovod"
DDP_SHARDED = "ddp_sharded"
DDP_SHARDED_SPAWN = "ddp_sharded_spawn"
DDP_FULLY_SHARDED = "ddp_fully_sharded"
HPU_PARALLEL = "hpu_parallel"

@staticmethod
def interactive_compatible_types() -> list[DistributedType]:
"""Returns a list containing interactive compatible DistributeTypes."""
return [
DistributedType.DP,
DistributedType.DDP_SPAWN,
DistributedType.DDP_SHARDED_SPAWN,
DistributedType.TPU_SPAWN,
]

def is_interactive_compatible(self) -> bool:
"""Returns whether self is interactive compatible."""
return self in DistributedType.interactive_compatible_types()

def deprecate(self) -> None:
rank_zero_deprecation(
"`DistributedType` Enum has been deprecated in v1.6 and will be removed in v1.8."
f" Use the string value `{self.value!r}` instead."
)


class DeviceType(_DeprecatedEnum):
"""Define Device type by its nature - accelerators.

Deprecated since v1.6.0 and will be removed in v1.8.0.

Use `_AcceleratorType` instead.
"""

CPU = "CPU"
GPU = "GPU"
IPU = "IPU"
TPU = "TPU"

def deprecate(self) -> None:
rank_zero_deprecation(
"`DeviceType` Enum has been deprecated in v1.6 and will be removed in v1.8."
f" Use the string value `{self.value!r}` instead."
)


class GradClipAlgorithmType(LightningEnum):
"""Define gradient_clip_algorithm types - training-tricks.
NORM type means "clipping gradients by norm". This computed over all model parameters together.
Expand Down
13 changes: 0 additions & 13 deletions tests/tests_pytorch/deprecated_api/test_remove_1-8.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,26 +36,13 @@
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities import device_parser
from pytorch_lightning.utilities.apply_func import move_data_to_device
from pytorch_lightning.utilities.enums import DeviceType, DistributedType
from pytorch_lightning.utilities.imports import _TORCHTEXT_LEGACY
from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn
from tests_pytorch.deprecated_api import no_deprecated_call
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.torchtext_utils import get_dummy_torchtext_data_iterator


def test_v1_8_0_deprecated_distributed_type_enum():

with pytest.deprecated_call(match="has been deprecated in v1.6 and will be removed in v1.8."):
_ = DistributedType.DDP


def test_v1_8_0_deprecated_device_type_enum():

with pytest.deprecated_call(match="has been deprecated in v1.6 and will be removed in v1.8."):
_ = DeviceType.CPU


@pytest.mark.skipif(not _TORCHTEXT_LEGACY, reason="torchtext.legacy is deprecated.")
def test_v1_8_0_deprecated_torchtext_batch():

Expand Down