Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,6 @@ known_first_party = [
"tests",
]
skip_glob = [
# todo
"pytorch_lightning/accelerators/*",


# todo
"pytorch_lightning/core/*",

Expand Down
10 changes: 5 additions & 5 deletions pytorch_lightning/accelerators/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
from pytorch_lightning.accelerators.cpu_accelerator import CPUAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp2_accelerator import DDP2Accelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_accelerator import DDPAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_cpu_spawn_accelerator import DDPCPUSpawnAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401
from pytorch_lightning.accelerators.dp_accelerator import DataParallelAccelerator # noqa: F401
from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator # noqa: F401
from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401
from pytorch_lightning.accelerators.horovod_accelerator import HorovodAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401
from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401
10 changes: 8 additions & 2 deletions pytorch_lightning/accelerators/accelerator_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,19 @@

import torch

from pytorch_lightning.utilities import _HOROVOD_AVAILABLE, DeviceType, DistributedType
from pytorch_lightning import _logger as log
from pytorch_lightning import accelerators
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment
from pytorch_lightning.cluster_environments.torchelastic_environment import TorchElasticEnvironment
from pytorch_lightning.utilities import device_parser, rank_zero_only, _TPU_AVAILABLE
from pytorch_lightning.utilities import (
_HOROVOD_AVAILABLE,
_TPU_AVAILABLE,
device_parser,
DeviceType,
DistributedType,
rank_zero_only,
)
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException

Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/accelerators/cpu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Optional, Union, Callable
from typing import Any, Callable, Optional, Union

import torch

Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/accelerators/horovod_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import ExitStack
from typing import Any, Optional, Union, Callable
from typing import Any, Callable, Optional, Union

import torch
from torch.optim.lr_scheduler import _LRScheduler
Expand Down