Skip to content

Commit 1ff6b18

Browse files
authored
Fix pre-commit isort failure on pytorch_lightning/accelerators (#5503)
Remove from skipped module in pyproject.toml and fix failures on: - pytorch_lightning/accelerators/*.py
1 parent 2fe36c7 commit 1ff6b18

File tree

5 files changed

+15
-13
lines changed

5 files changed

+15
-13
lines changed

pyproject.toml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,6 @@ known_first_party = [
2323
"tests",
2424
]
2525
skip_glob = [
26-
# todo
27-
"pytorch_lightning/accelerators/*",
28-
29-
3026
# todo
3127
"pytorch_lightning/core/*",
3228

pytorch_lightning/accelerators/__init__.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,15 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14+
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
1415
from pytorch_lightning.accelerators.cpu_accelerator import CPUAccelerator # noqa: F401
1516
from pytorch_lightning.accelerators.ddp2_accelerator import DDP2Accelerator # noqa: F401
1617
from pytorch_lightning.accelerators.ddp_accelerator import DDPAccelerator # noqa: F401
17-
from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401
18+
from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401
1819
from pytorch_lightning.accelerators.ddp_cpu_spawn_accelerator import DDPCPUSpawnAccelerator # noqa: F401
20+
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401
21+
from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401
1922
from pytorch_lightning.accelerators.dp_accelerator import DataParallelAccelerator # noqa: F401
2023
from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator # noqa: F401
21-
from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401
2224
from pytorch_lightning.accelerators.horovod_accelerator import HorovodAccelerator # noqa: F401
23-
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401
24-
from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401
25-
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
25+
from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401

pytorch_lightning/accelerators/accelerator_connector.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,19 @@
1515

1616
import torch
1717

18-
from pytorch_lightning.utilities import _HOROVOD_AVAILABLE, DeviceType, DistributedType
1918
from pytorch_lightning import _logger as log
2019
from pytorch_lightning import accelerators
2120
from pytorch_lightning.accelerators.accelerator import Accelerator
2221
from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment
2322
from pytorch_lightning.cluster_environments.torchelastic_environment import TorchElasticEnvironment
24-
from pytorch_lightning.utilities import device_parser, rank_zero_only, _TPU_AVAILABLE
23+
from pytorch_lightning.utilities import (
24+
_HOROVOD_AVAILABLE,
25+
_TPU_AVAILABLE,
26+
device_parser,
27+
DeviceType,
28+
DistributedType,
29+
rank_zero_only,
30+
)
2531
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn
2632
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2733

pytorch_lightning/accelerators/cpu_accelerator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14-
from typing import Any, Optional, Union, Callable
14+
from typing import Any, Callable, Optional, Union
1515

1616
import torch
1717

pytorch_lightning/accelerators/horovod_accelerator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
from contextlib import ExitStack
15-
from typing import Any, Optional, Union, Callable
15+
from typing import Any, Callable, Optional, Union
1616

1717
import torch
1818
from torch.optim.lr_scheduler import _LRScheduler

0 commit comments

Comments
 (0)