Skip to content

Commit 05f5825

Browse files
committed
isort
1 parent a1e1b5a commit 05f5825

File tree

5 files changed

+15
-10
lines changed

5 files changed

+15
-10
lines changed
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin # noqa: F401
2-
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin # noqa: F401
31
from pytorch_lightning.plugins.training_type.ddp import DDPPlugin # noqa: F401
42
from pytorch_lightning.plugins.training_type.ddp2 import DDP2Plugin # noqa: F401
53
from pytorch_lightning.plugins.training_type.ddp_spawn import DDPSpawnPlugin # noqa: F401
4+
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin # noqa: F401
5+
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin # noqa: F401

pytorch_lightning/plugins/training_type/ddp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,18 +9,18 @@
99
import torch.distributed as torch_distrib
1010

1111
from pytorch_lightning import _logger as log
12-
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
1312
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
1413
from pytorch_lightning.distributed import LightningDistributed
1514
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
15+
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
1616
from pytorch_lightning.utilities import _HYDRA_AVAILABLE
1717
from pytorch_lightning.utilities.distributed import find_free_network_port, rank_zero_only, sync_ddp_if_available
1818
from pytorch_lightning.utilities.exceptions import MisconfigurationException
1919
from pytorch_lightning.utilities.seed import seed_everything
2020

2121
if _HYDRA_AVAILABLE:
22-
from hydra.utils import to_absolute_path, get_original_cwd
2322
from hydra.core.hydra_config import HydraConfig
23+
from hydra.utils import get_original_cwd, to_absolute_path
2424

2525
if torch.distributed.is_available():
2626
from torch.distributed import ReduceOp

pytorch_lightning/plugins/training_type/ddp2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import torch
22

3-
from pytorch_lightning.plugins .training_type.ddp import DDPPlugin
43
from pytorch_lightning.core.step_result import Result
4+
from pytorch_lightning.plugins.training_type.ddp import DDPPlugin
55

66

77
class DDP2Plugin(DDPPlugin):

pytorch_lightning/plugins/training_type/ddp_spawn.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,18 @@
77
import torch.multiprocessing as mp
88

99
from pytorch_lightning import _logger as log
10-
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
1110
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
1211
from pytorch_lightning.distributed.dist import LightningDistributed
1312
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
14-
from pytorch_lightning.utilities.cloud_io import atomic_save, load as pl_load
15-
from pytorch_lightning.utilities.distributed import find_free_network_port, rank_zero_only
16-
from pytorch_lightning.utilities.distributed import sync_ddp_if_available, rank_zero_warn
13+
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
14+
from pytorch_lightning.utilities.cloud_io import atomic_save
15+
from pytorch_lightning.utilities.cloud_io import load as pl_load
16+
from pytorch_lightning.utilities.distributed import (
17+
find_free_network_port,
18+
rank_zero_only,
19+
rank_zero_warn,
20+
sync_ddp_if_available,
21+
)
1722
from pytorch_lightning.utilities.seed import seed_everything
1823

1924
if torch.distributed.is_available():

pytorch_lightning/plugins/training_type/parallel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44

55
import torch
66

7-
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin
87
from pytorch_lightning.cluster_environments import ClusterEnvironment
98
from pytorch_lightning.core import LightningModule
109
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
10+
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin
1111

1212
if torch.distributed.is_available():
1313
from torch.distributed import ReduceOp

0 commit comments

Comments
 (0)