File tree Expand file tree Collapse file tree 5 files changed +15
-10
lines changed
pytorch_lightning/plugins/training_type Expand file tree Collapse file tree 5 files changed +15
-10
lines changed Original file line number Diff line number Diff line change 1- from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin # noqa: F401
2- from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin # noqa: F401
31from pytorch_lightning .plugins .training_type .ddp import DDPPlugin # noqa: F401
42from pytorch_lightning .plugins .training_type .ddp2 import DDP2Plugin # noqa: F401
53from pytorch_lightning .plugins .training_type .ddp_spawn import DDPSpawnPlugin # noqa: F401
4+ from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin # noqa: F401
5+ from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin # noqa: F401
Original file line number Diff line number Diff line change 99import torch .distributed as torch_distrib
1010
1111from pytorch_lightning import _logger as log
12- from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin
1312from pytorch_lightning .cluster_environments .cluster_environment import ClusterEnvironment
1413from pytorch_lightning .distributed import LightningDistributed
1514from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel
15+ from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin
1616from pytorch_lightning .utilities import _HYDRA_AVAILABLE
1717from pytorch_lightning .utilities .distributed import find_free_network_port , rank_zero_only , sync_ddp_if_available
1818from pytorch_lightning .utilities .exceptions import MisconfigurationException
1919from pytorch_lightning .utilities .seed import seed_everything
2020
2121if _HYDRA_AVAILABLE :
22- from hydra .utils import to_absolute_path , get_original_cwd
2322 from hydra .core .hydra_config import HydraConfig
23+ from hydra .utils import get_original_cwd , to_absolute_path
2424
2525if torch .distributed .is_available ():
2626 from torch .distributed import ReduceOp
Original file line number Diff line number Diff line change 11import torch
22
3- from pytorch_lightning .plugins .training_type .ddp import DDPPlugin
43from pytorch_lightning .core .step_result import Result
4+ from pytorch_lightning .plugins .training_type .ddp import DDPPlugin
55
66
77class DDP2Plugin (DDPPlugin ):
Original file line number Diff line number Diff line change 77import torch .multiprocessing as mp
88
99from pytorch_lightning import _logger as log
10- from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin
1110from pytorch_lightning .cluster_environments .cluster_environment import ClusterEnvironment
1211from pytorch_lightning .distributed .dist import LightningDistributed
1312from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel
14- from pytorch_lightning .utilities .cloud_io import atomic_save , load as pl_load
15- from pytorch_lightning .utilities .distributed import find_free_network_port , rank_zero_only
16- from pytorch_lightning .utilities .distributed import sync_ddp_if_available , rank_zero_warn
13+ from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin
14+ from pytorch_lightning .utilities .cloud_io import atomic_save
15+ from pytorch_lightning .utilities .cloud_io import load as pl_load
16+ from pytorch_lightning .utilities .distributed import (
17+ find_free_network_port ,
18+ rank_zero_only ,
19+ rank_zero_warn ,
20+ sync_ddp_if_available ,
21+ )
1722from pytorch_lightning .utilities .seed import seed_everything
1823
1924if torch .distributed .is_available ():
Original file line number Diff line number Diff line change 44
55import torch
66
7- from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin
87from pytorch_lightning .cluster_environments import ClusterEnvironment
98from pytorch_lightning .core import LightningModule
109from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel
10+ from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin
1111
1212if torch .distributed .is_available ():
1313 from torch .distributed import ReduceOp
You can’t perform that action at this time.
0 commit comments