File tree Expand file tree Collapse file tree 4 files changed +14
-6
lines changed
pytorch_lightning/plugins/training_type Expand file tree Collapse file tree 4 files changed +14
-6
lines changed Original file line number Diff line number Diff line change 1- from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin
1+ from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin # noqa: F401
2+ from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin # noqa: F401
3+ from pytorch_lightning .plugins .training_type .ddp import DDPPlugin # noqa: F401
4+ from pytorch_lightning .plugins .training_type .ddp2 import DDP2Plugin # noqa: F401
5+ from pytorch_lightning .plugins .training_type .ddp_spawn import DDPSpawnPlugin # noqa: F401
Original file line number Diff line number Diff line change 1212from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin
1313from pytorch_lightning .cluster_environments .cluster_environment import ClusterEnvironment
1414from pytorch_lightning .distributed import LightningDistributed
15- from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel , unwrap_lightning_module
15+ from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel
1616from pytorch_lightning .utilities import _HYDRA_AVAILABLE
1717from pytorch_lightning .utilities .distributed import find_free_network_port , rank_zero_only , sync_ddp_if_available
1818from pytorch_lightning .utilities .exceptions import MisconfigurationException
@@ -60,7 +60,9 @@ def root_device(self):
6060 @property
6161 def lightning_module (self ):
6262 # the model may not be wrapped with DistributedDataParallel if calling this too early
63- return unwrap_lightning_module (self ._model )
63+ # fixme: uncomment when this class will actually be used
64+ # return unwrap_lightning_module(self._model)
65+ pass
6466
6567 @property
6668 def distributed_sampler_kwargs (self ):
Original file line number Diff line number Diff line change 1010from pytorch_lightning .plugins .training_type .parallel import ParallelPlugin
1111from pytorch_lightning .cluster_environments .cluster_environment import ClusterEnvironment
1212from pytorch_lightning .distributed .dist import LightningDistributed
13- from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel , unwrap_lightning_module
13+ from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel
1414from pytorch_lightning .utilities .cloud_io import atomic_save , load as pl_load
1515from pytorch_lightning .utilities .distributed import find_free_network_port , rank_zero_only
1616from pytorch_lightning .utilities .distributed import sync_ddp_if_available , rank_zero_warn
@@ -52,7 +52,9 @@ def root_device(self):
5252 @property
5353 def lightning_module (self ):
5454 # the model may not be wrapped with DistributedDataParallel if calling this too early
55- return unwrap_lightning_module (self ._model )
55+ # fixme: uncomment when this class will actually be used
56+ # return unwrap_lightning_module(self._model)
57+ pass
5658
5759 @property
5860 def distributed_sampler_kwargs (self ):
Original file line number Diff line number Diff line change 44
55import torch
66
7- from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin
7+ from pytorch_lightning .plugins .training_type .training_type_plugin import TrainingTypePlugin
88from pytorch_lightning .cluster_environments import ClusterEnvironment
99from pytorch_lightning .core import LightningModule
1010from pytorch_lightning .overrides .data_parallel import LightningDistributedDataParallel
You can’t perform that action at this time.
0 commit comments