Skip to content

Commit a1e1b5a

Browse files
committed
imports
1 parent bd61368 commit a1e1b5a

File tree

4 files changed

+14
-6
lines changed

4 files changed

+14
-6
lines changed
Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,5 @@
1-
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin
1+
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin # noqa: F401
2+
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin # noqa: F401
3+
from pytorch_lightning.plugins.training_type.ddp import DDPPlugin # noqa: F401
4+
from pytorch_lightning.plugins.training_type.ddp2 import DDP2Plugin # noqa: F401
5+
from pytorch_lightning.plugins.training_type.ddp_spawn import DDPSpawnPlugin # noqa: F401

pytorch_lightning/plugins/training_type/ddp.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
1313
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
1414
from pytorch_lightning.distributed import LightningDistributed
15-
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel, unwrap_lightning_module
15+
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
1616
from pytorch_lightning.utilities import _HYDRA_AVAILABLE
1717
from pytorch_lightning.utilities.distributed import find_free_network_port, rank_zero_only, sync_ddp_if_available
1818
from pytorch_lightning.utilities.exceptions import MisconfigurationException
@@ -60,7 +60,9 @@ def root_device(self):
6060
@property
6161
def lightning_module(self):
6262
# the model may not be wrapped with DistributedDataParallel if calling this too early
63-
return unwrap_lightning_module(self._model)
63+
# fixme: uncomment when this class will actually be used
64+
# return unwrap_lightning_module(self._model)
65+
pass
6466

6567
@property
6668
def distributed_sampler_kwargs(self):

pytorch_lightning/plugins/training_type/ddp_spawn.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
1111
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
1212
from pytorch_lightning.distributed.dist import LightningDistributed
13-
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel, unwrap_lightning_module
13+
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
1414
from pytorch_lightning.utilities.cloud_io import atomic_save, load as pl_load
1515
from pytorch_lightning.utilities.distributed import find_free_network_port, rank_zero_only
1616
from pytorch_lightning.utilities.distributed import sync_ddp_if_available, rank_zero_warn
@@ -52,7 +52,9 @@ def root_device(self):
5252
@property
5353
def lightning_module(self):
5454
# the model may not be wrapped with DistributedDataParallel if calling this too early
55-
return unwrap_lightning_module(self._model)
55+
# fixme: uncomment when this class will actually be used
56+
# return unwrap_lightning_module(self._model)
57+
pass
5658

5759
@property
5860
def distributed_sampler_kwargs(self):

pytorch_lightning/plugins/training_type/parallel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import torch
66

7-
from pytorch_lightning.plugins .training_type.training_type_plugin import TrainingTypePlugin
7+
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin
88
from pytorch_lightning.cluster_environments import ClusterEnvironment
99
from pytorch_lightning.core import LightningModule
1010
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel

0 commit comments

Comments
 (0)