We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent eb65fb6 commit e21ebb1Copy full SHA for e21ebb1
pytorch_lightning/plugins/training_type/ddp.py
@@ -9,7 +9,7 @@
9
import torch.distributed as torch_distrib
10
11
from pytorch_lightning import _logger as log
12
-from pytorch_lightning.plugins .training_type.parallel import ParallelPlugin
+from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
13
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
14
from pytorch_lightning.distributed import LightningDistributed
15
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel, unwrap_lightning_module
0 commit comments