Skip to content

Commit ff74854

Browse files
committed
fix
1 parent 7af6832 commit ff74854

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

pytorch_lightning/accelerators/accelerator_connector.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -317,11 +317,11 @@ def set_distributed_mode(self):
317317

318318
# DP and DDP2 cannot run without GPU
319319
if (self.trainer.num_gpus == 0
320-
and self.trainer._distrib_type in (DistributedType.DP, DistributedType.DDP, DistributedType.DDP2)):
320+
and self.trainer._distrib_type in (DistributedType.DP, DistributedType.DDP2)):
321321
rank_zero_warn(
322322
'You requested distributed training on GPUs, but none is available, so we set backend to `ddp_cpu`.'
323323
)
324-
# in some cases it yield in comarison None and int
324+
# todo: in some cases it yield in comarison None and int
325325
if ((self.trainer.num_nodes and self.trainer.num_nodes > 1)
326326
or (self.trainer.num_processes and self.trainer.num_processes > 1)):
327327
self.trainer._distrib_type = DistributedType.DDP

0 commit comments

Comments
 (0)