File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
pytorch_lightning/accelerators Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -317,11 +317,11 @@ def set_distributed_mode(self):
317317
318318 # DP and DDP2 cannot run without GPU
319319 if (self .trainer .num_gpus == 0
320- and self .trainer ._distrib_type in (DistributedType .DP , DistributedType .DDP , DistributedType . DDP2 )):
320+ and self .trainer ._distrib_type in (DistributedType .DP , DistributedType .DDP2 )):
321321 rank_zero_warn (
322322 'You requested distributed training on GPUs, but none is available, so we set backend to `ddp_cpu`.'
323323 )
324- # in some cases it yield in comarison None and int
324+ # todo: in some cases it yield in comarison None and int
325325 if ((self .trainer .num_nodes and self .trainer .num_nodes > 1 )
326326 or (self .trainer .num_processes and self .trainer .num_processes > 1 )):
327327 self .trainer ._distrib_type = DistributedType .DDP
You can’t perform that action at this time.
0 commit comments