@@ -61,20 +61,25 @@ def _worker_check(self, dataloader: DataLoader, name: str) -> None:
6161 using_spawn = self .accelerator_connector .distributed_backend == "ddp_spawn"
6262 if is_dataloader and not on_windows :
6363 if dataloader .num_workers > 0 and using_spawn :
64+ # checks for the attr persistent_workers available in pytorch >= 1.7
6465 if hasattr (dataloader , "persistent_workers" ):
6566 if not dataloader .persistent_workers :
6667 rank_zero_warn (
67- 'num_workers>0, persistent_workers=False, and accelerator=ddp_spawn may result in data loading bottlenecks.'
68- ' Consider setting persistent_workers=True (this is a limitation of Python .spawn() and PyTorch)'
68+ 'num_workers>0, persistent_workers=False, and accelerator=ddp_spawn'
69+ ' may result in data loading bottlenecks.'
70+ ' Consider setting persistent_workers=True'
71+ ' (this is a limitation of Python .spawn() and PyTorch)'
6972 )
7073 else :
7174 rank_zero_warn (
72- 'num_workers>0 and accelerator=ddp_spawn do not mix well and may result in data loading bottlenecks.'
73- ' Consider setting accelerator=ddp to use num_workers>0 (this is a limitation of Python .spawn() and PyTorch)'
75+ 'num_workers>0 and accelerator=ddp_spawn do not mix well'
76+ ' and may result in data loading bottlenecks.'
77+ ' Consider setting accelerator=ddp to use num_workers>0'
78+ ' (this is a limitation of Python .spawn() and PyTorch)'
7479 )
7580
7681 elif dataloader .num_workers == 0 and using_spawn :
77- # checks for the attr persistent_workers not available on pytorch < 1.7
82+ # checks for the attr persistent_workers available in pytorch >= 1.7
7883 if hasattr (dataloader , "persistent_workers" ):
7984 if not dataloader .persistent_workers :
8085 rank_zero_warn (
0 commit comments