File tree Expand file tree Collapse file tree 1 file changed +1
-8
lines changed
pytorch_lightning/strategies Expand file tree Collapse file tree 1 file changed +1
-8
lines changed Original file line number Diff line number Diff line change 5757 sync_ddp_if_available ,
5858)
5959from pytorch_lightning .utilities .enums import _StrategyType
60- from pytorch_lightning .utilities .exceptions import DeadlockDetectedException , MisconfigurationException
60+ from pytorch_lightning .utilities .exceptions import DeadlockDetectedException
6161from pytorch_lightning .utilities .seed import reset_seed
6262from pytorch_lightning .utilities .types import STEP_OUTPUT
6363
@@ -211,13 +211,6 @@ def _call_children_scripts(self):
211211 else : # Script called as `python -m a.b.c`
212212 command = [sys .executable , "-m" , __main__ .__spec__ .name ] + sys .argv [1 :]
213213
214- # the visible devices tell us how many GPUs we want to use.
215- # when the trainer script was called the device has already been scoped by the time
216- # code reaches this point. so, to call the scripts, we need to leave cuda visible devices alone
217- # but forward the GPUs selected via environment variables
218- if self .parallel_devices is None :
219- raise MisconfigurationException ("you selected (distribute_backend = ddp) but did not set Trainer(gpus=?)" )
220-
221214 os .environ ["WORLD_SIZE" ] = f"{ self .num_processes * self .num_nodes } "
222215
223216 self .interactive_ddp_procs = []
You can’t perform that action at this time.
0 commit comments