We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 824c11d commit 81636feCopy full SHA for 81636fe
pytorch_lightning/plugins/training_type/tpu_spawn.py
@@ -277,7 +277,11 @@ def _wrapped_function(
277
result = function(*args, **kwargs)
278
if self.local_rank == 0:
279
return_queue.put(move_data_to_device(result, "cpu"))
280
+
281
self.barrier("end-process")
282
+ # https://github.com/pytorch/xla/issues/2190#issuecomment-641665358
283
+ if self.local_rank == 0:
284
+ time.sleep(2)
285
286
def _worker_setup(self, process_idx: int):
287
reset_seed()
0 commit comments