We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f37bd46 commit c335a78Copy full SHA for c335a78
pytorch_lightning/plugins/training_type/tpu_spawn.py
@@ -125,9 +125,6 @@ def pre_dispatch(self, trainer: "pl.Trainer") -> None:
125
if self.debug:
126
os.environ["PT_XLA_DEBUG"] = str(1)
127
128
- if self.tpu_global_core_rank != 0 and trainer.progress_bar_callback is not None:
129
- trainer.progress_bar_callback.disable()
130
-
131
shared_params = find_shared_parameters(self.model)
132
self.model_to_device()
133
if is_overridden("on_post_move_to_device", self.lightning_module):
0 commit comments