Skip to content

Commit 4c8ba27

Browse files
committed
debug
1 parent 8c067cd commit 4c8ba27

File tree

1 file changed

+3
-1
lines changed

1 file changed

+3
-1
lines changed

pytorch_lightning/plugins/training_type/tpu_spawn.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def __init__(
6060
checkpoint_io: Optional[CheckpointIO] = None,
6161
precision_plugin: Optional[PrecisionPlugin] = None,
6262
debug: bool = False,
63-
**_: Any
63+
**_: Any,
6464
) -> None:
6565
checkpoint_io = checkpoint_io or XLACheckpointIO()
6666
super().__init__(
@@ -132,6 +132,8 @@ def _move_optimizer_state(self, device: Optional[torch.device] = None) -> None:
132132
"""Moves the state of the optimizers to the TPU if needed."""
133133
# TODO: `self.root_device` would raise error if called outside the spawn process
134134
# while training on 8 and more cores.
135+
if device:
136+
raise ValueError(f"device should be None" f" found: {device}.")
135137
device = device or self.root_device
136138
for opt in self.optimizers:
137139
for p, v in opt.state.items():

0 commit comments

Comments
 (0)