Skip to content

Commit 05ac035

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 80d24fe commit 05ac035

File tree

1 file changed

+3
-1
lines changed
  • src/lightning_lite/plugins/precision

1 file changed

+3
-1
lines changed

src/lightning_lite/plugins/precision/fsdp.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,9 @@
2727
class FSDPPrecision(NativeMixedPrecision):
2828
"""AMP for Fully Sharded Data Parallel training."""
2929

30-
def __init__(self, precision: Literal[16, "bf16"], device: str, scaler: Optional["ShardedGradScaler"] = None) -> None:
30+
def __init__(
31+
self, precision: Literal[16, "bf16"], device: str, scaler: Optional["ShardedGradScaler"] = None
32+
) -> None:
3133
if not _TORCH_GREATER_EQUAL_1_12:
3234
raise RuntimeError("`FSDPPrecision` is supported from PyTorch v1.12.0 onwards.")
3335

0 commit comments

Comments
 (0)