Skip to content

Commit cd997d6

Browse files
karthikprasadkaushikb11
authored andcommitted
Sanitize None params during pruning (Lightning-AI#6836)
* sanitize none params during pruning * amend
1 parent 17969f3 commit cd997d6

File tree

2 files changed

+14
-9
lines changed

2 files changed

+14
-9
lines changed

pytorch_lightning/callbacks/pruning.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -422,7 +422,9 @@ def sanitize_parameters_to_prune(
422422
current_modules = [m for m in pl_module.modules() if not isinstance(m, _MODULE_CONTAINERS)]
423423

424424
if parameters_to_prune is None:
425-
parameters_to_prune = [(m, p) for p in parameters for m in current_modules if hasattr(m, p)]
425+
parameters_to_prune = [
426+
(m, p) for p in parameters for m in current_modules if getattr(m, p, None) is not None
427+
]
426428
elif (
427429
isinstance(parameters_to_prune, (list, tuple)) and len(parameters_to_prune) > 0
428430
and all(len(p) == 2 for p in parameters_to_prune)

tests/callbacks/test_pruning.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def __init__(self):
3636
self.layer = Sequential(
3737
OrderedDict([
3838
("mlp_1", nn.Linear(32, 32)),
39-
("mlp_2", nn.Linear(32, 32)),
39+
("mlp_2", nn.Linear(32, 32, bias=False)),
4040
("mlp_3", nn.Linear(32, 2)),
4141
])
4242
)
@@ -85,7 +85,10 @@ def train_with_pruning_callback(
8585
if parameters_to_prune:
8686
pruning_kwargs["parameters_to_prune"] = [(model.layer.mlp_1, "weight"), (model.layer.mlp_2, "weight")]
8787
else:
88-
pruning_kwargs["parameter_names"] = ["weight"]
88+
if isinstance(pruning_fn, str) and pruning_fn.endswith("_structured"):
89+
pruning_kwargs["parameter_names"] = ["weight"]
90+
else:
91+
pruning_kwargs["parameter_names"] = ["weight", "bias"]
8992
if isinstance(pruning_fn, str) and pruning_fn.endswith("_structured"):
9093
pruning_kwargs["pruning_dim"] = 0
9194
if pruning_fn == "ln_structured":
@@ -250,14 +253,14 @@ def test_multiple_pruning_callbacks(tmpdir, caplog, make_pruning_permanent):
250253
actual = [m for m in actual if m.startswith("Applied")]
251254
assert actual == [
252255
"Applied `L1Unstructured`. Pruned: 0/1122 (0.00%) -> 544/1122 (48.48%)",
253-
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=32, bias=True).weight` with amount=0.5. Pruned: 0 (0.00%) -> 506 (49.41%)", # noqa: E501
254-
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=2, bias=True).weight` with amount=0.5. Pruned: 0 (0.00%) -> 38 (59.38%)", # noqa: E501
256+
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=32, bias=True).weight` with amount=0.5. Pruned: 0 (0.00%) -> 500 (48.83%)", # noqa: E501
257+
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=2, bias=True).weight` with amount=0.5. Pruned: 0 (0.00%) -> 44 (68.75%)", # noqa: E501
255258
"Applied `RandomUnstructured`. Pruned: 544/1122 (48.48%) -> 680/1122 (60.61%)",
256-
"Applied `RandomUnstructured` to `Linear(in_features=32, out_features=32, bias=True).weight` with amount=0.25. Pruned: 506 (49.41%) -> 633 (61.82%)", # noqa: E501
257-
"Applied `RandomUnstructured` to `Linear(in_features=32, out_features=2, bias=True).weight` with amount=0.25. Pruned: 38 (59.38%) -> 47 (73.44%)", # noqa: E501
259+
"Applied `RandomUnstructured` to `Linear(in_features=32, out_features=32, bias=True).weight` with amount=0.25. Pruned: 500 (48.83%) -> 635 (62.01%)", # noqa: E501
260+
"Applied `RandomUnstructured` to `Linear(in_features=32, out_features=2, bias=True).weight` with amount=0.25. Pruned: 44 (68.75%) -> 45 (70.31%)", # noqa: E501
258261
"Applied `L1Unstructured`. Pruned: 680/1122 (60.61%) -> 884/1122 (78.79%)",
259-
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=32, bias=True).weight` with amount=0.5. Pruned: 633 (61.82%) -> 828 (80.86%)", # noqa: E501
260-
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=2, bias=True).weight` with amount=0.5. Pruned: 47 (73.44%) -> 56 (87.50%)", # noqa: E501
262+
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=32, bias=True).weight` with amount=0.5. Pruned: 635 (62.01%) -> 830 (81.05%)", # noqa: E501
263+
"Applied `L1Unstructured` to `Linear(in_features=32, out_features=2, bias=True).weight` with amount=0.5. Pruned: 45 (70.31%) -> 54 (84.38%)", # noqa: E501
261264
]
262265

263266
filepath = str(tmpdir / "foo.ckpt")

0 commit comments

Comments
 (0)