From bac5ffe8f15e739424d638811d94cc43696deec2 Mon Sep 17 00:00:00 2001 From: Sadiq Jaffer Date: Sun, 4 Apr 2021 17:54:50 +0000 Subject: [PATCH] Correct parameter name in doc string `unfreeze_and_add_param_group` expects `modules` rather than `module` --- pytorch_lightning/callbacks/finetuning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/callbacks/finetuning.py b/pytorch_lightning/callbacks/finetuning.py index b25e5e06e8b86..f37e3bb31cc5e 100644 --- a/pytorch_lightning/callbacks/finetuning.py +++ b/pytorch_lightning/callbacks/finetuning.py @@ -77,7 +77,7 @@ def finetune_function(self, pl_module, current_epoch, optimizer, optimizer_idx): # When `current_epoch` is 10, feature_extractor will start training. if current_epoch == self._unfreeze_at_epoch: self.unfreeze_and_add_param_group( - module=pl_module.feature_extractor, + modules=pl_module.feature_extractor, optimizer=optimizer, train_bn=True, )