Skip to content

Commit 6a13cf7

Browse files
committed
add comment mentioning use by Lite
1 parent ce1bfde commit 6a13cf7

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

pytorch_lightning/plugins/training_type/training_type_plugin.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,8 @@ def _setup_models_and_optimizers(
6666
) -> Tuple[List[Module], List[Optimizer]]:
6767
"""Setup multiple models and multiple optimizers together.
6868
69+
Primarily used by Lightning Lite.
70+
6971
The returned objects are expected to be in the same order they were passed in.
7072
The default implementation will call :meth:`_setup_model` and :meth:`_setup_optimizer` on the input lists.
7173
"""
@@ -75,12 +77,12 @@ def _setup_models_and_optimizers(
7577
return models, optimizers
7678

7779
def _setup_model(self, model: Module) -> Module:
78-
"""Performs setup for the model, e.g., by wrapping it by another class."""
80+
"""Performs setup for the model, e.g., by wrapping it by another class. Primarily used by Lightning Lite."""
7981
# TODO (@awaelchli): standardize this across all plugins in Lightning and Lite. Related refactor: #7324
8082
return model
8183

8284
def _setup_optimizer(self, optimizer: Optimizer) -> Optimizer:
83-
"""Performs setup for the optimizer, e.g., by wrapping it by another class."""
85+
"""Performs setup for the optimizer, e.g., by wrapping it by another class. Primarily used by Lightning Lite."""
8486
# TODO (@awaelchli): standardize this across all plugins in Lightning and Lite. Related refactor: #7324
8587
return optimizer
8688

0 commit comments

Comments
 (0)