Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/source-pytorch/visualize/logging_advanced.rst
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ In LightningModule
* - Method
- on_step
- on_epoch
* - on_after_backward, on_before_backward, on_before_optimizer_step, on_before_zero_grad, training_step, training_step_end
* - on_after_backward, on_before_backward, on_before_optimizer_step, optimizer_step, configure_gradient_clipping, on_before_zero_grad, training_step, training_step_end
- True
- False
* - training_epoch_end, test_epoch_end, test_step, test_step_end, validation_epoch_end, validation_step, validation_step_end
Expand Down
3 changes: 3 additions & 0 deletions src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed an `AttributeError` when accessing `LightningModule.logger` and the Trainer has multiple loggers ([#14234](https://github.com/Lightning-AI/lightning/pull/14234))


- Added back support for `log`ging in the `configure_gradient_clipping` hook after unintended removal in v1.7.2 ([#14298](https://github.com/Lightning-AI/lightning/issues/14298))


- Fixed wrong num padding for `RichProgressBar` ([#14296](https://github.com/Lightning-AI/lightning/pull/14296))


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,13 @@ class _LogOptions(TypedDict):
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"lr_scheduler_step": None,
"configure_gradient_clipping": None,
"clip_gradients": None,
# should match `optimizer_step`
"configure_gradient_clipping": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"clip_gradients": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_before_zero_grad": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,12 +183,7 @@ class HookedModel(BoringModel):
def __init__(self, not_supported):
super().__init__()
pl_module_hooks = get_members(LightningModule)
pl_module_hooks.difference_update(
{
"log",
"log_dict",
}
)
pl_module_hooks.difference_update({"log", "log_dict"})
# remove `nn.Module` hooks
module_hooks = get_members(torch.nn.Module)
pl_module_hooks.difference_update(module_hooks)
Expand Down Expand Up @@ -236,8 +231,6 @@ def test_fx_validator_integration(tmpdir):
"on_validation_model_eval": "You can't",
"on_validation_model_train": "You can't",
"lr_scheduler_step": "You can't",
"configure_gradient_clipping": "You can't",
"clip_gradients": "You can't",
"on_save_checkpoint": "You can't",
"on_load_checkpoint": "You can't",
"on_exception": "You can't",
Expand Down
2 changes: 2 additions & 0 deletions tests/tests_pytorch/trainer/logging_/test_loop_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ def _make_assertion(model, hooks, result_mock, on_step, on_epoch, extra_kwargs):
"on_after_backward",
"on_before_optimizer_step",
"optimizer_step",
"configure_gradient_clipping",
"clip_gradients",
"on_before_zero_grad",
"optimizer_zero_grad",
"training_step",
Expand Down