We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 14e61d8 commit 1a7773eCopy full SHA for 1a7773e
pytorch_lightning/trainer/trainer.py
@@ -1412,6 +1412,7 @@ def call_hook(
1412
if hook_name in ("on_train_start",) and hasattr(self.accelerator, hook_name):
1413
accelerator_hook = getattr(self.accelerator, hook_name)
1414
accelerator_output = accelerator_hook(*args, **kwargs)
1415
+ # Rely on the accelerator output if lightningModule hook returns nothing
1416
# Required for cases such as DataParallel where we reduce the output for the user
1417
# todo: move this data parallel logic into the data parallel plugin
1418
output = accelerator_output if output is None else output
0 commit comments