File tree Expand file tree Collapse file tree 1 file changed +3
-5
lines changed Expand file tree Collapse file tree 1 file changed +3
-5
lines changed Original file line number Diff line number Diff line change 1414import logging
1515import os
1616from functools import partial
17- from typing import Optional
18- from typing import Type
17+ from typing import Optional , Type
1918
2019import pytorch_lightning as pl
2120from pytorch_lightning .accelerators import GPUAccelerator
3433 InterBatchParallelDataFetcher ,
3534)
3635from pytorch_lightning .utilities .model_helpers import is_overridden
37- from pytorch_lightning .utilities .rank_zero import rank_zero_deprecation
38- from pytorch_lightning .utilities .rank_zero import rank_zero_warn
36+ from pytorch_lightning .utilities .rank_zero import rank_zero_deprecation , rank_zero_warn
3937from pytorch_lightning .utilities .signature_utils import is_param_in_hook_signature
4038
4139log = logging .getLogger (__name__ )
@@ -195,7 +193,7 @@ def skip(self) -> bool:
195193 """Whether we should skip the training and immediately return from the call to :meth:`run`."""
196194 # since `trainer.num_training_batches` depends on the `train_dataloader` but that won't be called
197195 # until `on_run_start`, we use `limit_train_batches` instead
198- return self .trainer .limit_train_batches == 0
196+ return self .done or self . trainer .limit_train_batches == 0
199197
200198 def connect (self , epoch_loop : TrainingEpochLoop ) -> None : # type: ignore[override]
201199 """Connects a training epoch loop to this fit loop."""
You can’t perform that action at this time.
0 commit comments