diff --git a/CHANGELOG.md b/CHANGELOG.md index d7100ff9953ea..b3b0f9814c8dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -451,6 +451,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Remove `epoch` from `trainer.logged_metrics` ([#9904](https://github.com/PyTorchLightning/pytorch-lightning/pull/9904)) +- Removed `should_rank_save_checkpoint` property from Trainer ([#9433](https://github.com/PyTorchLightning/pytorch-lightning/pull/9433)) + + ### Fixed diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index 82c6273732664..ec57147764914 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -609,7 +609,7 @@ def __resolve_ckpt_dir(self, trainer: "pl.Trainer") -> None: self.dirpath = ckpt_path - if not trainer.fast_dev_run and trainer.should_rank_save_checkpoint: + if not trainer.fast_dev_run and trainer.training_type_plugin.should_rank_save_checkpoint: self._fs.makedirs(self.dirpath, exist_ok=True) def __warn_if_dir_not_empty(self, dirpath: _PATH) -> None: diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index e37df73a4b98f..41f0100c0814c 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -1507,10 +1507,6 @@ def world_size(self) -> int: # some training types define a world size return getattr(self.accelerator.training_type_plugin, "world_size", 1) - @property - def should_rank_save_checkpoint(self) -> bool: - return self.accelerator.training_type_plugin.should_rank_save_checkpoint - @property def _distrib_type(self) -> DistributedType: return self.accelerator_connector._distrib_type