From fceef7918516cf83dfdbf8df00be1b665075d5cf Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 30 Mar 2021 09:20:48 +0000 Subject: [PATCH 1/2] fix_hydra --- pytorch_lightning/plugins/training_type/tpu_spawn.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/plugins/training_type/tpu_spawn.py b/pytorch_lightning/plugins/training_type/tpu_spawn.py index ba074e7cfb206..a29310f65f724 100644 --- a/pytorch_lightning/plugins/training_type/tpu_spawn.py +++ b/pytorch_lightning/plugins/training_type/tpu_spawn.py @@ -23,10 +23,11 @@ from pytorch_lightning.plugins.training_type.ddp_spawn import DDPSpawnPlugin from pytorch_lightning.plugins.training_type.utils import on_colab_kaggle from pytorch_lightning.trainer.states import TrainerState -from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_warn +from pytorch_lightning.utilities import _TPU_AVAILABLE, rank_zero_warn, _OMEGACONF_AVAILABLE from pytorch_lightning.utilities.distributed import rank_zero_only, ReduceOp from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.seed import seed_everything +from pytorch_lightning.utilities.apply_func import apply_to_collection if _TPU_AVAILABLE: import torch_xla.core.xla_model as xm @@ -37,6 +38,10 @@ else: xm, xla_pl, xmp, ParallelLoader, rendezvous = [None] * 5 +if _OMEGACONF_AVAILABLE: + from omegaconf import OmegaConf + from omegaconf import DictConfig, ListConfig + class TPUSpawnPlugin(DDPSpawnPlugin): @@ -304,4 +309,6 @@ def save_checkpoint(self, checkpoint: Dict[str, Any], filepath: str) -> None: filepath: write-target file's path """ # Todo: TypeError: 'mappingproxy' object does not support item assignment + if _OMEGACONF_AVAILABLE: + checkpoint = apply_to_collection(checkpoint, (DictConfig, ListConfig), OmegaConf.to_container) self.save({k: v for k, v in checkpoint.items() if k != "callbacks"}, filepath) From 09ae9c1355b6caba7c95a4a287a20e11d499b28f Mon Sep 17 00:00:00 2001 From: tchaton Date: Tue, 30 Mar 2021 11:08:42 +0100 Subject: [PATCH 2/2] update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0de3835f6cf7e..6c26177096889 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -221,6 +221,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed a bug where gradients were disabled after calling `Trainer.predict` ([#6657](https://github.com/PyTorchLightning/pytorch-lightning/pull/6657)) +- Fixed resolve a bug with omegaconf and xm.save ([#6741](https://github.com/PyTorchLightning/pytorch-lightning/pull/6741)) + ## [1.2.4] - 2021-03-16 ### Changed