diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 99d716f6b5a8c..67bb6653edc31 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -426,6 +426,11 @@ def resolve_training_type_plugin(self, training_type: TrainingTypePlugin) -> Tra if hasattr(training_type, 'num_nodes') and getattr(training_type, 'num_nodes') is None: training_type.num_nodes = self.num_nodes + # Automatically set sync_batchnorm if None. + # Useful for custom plugins. + if hasattr(training_type, 'sync_batchnorm') and getattr(training_type, 'sync_batchnorm') is None: + training_type.sync_batchnorm = self.sync_batchnorm + return training_type def select_accelerator(self) -> Accelerator: diff --git a/tests/plugins/test_custom_plugin.py b/tests/plugins/test_custom_plugin.py new file mode 100644 index 0000000000000..872b49ef48635 --- /dev/null +++ b/tests/plugins/test_custom_plugin.py @@ -0,0 +1,41 @@ +# Copyright The PyTorch Lightning team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from pytorch_lightning import Trainer +from pytorch_lightning.plugins import DDPPlugin +from tests.helpers import BoringModel +from tests.helpers.runif import RunIf + + +class CustomParallelPlugin(DDPPlugin): + + def __init__(self, **kwargs): + super().__init__(**kwargs) + # Set to None so it will be overwritten by the accelerator connector. + self.sync_batchnorm = None + + +@RunIf(skip_windows=True) +def test_sync_batchnorm_set(tmpdir): + """Tests if sync_batchnorm is automatically set for custom plugin.""" + model = BoringModel() + plugin = CustomParallelPlugin() + assert plugin.sync_batchnorm is None + trainer = Trainer( + max_epochs=1, + plugins=[plugin], + default_root_dir=tmpdir, + sync_batchnorm=True, + ) + trainer.fit(model) + assert plugin.sync_batchnorm is True