We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e2fafe5 commit e033f51Copy full SHA for e033f51
tests/plugins/test_custom_plugin.py
@@ -1,17 +1,21 @@
1
import pytest
2
import torch
3
+
4
from pytorch_lightning import Trainer
5
from pytorch_lightning.plugins import DDPPlugin
6
from tests.helpers import BoringModel
7
+from tests.helpers.runif import RunIf
8
9
10
class CustomParallelPlugin(DDPPlugin):
11
12
def __init__(self, **kwargs):
13
super().__init__(**kwargs)
14
# Set to None so it will be overwritten by the accelerator connector.
15
self.sync_batchnorm = None
16
17
18
+@RunIf(skip_windows=True)
19
@pytest.mark.skipif(torch.cuda.is_available(), reason="test doesn't requires GPU machine")
20
def test_sync_batchnorm_set(tmpdir):
21
"""Tests if sync_batchnorm is automatically set for custom plugin."""
0 commit comments