Skip to content

Commit fde4871

Browse files
authored
Merge branch 'master' into bugfix/duplicate-logs2
2 parents aa9acca + 96cdca1 commit fde4871

File tree

3 files changed

+10
-9
lines changed

3 files changed

+10
-9
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
1212

1313
### Changed
1414

15+
- Increased TPU check timeout from 20s to 100s ([#5598](https://github.com/PyTorchLightning/pytorch-lightning/pull/5598))
16+
1517

1618
### Deprecated
1719

pytorch_lightning/utilities/xla_device_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020
import torch
2121

2222
XLA_AVAILABLE = importlib.util.find_spec("torch_xla") is not None
23+
#: define waiting time got checking TPU available in sec
24+
TPU_CHECK_TIMEOUT = 100
2325

2426
if XLA_AVAILABLE:
2527
import torch_xla.core.xla_model as xm
@@ -39,7 +41,7 @@ def wrapper(*args, **kwargs):
3941
queue = Queue()
4042
proc = Process(target=inner_f, args=(queue, func, *args), kwargs=kwargs)
4143
proc.start()
42-
proc.join(20)
44+
proc.join(TPU_CHECK_TIMEOUT)
4345
try:
4446
return queue.get_nowait()
4547
except q.Empty:

tests/utilities/test_xla_device_utils.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,15 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
import time
15+
from unittest.mock import patch
1516

1617
import pytest
1718

1819
import pytorch_lightning.utilities.xla_device_utils as xla_utils
1920
from pytorch_lightning.utilities import XLA_AVAILABLE, TPU_AVAILABLE
2021
from tests.base.develop_utils import pl_multi_process_test
2122

22-
if XLA_AVAILABLE:
23-
import torch_xla.core.xla_model as xm
2423

25-
26-
# lets hope that in or env we have installed XLA only for TPU devices, otherwise, it is testing in the cycle "if I am true test that I am true :D"
2724
@pytest.mark.skipif(XLA_AVAILABLE, reason="test requires torch_xla to be absent")
2825
def test_tpu_device_absence():
2926
"""Check tpu_device_exists returns None when torch_xla is not available"""
@@ -37,12 +34,12 @@ def test_tpu_device_presence():
3734
assert xla_utils.XLADeviceUtils.tpu_device_exists() is True
3835

3936

40-
def test_result_returns_within_20_seconds():
37+
@patch('pytorch_lightning.utilities.xla_device_utils.TPU_CHECK_TIMEOUT', 10)
38+
def test_result_returns_within_timeout_seconds():
4139
"""Check that pl_multi_process returns within 10 seconds"""
42-
4340
start = time.time()
44-
result = xla_utils.pl_multi_process(time.sleep)(25)
41+
result = xla_utils.pl_multi_process(time.sleep)(xla_utils.TPU_CHECK_TIMEOUT * 1.25)
4542
end = time.time()
4643
elapsed_time = int(end - start)
47-
assert elapsed_time <= 20
44+
assert elapsed_time <= xla_utils.TPU_CHECK_TIMEOUT
4845
assert result is False

0 commit comments

Comments
 (0)