diff --git a/.circleci/config.yml b/.circleci/config.yml index 7ac10195c75a9..5c314d4e6e5c1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -113,7 +113,7 @@ jobs: docker: - image: circleci/python:3.7 environment: - - XLA_VER: 1.9 + - XLA_VER: 1.12 - PYTHON_VER: 3.7 - MAX_CHECKS: 1000 - CHECK_SPEEP: 5 diff --git a/.github/workflows/README.md b/.github/workflows/README.md index d67bf92d6c048..8b9e7d173b03c 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -13,7 +13,7 @@ | pytorch-lightning (HPUs) | .azure-pipelines/hpu-tests.yml | Run only HPU-specific tests. | HPU | (3.8, 1.10) | linux | | pytorch-lightning (GPUs) | .azure-pipelines/gpu-tests.yml | Run all CPU and GPU-specific tests, standalone, and examples. Each standalone test needs to be run in separate processes to avoid unwanted interactions between test cases. | GPU | (3.9, 1.12) | linux | | PyTorchLightning.Benchmark | .azure-pipelines/gpu-benchmark.yml | Run speed/memory benchmarks for parity with pure PyTorch. | GPU | (3.9, 1.12) | linux | -| test-on-tpus | .circleci/config.yml | Run only TPU-specific tests. | TPU | (3.7, 1.9) | linux | +| test-on-tpus | .circleci/config.yml | Run only TPU-specific tests. | TPU | (3.7, 1.12) | linux | - \*Accelerators used in CI - GPU: 2 x NVIDIA Tesla V100 diff --git a/.github/workflows/cicd-pytorch_dockers.yml b/.github/workflows/cicd-pytorch_dockers.yml index b037c798bc8ee..c693dade2b2db 100644 --- a/.github/workflows/cicd-pytorch_dockers.yml +++ b/.github/workflows/cicd-pytorch_dockers.yml @@ -51,7 +51,7 @@ jobs: matrix: # the config used in '.circleci/config.yml`' python_version: ["3.7"] - xla_version: ["1.11"] + xla_version: ["1.12"] steps: - uses: actions/checkout@v2 - uses: docker/setup-buildx-action@v2 diff --git a/dockers/base-xla/Dockerfile b/dockers/base-xla/Dockerfile index 977aee878ffcd..3cc43e6e1a4f3 100644 --- a/dockers/base-xla/Dockerfile +++ b/dockers/base-xla/Dockerfile @@ -19,7 +19,7 @@ LABEL maintainer="Lightning-AI " # CALL: docker image build -t pytorch-lightning:XLA-image -f dockers/base-xla/Dockerfile . --build-arg PYTHON_VERSION=3.8 ARG PYTHON_VERSION=3.9 ARG CONDA_VERSION=4.9.2 -ARG XLA_VERSION=1.11 +ARG XLA_VERSION=1.12 SHELL ["/bin/bash", "-c"] # for skipping configurations @@ -92,6 +92,10 @@ RUN \ python --version && \ cd pytorch-lightning && \ pip install -q fire && \ + # Pin mkl version to avoid OSError on torch import + # OSError: libmkl_intel_lp64.so.1: cannot open shared object file: No such file or directory + # https://github.com/pytorch/xla/issues/1666 + pip install mkl==2021.4.0 && \ # drop packages installed with XLA python .actions/assistant.py requirements_prune_pkgs torch,torchvision && \ # drop unnecessary packages