diff --git a/.github/workflows/python_wheel_build.yml b/.github/workflows/python_wheel_build.yml index b846b85d9c9f9..f5d62b4c9b484 100644 --- a/.github/workflows/python_wheel_build.yml +++ b/.github/workflows/python_wheel_build.yml @@ -11,7 +11,7 @@ on: schedule: - cron: '01 1 * * *' pull_request: - types: [labeled] + types: [opened, synchronize, reopened, labeled] concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} @@ -26,6 +26,7 @@ jobs: contains(github.event.pull_request.labels.*.name, 'build-python-wheels') runs-on: ubuntu-latest strategy: + fail-fast: false matrix: target: [cp39-manylinux_x86_64, cp310-manylinux_x86_64, cp311-manylinux_x86_64, cp312-manylinux_x86_64, cp313-manylinux_x86_64] name: ${{ matrix.target }} @@ -35,6 +36,44 @@ jobs: with: build-tag: ${{ matrix.target }} + test-wheels: + needs: build-wheels + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + name: test-wheel-cp${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + + - name: Download produced wheels + uses: actions/download-artifact@v4 + with: + path: wheels + merge-multiple: true + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install produced wheel + run: | + ls -R wheels + PY_VER=$(python -c "import sys; print(f'cp{sys.version_info.major}{sys.version_info.minor}')") + WHEEL=$(ls wheels/*${PY_VER}*.whl | head -n 1) + echo "Python version: ${PY_VER}, installing wheel: ${WHEEL}" + pip install "$WHEEL" + + - name: Install tutorials dependencies + run: | + python -m pip install --no-cache-dir -r requirements.txt + + - name: Run tutorials + run: | + pytest -vv -s -rF --show-capture=all test/wheels + create-and-upload-wheel-registry: if: github.event_name != 'pull_request' # The secrets are not available in PR needs: build-wheels diff --git a/setup.py b/setup.py index d3522156c08d8..bbd31ff4413eb 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,7 @@ def run(self): "-Dbuiltin_nlohmannjson=ON -Dbuiltin_tbb=ON -Dbuiltin_xrootd=ON " # builtins "-Dbuiltin_lz4=ON -Dbuiltin_lzma=ON -Dbuiltin_zstd=ON -Dbuiltin_xxhash=ON " # builtins "-Dpyroot=ON -Ddataframe=ON -Dxrootd=ON -Dssl=ON -Dimt=ON " - "-Droofit=ON " + "-Droofit=ON -Dmathmore=ON -Dbuiltin_fftw3=ON -Dbuiltin_gsl=ON " # Next 4 paths represent the structure of the target binaries/headers/libs # as the target installation directory of the Python environment would expect f"-DCMAKE_INSTALL_BINDIR={ROOT_BUILD_INTERNAL_DIRNAME}/ROOT/bin " diff --git a/test/wheels/test_tutorials.py b/test/wheels/test_tutorials.py new file mode 100644 index 0000000000000..8548ab9dae2ed --- /dev/null +++ b/test/wheels/test_tutorials.py @@ -0,0 +1,114 @@ +import os +import pathlib +import shutil +import signal +import subprocess +import sys + +import pytest +import ROOT + +ROOT.gROOT.SetBatch(True) + +tutorial_dir = pathlib.Path(str(ROOT.gROOT.GetTutorialDir())) + +subdirs = ["analysis/dataframe", "analysis/tree", "hist", "io/ntuple", "roofit/roofit"] + +SKIP_TUTORIALS = { + "ntpl004_dimuon.C", # requires reading remote data via HTTP + "ntpl008_import.C", # requires reading remote data via HTTP + "ntpl011_global_temperatures.C", # requires reading remote data via HTTP + "distrdf004_dask_lxbatch.py", # only works on lxplus + "_SQlite", # requires SQLite, not supported yet in ROOT wheels + "h1analysisProxy.C", # helper macro, not meant to run standalone + "hist001_RHist_basics.C", # required RHist, not supported in ROOT wheels + "hist002_RHist_weighted.C", # required RHist, not supported in ROOT wheels +} + +# ---------------------- +# Python tutorials tests +# ---------------------- +py_tutorials = [] +for sub in subdirs: + sub_path = tutorial_dir / sub + for f in sub_path.rglob("*.py"): + if any(skip in f.name for skip in SKIP_TUTORIALS): + print("Skipping Python tutorial:", f) + continue + py_tutorials.append(f) + +py_tutorials = sorted(py_tutorials, key=lambda p: p.name) + + +def test_tutorials_are_detected(): + assert len(py_tutorials) > 0 + + +@pytest.mark.parametrize("tutorial", py_tutorials, ids=lambda p: p.name) +def test_tutorial(tutorial): + env = dict(**os.environ) + # force matplotlib to use a non-GUI backend + env["MPLBACKEND"] = "Agg" + print("Test env:", env) + try: + result = subprocess.run( + [sys.executable, str(tutorial)], + check=True, + env=env, + timeout=60, + capture_output=True, + text=True, + ) + print("Test stderr:", result.stderr) + + except subprocess.TimeoutExpired: + pytest.skip(f"Tutorial {tutorial} timed out") + + except subprocess.CalledProcessError as e: + # read stderr to see if EOFError occurred + if "EOFError" in e.stderr: + pytest.skip(f"Skipping {tutorial.name} (requires user input)") + raise + + +# ---------------------- +# C++ tutorials tests +# ---------------------- +cpp_tutorials = [] +for sub in subdirs: + sub_path = tutorial_dir / sub + for f in sub_path.rglob("*.C"): + if any(skip in f.name for skip in SKIP_TUTORIALS): + print("Skipping C++ tutorial:", f) + continue + cpp_tutorials.append(f) + +cpp_tutorials = sorted(cpp_tutorials, key=lambda p: p.name) + + +def test_cpp_tutorials_are_detected(): + assert len(cpp_tutorials) > 0 + + +@pytest.mark.parametrize("tutorial", cpp_tutorials, ids=lambda p: p.name) +def test_cpp_tutorial(tutorial): + try: + root_exe = shutil.which("root") + result = subprocess.run( + [root_exe, "-b", "-q", str(tutorial)], + check=True, + timeout=60, + capture_output=True, + text=True, + ) + print("Test stderr:", result.stderr) + + except subprocess.TimeoutExpired: + pytest.skip(f"Tutorial {tutorial} timed out") + + except subprocess.CalledProcessError as e: + if e.returncode == -signal.SIGILL or e.returncode == 132: + pytest.fail(f"Failing {tutorial.name} (illegal instruction on this platform)") + elif "EOFError" in e.stderr: + pytest.skip(f"Skipping {tutorial.name} (requires user input)") + raise