From dc7b262ac52a18d3e9acad37676d31f10601a2f0 Mon Sep 17 00:00:00 2001 From: Adarsh Yoga Date: Wed, 3 May 2023 16:39:09 +0000 Subject: [PATCH] adding conda recipe --- .github/workflows/conda-package.yml | 234 ++++++++++++++++++ CMakeLists.txt | 4 + README.md | 37 ++- conda-recipe/bld.bat | 60 +++++ conda-recipe/build.sh | 32 +++ conda-recipe/meta.yaml | 83 +++++++ dpbench/configs/__init__.py | 3 + dpbench/configs/bench_info/__init__.py | 3 + dpbench/configs/framework_info/__init__.py | 3 + dpbench/infrastructure/datamodel.py | 2 +- alembic.ini => dpbench/migrations/alembic.ini | 0 dpbench/migrations/versions/__init__.py | 3 + setup.py | 5 + 13 files changed, 463 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/conda-package.yml create mode 100644 conda-recipe/bld.bat create mode 100644 conda-recipe/build.sh create mode 100644 conda-recipe/meta.yaml create mode 100644 dpbench/configs/__init__.py create mode 100644 dpbench/configs/bench_info/__init__.py create mode 100644 dpbench/configs/framework_info/__init__.py rename alembic.ini => dpbench/migrations/alembic.ini (100%) create mode 100644 dpbench/migrations/versions/__init__.py diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml new file mode 100644 index 00000000..46c7dfcc --- /dev/null +++ b/.github/workflows/conda-package.yml @@ -0,0 +1,234 @@ +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 + +name: Conda package + +on: + push: + branches: + - main + pull_request: + +env: + PACKAGE_NAME: dpbench + MODULE_NAME: dpbench + CHANNELS: '-c dppy/label/dev -c conda-forge -c intel -c nodefaults --override-channels' + VER_JSON_NAME: 'version.json' + VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); " + VER_SCRIPT2: "d = j['dpbench'][0]; print('='.join((d[s] for s in ('version', 'build'))))" + +jobs: + build: + name: Build ['${{ matrix.os }}', python='${{ matrix.python }}'] + + strategy: + fail-fast: false + matrix: + python: ['3.9', '3.10'] + os: [ubuntu-latest, windows-latest] + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash -l {0} + + continue-on-error: false + + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.11.0 + with: + access_token: ${{ github.token }} + + - name: Checkout ${{ env.PACKAGE_NAME }} repo + uses: actions/checkout@v3.3.0 + with: + fetch-depth: 0 + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2.2.0 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'build' + + - name: Store conda paths as envs + run: echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" '/' >> $GITHUB_ENV + + - name: Install conda-build + run: conda install conda-build + + - name: Cache conda packages + uses: actions/cache@v3.2.6 + env: + CACHE_NUMBER: 1 # Increase to reset cache + with: + path: ${{ env.CONDA_PKGS_DIR }} + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Build conda package + run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe + + - name: Upload artifact + uses: actions/upload-artifact@v3.1.2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2 + + test: + name: Test ['${{ matrix.os }}', python='${{ matrix.python }}'] + + needs: build + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }} + + strategy: + fail-fast: false + matrix: + python: ['3.9', '3.10'] + os: [ubuntu-20.04, ubuntu-latest, windows-latest] + experimental: [false] + + continue-on-error: ${{ matrix.experimental }} + + steps: + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2.2.0 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'test' + + - name: Store conda paths as envs + shell: bash -l {0} + run: | + echo "CHANNEL_PATH=${{ github.workspace }}/channel/" | tr "\\" "/" >> $GITHUB_ENV + echo "EXTRACTED_PKG_PATH=${{ github.workspace }}/pkg/" | tr "\\" "/" >> $GITHUB_ENV + echo "VER_JSON_PATH=${{ github.workspace }}/version.json" | tr "\\" "/" >> $GITHUB_ENV + echo "PKG_PATH_IN_CHANNEL=${{ github.workspace }}/channel/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" "/" >> $GITHUB_ENV + + - name: Download artifact + uses: actions/download-artifact@v3.0.2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.PKG_PATH_IN_CHANNEL }} + + # We need --force-local because on windows path looks like C:/foo/bar + # and tar interprits semicolons as remote device. + - name: Extract package archive + shell: bash -l {0} + run: | + mkdir -p ${EXTRACTED_PKG_PATH} + tar -xvf ${PKG_PATH_IN_CHANNEL}${PACKAGE_NAME}-*.tar.bz2 -C ${EXTRACTED_PKG_PATH} --force-local + + # Needed to be able to run conda index + - name: Install conda-build + run: conda install conda-build + + - name: Create conda channel + run: conda index ${{ env.CHANNEL_PATH }} + + - name: Test conda channel + run: | + conda search ${{ env.PACKAGE_NAME }} -c ${{ env.CHANNEL_PATH }} --override-channels --info --json > ${{ env.VER_JSON_PATH }} + cat ${{ env.VER_JSON_PATH }} + + - name: Collect dependencies + shell: bash -l {0} + run: | + export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}") + + echo PACKAGE_VERSION=${PACKAGE_VERSION} + echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV + + conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile + cat lockfile + env: + TEST_CHANNELS: '-c ${{ env.CHANNEL_PATH }} ${{ env.CHANNELS }}' + + - name: Cache conda packages + uses: actions/cache@v3.2.6 + env: + CACHE_NUMBER: 1 # Increase to reset cache + with: + path: ${{ env.CONDA_PKGS_DIR }} + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Install opencl_rt + run: conda install opencl_rt -c intel --override-channels + + - name: Install dpbench + run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} + env: + TEST_CHANNELS: '-c ${{ env.CHANNEL_PATH }} ${{ env.CHANNELS }}' + + - name: List installed packages + run: conda list + + - name: Smoke test + run: python -c "import dpnp, dpctl, dpbench; dpctl.lsplatform()" + + - name: Run benchmakrs + run: | + dpbench -i numpy,numba_dpex_p,dpnp,numba_n,sycl run + dpbench report + + upload_anaconda: + name: Upload dppy/label/dev ['${{ matrix.os }}', python='${{ matrix.python }}'] + + needs: [test] + + strategy: + matrix: + python: ['3.9', '3.10'] + os: [ubuntu-latest, windows-latest] + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash -l {0} + + continue-on-error: false + + if: | + (github.repository == 'IntelPython/dpbench') && + (github.ref == 'refs/heads/main' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')) + + steps: + - name: Download artifact + uses: actions/download-artifact@v3.0.2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2.2.0 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'upload' + + - name: Install anaconda-client + run: conda install anaconda-client + + - name: Upload + run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 + env: + ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} diff --git a/CMakeLists.txt b/CMakeLists.txt index e6a9796e..2f5d28c3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,6 +10,10 @@ project(dpbench "Benchmark suite to evaluate Intel Data Parallel Extensions for Python" ) +# Help conda build find path from both host and build env. +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY BOTH) + find_package(pybind11 CONFIG REQUIRED) find_package(IntelDPCPP REQUIRED) find_package(PythonExtensions REQUIRED) diff --git a/README.md b/README.md index 8f277f2d..e6a883ef 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,24 @@ SPDX-License-Identifier: Apache-2.0 * **\\_numba_mlir\_\.py** : This file contains Numba-MLIR implementations of the benchmarks. There are three modes: kernel-mode, numpy-mode and prange-mode. Experimental. ## Examples of setting up and running the benchmarks + +### Using prebuilt version + +1. Create conda environment + + ```bash + conda create -n dpbench dpbench -c dppy/label/dev -c conda-forge -c intel -c nodefaults --override-channels + conda activate dpbench + ``` + +2. Run specific benchmark, e.g. black_scholes + + ```bash + dpbench -b black_scholes run + ``` + +### Build from source (for development) + 1. Clone the repository ```bash @@ -69,13 +87,22 @@ SPDX-License-Identifier: Apache-2.0 ```bash dpbench -b black_scholes run ``` -5. Run all benchmarks + +### Usage + +1. Run all benchmarks ```bash dpbench -a run ``` -6. Device Customization +2. Generate report + + ```bash + dpbench report + ``` + +3. Device Customization If a framework is SYCL based, an extra configuration option `sycl_device` may be set in the framework config file or by passing `--sycl-device` argument to `dpbench run` to control what device the framework uses for execution. The `sycl_device` @@ -85,11 +112,11 @@ SPDX-License-Identifier: Apache-2.0 Here is an example: - ```json - dpbench -b black_scholes -i dpnp run --sycl-device=level_zero:gpu:0 + ```shell + dpbench -b black_scholes -i dpnp run --sycl-device=level_zero:gpu:0 ``` -7. All available options are available using `dpbench --help` and `dpbench --help`: +4. All available options are available using `dpbench --help` and `dpbench --help`: ``` usage: dpbench [-h] [-b [BENCHMARKS]] [-i [IMPLEMENTATIONS]] [-a | --all-implementations | --no-all-implementations] [--version] [-r [RUN_ID]] [--last-run | --no-last-run] diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat new file mode 100644 index 00000000..7fe45606 --- /dev/null +++ b/conda-recipe/bld.bat @@ -0,0 +1,60 @@ +REM SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +REM +REM SPDX-License-Identifier: Apache-2.0 + +REM A workaround for activate-dpcpp.bat issue to be addressed in 2021.4 +set "LIB=%BUILD_PREFIX%\Library\lib;%BUILD_PREFIX%\compiler\lib;%LIB%" +SET "INCLUDE=%BUILD_PREFIX%\include;%INCLUDE%" + +REM Since the 60.0.0 release, setuptools includes a local, vendored copy +REM of distutils (from late copies of CPython) that is enabled by default. +REM It breaks build for Windows, so use distutils from "stdlib" as before. +REM @TODO: remove the setting, once transition to build backend on Windows +REM to cmake is complete. +SET "SETUPTOOLS_USE_DISTUTILS=stdlib" + +SET "DPBENCH_SYCL=1" + +"%PYTHON%" setup.py clean --all + +set "SKBUILD_ARGS=-G Ninja -- -DCMAKE_C_COMPILER:PATH=icx -DCMAKE_CXX_COMPILER:PATH=icx" +set "SKBUILD_ARGS=%SKBUILD_ARGS% -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON" + +FOR %%V IN (14.0.0 14 15.0.0 15 16.0.0 16 17.0.0 17) DO @( + REM set DIR_HINT if directory exists + IF EXIST "%BUILD_PREFIX%\Library\lib\clang\%%V\" ( + SET "SYCL_INCLUDE_DIR_HINT=%BUILD_PREFIX%\Library\lib\clang\%%V" + ) +) + +set "PATCHED_CMAKE_VERSION=3.26" +set "PLATFORM_DIR=%PREFIX%\Library\share\cmake-%PATCHED_CMAKE_VERSION%\Modules\Platform" +set "FN=Windows-IntelLLVM.cmake" + +rem Save the original file, and copy patched file to +rem fix the issue with IntelLLVM integration with cmake on Windows +if EXIST "%PLATFORM_DIR%" ( + dir "%PLATFORM_DIR%\%FN%" + copy /Y "%PLATFORM_DIR%\%FN%" . + if errorlevel 1 exit 1 + copy /Y ".github\workflows\Windows-IntelLLVM_%PATCHED_CMAKE_VERSION%.cmake" "%PLATFORM_DIR%\%FN%" + if errorlevel 1 exit 1 +) + +if NOT "%WHEELS_OUTPUT_FOLDER%"=="" ( + rem Install and assemble wheel package from the build bits + "%PYTHON%" setup.py install bdist_wheel %SKBUILD_ARGS% + if errorlevel 1 exit 1 + copy dist\dpbench*.whl %WHEELS_OUTPUT_FOLDER% + if errorlevel 1 exit 1 +) ELSE ( + rem Only install + "%PYTHON%" setup.py install %SKBUILD_ARGS% + if errorlevel 1 exit 1 +) + +rem copy back +if EXIST "%PLATFORM_DIR%" ( + copy /Y "%FN%" "%PLATFORM_DIR%\%FN%" + if errorlevel 1 exit 1 +) diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh new file mode 100644 index 00000000..1eb7e1f1 --- /dev/null +++ b/conda-recipe/build.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 + +# Intel LLVM must cooperate with compiler and sysroot from conda +echo "--gcc-toolchain=${BUILD_PREFIX} --sysroot=${BUILD_PREFIX}/${HOST}/sysroot -target ${HOST}" > icpx_for_conda.cfg +export ICPXCFG="$(pwd)/icpx_for_conda.cfg" +export ICXCFG="$(pwd)/icpx_for_conda.cfg" + +export CMAKE_GENERATOR="Ninja" +export DPBENCH_SYCL=1 + +if [ -e "_skbuild" ]; then + ${PYTHON} setup.py clean --all +fi + +SKBUILD_ARGS="-- -DCMAKE_C_COMPILER:PATH=icx -DCMAKE_CXX_COMPILER:PATH=icpx -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON" + +# Build wheel package +if [ "$CONDA_PY" == "36" ]; then + WHEELS_BUILD_ARGS="-p manylinux1_x86_64" +else + WHEELS_BUILD_ARGS="-p manylinux2014_x86_64" +fi +if [ -n "${WHEELS_OUTPUT_FOLDER}" ]; then + $PYTHON setup.py install bdist_wheel ${WHEELS_BUILD_ARGS} ${SKBUILD_ARGS} + cp dist/dpnp*.whl ${WHEELS_OUTPUT_FOLDER} +else + $PYTHON setup.py install ${SKBUILD_ARGS} +fi diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml new file mode 100644 index 00000000..1c4394ea --- /dev/null +++ b/conda-recipe/meta.yaml @@ -0,0 +1,83 @@ +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 + +package: + name: dpbench + version: 0.1 +source: + path: .. + +requirements: + build: + - {{ compiler('cxx') }} + - {{ compiler('dpcpp') }} >=2023.1 # [not osx] + - sysroot_linux-64 >=2.28 # [linux] + host: + - python + - setuptools + - cmake + - ninja + - wheel + - scikit-build + - cython + - pybind11 + # runtime requirements, cause conda tries to install after build + # - tomli + # - alembic + # - sqlalchemy + # - py-cpuinfo + # - scipy + # - scikit-learn + # - pandas + - intel::numpy + - numba + - dpctl + - dpnp + - numba-dpex + run: + - python + - tomli + - alembic + - sqlalchemy + # - py-cpuinfo + - scipy + - scikit-learn + - pandas + - numpy + - numba + - dpctl + - dpnp + - numba-dpex + +build: + number: 0 + +test: + requires: + - dpctl + - dpnp + - numba-dpex + - numba + - numpy + imports: + - dpbench + commands: + - python -c "import dpbench" + +about: + home: https://github.com/IntelPython/dpbench + license: Apache 2.0 + license_file: LICENSE + description: | + LEGAL NOTICE: Use of this software package is subject to the + software license agreement (as set forth above, in the license section of + the installed Conda package and/or the README file) and all notices, + disclaimers or license terms for third party or open source software + included in or with the software. +

+ EULA: Apache-2.0 +

+extra: + recipe-maintainers: + - Intel Python diff --git a/dpbench/configs/__init__.py b/dpbench/configs/__init__.py new file mode 100644 index 00000000..5985a8ff --- /dev/null +++ b/dpbench/configs/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/dpbench/configs/bench_info/__init__.py b/dpbench/configs/bench_info/__init__.py new file mode 100644 index 00000000..5985a8ff --- /dev/null +++ b/dpbench/configs/bench_info/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/dpbench/configs/framework_info/__init__.py b/dpbench/configs/framework_info/__init__.py new file mode 100644 index 00000000..5985a8ff --- /dev/null +++ b/dpbench/configs/framework_info/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/dpbench/infrastructure/datamodel.py b/dpbench/infrastructure/datamodel.py index 6a66524e..6eb68e45 100644 --- a/dpbench/infrastructure/datamodel.py +++ b/dpbench/infrastructure/datamodel.py @@ -163,7 +163,7 @@ def create_results_table(db_file: str): """ absolute_path = os.path.dirname(__file__) - relative_path = "../../alembic.ini" + relative_path = "../migrations/alembic.ini" full_path = os.path.join(absolute_path, relative_path) alembic_cfg = Config(full_path) diff --git a/alembic.ini b/dpbench/migrations/alembic.ini similarity index 100% rename from alembic.ini rename to dpbench/migrations/alembic.ini diff --git a/dpbench/migrations/versions/__init__.py b/dpbench/migrations/versions/__init__.py new file mode 100644 index 00000000..5985a8ff --- /dev/null +++ b/dpbench/migrations/versions/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/setup.py b/setup.py index 7add57c5..02877d12 100644 --- a/setup.py +++ b/setup.py @@ -36,5 +36,10 @@ find_packages(include=["*"]) + find_packages(where="./dpbench/benchmarks/*/*") ), + include_package_data=True, + package_data={ + "dpbench.migrations": ["alembic.ini"], + "dpbench.configs": ["*/*.toml", "*.toml"], + }, cmake_args=cmake_args, )