Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
234 changes: 234 additions & 0 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,234 @@
# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0

name: Conda package

on:
push:
branches:
- main
pull_request:

env:
PACKAGE_NAME: dpbench
MODULE_NAME: dpbench
CHANNELS: '-c dppy/label/dev -c conda-forge -c intel -c nodefaults --override-channels'
VER_JSON_NAME: 'version.json'
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); "
VER_SCRIPT2: "d = j['dpbench'][0]; print('='.join((d[s] for s in ('version', 'build'))))"

jobs:
build:
name: Build ['${{ matrix.os }}', python='${{ matrix.python }}']

strategy:
fail-fast: false
matrix:
python: ['3.9', '3.10']
os: [ubuntu-latest, windows-latest]

runs-on: ${{ matrix.os }}

defaults:
run:
shell: bash -l {0}

continue-on-error: false

steps:
- name: Cancel Previous Runs
uses: styfle/[email protected]
with:
access_token: ${{ github.token }}

- name: Checkout ${{ env.PACKAGE_NAME }} repo
uses: actions/[email protected]
with:
fetch-depth: 0

- name: Setup miniconda
uses: conda-incubator/[email protected]
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'build'

- name: Store conda paths as envs
run: echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" '/' >> $GITHUB_ENV

- name: Install conda-build
run: conda install conda-build

- name: Cache conda packages
uses: actions/[email protected]
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.CONDA_PKGS_DIR }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-

- name: Build conda package
run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe

- name: Upload artifact
uses: actions/[email protected]
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2

test:
name: Test ['${{ matrix.os }}', python='${{ matrix.python }}']

needs: build

runs-on: ${{ matrix.os }}

defaults:
run:
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}

strategy:
fail-fast: false
matrix:
python: ['3.9', '3.10']
os: [ubuntu-20.04, ubuntu-latest, windows-latest]
experimental: [false]

continue-on-error: ${{ matrix.experimental }}

steps:
- name: Setup miniconda
uses: conda-incubator/[email protected]
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'test'

- name: Store conda paths as envs
shell: bash -l {0}
run: |
echo "CHANNEL_PATH=${{ github.workspace }}/channel/" | tr "\\" "/" >> $GITHUB_ENV
echo "EXTRACTED_PKG_PATH=${{ github.workspace }}/pkg/" | tr "\\" "/" >> $GITHUB_ENV
echo "VER_JSON_PATH=${{ github.workspace }}/version.json" | tr "\\" "/" >> $GITHUB_ENV
echo "PKG_PATH_IN_CHANNEL=${{ github.workspace }}/channel/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" "/" >> $GITHUB_ENV

- name: Download artifact
uses: actions/[email protected]
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.PKG_PATH_IN_CHANNEL }}

# We need --force-local because on windows path looks like C:/foo/bar
# and tar interprits semicolons as remote device.
- name: Extract package archive
shell: bash -l {0}
run: |
mkdir -p ${EXTRACTED_PKG_PATH}
tar -xvf ${PKG_PATH_IN_CHANNEL}${PACKAGE_NAME}-*.tar.bz2 -C ${EXTRACTED_PKG_PATH} --force-local

# Needed to be able to run conda index
- name: Install conda-build
run: conda install conda-build

- name: Create conda channel
run: conda index ${{ env.CHANNEL_PATH }}

- name: Test conda channel
run: |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.CHANNEL_PATH }} --override-channels --info --json > ${{ env.VER_JSON_PATH }}
cat ${{ env.VER_JSON_PATH }}

- name: Collect dependencies
shell: bash -l {0}
run: |
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}")

echo PACKAGE_VERSION=${PACKAGE_VERSION}
echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV

conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
cat lockfile
env:
TEST_CHANNELS: '-c ${{ env.CHANNEL_PATH }} ${{ env.CHANNELS }}'

- name: Cache conda packages
uses: actions/[email protected]
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.CONDA_PKGS_DIR }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-

- name: Install opencl_rt
run: conda install opencl_rt -c intel --override-channels

- name: Install dpbench
run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.CHANNEL_PATH }} ${{ env.CHANNELS }}'

- name: List installed packages
run: conda list

- name: Smoke test
run: python -c "import dpnp, dpctl, dpbench; dpctl.lsplatform()"

- name: Run benchmakrs
run: |
dpbench -i numpy,numba_dpex_p,dpnp,numba_n,sycl run
dpbench report

upload_anaconda:
name: Upload dppy/label/dev ['${{ matrix.os }}', python='${{ matrix.python }}']

needs: [test]

strategy:
matrix:
python: ['3.9', '3.10']
os: [ubuntu-latest, windows-latest]

runs-on: ${{ matrix.os }}

defaults:
run:
shell: bash -l {0}

continue-on-error: false

if: |
(github.repository == 'IntelPython/dpbench') &&
(github.ref == 'refs/heads/main' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/'))

steps:
- name: Download artifact
uses: actions/[email protected]
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}

- name: Setup miniconda
uses: conda-incubator/[email protected]
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'upload'

- name: Install anaconda-client
run: conda install anaconda-client

- name: Upload
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2
env:
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
4 changes: 4 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ project(dpbench
"Benchmark suite to evaluate Intel Data Parallel Extensions for Python"
)

# Help conda build find path from both host and build env.
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY BOTH)

find_package(pybind11 CONFIG REQUIRED)
find_package(IntelDPCPP REQUIRED)
find_package(PythonExtensions REQUIRED)
Expand Down
37 changes: 32 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,24 @@ SPDX-License-Identifier: Apache-2.0
* **\<benchmark\>\_numba_mlir\_\<mode\>.py** : This file contains Numba-MLIR implementations of the benchmarks. There are three modes: kernel-mode, numpy-mode and prange-mode. Experimental.

## Examples of setting up and running the benchmarks

### Using prebuilt version

1. Create conda environment

```bash
conda create -n dpbench dpbench -c dppy/label/dev -c conda-forge -c intel -c nodefaults --override-channels
conda activate dpbench
```

2. Run specific benchmark, e.g. black_scholes

```bash
dpbench -b black_scholes run
```

### Build from source (for development)

1. Clone the repository

```bash
Expand Down Expand Up @@ -69,13 +87,22 @@ SPDX-License-Identifier: Apache-2.0
```bash
dpbench -b black_scholes run
```
5. Run all benchmarks

### Usage

1. Run all benchmarks

```bash
dpbench -a run
```

6. Device Customization
2. Generate report

```bash
dpbench report
```

3. Device Customization

If a framework is SYCL based, an extra configuration option `sycl_device` may be set in the
framework config file or by passing `--sycl-device` argument to `dpbench run` to control what device the framework uses for execution. The `sycl_device`
Expand All @@ -85,11 +112,11 @@ SPDX-License-Identifier: Apache-2.0

Here is an example:

```json
dpbench -b black_scholes -i dpnp run --sycl-device=level_zero:gpu:0
```shell
dpbench -b black_scholes -i dpnp run --sycl-device=level_zero:gpu:0
```

7. All available options are available using `dpbench --help` and `dpbench <command> --help`:
4. All available options are available using `dpbench --help` and `dpbench <command> --help`:

```
usage: dpbench [-h] [-b [BENCHMARKS]] [-i [IMPLEMENTATIONS]] [-a | --all-implementations | --no-all-implementations] [--version] [-r [RUN_ID]] [--last-run | --no-last-run]
Expand Down
60 changes: 60 additions & 0 deletions conda-recipe/bld.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
REM SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation
REM
REM SPDX-License-Identifier: Apache-2.0

REM A workaround for activate-dpcpp.bat issue to be addressed in 2021.4
set "LIB=%BUILD_PREFIX%\Library\lib;%BUILD_PREFIX%\compiler\lib;%LIB%"
SET "INCLUDE=%BUILD_PREFIX%\include;%INCLUDE%"

REM Since the 60.0.0 release, setuptools includes a local, vendored copy
REM of distutils (from late copies of CPython) that is enabled by default.
REM It breaks build for Windows, so use distutils from "stdlib" as before.
REM @TODO: remove the setting, once transition to build backend on Windows
REM to cmake is complete.
SET "SETUPTOOLS_USE_DISTUTILS=stdlib"

SET "DPBENCH_SYCL=1"

"%PYTHON%" setup.py clean --all

set "SKBUILD_ARGS=-G Ninja -- -DCMAKE_C_COMPILER:PATH=icx -DCMAKE_CXX_COMPILER:PATH=icx"
set "SKBUILD_ARGS=%SKBUILD_ARGS% -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON"

FOR %%V IN (14.0.0 14 15.0.0 15 16.0.0 16 17.0.0 17) DO @(
REM set DIR_HINT if directory exists
IF EXIST "%BUILD_PREFIX%\Library\lib\clang\%%V\" (
SET "SYCL_INCLUDE_DIR_HINT=%BUILD_PREFIX%\Library\lib\clang\%%V"
)
)

set "PATCHED_CMAKE_VERSION=3.26"
set "PLATFORM_DIR=%PREFIX%\Library\share\cmake-%PATCHED_CMAKE_VERSION%\Modules\Platform"
set "FN=Windows-IntelLLVM.cmake"

rem Save the original file, and copy patched file to
rem fix the issue with IntelLLVM integration with cmake on Windows
if EXIST "%PLATFORM_DIR%" (
dir "%PLATFORM_DIR%\%FN%"
copy /Y "%PLATFORM_DIR%\%FN%" .
if errorlevel 1 exit 1
copy /Y ".github\workflows\Windows-IntelLLVM_%PATCHED_CMAKE_VERSION%.cmake" "%PLATFORM_DIR%\%FN%"
if errorlevel 1 exit 1
)

if NOT "%WHEELS_OUTPUT_FOLDER%"=="" (
rem Install and assemble wheel package from the build bits
"%PYTHON%" setup.py install bdist_wheel %SKBUILD_ARGS%
if errorlevel 1 exit 1
copy dist\dpbench*.whl %WHEELS_OUTPUT_FOLDER%
if errorlevel 1 exit 1
) ELSE (
rem Only install
"%PYTHON%" setup.py install %SKBUILD_ARGS%
if errorlevel 1 exit 1
)

rem copy back
if EXIST "%PLATFORM_DIR%" (
copy /Y "%FN%" "%PLATFORM_DIR%\%FN%"
if errorlevel 1 exit 1
)
Loading