Skip to content

Commit 7d8f730

Browse files
authored
Merge pull request #163 from IntelPython/package
Enable conda build
2 parents e88dad9 + dc7b262 commit 7d8f730

File tree

13 files changed

+463
-6
lines changed

13 files changed

+463
-6
lines changed
Lines changed: 234 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,234 @@
1+
# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation
2+
#
3+
# SPDX-License-Identifier: Apache-2.0
4+
5+
name: Conda package
6+
7+
on:
8+
push:
9+
branches:
10+
- main
11+
pull_request:
12+
13+
env:
14+
PACKAGE_NAME: dpbench
15+
MODULE_NAME: dpbench
16+
CHANNELS: '-c dppy/label/dev -c conda-forge -c intel -c nodefaults --override-channels'
17+
VER_JSON_NAME: 'version.json'
18+
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); "
19+
VER_SCRIPT2: "d = j['dpbench'][0]; print('='.join((d[s] for s in ('version', 'build'))))"
20+
21+
jobs:
22+
build:
23+
name: Build ['${{ matrix.os }}', python='${{ matrix.python }}']
24+
25+
strategy:
26+
fail-fast: false
27+
matrix:
28+
python: ['3.9', '3.10']
29+
os: [ubuntu-latest, windows-latest]
30+
31+
runs-on: ${{ matrix.os }}
32+
33+
defaults:
34+
run:
35+
shell: bash -l {0}
36+
37+
continue-on-error: false
38+
39+
steps:
40+
- name: Cancel Previous Runs
41+
uses: styfle/[email protected]
42+
with:
43+
access_token: ${{ github.token }}
44+
45+
- name: Checkout ${{ env.PACKAGE_NAME }} repo
46+
uses: actions/[email protected]
47+
with:
48+
fetch-depth: 0
49+
50+
- name: Setup miniconda
51+
uses: conda-incubator/[email protected]
52+
with:
53+
auto-update-conda: true
54+
python-version: ${{ matrix.python }}
55+
miniconda-version: 'latest'
56+
activate-environment: 'build'
57+
58+
- name: Store conda paths as envs
59+
run: echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" '/' >> $GITHUB_ENV
60+
61+
- name: Install conda-build
62+
run: conda install conda-build
63+
64+
- name: Cache conda packages
65+
uses: actions/[email protected]
66+
env:
67+
CACHE_NUMBER: 1 # Increase to reset cache
68+
with:
69+
path: ${{ env.CONDA_PKGS_DIR }}
70+
key:
71+
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }}
72+
restore-keys: |
73+
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
74+
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
75+
76+
- name: Build conda package
77+
run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe
78+
79+
- name: Upload artifact
80+
uses: actions/[email protected]
81+
with:
82+
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
83+
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2
84+
85+
test:
86+
name: Test ['${{ matrix.os }}', python='${{ matrix.python }}']
87+
88+
needs: build
89+
90+
runs-on: ${{ matrix.os }}
91+
92+
defaults:
93+
run:
94+
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}
95+
96+
strategy:
97+
fail-fast: false
98+
matrix:
99+
python: ['3.9', '3.10']
100+
os: [ubuntu-20.04, ubuntu-latest, windows-latest]
101+
experimental: [false]
102+
103+
continue-on-error: ${{ matrix.experimental }}
104+
105+
steps:
106+
- name: Setup miniconda
107+
uses: conda-incubator/[email protected]
108+
with:
109+
auto-update-conda: true
110+
python-version: ${{ matrix.python }}
111+
miniconda-version: 'latest'
112+
activate-environment: 'test'
113+
114+
- name: Store conda paths as envs
115+
shell: bash -l {0}
116+
run: |
117+
echo "CHANNEL_PATH=${{ github.workspace }}/channel/" | tr "\\" "/" >> $GITHUB_ENV
118+
echo "EXTRACTED_PKG_PATH=${{ github.workspace }}/pkg/" | tr "\\" "/" >> $GITHUB_ENV
119+
echo "VER_JSON_PATH=${{ github.workspace }}/version.json" | tr "\\" "/" >> $GITHUB_ENV
120+
echo "PKG_PATH_IN_CHANNEL=${{ github.workspace }}/channel/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" "/" >> $GITHUB_ENV
121+
122+
- name: Download artifact
123+
uses: actions/[email protected]
124+
with:
125+
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
126+
path: ${{ env.PKG_PATH_IN_CHANNEL }}
127+
128+
# We need --force-local because on windows path looks like C:/foo/bar
129+
# and tar interprits semicolons as remote device.
130+
- name: Extract package archive
131+
shell: bash -l {0}
132+
run: |
133+
mkdir -p ${EXTRACTED_PKG_PATH}
134+
tar -xvf ${PKG_PATH_IN_CHANNEL}${PACKAGE_NAME}-*.tar.bz2 -C ${EXTRACTED_PKG_PATH} --force-local
135+
136+
# Needed to be able to run conda index
137+
- name: Install conda-build
138+
run: conda install conda-build
139+
140+
- name: Create conda channel
141+
run: conda index ${{ env.CHANNEL_PATH }}
142+
143+
- name: Test conda channel
144+
run: |
145+
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.CHANNEL_PATH }} --override-channels --info --json > ${{ env.VER_JSON_PATH }}
146+
cat ${{ env.VER_JSON_PATH }}
147+
148+
- name: Collect dependencies
149+
shell: bash -l {0}
150+
run: |
151+
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}")
152+
153+
echo PACKAGE_VERSION=${PACKAGE_VERSION}
154+
echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV
155+
156+
conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
157+
cat lockfile
158+
env:
159+
TEST_CHANNELS: '-c ${{ env.CHANNEL_PATH }} ${{ env.CHANNELS }}'
160+
161+
- name: Cache conda packages
162+
uses: actions/[email protected]
163+
env:
164+
CACHE_NUMBER: 1 # Increase to reset cache
165+
with:
166+
path: ${{ env.CONDA_PKGS_DIR }}
167+
key:
168+
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
169+
restore-keys: |
170+
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
171+
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
172+
173+
- name: Install opencl_rt
174+
run: conda install opencl_rt -c intel --override-channels
175+
176+
- name: Install dpbench
177+
run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
178+
env:
179+
TEST_CHANNELS: '-c ${{ env.CHANNEL_PATH }} ${{ env.CHANNELS }}'
180+
181+
- name: List installed packages
182+
run: conda list
183+
184+
- name: Smoke test
185+
run: python -c "import dpnp, dpctl, dpbench; dpctl.lsplatform()"
186+
187+
- name: Run benchmakrs
188+
run: |
189+
dpbench -i numpy,numba_dpex_p,dpnp,numba_n,sycl run
190+
dpbench report
191+
192+
upload_anaconda:
193+
name: Upload dppy/label/dev ['${{ matrix.os }}', python='${{ matrix.python }}']
194+
195+
needs: [test]
196+
197+
strategy:
198+
matrix:
199+
python: ['3.9', '3.10']
200+
os: [ubuntu-latest, windows-latest]
201+
202+
runs-on: ${{ matrix.os }}
203+
204+
defaults:
205+
run:
206+
shell: bash -l {0}
207+
208+
continue-on-error: false
209+
210+
if: |
211+
(github.repository == 'IntelPython/dpbench') &&
212+
(github.ref == 'refs/heads/main' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/'))
213+
214+
steps:
215+
- name: Download artifact
216+
uses: actions/[email protected]
217+
with:
218+
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
219+
220+
- name: Setup miniconda
221+
uses: conda-incubator/[email protected]
222+
with:
223+
auto-update-conda: true
224+
python-version: ${{ matrix.python }}
225+
miniconda-version: 'latest'
226+
activate-environment: 'upload'
227+
228+
- name: Install anaconda-client
229+
run: conda install anaconda-client
230+
231+
- name: Upload
232+
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2
233+
env:
234+
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }}

CMakeLists.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@ project(dpbench
1010
"Benchmark suite to evaluate Intel Data Parallel Extensions for Python"
1111
)
1212

13+
# Help conda build find path from both host and build env.
14+
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH)
15+
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY BOTH)
16+
1317
find_package(pybind11 CONFIG REQUIRED)
1418
find_package(IntelDPCPP REQUIRED)
1519
find_package(PythonExtensions REQUIRED)

README.md

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,24 @@ SPDX-License-Identifier: Apache-2.0
1818
* **\<benchmark\>\_numba_mlir\_\<mode\>.py** : This file contains Numba-MLIR implementations of the benchmarks. There are three modes: kernel-mode, numpy-mode and prange-mode. Experimental.
1919

2020
## Examples of setting up and running the benchmarks
21+
22+
### Using prebuilt version
23+
24+
1. Create conda environment
25+
26+
```bash
27+
conda create -n dpbench dpbench -c dppy/label/dev -c conda-forge -c intel -c nodefaults --override-channels
28+
conda activate dpbench
29+
```
30+
31+
2. Run specific benchmark, e.g. black_scholes
32+
33+
```bash
34+
dpbench -b black_scholes run
35+
```
36+
37+
### Build from source (for development)
38+
2139
1. Clone the repository
2240

2341
```bash
@@ -69,13 +87,22 @@ SPDX-License-Identifier: Apache-2.0
6987
```bash
7088
dpbench -b black_scholes run
7189
```
72-
5. Run all benchmarks
90+
91+
### Usage
92+
93+
1. Run all benchmarks
7394

7495
```bash
7596
dpbench -a run
7697
```
7798

78-
6. Device Customization
99+
2. Generate report
100+
101+
```bash
102+
dpbench report
103+
```
104+
105+
3. Device Customization
79106

80107
If a framework is SYCL based, an extra configuration option `sycl_device` may be set in the
81108
framework config file or by passing `--sycl-device` argument to `dpbench run` to control what device the framework uses for execution. The `sycl_device`
@@ -85,11 +112,11 @@ SPDX-License-Identifier: Apache-2.0
85112

86113
Here is an example:
87114

88-
```json
89-
dpbench -b black_scholes -i dpnp run --sycl-device=level_zero:gpu:0
115+
```shell
116+
dpbench -b black_scholes -i dpnp run --sycl-device=level_zero:gpu:0
90117
```
91118

92-
7. All available options are available using `dpbench --help` and `dpbench <command> --help`:
119+
4. All available options are available using `dpbench --help` and `dpbench <command> --help`:
93120

94121
```
95122
usage: dpbench [-h] [-b [BENCHMARKS]] [-i [IMPLEMENTATIONS]] [-a | --all-implementations | --no-all-implementations] [--version] [-r [RUN_ID]] [--last-run | --no-last-run]

conda-recipe/bld.bat

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
REM SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation
2+
REM
3+
REM SPDX-License-Identifier: Apache-2.0
4+
5+
REM A workaround for activate-dpcpp.bat issue to be addressed in 2021.4
6+
set "LIB=%BUILD_PREFIX%\Library\lib;%BUILD_PREFIX%\compiler\lib;%LIB%"
7+
SET "INCLUDE=%BUILD_PREFIX%\include;%INCLUDE%"
8+
9+
REM Since the 60.0.0 release, setuptools includes a local, vendored copy
10+
REM of distutils (from late copies of CPython) that is enabled by default.
11+
REM It breaks build for Windows, so use distutils from "stdlib" as before.
12+
REM @TODO: remove the setting, once transition to build backend on Windows
13+
REM to cmake is complete.
14+
SET "SETUPTOOLS_USE_DISTUTILS=stdlib"
15+
16+
SET "DPBENCH_SYCL=1"
17+
18+
"%PYTHON%" setup.py clean --all
19+
20+
set "SKBUILD_ARGS=-G Ninja -- -DCMAKE_C_COMPILER:PATH=icx -DCMAKE_CXX_COMPILER:PATH=icx"
21+
set "SKBUILD_ARGS=%SKBUILD_ARGS% -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON"
22+
23+
FOR %%V IN (14.0.0 14 15.0.0 15 16.0.0 16 17.0.0 17) DO @(
24+
REM set DIR_HINT if directory exists
25+
IF EXIST "%BUILD_PREFIX%\Library\lib\clang\%%V\" (
26+
SET "SYCL_INCLUDE_DIR_HINT=%BUILD_PREFIX%\Library\lib\clang\%%V"
27+
)
28+
)
29+
30+
set "PATCHED_CMAKE_VERSION=3.26"
31+
set "PLATFORM_DIR=%PREFIX%\Library\share\cmake-%PATCHED_CMAKE_VERSION%\Modules\Platform"
32+
set "FN=Windows-IntelLLVM.cmake"
33+
34+
rem Save the original file, and copy patched file to
35+
rem fix the issue with IntelLLVM integration with cmake on Windows
36+
if EXIST "%PLATFORM_DIR%" (
37+
dir "%PLATFORM_DIR%\%FN%"
38+
copy /Y "%PLATFORM_DIR%\%FN%" .
39+
if errorlevel 1 exit 1
40+
copy /Y ".github\workflows\Windows-IntelLLVM_%PATCHED_CMAKE_VERSION%.cmake" "%PLATFORM_DIR%\%FN%"
41+
if errorlevel 1 exit 1
42+
)
43+
44+
if NOT "%WHEELS_OUTPUT_FOLDER%"=="" (
45+
rem Install and assemble wheel package from the build bits
46+
"%PYTHON%" setup.py install bdist_wheel %SKBUILD_ARGS%
47+
if errorlevel 1 exit 1
48+
copy dist\dpbench*.whl %WHEELS_OUTPUT_FOLDER%
49+
if errorlevel 1 exit 1
50+
) ELSE (
51+
rem Only install
52+
"%PYTHON%" setup.py install %SKBUILD_ARGS%
53+
if errorlevel 1 exit 1
54+
)
55+
56+
rem copy back
57+
if EXIST "%PLATFORM_DIR%" (
58+
copy /Y "%FN%" "%PLATFORM_DIR%\%FN%"
59+
if errorlevel 1 exit 1
60+
)

0 commit comments

Comments
 (0)