Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
110 changes: 76 additions & 34 deletions .github/workflows/build_and_run.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,26 @@ on:

env:
# sycl is not included. Add it manually if you need
WORKLOADS: python,numpy,dpnp,numba_n,numba_np,numba_npr,numba_dpex_k,numba_dpex_n,numba_dpex_p,numba_mlir_k,numba_mlir_n,numba_mlir_p
WORKLOADS: python,numpy,dpnp,numba_n,numba_np,numba_npr,numba_dpex_k,numba_dpex_n,numba_dpex_p

jobs:
build_linux:
build:
name: Build and run

strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest", "windows-latest"]
python: ["3.9", "3.10"]
python: ["3.9", "3.10", "3.11"]
sycl: ["sycl","no-sycl"]
install: ["pip", "setup.py"]
exclude:
# setuptools<64 + scikit-build produce 'UNKOWN' package name for
# python 3.11. Could not find exact reference for that issue.
# latest setuptools is unable to create editable environment for pip
- python: "3.11"
install: "pip"
sycl: "sycl"
include:
- sycl: sycl
os: ubuntu-latest
Expand All @@ -40,6 +47,10 @@ jobs:

runs-on: ${{matrix.os}}

defaults:
run:
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -el {0}' }}

steps:
- name: Cancel Previous Runs
uses: styfle/[email protected]
Expand All @@ -51,6 +62,22 @@ jobs:
with:
fetch-depth: 0

# intel:numpy for python 3.11 is not upstreamed yet
- name: Patch numpy dependency for Python 3.11
if: matrix.python == '3.11'
shell: bash -l {0}
run: |
find ./environments -type f | xargs sed -i 's/intel::numpy/numpy/'
find ./environments -type f | xargs sed -i '/numba-mlir/d'
find ./environments -type f | xargs sed -i 's/setuptools>=42,<64/setuptools/'

# TODO: remove once numba_mlir support python 3.11
- name: Patch numpy dependency for Python 3.11
if: matrix.python != '3.11'
shell: bash -l {0}
run: |
echo "WORKLOADS=$WORKLOADS,numba_mlir_k,numba_mlir_n,numba_mlir_p" >> "$GITHUB_ENV"

- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2
with:
Expand All @@ -63,25 +90,10 @@ jobs:
run-post: false

- name: Conda info
shell: bash -el {0}
run: |
conda info
conda list


- name: Setup OpenCL CPU device
if: runner.os == 'Windows'
shell: pwsh
run: |
$script_path="$env:CONDA_PREFIX\Scripts\set-intel-ocl-icd-registry.ps1"
&$script_path
echo "OCL_ICD_FILENAMES=$env:CONDA_PREFIX\Library\lib\intelocl64.dll" >> $env:GITHUB_ENV
echo "LIB=$env:CONDA_PREFIX\Library\lib;$env:CONDA_PREFIX\compiler\lib;$env:LIB" >> $env:GITHUB_ENV
echo "INCLUDE=$env:CONDA_PREFIX\include;$env:INCLUDE" >> $env:GITHUB_ENV
# Check the variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default
$cl_cfg="$env:CONDA_PREFIX\Library\lib\cl.cfg"
Get-Content -Tail 5 -Path $cl_cfg

- name: Configure Python
if: runner.os == 'Windows'
shell: pwsh
Expand All @@ -98,6 +110,20 @@ jobs:
$env:FN="Windows-IntelLLVM.cmake"
Copy-Item ".github\workflows\Windows-IntelLLVM_${env:PATCHED_CMAKE_VERSION}.cmake" "${env:PLATFORM_DIR}\${env:FN}"

# TODO: remove it once it is removed from hard dependency. Not presented
# in conda-forge version, but there are some blockers before we could use
# it.
- name: Remove vs env
if: runner.os == 'Windows' && matrix.sycl == 'sycl'
run: |
conda remove --force vs2017_win-64

- name: Configure MSBuild
if: runner.os == 'Windows' && matrix.sycl == 'sycl'
uses: ilammy/msvc-dev-cmd@v1
with:
toolset: 14.35

- name: Configure Sycl
if: matrix.sycl == 'sycl'
shell: bash -el {0}
Expand All @@ -108,31 +134,47 @@ jobs:
echo "DPBENCH_SYCL=1" >> "$GITHUB_ENV"
echo "WORKLOADS=$WORKLOADS,sycl" >> "$GITHUB_ENV"

- name: Configure compiler
run: |
echo "CMAKE_GENERATOR=Ninja" >> "$GITHUB_ENV"

- name: Configure OneAPI
run: |
echo "ONEAPI_DEVICE_SELECTOR=opencl:cpu" >> "$GITHUB_ENV"

- name: Populate conda environment paths
shell: pwsh
if: runner.os == 'Windows'
run: |
echo "LIB=$env:CONDA_PREFIX\Library\lib;$env:CONDA_PREFIX\compiler\lib;$env:LIB" >> $env:GITHUB_ENV
echo "INCLUDE=$env:CONDA_PREFIX\include;$env:INCLUDE" >> $env:GITHUB_ENV

- name: Build dpbench
if: matrix.install == 'pip'
shell: bash -el {0}
run: |
pip install \
--no-index --no-deps --no-build-isolation -e . -v
run: pip install --no-index --no-deps --no-build-isolation -e . -v

- name: Build dpbench
if: matrix.install == 'setup.py'
shell: bash -el {0}
run: |
python setup.py develop
run: python setup.py develop

- name: Run benchmarks
- name: Configure dpbench
shell: bash -el {0}
run: |
export NUMBA_MLIR_GPU_RUNTIME=sycl
# TODO: do we need GPU in github acions?
echo "NUMBA_MLIR_GPU_RUNTIME=sycl" >> "GITHUB_ENV"
# Turn off numba-dpex autofall back
export NUMBA_DPEX_FALLBACK_ON_CPU=0
# Make sure numba-dpex is using native atomics in github CI
export NUMBA_DPEX_ACTIVATE_ATOMICS_FP_NATIVE=1
echo "NUMBA_DPEX_FALLBACK_ON_CPU=0" >> "GITHUB_ENV"

- name: Setup OpenCL CPU device
if: runner.os == 'Windows'
shell: pwsh
run: |
$script_path="$env:CONDA_PREFIX\Scripts\set-intel-ocl-icd-registry.ps1"
&$script_path
echo "OCL_ICD_FILENAMES=$env:CONDA_PREFIX\Library\lib\intelocl64.dll" >> $env:GITHUB_ENV

dpbench -i ${WORKLOADS} run -r2 --no-print-results || exit 1
- name: Run benchmarks
run: dpbench -i ${{env.WORKLOADS}} run -r2 --no-print-results || exit 1

- name: Generate report
shell: bash -el {0}
run: |
dpbench -i ${WORKLOADS} report || exit 1
run: dpbench -i ${{env.WORKLOADS}} report || exit 1
17 changes: 12 additions & 5 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ env:
PACKAGE_NAME: dpbench
MODULE_NAME: dpbench
# There is a separate action that removes defaults.
CHANNELS: 'dppy/label/dev,conda-forge,intel'
CHANNELS: 'dppy/label/dev,conda-forge,intel,nodefaults'
VER_JSON_NAME: 'version.json'
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); "
VER_SCRIPT2: "d = j['dpbench'][0]; print('='.join((d[s] for s in ('version', 'build'))))"
Expand All @@ -26,14 +26,14 @@ jobs:
strategy:
fail-fast: false
matrix:
python: ['3.9', '3.10']
python: ['3.9', '3.10', '3.11']
os: [ubuntu-latest, windows-latest]

runs-on: ${{ matrix.os }}

defaults:
run:
shell: bash -l {0}
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}

continue-on-error: false

Expand Down Expand Up @@ -63,12 +63,19 @@ jobs:
run: conda config --remove channels defaults

- name: Store conda paths as envs
shell: bash -l {0}
run: echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" '/' >> $GITHUB_ENV

# boa is an extention to conda so we can use mamba resolver in conda build
- name: Install conda-build
run: mamba install boa

- name: Configure MSBuild
if: runner.os == 'Windows'
uses: microsoft/[email protected]
with:
vs-version: '14.35'

- name: Build conda package
run: conda mambabuild --no-test --python ${{ matrix.python }} conda-recipe

Expand All @@ -92,7 +99,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python: ['3.9', '3.10']
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, ubuntu-latest, windows-latest]
experimental: [false]

Expand Down Expand Up @@ -168,7 +175,7 @@ jobs:

strategy:
matrix:
python: ['3.9', '3.10']
python: ['3.9', '3.10', '3.11']
os: [ubuntu-latest, windows-latest]

runs-on: ${{ matrix.os }}
Expand Down
26 changes: 22 additions & 4 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: Apache-2.0

cmake_minimum_required(VERSION 3.22 FATAL_ERROR)
cmake_minimum_required(VERSION 3.22..3.27 FATAL_ERROR)

project(dpbench
LANGUAGES CXX
Expand All @@ -14,10 +14,28 @@ project(dpbench
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY BOTH)

find_package(Python 3.9 REQUIRED
COMPONENTS Interpreter Development.Module)

if(NOT DEFINED DPCTL_LIBRARY_PATH)
execute_process(
COMMAND ${Python3_EXECUTABLE} -c "import dpctl,os; print(os.path.dirname(dpctl.__file__));"
OUTPUT_VARIABLE DPCTL_LIBRARY_PATH
RESULT_VARIABLE RET
OUTPUT_STRIP_TRAILING_WHITESPACE
)

if(RET EQUAL "1")
message(FATAL_ERROR "Module \'dpctl\' not found.")
endif()
endif()

cmake_path(APPEND DPCTL_MODULE_PATH ${DPCTL_LIBRARY_PATH} resources cmake)
list(APPEND CMAKE_MODULE_PATH ${DPCTL_MODULE_PATH})

find_package(pybind11 CONFIG REQUIRED)
find_package(IntelDPCPP REQUIRED)
find_package(PythonExtensions REQUIRED)
find_package(Python3 COMPONENTS NumPy Development)
find_package(IntelSYCL REQUIRED)
find_package(Dpctl REQUIRED)

set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED True)
Expand Down
14 changes: 8 additions & 6 deletions conda-recipe/bld.bat
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ REM @TODO: remove the setting, once transition to build backend on Windows
REM to cmake is complete.
SET "SETUPTOOLS_USE_DISTUTILS=stdlib"

SET "DPBENCH_SYCL=1"
set "DPBENCH_SYCL=1"
set "CMAKE_GENERATOR=Ninja"
set "CC=icx"
set "CXX=icx"

"%PYTHON%" setup.py clean --all

set "SKBUILD_ARGS=-G Ninja -- -DCMAKE_C_COMPILER:PATH=icx -DCMAKE_CXX_COMPILER:PATH=icx"
set "SKBUILD_ARGS=%SKBUILD_ARGS% -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON"

FOR %%V IN (14.0.0 14 15.0.0 15 16.0.0 16 17.0.0 17) DO @(
REM set DIR_HINT if directory exists
IF EXIST "%BUILD_PREFIX%\Library\lib\clang\%%V\" (
Expand All @@ -41,15 +41,17 @@ if EXIST "%PLATFORM_DIR%" (
if errorlevel 1 exit 1
)

@REM TODO: switch to pip build. Currently results in broken binary
@REM %PYTHON% -m pip install --no-index --no-deps --no-build-isolation . -v
if NOT "%WHEELS_OUTPUT_FOLDER%"=="" (
rem Install and assemble wheel package from the build bits
"%PYTHON%" setup.py install bdist_wheel %SKBUILD_ARGS%
"%PYTHON%" setup.py install bdist_wheel --single-version-externally-managed --record=record.txt
if errorlevel 1 exit 1
copy dist\dpbench*.whl %WHEELS_OUTPUT_FOLDER%
if errorlevel 1 exit 1
) ELSE (
rem Only install
"%PYTHON%" setup.py install %SKBUILD_ARGS%
"%PYTHON%" setup.py install --single-version-externally-managed --record=record.txt
if errorlevel 1 exit 1
)

Expand Down
15 changes: 10 additions & 5 deletions conda-recipe/build.sh
Original file line number Diff line number Diff line change
@@ -1,22 +1,27 @@
#!/bin/bash
#!/bin/bash -x

# SPDX-FileCopyrightText: 2022 - 2023 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0

# Intel LLVM must cooperate with compiler and sysroot from conda
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${BUILD_PREFIX}/lib"

echo "--gcc-toolchain=${BUILD_PREFIX} --sysroot=${BUILD_PREFIX}/${HOST}/sysroot -target ${HOST}" > icpx_for_conda.cfg
export ICPXCFG="$(pwd)/icpx_for_conda.cfg"
export ICXCFG="$(pwd)/icpx_for_conda.cfg"

export CMAKE_GENERATOR="Ninja"
export DPBENCH_SYCL=1
export CMAKE_GENERATOR="Ninja"
export CC=icx
export CXX=icpx

if [ -e "_skbuild" ]; then
${PYTHON} setup.py clean --all
fi

SKBUILD_ARGS="-- -DCMAKE_C_COMPILER:PATH=icx -DCMAKE_CXX_COMPILER:PATH=icpx -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON"
# TODO: switch to pip build. Currently results in broken binary on Windows
# $PYTHON -m pip install --no-index --no-deps --no-build-isolation . -v

# Build wheel package
if [ "$CONDA_PY" == "36" ]; then
Expand All @@ -25,8 +30,8 @@ else
WHEELS_BUILD_ARGS="-p manylinux2014_x86_64"
fi
if [ -n "${WHEELS_OUTPUT_FOLDER}" ]; then
$PYTHON setup.py install bdist_wheel ${WHEELS_BUILD_ARGS} ${SKBUILD_ARGS}
$PYTHON setup.py install bdist_wheel ${WHEELS_BUILD_ARGS} --single-version-externally-managed --record=record.txt
cp dist/dpnp*.whl ${WHEELS_OUTPUT_FOLDER}
else
$PYTHON setup.py install ${SKBUILD_ARGS}
$PYTHON setup.py install --single-version-externally-managed --record=record.txt
fi
Loading