Skip to content

Commit

Permalink
Merge branch 'IntelPython:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
adarshyoga authored May 7, 2024
2 parents 1c2a44d + ab6ecb5 commit a59b83b
Show file tree
Hide file tree
Showing 27 changed files with 231 additions and 131 deletions.
1 change: 0 additions & 1 deletion .github/workflows/build_and_run.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ jobs:
uses: conda-incubator/setup-miniconda@v2
with:
python-version: ${{ matrix.python }}
miniforge-variant: Mambaforge
miniforge-version: latest
activate-environment: "build"
channel-priority: "disabled"
Expand Down
33 changes: 16 additions & 17 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,13 @@ jobs:
fetch-depth: 0

- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2
uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{ matrix.python }}
miniforge-variant: Mambaforge
miniforge-version: latest
activate-environment: "build"
auto-activate-base: true
activate-environment: ""
channels: ${{ env.CHANNELS }}
channel-priority: "disabled"
run-post: false
miniforge-version: latest

- name: Disable defautls
run: conda config --remove channels defaults
Expand All @@ -74,18 +72,21 @@ jobs:
echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" '/' >> $GITHUB_ENV
echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV
# boa is an extention to conda so we can use mamba resolver in conda build
- name: Install conda-build
run: mamba install boa
run: conda install conda-build

- name: Configure MSBuild
if: runner.os == 'Windows'
uses: microsoft/[email protected]
with:
vs-version: '14.35'

- name: Show conda-rc
shell: bash -el {0}
run: cat ~/.condarc

- name: Build conda package
run: conda mambabuild --no-test --python ${{ matrix.python }} conda-recipe
run: conda build --python ${{ matrix.python }} conda-recipe

- name: Upload artifact
uses: actions/[email protected]
Expand Down Expand Up @@ -122,15 +123,13 @@ jobs:

steps:
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2
uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{ matrix.python }}
miniforge-variant: Mambaforge
miniforge-version: latest
activate-environment: "build"
channels: ${{ env.CHANNELS }}
channel-priority: "disabled"
run-post: false
miniforge-version: latest

- name: Disable defautls
run: conda config --remove channels defaults
Expand Down Expand Up @@ -159,7 +158,7 @@ jobs:
# Needed to be able to run conda index
- name: Install conda-build
run: mamba install conda-build conda-index
run: conda install conda-build conda-index

- name: Create conda channel
run: python -m conda_index ${{ env.CHANNEL_PATH }}
Expand All @@ -170,7 +169,7 @@ jobs:
cat ${{ env.VER_JSON_PATH }}
- name: Install dpbench
run: mamba install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest intel::intel-opencl-rt python=${{ matrix.python }} -c ${{ env.CHANNEL_PATH }}
run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest intel::intel-opencl-rt python=${{ matrix.python }} -c ${{ env.CHANNEL_PATH }}

- name: Setup OpenCL CPU device
if: runner.os == 'Windows'
Expand All @@ -197,11 +196,11 @@ jobs:
# we want to make sure that configuration files are geting populated
- name: Run npbench benchmark
run: |
dpbench -i numpy -b azimint_hist run --npbench --precision=${{matrix.precision}}
dpbench -i numpy -b azimint_hist run --experimental-npbench --precision=${{matrix.precision}}
- name: Run rodinia benchmark
run: |
dpbench run --rodinia --no-dpbench --no-validate -r 1 --precision=${{matrix.precision}}
dpbench run --experimental-rodinia --no-dpbench --no-validate -r 1 --precision=${{matrix.precision}}
upload_anaconda:
name: Upload dppy/label/dev ['${{ matrix.os }}', python='${{ matrix.python }}']
Expand Down
38 changes: 20 additions & 18 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,9 @@ SPDX-License-Identifier: Apache-2.0
4. All available options are available using `dpbench --help` and `dpbench <command> --help`:

```
usage: dpbench [-h] [-b [BENCHMARKS]] [-i [IMPLEMENTATIONS]] [-a | --all-implementations | --no-all-implementations] [--version] [-r [RUN_ID]] [--last-run | --no-last-run]
[-d [RESULTS_DB]] [--log-level [{critical,fatal,error,warning,info,debug}]]
{run,report,config} ...
usage: dpbench [-h] [-b [BENCHMARKS]] [-i [IMPLEMENTATIONS]] [-a | --all-implementations | --no-all-implementations] [--version] [-r [RUN_ID]] [--last-run | --no-last-run] [-d [RESULTS_DB]]
[--log-level [{critical,fatal,error,warning,info,debug}]]
{run,report,config} ...

positional arguments:
{run,report,config}
Expand All @@ -131,50 +131,52 @@ SPDX-License-Identifier: Apache-2.0
-i [IMPLEMENTATIONS], --implementations [IMPLEMENTATIONS]
Comma separated list of implementations. Use --all-implementations to load all available implementations.
-a, --all-implementations, --no-all-implementations
If set, all available implementations will be loaded. (default: False)
If set, all available implementations will be loaded.
--version show program's version number and exit
-r [RUN_ID], --run-id [RUN_ID]
run_id to perform actions on. Use --last-run to use latest available run, or leave empty to create new one.
--last-run, --no-last-run
Sets run_id to the latest run_id from the database. (default: False)
Sets run_id to the latest run_id from the database.
-d [RESULTS_DB], --results-db [RESULTS_DB]
Path to a database to store results.
--log-level [{critical,fatal,error,warning,info,debug}]
Log level.
```
```
usage: dpbench run [-h] [-p [{S,M,L}]] [-s | --validate | --no-validate] [--dpbench | --no-dpbench] [--npbench | --no-npbench] [--polybench | --no-polybench] [-r [REPEAT]] [-t [TIMEOUT]]
[--precision [{single,double}]] [--print-results | --no-print-results] [--save | --no-save] [--sycl-device [SYCL_DEVICE]]
[--skip-expected-failures | --no-skip-expected-failures]
usage: dpbench run [-h] [-p [{S,M16Gb,M,L}]] [-s | --validate | --no-validate] [--dpbench | --no-dpbench] [--experimental-npbench | --no-experimental-npbench] [--experimental-polybench | --no-experimental-polybench]
[--experimental-rodinia | --no-experimental-rodinia] [-r [REPEAT]] [-t [TIMEOUT]] [--precision [{single,double}]] [--print-results | --no-print-results] [--save | --no-save] [--sycl-device [SYCL_DEVICE]]
[--skip-expected-failures | --no-skip-expected-failures]
Subcommand to run benchmark executions.
options:
-h, --help show this help message and exit
-p [{S,M,L}], --preset [{S,M,L}]
-p [{S,M16Gb,M,L}], --preset [{S,M16Gb,M,L}]
Preset to use for benchmark execution.
-s, --validate, --no-validate
Set if the validation will be run for each benchmark. (default: True)
Set if the validation will be run for each benchmark.
--dpbench, --no-dpbench
Set if run dpbench benchmarks. (default: True)
--npbench, --no-npbench
Set if run npbench benchmarks. (default: False)
--polybench, --no-polybench
Set if run polybench benchmarks. (default: False)
Set if run dpbench benchmarks.
--experimental-npbench, --no-experimental-npbench
Set if run npbench benchmarks.
--experimental-polybench, --no-experimental-polybench
Set if run polybench benchmarks.
--experimental-rodinia, --no-experimental-rodinia
Set if run rodinia benchmarks.
-r [REPEAT], --repeat [REPEAT]
Number of repeats for each benchmark.
-t [TIMEOUT], --timeout [TIMEOUT]
Timeout time in seconds for each benchmark execution.
--precision [{single,double}]
Data precision to use for array initialization.
--print-results, --no-print-results
Show the result summary or not (default: True)
--save, --no-save Either to save execution into database. (default: True)
Show the result summary or not
--save, --no-save Either to save execution into database.
--sycl-device [SYCL_DEVICE]
Sycl device to overwrite for framework configurations.
--skip-expected-failures, --no-skip-expected-failures
Either to save execution into database. (default: True)
Either to save execution into database.
```
```
Expand Down
45 changes: 34 additions & 11 deletions conda-recipe/bld.bat
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ set "DPBENCH_SYCL=1"
set "CMAKE_GENERATOR=Ninja"
set "CC=icx"
set "CXX=icx"

"%PYTHON%" setup.py clean --all
:: Make CMake verbose
set "VERBOSE=1"

FOR %%V IN (14.0.0 14 15.0.0 15 16.0.0 16 17.0.0 17) DO @(
REM set DIR_HINT if directory exists
Expand All @@ -41,18 +41,41 @@ if EXIST "%PLATFORM_DIR%" (
if errorlevel 1 exit 1
)

@REM TODO: switch to pip build. Currently results in broken binary
@REM %PYTHON% -m pip install --no-index --no-deps --no-build-isolation . -v
:: -wnx flags mean: --wheel --no-isolation --skip-dependency-check
%PYTHON% -m build -w -n -x
if %ERRORLEVEL% neq 0 exit 1

:: `pip install dist\dpbench*.whl` does not work on windows,
:: so use a loop; there's only one wheel in dist/ anyway
for /f %%f in ('dir /b /S .\dist') do (
%PYTHON% -m wheel tags --remove --build %GIT_DESCRIBE_NUMBER% %%f
if %ERRORLEVEL% neq 0 exit 1
)

:: wheel file was renamed
for /f %%f in ('dir /b /S .\dist') do (
%PYTHON% -m pip install %%f ^
--no-build-isolation ^
--no-deps ^
--only-binary :all: ^
--no-index ^
--prefix %PREFIX% ^
-vv
if %ERRORLEVEL% neq 0 exit 1
)

:: Must be consistent with pyproject.toml project.scritps. Currently pip does
:: not allow to ignore scripts installation, so we have to remove them manually.
:: https://github.com/pypa/pip/issues/3980
:: We have to let conda-build manage it for use in order to set proper python
:: path.
:: https://docs.conda.io/projects/conda-build/en/stable/resources/define-metadata.html#python-entry-points
rm %PREFIX%\Scripts\dpbench.exe

:: Copy wheel package
if NOT "%WHEELS_OUTPUT_FOLDER%"=="" (
rem Install and assemble wheel package from the build bits
"%PYTHON%" setup.py install --single-version-externally-managed --record=record.txt bdist_wheel --build-number %GIT_DESCRIBE_NUMBER%
if errorlevel 1 exit 1
copy dist\dpbench*.whl %WHEELS_OUTPUT_FOLDER%
if errorlevel 1 exit 1
) ELSE (
rem Only install
"%PYTHON%" setup.py install --single-version-externally-managed --record=record.txt
if errorlevel 1 exit 1
)

rem copy back
Expand Down
34 changes: 23 additions & 11 deletions conda-recipe/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,30 @@ export CMAKE_GENERATOR="Ninja"
export CC=icx
export CXX=icpx

if [ -e "_skbuild" ]; then
${PYTHON} setup.py clean --all
fi

# TODO: switch to pip build. Currently results in broken binary on Windows
# $PYTHON -m pip install --no-index --no-deps --no-build-isolation . -v

# Build wheel package
if [ -n "${WHEELS_OUTPUT_FOLDER}" ]; then
$PYTHON setup.py install --single-version-externally-managed --record=record.txt bdist_wheel -p manylinux2014_x86_64 --build-number $GIT_DESCRIBE_NUMBER
mkdir -p ${WHEELS_OUTPUT_FOLDER}
cp dist/dpbench*.whl ${WHEELS_OUTPUT_FOLDER}
else
$PYTHON setup.py install --single-version-externally-managed --record=record.txt
# -wnx flags mean: --wheel --no-isolation --skip-dependency-check
${PYTHON} -m build -w -n -x
${PYTHON} -m wheel tags --remove --build "$GIT_DESCRIBE_NUMBER" \
--platform-tag manylinux2014_x86_64 dist/dpbench*.whl
${PYTHON} -m pip install dist/dpbench*.whl \
--no-build-isolation \
--no-deps \
--only-binary :all: \
--no-index \
--prefix ${PREFIX} \
-vv

# Must be consistent with pyproject.toml project.scritps. Currently pip does
# not allow to ignore scripts installation, so we have to remove them manually.
# https://github.com/pypa/pip/issues/3980
# We have to let conda-build manage it for use in order to set proper python
# path.
# https://docs.conda.io/projects/conda-build/en/stable/resources/define-metadata.html#python-entry-points
rm ${PREFIX}/bin/dpbench

# Copy wheel package
if [[ -v WHEELS_OUTPUT_FOLDER ]]; then
cp dist/dpbench*.whl "${WHEELS_OUTPUT_FOLDER[@]}"
fi
61 changes: 29 additions & 32 deletions conda-recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
#
# SPDX-License-Identifier: Apache-2.0

{% set pyproject = load_file_data('pyproject.toml') %}
{% set py_deps = pyproject.get('project', {}).get('dependencies', []) %}
{% set py_build_deps = pyproject.get('build-system', {}).get('requires', []) %}
{% set project_scripts = pyproject.get('project', {}).get('scripts', {}) %}

package:
name: dpbench
version: {{ GIT_DESCRIBE_TAG }}
Expand All @@ -13,49 +18,41 @@ build:
number: {{ GIT_DESCRIBE_NUMBER }}
script_env:
- WHEELS_OUTPUT_FOLDER
entry_points:
{% for script, module in project_scripts | dictsort %}
- {{ script ~ " = " ~ module }}
{% endfor %}

requirements:
build:
- {{ compiler('cxx') }}
- {{ compiler('dpcpp') }} ==2024.0.0 # [not osx]
- {{ compiler('dpcpp') }}
# This is required to get compatible headers with the system installed glibc
- sysroot_linux-64 >=2.28 # [linux]
host:
- python
- setuptools
- cmake==3.26* # [win]
- cmake # [not win]
- ninja # [not win]
- scikit-build
- cython
- pybind11
- versioneer
- intel::numpy
- numba
- dpctl
- dpnp
- numba-dpex
- pip
{% for dep in py_build_deps %}
{% if dep.startswith('ninja') %}
- {{ dep.split(';')[0] }} # [not win]
{% elif dep.startswith('cmake') %}
- cmake=3.26 # [win]
- {{ dep }} # [not win]
{% elif dep.startswith('build>=') %}
- {{ 'python-' ~ dep }}
{% else %}
- {{ dep|replace('_','-') }}
{% endif %}
{% endfor %}
run:
- python
- tomli
- alembic
- sqlalchemy
- py-cpuinfo
- scipy
- scikit-learn
- pandas
- intel::numpy
- numba
- dpctl
- dpnp
- numba-dpex
{% for dep in py_deps %}
- {{ dep|replace('_','-') }}
{% endfor %}

test:
requires:
- dpctl
- dpnp
- numba-dpex
- numba
- numpy
commands:
- dpbench --help

about:
home: https://github.com/IntelPython/dpbench
Expand Down
8 changes: 4 additions & 4 deletions dpbench/benchmarks/default/gpairs/gpairs_numba_dpex_k.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ def count_weighted_pairs_3d_intel_no_slm_ker(

n_wi = 32

dsq_mat = dpex.private.array(shape=(32 * 32), dtype=dtype)
w0_vec = dpex.private.array(shape=(32), dtype=dtype)
w1_vec = dpex.private.array(shape=(32), dtype=dtype)
dsq_mat = kapi.PrivateArray(shape=(32 * 32), dtype=dtype)
w0_vec = kapi.PrivateArray(shape=(32), dtype=dtype)
w1_vec = kapi.PrivateArray(shape=(32), dtype=dtype)

offset0 = gr0 * n_wi * lws0 + lid0
offset1 = gr1 * n_wi * lws1 + lid1
Expand Down Expand Up @@ -80,7 +80,7 @@ def count_weighted_pairs_3d_intel_no_slm_ker(

# update slm_hist. Use work-item private buffer of 16 tfloat elements
for k in range(0, slm_hist_size, private_hist_size):
private_hist = dpex.private.array(shape=(32), dtype=dtype)
private_hist = kapi.PrivateArray(shape=(32), dtype=dtype)
for p in range(private_hist_size):
private_hist[p] = 0.0

Expand Down
Loading

0 comments on commit a59b83b

Please sign in to comment.