Skip to content

Commit 92bb6b7

Browse files
committed
Merge branch 'main' into gold/2021
2 parents d02059e + addaf4b commit 92bb6b7

File tree

14 files changed

+162
-36
lines changed

14 files changed

+162
-36
lines changed

.github/workflows/conda-package.yml

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
name: Conda package
22

3-
on: push
3+
on:
4+
push:
5+
branches:
6+
tags:
47

58
env:
69
PACKAGE_NAME: numba-dppy
@@ -12,7 +15,7 @@ jobs:
1215

1316
strategy:
1417
matrix:
15-
python: ["3.8"]
18+
python: [3.8, 3.9]
1619
integration_channels: [""]
1720
experimental: [true] # packages are not available on -c intel yet
1821
artifact_name: [""]
@@ -68,7 +71,7 @@ jobs:
6871

6972
strategy:
7073
matrix:
71-
python: ["3.8"]
74+
python: [3.8, 3.9]
7275
integration_channels: [""]
7376
experimental: [true] # packages are not available on -c intel yet
7477
artifact_name: [""]
@@ -118,13 +121,15 @@ jobs:
118121

119122
strategy:
120123
matrix:
121-
python: ["3.8"]
124+
python: [3.8, 3.9]
125+
numba: [0.54, 0.55]
122126
integration_channels: [""]
123127
experimental: [true] # packages are not available on -c intel yet
124128
artifact_name: [""]
125129
dependencies: [""]
126130
include:
127131
- python: "3.8"
132+
numba: 0.54
128133
integration_channels: -c dppy/label/dev
129134
artifact_name: -c dppy_label_dev
130135
experimental: false # current stable
@@ -171,7 +176,7 @@ jobs:
171176
- name: Install numba-dppy
172177
run: |
173178
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}"
174-
conda install $PACKAGE_NAME pytest python=${{ matrix.python }} ${{ matrix.dependencies }} $CHANNELS
179+
conda install $PACKAGE_NAME pytest python=${{ matrix.python }} numba=${{ matrix.numba }} ${{ matrix.dependencies }} $CHANNELS
175180
# Test installed packages
176181
conda list
177182
- name: Run tests
@@ -186,7 +191,7 @@ jobs:
186191

187192
strategy:
188193
matrix:
189-
python: ["3.8"]
194+
python: [3.8, 3.9]
190195
integration_channels: [""]
191196
experimental: [true] # packages are not available on -c intel yet
192197
artifact_name: [""]
@@ -248,17 +253,16 @@ jobs:
248253
249254
upload_linux:
250255
needs: test_linux
251-
if: ${{ github.ref == 'refs/heads/main' }}
256+
if: ${{ github.ref }} == 'refs/heads/main' || ${{ github.ref }} == 'refs/heads/release*'
252257
runs-on: ubuntu-latest
253258
strategy:
254259
matrix:
255-
python: ["3.8"]
260+
python: [3.8, 3.9]
256261
steps:
257262
- name: Download artifact
258263
uses: actions/download-artifact@v2
259264
with:
260-
# `-c dppy_label_dev`: packages not published on `-c intel` yet
261-
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} -c dppy_label_dev
265+
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
262266

263267
- name: Install anaconda-client
264268
run: conda install anaconda-client
@@ -274,17 +278,16 @@ jobs:
274278
275279
upload_windows:
276280
needs: test_windows
277-
if: ${{ github.ref == 'refs/heads/main' }}
281+
if: ${{ github.ref }} == 'refs/heads/main' || ${{ github.ref }} == 'refs/heads/release*'
278282
runs-on: windows-latest
279283
strategy:
280284
matrix:
281-
python: [3.8]
285+
python: [3.8, 3.9]
282286
steps:
283287
- name: Download artifact
284288
uses: actions/download-artifact@v2
285289
with:
286-
# `-c dppy_label_dev`: packages not published on `-c intel` yet
287-
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} -c dppy_label_dev
290+
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
288291

289292
- uses: conda-incubator/setup-miniconda@v2
290293
with:

.github/workflows/numba.yml

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
name: Test w/ Numba PRs
2+
3+
on:
4+
workflow_dispatch:
5+
inputs:
6+
numba_pr:
7+
description: Numba PR
8+
required: true
9+
default: 7177
10+
11+
jobs:
12+
build:
13+
runs-on: ubuntu-latest
14+
15+
env:
16+
ID: ${{ github.event.inputs.numba_pr }}
17+
18+
steps:
19+
- name: Checkout numba-dppy
20+
uses: actions/checkout@v2
21+
with: {path: numba-dppy}
22+
23+
- name: Checkout numba
24+
uses: actions/checkout@v2
25+
with:
26+
repository: numba/numba
27+
path: numba
28+
29+
# See https://docs.github.com/en/github/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/checking-out-pull-requests-locally
30+
- name: Checkout numba PR
31+
run: |
32+
cd numba
33+
git fetch origin pull/${{env.ID}}/head:pr${{env.ID}}
34+
git checkout pr${{env.ID}}
35+
36+
- name: Add conda to system path
37+
shell: bash
38+
run: echo $CONDA/bin >> $GITHUB_PATH
39+
40+
- name: Configure environment
41+
run: |
42+
cd numba-dppy
43+
conda env update -n base -f environment.yml --prune
44+
conda remove -n base numba --force
45+
46+
conda list
47+
which python
48+
49+
- name: Install numba
50+
run: |
51+
cd numba
52+
git log -1
53+
python setup.py develop
54+
55+
- name: Install numba-dppy
56+
run: |
57+
cd numba-dppy
58+
git log -1
59+
python setup.py develop
60+
61+
- name: Test installation
62+
run: |
63+
conda list
64+
65+
# echo "libintelocl.so" | tee /etc/OpenCL/vendors/intel-cpu.icd
66+
export OCL_ICD_FILENAMES=libintelocl.so
67+
68+
python -c "import numba; print(numba.__file__)"
69+
python -c "import numba_dppy; print(numba_dppy.__file__)"
70+
71+
- name: Test
72+
run: |
73+
# echo "libintelocl.so" | tee /etc/OpenCL/vendors/intel-cpu.icd
74+
export OCL_ICD_FILENAMES=libintelocl.so
75+
76+
pytest -q -ra --disable-warnings --pyargs numba_dppy -vv

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ https://intelpython.github.io/dpnp/
1818

1919
## Dependencies
2020

21-
* numba 0.54.*
22-
* dpctl 0.10.*
23-
* dpnp 0.8.* (optional)
21+
* numba 0.54.* or 0.55.*
22+
* dpctl 0.11.*
23+
* dpnp 0.9.* (optional)
2424
* llvm-spirv 11.* (SPIRV generation from LLVM IR)
2525
* spirv-tools
2626
* packaging

conda-recipe/meta.yaml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,17 +18,17 @@ requirements:
1818
- python
1919
- setuptools
2020
- cython
21-
- numba 0.54*
22-
- dpctl >=0.10*
23-
- dpnp >=0.8* # [linux]
21+
- numba 0.54*|0.55*
22+
- dpctl 0.10*|0.11*
23+
- dpnp 0.8*|0.9* # [linux]
2424
- wheel
2525
run:
2626
- python
27-
- numba 0.54*
28-
- dpctl >=0.10*
27+
- numba 0.54*|0.55*
28+
- dpctl 0.10*|0.11*
2929
- spirv-tools
3030
- llvm-spirv 11.*
31-
- dpnp >=0.8* # [linux]
31+
- dpnp 0.8*|0.9* # [linux]
3232
- packaging
3333

3434
test:

docs/user_guides/debugging/debugging_environment.rst

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,6 @@ Configure debugging environment
1818
conda create numba-dppy-dev numba-dppy
1919
conda activate numba-dppy-dev
2020
21-
.. note::
22-
23-
Debugging features were tested with the following packages: ``numba-dppy=0.14``, ``dpctl=0.8``, ``numba=0.53``.
24-
2521
3) Activate NEO drivers (optional).
2622

2723
If you want to use the local NEO driver, activate the variables for it. See the :ref:`NEO-driver`.

docs/user_guides/getting_started.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ Installation
66

77
Numba-dppy depends on following components:
88

9-
* numba 0.54.* (`Numba`_)
9+
* numba 0.54.* or 0.55.* (`Numba`_)
1010
* dpctl 0.9.* (`Intel Python dpctl`_)
1111
* dpnp >=0.6.* (optional, `Intel Python DPNP`_)
1212
* `llvm-spirv`_ (SPIRV generation from LLVM IR)

environment.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@ dependencies:
1111
- gxx_linux-64
1212
- dpcpp_linux-64
1313
- cython
14-
- numba 0.54*
15-
- dpctl 0.10*
16-
- dpnp 0.8*
14+
- numba 0.55*
15+
- dpctl 0.11*
16+
- dpnp 0.9*
1717
- spirv-tools
1818
# - llvm-spirv 11.*
1919
- packaging

numba_dppy/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -517,6 +517,7 @@ def main():
517517

518518
import numba.testing
519519

520+
from numba_dppy.interop import asarray
520521
from numba_dppy.retarget import offload_to_sycl_device
521522

522523
from . import config

numba_dppy/interop.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
# Copyright 2021 Intel Corporation
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
"""Support for interoperability."""
16+
17+
import dpctl.tensor as dpt
18+
19+
20+
def asarray(container):
21+
"""Convert container supported by interoperability to numba-dppy container.
22+
Currently used dpctl.tensor.asarray().
23+
"""
24+
try:
25+
return dpt.asarray(container)
26+
except:
27+
pass
28+
29+
# Workaround for dpnp_array if dpctl asarray() does not support it.
30+
try:
31+
from dpnp.dpnp_array import dpnp_array
32+
33+
if isinstance(container, dpnp_array) and hasattr(container, "_array_obj"):
34+
import warnings
35+
36+
warnings.warn("asarray() uses internals from dpnp.")
37+
return container._array_obj
38+
except:
39+
pass
40+
41+
raise NotImplementedError("dpctl asarray() does not support " + type(container))

numba_dppy/tests/integration/test_dpnp_interop.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,8 @@ def data_parallel_sum(a, b, c):
8989
global_size = 1021
9090

9191
with dppy.offload_to_sycl_device(offload_device):
92-
a = dpnp.arange(global_size, dtype=dtype)
93-
b = dpnp.arange(global_size, dtype=dtype)
94-
c = dpnp.ones_like(a)
92+
a = dppy.asarray(dpnp.arange(global_size, dtype=dtype))
93+
b = dppy.asarray(dpnp.arange(global_size, dtype=dtype))
94+
c = dppy.asarray(dpnp.ones_like(a))
9595

9696
data_parallel_sum[global_size, dppy.DEFAULT_LOCAL_SIZE](a, b, c)

0 commit comments

Comments
 (0)