Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix issues for numpy 2.1.0 #1804

Merged
merged 11 commits into from
Aug 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -490,8 +490,8 @@ jobs:
run: |
CHANNELS="${{ env.CHANNELS }}"
. $CONDA/etc/profile.d/conda.sh
conda create -n ${{ env.EXAMPLES_ENV_NAME }} -y pytest python=${{ matrix.python }} $CHANNELS
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y cmake">=3.22" $CHANNELS || exit 1
conda create -n ${{ env.EXAMPLES_ENV_NAME }} -y pytest python=${{ matrix.python }} setuptools"<72.2.0" $CHANNELS
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y cmake $CHANNELS || exit 1
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y ninja $CHANNELS || exit 1
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y pybind11 cython scikit-build $CHANNELS || exit 1
conda install -n ${{ env.EXAMPLES_ENV_NAME }} -y mkl-dpcpp mkl-devel-dpcpp dpcpp_cpp_rt $CHANNELS || exit 1
Expand Down
18 changes: 9 additions & 9 deletions .github/workflows/generate-coverage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:

env:
ONEAPI_ROOT: /opt/intel/oneapi
GTEST_ROOT: /home/runner/work/googletest-1.13.0/install
GTEST_ROOT: /home/runner/work/googletest-1.15.2/install
# Use oneAPI compiler 2023 to work around an issue
USE_2023: 0

Expand Down Expand Up @@ -52,16 +52,16 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
python-version: '3.12'
architecture: x64

- name: Cache Gtest
id: cache-gtest
uses: actions/cache@v4
with:
path: |
/home/runner/work/googletest-1.13.0/install
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('/home/runner/work/googletest-1.13.0/install/include/gtest/*') }}
/home/runner/work/googletest-1.15.2/install
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('/home/runner/work/googletest-1.15.2/install/include/gtest/*') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
Expand All @@ -72,12 +72,12 @@ jobs:
shell: bash -l {0}
run: |
cd /home/runner/work
wget https://github.com/google/googletest/archive/refs/tags/v1.13.0.tar.gz
tar xf v1.13.0.tar.gz
cd googletest-1.13.0
wget https://github.com/google/googletest/archive/refs/tags/v1.15.2.tar.gz
tar xf v1.15.2.tar.gz
cd googletest-1.15.2
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=/home/runner/work/googletest-1.13.0/install
cmake .. -DCMAKE_INSTALL_PREFIX=/home/runner/work/googletest-1.15.2/install
make && make install

- name: Checkout repo
Expand All @@ -92,7 +92,7 @@ jobs:
- name: Install dpctl dependencies
shell: bash -l {0}
run: |
pip install numpy"<1.26.0" cython setuptools pytest pytest-cov scikit-build cmake coverage[toml] versioneer[toml]==0.29
pip install numpy cython setuptools pytest pytest-cov scikit-build cmake coverage[toml] versioneer[toml]==0.29

- name: Build dpctl with coverage
shell: bash -l {0}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/os-llvm-sycl-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
python-version: '3.12'
architecture: x64

- name: Install dpctl dependencies
Expand Down
7 changes: 5 additions & 2 deletions dpctl/tensor/_ctors.py
Original file line number Diff line number Diff line change
Expand Up @@ -765,8 +765,11 @@ def _get_arange_length(start, stop, step):


def _to_scalar(obj, sc_ty):
"A way to convert object to NumPy scalar type"
zd_arr = np.asarray(obj).astype(sc_ty, casting="unsafe")
"""A way to convert object to NumPy scalar type.
Raises OverflowError if obj can not be represented
using the requested scalar type.
"""
zd_arr = np.asarray(obj, dtype=sc_ty)
return zd_arr[tuple()]


Expand Down
7 changes: 5 additions & 2 deletions dpctl/tensor/_dlpack.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -1083,9 +1083,10 @@ def from_dlpack(x, /, *, device=None, copy=None):
except TypeError:
# exporter does not support max_version keyword
got_type_error = True
except (BufferError, NotImplementedError):
# Either dl_device, or copy can be satisfied
except (BufferError, NotImplementedError, ValueError) as e:
# Either dl_device, or copy cannot be satisfied
got_buffer_error = True
saved_exception = e
except Exception as e:
got_other_error = True
saved_exception = e
Expand Down Expand Up @@ -1144,6 +1145,8 @@ def from_dlpack(x, /, *, device=None, copy=None):
raise BufferError(
"Importing data via DLPack requires copying, but copy=False was provided"
)
if dl_device is None:
raise saved_exception
# must copy via host
if dl_device[0] != device_OneAPI:
raise BufferError(f"Can not import to requested device {dl_device}")
Expand Down
3 changes: 3 additions & 0 deletions dpctl/tests/test_sycl_kernel_submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@ def test_create_program_from_source(ctype_str, dtype, ctypes_ctor):

n_elems = 1024 * 512
lws = 128
if dtype.kind in "ui":
n_elems = min(n_elems, dpt.iinfo(dtype).max)
n_elems = (n_elems // lws) * lws
a = dpt.arange(n_elems, dtype=dtype, sycl_queue=q)
b = dpt.arange(n_elems, stop=0, step=-1, dtype=dtype, sycl_queue=q)
c = dpt.zeros(n_elems, dtype=dtype, sycl_queue=q)
Expand Down
23 changes: 14 additions & 9 deletions dpctl/tests/test_usm_ndarray_ctor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1791,17 +1791,22 @@ def test_full_strides():
assert np.array_equal(dpt.asnumpy(X), Xnp)


def test_full_gh_1230():
q = get_queue_or_skip()
dtype = "i4"
@pytest.mark.parametrize("dt", ["i1", "u1", "i2", "u2", "i4", "u4", "i8", "u8"])
def test_full_gh_1230(dt):
get_queue_or_skip()
dtype = dpt.dtype(dt)
dt_maxint = dpt.iinfo(dtype).max
X = dpt.full(1, dt_maxint + 1, dtype=dtype, sycl_queue=q)
X_np = dpt.asnumpy(X)
assert X.dtype == dpt.dtype(dtype)
assert np.array_equal(X_np, np.full_like(X_np, dt_maxint + 1))

with pytest.raises(OverflowError):
dpt.full(1, dpt.iinfo(dpt.uint64).max + 1, sycl_queue=q)
if (dtype.itemsize < 8) and (np.lib.NumpyVersion(np.__version__) < "2.0.0"):
try:
X = dpt.full(1, fill_value=(dt_maxint + 1), dtype=dt)
except OverflowError:
pytest.skip("Expected OverflowError raised")
Y = dpt.full_like(X, fill_value=dpt.iinfo(dt).min)
assert dpt.all(X == Y)
else:
with pytest.raises(OverflowError):
dpt.full(1, dt_maxint + 1, dtype=dt)


@pytest.mark.parametrize(
Expand Down
14 changes: 11 additions & 3 deletions dpctl/tests/test_usm_ndarray_linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,20 @@ def test_matmul_simple(dtype):
skip_if_dtype_not_supported(dtype, q)

n, m = 235, 17
m1 = dpt.ones((m, n), dtype=dtype)
m2 = dpt.ones((n, m), dtype=dtype)
m1 = dpt.zeros((m, n), dtype=dtype)
m2 = dpt.zeros((n, m), dtype=dtype)

dt = m1.dtype
if dt.kind in "ui":
n1 = min(n, dpt.iinfo(dt).max)
else:
n1 = n
m1[:, :n1] = dpt.ones((m, n1), dtype=dt)
m2[:n1, :] = dpt.ones((n1, m), dtype=dt)

for k in [1, 2, 3, 4, 7, 8, 9, 15, 16, 17]:
r = dpt.matmul(m1[:k, :], m2[:, :k])
assert dpt.all(r == dpt.full((k, k), n, dtype=dtype))
assert dpt.all(r == dpt.full((k, k), fill_value=n1, dtype=dt))


@pytest.mark.parametrize("dtype", _numeric_types)
Expand Down
Loading