Skip to content

Commit 6470324

Browse files
authored
[CI] upgrade dpctl/dpnp versions (#2414)
* Update build-and-test-lnx.yml * Update ci.yml * Update build-and-test-lnx.yml * Update renovate.json * Update renovate.json * Update test_common.py * Update test_common.py * Update test_common.py * Update build-and-test-lnx.yml * Update build-and-test-lnx.yml * Update ci.yml * Update renovate.json * Update build-and-test-lnx.yml * Update test_common.py * Update test_common.py * Update common.py * Update coordinate_descent.py * Update logistic_regression.py * Update logistic_regression.py * Update _forest.py * Update _forest.py * Update ci.yml * Update ci.yml * Update _data_conversion.py * Update _data_conversion.py * Update _device_offload.py * try again * fixes * fix KMeans transform issue * formatting * further fixes * Update install_dpcpp.sh * stopping point * Update ci.yml * face facts * clarification * add renovatebot checks * switch source for dpctl/dpnp tracking * fix potential issue with nightly.yml * fix bug * forgotten save * try and fix CI * fix method vs staticmethod issue * try again * try again * move validate_data * Update test_data.py * reformat * try again * try to lazy import changes for dpnp * try with the spelling correction * try again * Update ci.yml * Update _data_conversion.py * Update coordinate_descent.py * Update _data_conversion.py * Update coordinate_descent.py * Update _data_conversion.py * Update ci.yml * Update ci.yml * fix issues related to dpctl/dpnp * further fixes * final hopefully * sometime I can be really dumb * sometime I can be really dumb * Update _device_offload.py * fixes and requests * try to fix spmd use_raw_input issues * fixes and requests
1 parent b226b2c commit 6470324

File tree

19 files changed

+152
-63
lines changed

19 files changed

+152
-63
lines changed

.ci/pipeline/build-and-test-lnx.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -60,13 +60,14 @@ steps:
6060
pip install --upgrade -r requirements-test.txt
6161
pip install $(python .ci/scripts/get_compatible_scipy_version.py)
6262
# dpep installation is set to pypi to avoid conflict of numpy versions from pip and conda
63-
# py312 is disabled due to segfault on exit of program with usage of dpctl
64-
if [ $(echo $(PYTHON_VERSION) | grep '3.9\|3.10\|3.11') ] && [ $(SKLEARN_VERSION) != "1.0" ] && [ -z ${NO_DPC} ]; then pip install dpctl==0.18.* dpnp==0.16.*; fi
65-
# issues exist with conda-forge dpcpp-cpp-rt=2025.1.1 it is needed to use the dpc build
66-
if [ -z "${NO_DPC}" ]; then pip install dpcpp-cpp-rt==2025.1.*; fi
63+
if [ $(echo $(PYTHON_VERSION) | grep '3.9\|3.10\|3.11\|3.12') ] && [ $(SKLEARN_VERSION) != "1.0" ] && [ -z ${NO_DPC} ]; then pip install dpctl==$DPCTL_VERSION dpnp==$DPNP_VERSION; fi
64+
if [ -z "${NO_DPC}" ]; then pip install dpcpp-cpp-rt==$DPCPP_RT_VERSION; fi
6765
pip list
6866
env:
6967
NO_DPC: ${{ variables.NO_DPC }}
68+
DPCTL_VERSION: 0.20.1
69+
DPNP_VERSION: 0.18.1
70+
DPCPP_RT_VERSION: 2025.2.0
7071
displayName: "Install testing requirements"
7172
- script: |
7273
. /usr/share/miniconda/etc/profile.d/conda.sh

.ci/scripts/install_dpcpp.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,5 +21,5 @@ rm GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
2121
echo "deb https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list
2222
sudo add-apt-repository -y "deb https://apt.repos.intel.com/oneapi all main"
2323
sudo apt-get update
24-
sudo apt-get install -y intel-dpcpp-cpp-compiler-2025.1
24+
sudo apt-get install -y intel-dpcpp-cpp-compiler-2025.2
2525
sudo bash -c 'echo libintelocl.so > /etc/OpenCL/vendors/intel-cpu.icd'

.github/renovate.json

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,5 +20,40 @@
2020
],
2121
"pre-commit": {
2222
"enabled": true
23-
}
23+
},
24+
"customManagers": [
25+
{
26+
"customType": "regex",
27+
"fileMatch": [
28+
"^\\.ci/pipeline/build-and-test-lnx\\.yml$",
29+
"^\\.github/workflows/ci\\.yml$"
30+
],
31+
"matchStrings": [
32+
"\\s*DPCTL_VERSION: (?<currentValue>\\d+\\.\\d+\\.\\d+)"
33+
],
34+
"depNameTemplate": "dpctl",
35+
"datasourceTemplate": "pypi"
36+
},
37+
{
38+
"customType": "regex",
39+
"fileMatch": [
40+
"^\\.ci/pipeline/build-and-test-lnx\\.yml$",
41+
"^\\.github/workflows/ci\\.yml$"
42+
],
43+
"matchStrings": [
44+
"\\s*DPNP_VERSION: (?<currentValue>\\d+\\.\\d+\\.\\d+)"
45+
],
46+
"depNameTemplate": "dpnp",
47+
"datasourceTemplate": "pypi"
48+
},
49+
{
50+
"customType": "regex",
51+
"fileMatch": ["^\\.ci/pipeline/build-and-test-lnx\\.yml$"],
52+
"matchStrings": [
53+
"\\s*DPCPP_RT_VERSION: (?<currentValue>\\d+\\.\\d+\\.\\d+)"
54+
],
55+
"depNameTemplate": "dpcpp-cpp-rt",
56+
"datasourceTemplate": "pypi"
57+
}
58+
]
2459
}

.github/workflows/ci.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ concurrency:
3030
cancel-in-progress: true
3131

3232
env:
33-
DPCTL_VERSION: 0.18.1
34-
DPNP_VERSION: 0.16.0
33+
DPCTL_VERSION: 0.20.1
34+
DPNP_VERSION: 0.18.1
3535
DPCTL_PY_VERSIONS: '3.9\|3.11'
3636
UXL_PYTHONVERSION: "3.12"
3737
UXL_SKLEARNVERSION: "1.4"
@@ -463,7 +463,7 @@ jobs:
463463
- name: Install PyTorch
464464
if: contains(matrix.FRAMEWORKS, 'pytorch')
465465
run: |
466-
pip install torch --index-url https://download.pytorch.org/whl/xpu
466+
pip install torch --index-url https://download.pytorch.org/whl/cpu
467467
python -c "import torch; _=[print(torch.xpu.get_device_name(i)) for i in range(torch.xpu.device_count())]"
468468
- name: Install daal4py/sklearnex
469469
run: pip install *.whl

onedal/_device_offload.py

Lines changed: 38 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,15 @@
1717
import inspect
1818
import logging
1919
from functools import wraps
20+
from operator import xor
2021

2122
import numpy as np
2223
from sklearn import get_config
2324

2425
from ._config import _get_config
25-
from .datatypes import copy_to_dpnp, copy_to_usm, dlpack_to_numpy, usm_to_numpy
26+
from .datatypes import copy_to_dpnp, copy_to_usm, dlpack_to_numpy
2627
from .utils import _sycl_queue_manager as QM
27-
from .utils._array_api import _asarray, _is_numpy_namespace
28+
from .utils._array_api import _asarray, _get_sycl_namespace, _is_numpy_namespace
2829
from .utils._third_party import is_dpnp_ndarray
2930

3031
logger = logging.getLogger("sklearnex")
@@ -62,23 +63,23 @@ def wrapper(self, *args, **kwargs):
6263

6364

6465
def _transfer_to_host(*data):
65-
has_usm_data, has_host_data = False, False
66+
has_usm_data = None
6667

6768
host_data = []
6869
for item in data:
69-
if usm_iface := getattr(item, "__sycl_usm_array_interface__", None):
70-
item = usm_to_numpy(item, usm_iface)
71-
has_usm_data = True
70+
if item is None:
71+
host_data.append(item)
72+
continue
73+
74+
if usm_iface := hasattr(item, "__sycl_usm_array_interface__"):
75+
xp = item.__array_namespace__()
76+
item = xp.asnumpy(item)
77+
has_usm_data = has_usm_data or has_usm_data is None
7278
elif not isinstance(item, np.ndarray) and (hasattr(item, "__dlpack_device__")):
7379
item = dlpack_to_numpy(item)
74-
has_host_data = True
75-
else:
76-
has_host_data = True
77-
78-
mismatch_host_item = usm_iface is None and item is not None and has_usm_data
79-
mismatch_usm_item = usm_iface is not None and has_host_data
8080

81-
if mismatch_host_item or mismatch_usm_item:
81+
# set has_usm_data to boolean and use xor to see if they don't match
82+
if xor((has_usm_data := bool(has_usm_data)), usm_iface):
8283
raise RuntimeError("Input data shall be located on single target device")
8384

8485
host_data.append(item)
@@ -171,3 +172,27 @@ def wrapper_impl(*args, **kwargs):
171172
return result
172173

173174
return wrapper_impl
175+
176+
177+
def support_sycl_format(func):
178+
# This wrapper enables scikit-learn functions and methods to work with
179+
# all sycl data frameworks as they no longer support numpy implicit
180+
# conversion and must be manually converted. This is only necessary
181+
# when array API is supported but not active.
182+
183+
@wraps(func)
184+
def wrapper(*args, **kwargs):
185+
if (
186+
not get_config().get("array_api_dispatch", False)
187+
and _get_sycl_namespace(*args)[2]
188+
):
189+
with QM.manage_global_queue(kwargs.get("queue"), *args):
190+
if inspect.isfunction(func) and "." in func.__qualname__:
191+
self, (args, kwargs) = args[0], _get_host_inputs(*args[1:], **kwargs)
192+
return func(self, *args, **kwargs)
193+
else:
194+
args, kwargs = _get_host_inputs(*args, **kwargs)
195+
return func(*args, **kwargs)
196+
return func(*args, **kwargs)
197+
198+
return wrapper

onedal/datatypes/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from ._data_conversion import from_table, return_type_constructor, to_table
1818
from ._dlpack import dlpack_to_numpy, get_torch_queue
19-
from ._sycl_usm import copy_to_dpnp, copy_to_usm, usm_to_numpy
19+
from ._sycl_usm import copy_to_dpnp, copy_to_usm
2020

2121
__all__ = [
2222
"copy_to_dpnp",
@@ -26,5 +26,4 @@
2626
"get_torch_queue",
2727
"to_table",
2828
"return_type_constructor",
29-
"usm_to_numpy",
3029
]

onedal/datatypes/_data_conversion.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -95,21 +95,22 @@ def return_type_constructor(array):
9595
# prioritized: it provides finer-grained control of SYCL queues and the
9696
# related SYCL devices which are generally unavailable via DLPack
9797
# representations (such as SYCL contexts, SYCL sub-devices, etc.).
98+
xp = array.__array_namespace__()
99+
# array api support added in dpnp starting in 0.19, will fail for
100+
# older versions
98101
if is_dpctl_tensor(array):
99-
xp = array.__array_namespace__()
100102
func = lambda x: (
101103
xp.asarray(x)
102104
if hasattr(x, "__sycl_usm_array_interface__")
103105
else xp.asarray(backend.from_table(x), device=device)
104106
)
105107
elif is_dpnp_ndarray(array):
106-
xp = array._array_obj.__array_namespace__()
107-
from_usm = array._create_from_usm_ndarray
108-
func = lambda x: from_usm(
109-
xp.asarray(x)
108+
func = lambda x: (
109+
xp.asarray(xp.as_usm_ndarray(x))
110110
if hasattr(x, "__sycl_usm_array_interface__")
111111
else xp.asarray(backend.from_table(x), device=device)
112112
)
113+
113114
elif hasattr(array, "__array_namespace__"):
114115
func = array.__array_namespace__().from_dlpack
115116
else:

onedal/datatypes/_sycl_usm.py

Lines changed: 9 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,13 @@
1717
from collections.abc import Iterable
1818

1919
import numpy as np
20+
import scipy.sparse as sp
2021

2122
from ..utils._third_party import lazy_import
2223

2324

2425
@lazy_import("dpctl.memory", "dpctl.tensor")
25-
def array_to_usm(memory, tensor, queue, array):
26+
def _array_to_usm(memory, tensor, queue, array):
2627
try:
2728
mem = memory.MemoryUSMDevice(array.nbytes, queue=queue)
2829
mem.copy_from_host(array.tobytes())
@@ -37,42 +38,26 @@ def array_to_usm(memory, tensor, queue, array):
3738

3839

3940
@lazy_import("dpnp", "dpctl.tensor")
40-
def to_dpnp(dpnp, tensor, array):
41+
def _to_dpnp(dpnp, tensor, array):
4142
if isinstance(array, tensor.usm_ndarray):
4243
return dpnp.array(array, copy=False)
4344
else:
4445
return array
4546

4647

4748
def copy_to_usm(queue, array):
48-
if hasattr(array, "__array__"):
49-
return array_to_usm(queue, array)
49+
if hasattr(array, "tobytes"):
50+
return _array_to_usm(queue, array)
5051
else:
51-
if isinstance(array, Iterable):
52+
if isinstance(array, Iterable) and not sp.issparse(array):
5253
array = [copy_to_usm(queue, i) for i in array]
5354
return array
5455

5556

5657
def copy_to_dpnp(queue, array):
57-
if hasattr(array, "__array__"):
58-
return to_dpnp(array_to_usm(queue, array))
58+
if hasattr(array, "tobytes"):
59+
return _to_dpnp(_array_to_usm(queue, array))
5960
else:
60-
if isinstance(array, Iterable):
61+
if isinstance(array, Iterable) and not sp.issparse(array):
6162
array = [copy_to_dpnp(queue, i) for i in array]
6263
return array
63-
64-
65-
@lazy_import("dpctl.memory")
66-
def usm_to_numpy(memorymod, item, usm_iface):
67-
buffer = memorymod.as_usm_memory(item).copy_to_host()
68-
order = "C"
69-
if usm_iface["strides"] is not None and len(usm_iface["strides"]) > 1:
70-
if usm_iface["strides"][0] < usm_iface["strides"][1]:
71-
order = "F"
72-
item = np.ndarray(
73-
shape=usm_iface["shape"],
74-
dtype=usm_iface["typestr"],
75-
buffer=buffer,
76-
order=order,
77-
)
78-
return item

onedal/tests/test_common.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
import os
1919
from glob import glob
2020

21+
from onedal.tests.utils._dataframes_support import test_frameworks
22+
2123

2224
def _check_primitive_usage_ban(primitive_name, package, allowed_locations=None):
2325
"""This test blocks the usage of the primitive in
@@ -55,3 +57,15 @@ def test_sklearn_check_version_ban():
5557
# remove this file from the list
5658
output = "\n".join([i for i in output if "test_common.py" not in i])
5759
assert output == "", f"sklearn versioning is occurring in: \n{output}"
60+
61+
62+
def test_frameworks_intentionality():
63+
"""Only silent skip frameworks which are not installed"""
64+
fmwks = test_frameworks.replace("array_api", "array_api_strict").split(",")
65+
for module in fmwks:
66+
try:
67+
importlib.import_module(module)
68+
# If a module isn't installed, working as intended.
69+
# If an ImportError occurs, then something is wrong.
70+
except ModuleNotFoundError:
71+
pass

onedal/utils/_array_api.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from functools import lru_cache
2121

2222
import numpy as np
23+
import scipy.sparse as sp
2324

2425
from ..utils._third_party import _is_subclass_fast
2526

@@ -44,6 +45,8 @@ def _asarray(data, xp, *args, **kwargs):
4445
for i in range(len(data)):
4546
result_data.append(_asarray(data[i], xp, *args, **kwargs))
4647
data = tuple(result_data)
48+
elif sp.issparse(data):
49+
pass
4750
else:
4851
for i in range(len(data)):
4952
data[i] = _asarray(data[i], xp, *args, **kwargs)

0 commit comments

Comments
 (0)