Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
5395e73
Convert examples from workload style to standalone style
nayyirahsan Jan 23, 2026
cd6811c
Addressed comments (Reversed READ.ME, inclusive_scan_team.py, and all…
nayyirahsan Jan 26, 2026
4221719
Format examples with Black
nayyirahsan Jan 29, 2026
c6a0215
Deleted ufunc implementations and tests. Replaced ufunc usage in exam…
nayyirahsan Feb 12, 2026
19e7fd7
Merge branch 'main' into nayyirahsan/ufunc-removal
nayyirahsan Feb 12, 2026
275c89d
fix array api tests and further removed pk usage
nayyirahsan Feb 22, 2026
e1bd7ef
fix array api test
nayyirahsan Feb 22, 2026
f056ad9
pass tests
nayyirahsan Feb 25, 2026
d2962f0
format
nayyirahsan Feb 25, 2026
5ea2991
remove duplicate
nayyirahsan Feb 25, 2026
97be73f
revert data_type.py
nayyirahsan Feb 25, 2026
4e42b48
format
nayyirahsan Feb 25, 2026
3254455
add pk_array helper
nayyirahsan Feb 25, 2026
06fe715
add pk_array lib
nayyirahsan Feb 25, 2026
8f6dea9
format
nayyirahsan Feb 25, 2026
58715ec
revert abstraction
nayyirahsan Mar 2, 2026
9ec995c
remove unnessesary test
nayyirahsan Mar 3, 2026
bd1c1e9
remove all PKArray usage
nayyirahsan Mar 3, 2026
8fb75d7
Merge branch 'main' into nayyirahsan/ufunc-removal
IvanGrigorik Mar 6, 2026
4159771
fix for other execution spaces
nayyirahsan Mar 9, 2026
effcd59
Merge branch 'nayyirahsan/ufunc-removal' of https://github.com/nayyir…
nayyirahsan Mar 9, 2026
5d61e30
fix isnan and isfinite errors
nayyirahsan Mar 9, 2026
299b46f
add isinf function
nayyirahsan Mar 9, 2026
6b1d185
restore workunits
nayyirahsan Mar 9, 2026
0cd79dc
fix equal
nayyirahsan Mar 9, 2026
c97a5bc
format
nayyirahsan Mar 9, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/array_api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,6 @@ jobs:
# to circumvent the currently slow performance of
# JIT compile/link, which can otherwise cause issues
# for hypothesis-driven test case generation
pytest $GITHUB_WORKSPACE/tools/pre_compile_ufuncs.py -s
# pytest $GITHUB_WORKSPACE/tools/pre_compile_ufuncs.py -s
# only run a subset of the conformance tests to get started
pytest array_api_tests/meta/test_broadcasting.py array_api_tests/meta/test_equality_mapping.py array_api_tests/meta/test_signatures.py array_api_tests/meta/test_special_cases.py array_api_tests/test_constants.py array_api_tests/meta/test_utils.py array_api_tests/test_creation_functions.py::test_ones array_api_tests/test_creation_functions.py::test_ones_like array_api_tests/test_data_type_functions.py::test_result_type array_api_tests/test_operators_and_elementwise_functions.py::test_log10 array_api_tests/test_operators_and_elementwise_functions.py::test_sqrt array_api_tests/test_operators_and_elementwise_functions.py::test_isfinite array_api_tests/test_operators_and_elementwise_functions.py::test_log2 array_api_tests/test_operators_and_elementwise_functions.py::test_log1p array_api_tests/test_operators_and_elementwise_functions.py::test_isinf array_api_tests/test_operators_and_elementwise_functions.py::test_log array_api_tests/test_array_object.py::test_scalar_casting array_api_tests/test_operators_and_elementwise_functions.py::test_sign array_api_tests/test_operators_and_elementwise_functions.py::test_square array_api_tests/test_operators_and_elementwise_functions.py::test_cos array_api_tests/test_operators_and_elementwise_functions.py::test_round array_api_tests/test_operators_and_elementwise_functions.py::test_trunc array_api_tests/test_operators_and_elementwise_functions.py::test_ceil array_api_tests/test_operators_and_elementwise_functions.py::test_floor array_api_tests/test_operators_and_elementwise_functions.py::test_exp array_api_tests/test_operators_and_elementwise_functions.py::test_sin array_api_tests/test_operators_and_elementwise_functions.py::test_tan array_api_tests/test_operators_and_elementwise_functions.py::test_tanh array_api_tests/test_creation_functions.py::test_zeros array_api_tests/test_creation_functions.py::test_zeros_like array_api_tests/test_creation_functions.py::test_full_like array_api_tests/test_operators_and_elementwise_functions.py::test_positive array_api_tests/test_operators_and_elementwise_functions.py::test_isnan array_api_tests/test_operators_and_elementwise_functions.py::test_equal "array_api_tests/test_has_names.py::test_has_names[array_method-__pos__]"
pytest array_api_tests/meta/test_broadcasting.py array_api_tests/meta/test_equality_mapping.py array_api_tests/meta/test_signatures.py array_api_tests/meta/test_special_cases.py array_api_tests/test_constants.py array_api_tests/meta/test_utils.py array_api_tests/test_creation_functions.py::test_ones array_api_tests/test_creation_functions.py::test_ones_like array_api_tests/test_data_type_functions.py::test_result_type array_api_tests/test_array_object.py::test_scalar_casting array_api_tests/test_creation_functions.py::test_zeros array_api_tests/test_creation_functions.py::test_zeros_like array_api_tests/test_creation_functions.py::test_full_like
53 changes: 24 additions & 29 deletions examples/LogisticRegression/LR.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@

import numbers
import numpy as np
import pykokkos as pk
import warnings

from joblib import Parallel, effective_n_jobs
Expand Down Expand Up @@ -62,12 +61,8 @@
)


def asarray(arr, dtype=pk.double):
arr = np.asarray(arr)

view = pk.View(arr.shape, dtype)
view[:] = arr
return view
def asarray(arr, dtype=np.float64):
return np.asarray(arr, dtype=dtype)


def _check_solver(solver, penalty, dual):
Expand Down Expand Up @@ -267,13 +262,13 @@ def _logistic_regression_path(
The "copy" parameter was removed.
"""
if isinstance(Cs, numbers.Integral):
Cs = pk.logspace(-4, 4, Cs)
Cs = np.logspace(-4, 4, Cs)

solver = _check_solver(solver, penalty, dual)

_, n_features = X.shape

classes = pk.unique(y)
classes = np.unique(y)

random_state = check_random_state(random_state)

Expand Down Expand Up @@ -301,9 +296,9 @@ def _logistic_regression_path(
# multinomial case this is not necessary.

if multi_class == "ovr":
w0 = pk.zeros(n_features + int(fit_intercept), dtype=X.dtype)
w0 = np.zeros(n_features + int(fit_intercept), dtype=X.dtype)
mask = y == pos_class
y_bin = pk.ones(y.shape, dtype=X.dtype)
y_bin = np.ones(y.shape, dtype=X.dtype)
if solver in ["lbfgs", "newton-cg"]:
# HalfBinomialLoss, used for those solvers, represents y in [0, 1] instead
# of in [-1, 1].
Expand Down Expand Up @@ -333,11 +328,11 @@ def _logistic_regression_path(
lbin = LabelBinarizer()
Y_multi = asarray(lbin.fit_transform(y))
if Y_multi.shape[1] == 1:
Y_multi = pk.hstack(
pk.negative(pk.subtract(Y_multi, asarray([1]))), Y_multi
Y_multi = np.hstack(
(np.negative(np.subtract(Y_multi, asarray([1]))), Y_multi)
)

w0 = pk.zeros((classes.size, n_features + int(fit_intercept)), dtype=X.dtype)
w0 = np.zeros((classes.size, n_features + int(fit_intercept)), dtype=X.dtype)

if coef is not None:
# it must work both giving the bias term and not
Expand Down Expand Up @@ -384,7 +379,7 @@ def _logistic_regression_path(
# i.e. 1d-arrays. LinearModelLoss expects classes to be contiguous and
# reconstructs the 2d-array via w0.reshape((n_classes, -1), order="F").
# As w0 is F-contiguous, ravel(order="F") also avoids a copy.
w0 = pk.ravel(w0, order="F")
w0 = np.ravel(w0, order="F")

loss = LinearModelLoss(
base_loss=HalfMultinomialLoss(n_classes=classes.size),
Expand All @@ -397,7 +392,7 @@ def _logistic_regression_path(
func = loss.loss
grad = loss.gradient
hess = loss.gradient_hessian_product # hess = [gradient, hessp]
warm_start_sag = {"coef": pk.transpose(w0)}
warm_start_sag = {"coef": np.transpose(np.array(w0))}
else:
target = y_bin
if solver == "lbfgs":
Expand All @@ -412,15 +407,15 @@ def _logistic_regression_path(
func = loss.loss
grad = loss.gradient
hess = loss.gradient_hessian_product # hess = [gradient, hessp]
warm_start_sag = {"coef": pk.expand_dims(w0, axis=1)}
warm_start_sag = {"coef": np.expand_dims(w0, axis=1)}

coefs = list()

n_iter = pk.zeros(len(Cs), dtype=pk.int32)
n_iter = np.zeros(len(Cs), dtype=np.int32)
for i, C in enumerate(Cs):
if solver == "lbfgs":
l2_reg_strength = 1.0 / C
iprint = [-1, 50, 1, 100, 101][pk.searchsorted([0, 1, 2, 3], verbose)]
iprint = [-1, 50, 1, 100, 101][np.searchsorted([0, 1, 2, 3], verbose)]
opt_res = optimize.minimize(
func,
np.asarray(w0),
Expand Down Expand Up @@ -471,9 +466,9 @@ def _logistic_regression_path(
)
coef_ = asarray(coef_)
if fit_intercept:
w0 = pk.hstack(pk.ravel(coef_), intercept_)
w0 = np.hstack((np.ravel(coef_), intercept_))
else:
w0 = pk.ravel(coef_)
w0 = np.ravel(coef_)

elif solver in ["sag", "saga"]:
if multi_class == "multinomial":
Expand Down Expand Up @@ -518,7 +513,7 @@ def _logistic_regression_path(
if multi_class == "multinomial":
n_classes = max(2, classes.size)
if solver in ["lbfgs", "newton-cg"]:
multi_w0 = pk.reshape(w0, (n_classes, -1), order="F")
multi_w0 = np.reshape(w0, (n_classes, -1), order="F")
else:
multi_w0 = w0
coefs.append(asarray(multi_w0))
Expand Down Expand Up @@ -829,7 +824,7 @@ def fit(self, X, y, sample_weight=None):
"Setting penalty='none' will ignore the C and l1_ratio parameters"
)
# Note that check for l1_ratio is done right above
C_ = pk.inf
C_ = np.inf
penalty = "l2"
else:
C_ = self.C
Expand Down Expand Up @@ -862,7 +857,7 @@ def fit(self, X, y, sample_weight=None):

X = asarray(X)
y = asarray(y)
self.classes_ = pk.unique(y)
self.classes_ = np.unique(np.array(y))

multi_class = _check_multi_class(self.multi_class, solver, len(self.classes_))

Expand Down Expand Up @@ -969,7 +964,7 @@ def fit(self, X, y, sample_weight=None):
)

fold_coefs_, _, n_iter_ = zip(*fold_coefs_)
self.n_iter_ = pk.col(asarray(n_iter_), 0)
self.n_iter_ = np.array(n_iter_)

n_features = X.shape[1]
if multi_class == "multinomial":
Expand All @@ -984,7 +979,7 @@ def fit(self, X, y, sample_weight=None):
self.intercept_ = self.coef_[:, -1]
self.coef_ = self.coef_[:, :-1]
else:
self.intercept_ = pk.zeros(n_classes)
self.intercept_ = np.zeros(n_classes)

return self

Expand Down Expand Up @@ -1024,7 +1019,7 @@ def predict_proba(self, X):
if decision.ndim == 1:
# Workaround for multi_class="multinomial" and binary outcomes
# which requires softmax prediction with only a 1D decision.
decision_2d = pk.hstack(pk.negative(decision), decision)
decision_2d = np.hstack((np.negative(decision), decision))
else:
decision_2d = decision
return softmax(decision_2d, copy=False)
Expand All @@ -1045,7 +1040,7 @@ def predict_log_proba(self, X):
Returns the log-probability of the sample for each class in the
model, where classes are ordered as they are in ``self.classes_``.
"""
return pk.log(self.predict_proba(X))
return np.log(self.predict_proba(X))

def predict(self, X):
"""
Expand All @@ -1065,7 +1060,7 @@ def predict(self, X):
else:
indices = scores.argmax(axis=1)

return pk.index(self.classes_, asarray(indices, dtype=pk.int32))
return self.classes_[np.array(indices, dtype=np.int32)]


def main():
Expand Down
Loading