From 22321e79832dc6054f8e4734222a5c1ab1a6f1ef Mon Sep 17 00:00:00 2001 From: David Cortes Date: Mon, 27 Oct 2025 08:16:54 +0100 Subject: [PATCH] use larger bfgs memory --- daal4py/sklearn/linear_model/logistic_path.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/daal4py/sklearn/linear_model/logistic_path.py b/daal4py/sklearn/linear_model/logistic_path.py index e1b7429ef7..650696cad4 100755 --- a/daal4py/sklearn/linear_model/logistic_path.py +++ b/daal4py/sklearn/linear_model/logistic_path.py @@ -249,6 +249,11 @@ def __logistic_regression_path( iprint = [-1, 50, 1, 100, 101][ np.searchsorted(np.array([0, 1, 2, 3]), verbose) ] + # Note: this uses more correction pairs than the implementation in scikit-learn, + # which means better approximation of the Hessian at the expense of slower updates. + # This is beneficial for high-dimensional convex problems without bound constraints + # like the logistic regression being fitted here. For larger problems with sparse + # data (currently not supported), it might benefit from increasing the number further. opt_res = optimize.minimize( func, w0, @@ -257,6 +262,7 @@ def __logistic_regression_path( args=extra_args, options={ "maxiter": max_iter, + "maxcor": 50, "maxls": 50, "gtol": tol, "ftol": 64 * np.finfo(float).eps,