def test_methods(N, p, seed, opt): X, y = make_intercept_data(N, p, seed=seed) coefs = opt(X, y) p = sigmoid(X.dot(coefs).compute()) y_sum = y.compute().sum() p_sum = p.sum() assert np.isclose(y_sum, p_sum, atol=1e-1)
def broadcast_lbfgs_weight(): with cluster() as (s, [a, b]): with Client(s['address'], loop=loop) as c: X, y = make_intercept_data(1000, 10) coefs = lbfgs(X, y, dask_distributed_client=c) p = sigmoid(X.dot(coefs).compute()) y_sum = y.compute().sum() p_sum = p.sum() assert np.isclose(y_sum, p_sum, atol=1e-1)
def predict_proba(self, X): """Probability estimates for samples in X. Parameters ---------- X : array-like, shape = [n_samples, n_features] Returns ------- T : array-like, shape = [n_samples, n_classes] The probability of the sample for each class in the model. """ return sigmoid(self.decision_function(X))
def test_methods(N, p, seed, opt, is_cupy): X, y = make_intercept_data(N, p, seed=seed) if is_cupy: cupy = pytest.importorskip('cupy') X, y = to_dask_cupy_array_xy(X, y, cupy) coefs = opt(X, y) p = sigmoid(X.dot(coefs).compute()) y_sum = y.compute().sum() p_sum = p.sum() assert np.isclose(y_sum, p_sum, atol=1e-1)
def predict_proba(self, X): """Probability estimates for samples in X. Parameters ---------- X : array-like, shape = [n_samples, n_features] Returns ------- T : array-like, shape = [n_samples, n_classes] The probability of the sample for each class in the model. """ X_ = self._check_array(X) return sigmoid(dot(X_, self._coef))
def predict_proba(self, X): """Probability estimates for samples in X. Parameters ---------- X : array-like, shape = [n_samples, n_features] Returns ------- T : array-like, shape = [n_samples, n_classes] The probability of the sample for each class in the model. """ # TODO: more work needed here to support multi_class prob = sigmoid(self.decision_function(X)) return lr_prob_stack(prob)
def hessian(self, Xbeta, X): """Logistic hessian""" p = sigmoid(Xbeta) return dot(p * (1 - p) * X.T, X)
def gradient(self, Xbeta, X, y): """Logistic gradient""" p = sigmoid(Xbeta) return dot(X.T, p - y)
def hessian(Xbeta, X): p = sigmoid(Xbeta) return dot(p * (1 - p) * X.T, X)
def gradient(Xbeta, X, y): p = sigmoid(Xbeta) return dot(X.T, p - y)
def predict_proba(self, X): from dask_glm.utils import sigmoid X = self._input_to_dask_cupy_array(X) return sigmoid(self.decision_function(X))