def g(w): W = w.reshape(W0.shape) y = [softmax.regression_ll_grad(x, y, W) for x, y in zip(X, P)] y = -sum(y) return y.ravel()
def g(w): W = w.reshape(W0.shape) y = [softmax.regression_ll_grad(x, y, W, c) for x, y, c in zip(X, P, C)] y = -sum(y) y[~M] = 0.0 return y.ravel()