def create(**kwargs): A, b = problem_util.create_classification(**kwargs) lam = 1 x = cp.Variable(A.shape[1]) f = ep.hinge_loss(x, A, b) + lam * cp.sum_squares(x) return cp.Problem(cp.Minimize(f))
def create(**kwargs): A, b = problem_util.create_classification(**kwargs) lam = 1 x = cp.Variable(A.shape[1]) f = ep.hinge_loss(x, A, b) + lam*cp.sum_squares(x) return cp.Problem(cp.Minimize(f))
def create(**kwargs): A, b = problem_util.create_classification(**kwargs) m = kwargs["m"] n = kwargs["n"] sigma = 0.05 mu = kwargs.get("mu", 1) lam = 0.5 * sigma * np.sqrt(m * np.log(mu * n)) x = cp.Variable(A.shape[1]) f = ep.hinge_loss(x, A, b) + lam * cp.norm1(x) return cp.Problem(cp.Minimize(f))
def create(**kwargs): A, b = problem_util.create_classification(**kwargs) m = kwargs["m"] n = kwargs["n"] sigma = 0.05 mu = kwargs.get("mu", 1) lam = 0.5*sigma*np.sqrt(m*np.log(mu*n)) x = cp.Variable(A.shape[1]) f = ep.hinge_loss(x, A, b) + lam*cp.norm1(x) return cp.Problem(cp.Minimize(f))
k = 3 theta = cp.Variable(n) Theta = cp.Variable(n,k) alphas = np.linspace(1./(k+1), 1-1./(k+1), k) np.random.seed(0) X = np.random.randn(m,n) y_binary = np.random.randint(2, size=(m,))*2-1 y_multi = np.random.randint(k, size=(m,)) # TODO(mwytock): Need to handle axis=1 parameter # lambda: ep.softmax_loss(Theta, X, y_multi), # lambda: ep.multiclass_hinge_loss(Theta, X, y_multi), FUNCTION_TESTS = [ lambda: ep.hinge_loss(theta, X, y_binary), lambda: ep.logistic_loss(theta, X, y_binary), lambda: ep.poisson_loss(theta, X, y_multi), ] def run_function(f): prob = cp.Problem(cp.Minimize(f())) obj_val0 = prob.solve() status, obj_val1 = ep.solve(prob) tol = 1e-2 assert_equal(status, OPTIMAL) assert_less(abs(obj_val1-obj_val0)/(1+abs(obj_val0)), tol) def test_functions(): for f in FUNCTION_TESTS:
k = 3 theta = cp.Variable(n) Theta = cp.Variable(n, k) alphas = np.linspace(1. / (k + 1), 1 - 1. / (k + 1), k) np.random.seed(0) X = np.random.randn(m, n) y_binary = np.random.randint(2, size=(m, )) * 2 - 1 y_multi = np.random.randint(k, size=(m, )) # TODO(mwytock): Need to handle axis=1 parameter # lambda: ep.softmax_loss(Theta, X, y_multi), # lambda: ep.multiclass_hinge_loss(Theta, X, y_multi), FUNCTION_TESTS = [ lambda: ep.hinge_loss(theta, X, y_binary), lambda: ep.logistic_loss(theta, X, y_binary), lambda: ep.poisson_loss(theta, X, y_multi), ] def run_function(f): prob = cp.Problem(cp.Minimize(f())) obj_val0 = prob.solve() status, obj_val1 = ep.solve(prob) tol = 1e-2 assert_equal(status, OPTIMAL) assert_less(abs(obj_val1 - obj_val0) / (1 + abs(obj_val0)), tol)