def nucmin_estimator(A, y, eta=None, **kwargs): """@todo: Docstring for nucmin_estimator. :param A: @todo :param y: @todo :param **kwargs: @todo :returns: @todo """ x_sharp = cvx.Variable(A.shape[1], A.shape[2]) objective = cvx.Minimize(cvx.normNuc(x_sharp)) if eta is None: constraints = [_expval(A, x_sharp) == y] else: constraints = [cvx.abs(_expval(A, x_sharp) - y) < eta] problem = cvx.Problem(objective, constraints) problem.solve(**kwargs) if problem.status not in ['optimal']: raise ValueError("Optimization did not converge: " + problem.status) return np.array(x_sharp.value)
def test_norm_nuc(self) -> None: """Test gradient for norm_nuc """ expr = cp.normNuc(self.A) self.A.value = [[10, 4], [4, 30]] self.assertItemsAlmostEqual(expr.grad[self.A].toarray(), [1, 0, 0, 1])
# Set-up hyperparameters options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9} # Call instance of PSO optimizer = ps.single.GlobalBestPSO(n_particles=100, dimensions=p, options=options) # Perform optimization best_cost, best_pos = optimizer.optimize(nnm, iters=100, Y=Y_low, omega_c=omega_c) known_value_indices = tuple(zip(*omega.tolist())) known_values = Y_low[omega[:, 0], omega[:, 1]] X = cp.Variable((m, n), pos=True) objective_fn = cp.normNuc(X) constraints = [ X[known_value_indices] == known_values, ] problem = cp.Problem(cp.Minimize(objective_fn), constraints) problem.solve(gp=False) #print("Solver reconstruct loss: ", np.linalg.norm((np.array(X.value) - Y_low), "fro")) Y[omega_c[:, 0], omega_c[:, 1]] = best_pos print("PSO reconstruct loss:", np.linalg.norm((Y - Y_low), "fro")) result.append( [t, np.linalg.norm((Y - Y_low), "fro") / np.linalg.norm(Y_low, "fro")]) #result.append([t, "CVXPY", np.linalg.norm((np.array(X.value) - Y_low), "fro")]) df = pd.DataFrame(result, columns=["iter", "MSE"]).iloc[:, 1]