def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ A = arg_objs[0] n, _ = A.size # Requires that A is symmetric. obj, constraints = transpose.graph_implementation([A], (n, n)) # A == A.T constraints.append(lu.create_eq(A, obj)) # SDP constraint. t = lu.create_var((1, 1)) prom_t = lu.promote(t, (n, 1)) # I*t - A expr = lu.sub_expr(A, lu.diag_vec(prom_t)) return (t, [SDP(expr)] + constraints)
def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ A = arg_objs[0] n, _ = A.size # Requires that A is symmetric. # A == A.T obj, constraints = transpose.graph_implementation([A], (n, n)) constraints.append(lu.create_eq(A, obj)) # SDP constraint. t = lu.create_var((1, 1)) prom_t = lu.promote(t, (n, 1)) # I*t - A expr = lu.sub_expr(lu.diag_vec(prom_t), A) return (t, [SDP(expr)] + constraints)
def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ A = arg_objs[0] # m by n matrix. n, m = A.size # Create a matrix with Schur complement I*t - (1/t)*A.T*A. X = lu.create_var((n+m, n+m)) t = lu.create_var((1, 1)) I_n = lu.create_const(sp.eye(n), (n, n)) I_m = lu.create_const(sp.eye(m), (m, m)) # Expand A.T. obj, constraints = transpose.graph_implementation([A], (m, n)) # Fix X using the fact that A must be affine by the DCP rules. # X[0:n, 0:n] == I_n*t index.block_eq(X, lu.mul_expr(I_n, t, (n, n)), constraints, 0, n, 0, n) # X[0:n, n:n+m] == A index.block_eq(X, A, constraints, 0, n, n, n+m) # X[n:n+m, 0:n] == obj index.block_eq(X, obj, constraints, n, n+m, 0, n) # X[n:n+m, n:n+m] == I_m*t index.block_eq(X, lu.mul_expr(I_m, t, (m, m)), constraints, n, n+m, n, n+m) # Add SDP constraint. return (t, constraints + [SDP(X)])
def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ A = arg_objs[0] rows, cols = A.size # Create the equivalent problem: # minimize (trace(U) + trace(V))/2 # subject to: # [U A; A.T V] is positive semidefinite X = lu.create_var((rows+cols, rows+cols)) # Expand A.T. obj, constraints = transpose.graph_implementation([A], (cols, rows)) # Fix X using the fact that A must be affine by the DCP rules. # X[0:rows,rows:rows+cols] == A index.block_eq(X, A, constraints, 0, rows, rows, rows+cols) # X[rows:rows+cols,0:rows] == A.T index.block_eq(X, obj, constraints, rows, rows+cols, 0, rows) diag = [index.get_index(X, constraints, i, i) for i in range(rows+cols)] half = lu.create_const(0.5, (1, 1)) trace = lu.mul_expr(half, lu.sum_expr(diag), (1, 1)) # Add SDP constraint. return (trace, [SDP(X)] + constraints)
def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ A = arg_objs[0] rows, cols = A.size # Create the equivalent problem: # minimize (trace(U) + trace(V))/2 # subject to: # [U A; A.T V] is positive semidefinite X = lu.create_var((rows + cols, rows + cols)) # Expand A.T. obj, constraints = transpose.graph_implementation([A], (cols, rows)) # Fix X using the fact that A must be affine by the DCP rules. # X[0:rows,rows:rows+cols] == A index.block_eq(X, A, constraints, 0, rows, rows, rows + cols) # X[rows:rows+cols,0:rows] == A.T index.block_eq(X, obj, constraints, rows, rows + cols, 0, rows) diag = [ index.get_index(X, constraints, i, i) for i in range(rows + cols) ] half = lu.create_const(0.5, (1, 1)) trace = lu.mul_expr(half, lu.sum_expr(diag), (1, 1)) # Add SDP constraint. return (trace, [SDP(X)] + constraints)
def graph_implementation(arg_objs, size, data=None): """Reduces the atom to an affine expression and list of constraints. Creates the equivalent problem:: maximize sum(log(D[i, i])) subject to: D diagonal diag(D) = diag(Z) Z is upper triangular. [D Z; Z.T A] is positive semidefinite The problem computes the LDL factorization: .. math:: A = (Z^TD^{-1})D(D^{-1}Z) This follows from the inequality: .. math:: \det(A) >= \det(D) + \det([D, Z; Z^T, A])/\det(D) >= \det(D) because (Z^TD^{-1})D(D^{-1}Z) is a feasible D, Z that achieves det(A) = det(D) and the objective maximizes det(D). Parameters ---------- arg_objs : list LinExpr for each argument. size : tuple The size of the resulting expression. data : Additional data required by the atom. Returns ------- tuple (LinOp for objective, list of constraints) """ A = arg_objs[0] # n by n matrix. n, _ = A.size X = lu.create_var((2 * n, 2 * n)) Z = lu.create_var((n, n)) D = lu.create_var((n, n)) # Require that X is symmetric (which implies # A is symmetric). # X == X.T obj, constraints = transpose.graph_implementation([X], (n, n)) constraints.append(lu.create_eq(X, obj)) # Require that X and A are PSD. constraints += [SDP(X), SDP(A)] # Fix Z as upper triangular, D as diagonal, # and diag(D) as diag(Z). for i in xrange(n): for j in xrange(n): if i == j: # D[i, j] == Z[i, j] Dij = index.get_index(D, constraints, i, j) Zij = index.get_index(Z, constraints, i, j) constraints.append(lu.create_eq(Dij, Zij)) if i != j: # D[i, j] == 0 Dij = index.get_index(D, constraints, i, j) constraints.append(lu.create_eq(Dij)) if i > j: # Z[i, j] == 0 Zij = index.get_index(Z, constraints, i, j) constraints.append(lu.create_eq(Zij)) # Fix X using the fact that A must be affine by the DCP rules. # X[0:n, 0:n] == D index.block_eq(X, D, constraints, 0, n, 0, n) # X[0:n, n:2*n] == Z, index.block_eq(X, Z, constraints, 0, n, n, 2 * n) # X[n:2*n, n:2*n] == A index.block_eq(X, A, constraints, n, 2 * n, n, 2 * n) # Add the objective sum(log(D[i, i]) log_diag = [] for i in xrange(n): Dii = index.get_index(D, constraints, i, i) obj, constr = log.graph_implementation([Dii], (1, 1)) constraints += constr log_diag.append(obj) obj = lu.sum_expr(log_diag) return (obj, constraints)