def remove_redundant_rows(data): """Remove redundant constraints from A and G. Parameters ---------- data : dict All the problem data. Returns ------- str A status indicating if infeasibility was detected. """ # Extract data. dims = data[s.DIMS] A = data[s.A] G = data[s.G] b = data[s.B] h = data[s.H] # Remove redundant rows in A. if A.shape[0] > 0: # The pivoting improves robustness. Q, R, P = scipy.linalg.qr(A.todense(), pivoting=True) rows_to_keep = [] for i in range(R.shape[0]): if np.linalg.norm(R[i, :]) > 1e-10: rows_to_keep.append(i) R = R[rows_to_keep, :] Q = Q[:, rows_to_keep] # Invert P from col -> var to var -> col. Pinv = np.zeros(P.size, dtype='int') for i in range(P.size): Pinv[P[i]] = i # Rearrage R. R = R[:, Pinv] A = R b_old = b b = Q.T.dot(b) # If b is not in the range of Q, # the problem is infeasible. if not np.allclose(b_old, Q.dot(b)): return s.INFEASIBLE dims[s.EQ_DIM] = int(b.shape[0]) data["Q"] = intf.dense2cvxopt(Q) # Remove obviously redundant rows in G's <= constraints. if dims[s.LEQ_DIM] > 0: G = G.tocsr() G_leq = G[:dims[s.LEQ_DIM], :] h_leq = h[:dims[s.LEQ_DIM]].ravel() G_other = G[dims[s.LEQ_DIM]:, :] h_other = h[dims[s.LEQ_DIM]:].ravel() G_leq, h_leq, P_leq = compress_matrix(G_leq, h_leq) dims[s.LEQ_DIM] = int(h_leq.shape[0]) data["P_leq"] = intf.sparse2cvxopt(P_leq) G = sp.vstack([G_leq, G_other]) h = np.hstack([h_leq, h_other]) # Convert A, b, G, h to CVXOPT matrices. data[s.A] = A data[s.G] = G data[s.B] = b data[s.H] = h return s.OPTIMAL
def remove_redundant_rows(data): """Remove redundant constraints from A and G. Parameters ---------- data : dict All the problem data. Returns ------- str A status indicating if infeasibility was detected. """ dims = data[s.DIMS] # Convert A, b, G, h to scipy sparse matrices and numpy 1D arrays. A = intf.DEFAULT_SPARSE_INTF.const_to_matrix(data[s.A], convert_scalars=True) G = intf.DEFAULT_SPARSE_INTF.const_to_matrix(data[s.G], convert_scalars=True) b = intf.DEFAULT_NP_INTF.const_to_matrix(data[s.B], convert_scalars=True) h = intf.DEFAULT_NP_INTF.const_to_matrix(data[s.H], convert_scalars=True) # Remove redundant rows in A. if A.shape[0] > 0: # The pivoting improves robustness. Q, R, P = scipy.linalg.qr(A.todense(), pivoting=True) rows_to_keep = [] for i in range(R.shape[0]): if np.linalg.norm(R[i,:]) > 1e-10: rows_to_keep.append(i) R = R[rows_to_keep,:] Q = Q[:, rows_to_keep] # Invert P from col -> var to var -> col. Pinv = np.zeros(P.size, dtype='int') for i in range(P.size): Pinv[P[i]] = i # Rearrage R. R = R[:,Pinv] A = R b_old = b b = Q.T.dot(b) # If b is not in the range of Q, # the problem is infeasible. if not np.allclose(b_old, Q.dot(b)): return s.INFEASIBLE dims[s.EQ_DIM] = b.shape[0] data["Q"] = intf.CVXOPT_DENSE_INTF.const_to_matrix(Q, convert_scalars=True) # Remove obviously redundant rows in G's <= constraints. if dims[s.LEQ_DIM] > 0: G = G.tocsr() G_leq = G[:dims[s.LEQ_DIM],:] h_leq = h[:dims[s.LEQ_DIM]] G_other = G[dims[s.LEQ_DIM]:,:] h_other = h[dims[s.LEQ_DIM]:] G_leq, h_leq, P_leq = compress_matrix(G_leq, h_leq) dims[s.LEQ_DIM] = h_leq.shape[0] data["P_leq"] = intf.CVXOPT_SPARSE_INTF.const_to_matrix(P_leq, convert_scalars=True) # Scipy 0.13 can't stack empty arrays. if G_leq.shape[0] > 0 and G_other.shape[0] > 0: G = sp.vstack([G_leq, G_other]) elif G_leq.shape[0] > 0: G = G_leq else: G = G_other h = np.vstack([h_leq, h_other]) # Convert A, b, G, h to CVXOPT matrices. data[s.A] = intf.CVXOPT_SPARSE_INTF.const_to_matrix(A, convert_scalars=True) data[s.G] = intf.CVXOPT_SPARSE_INTF.const_to_matrix(G, convert_scalars=True) data[s.B] = intf.CVXOPT_DENSE_INTF.const_to_matrix(b, convert_scalars=True) data[s.H] = intf.CVXOPT_DENSE_INTF.const_to_matrix(h, convert_scalars=True) return s.OPTIMAL