def test_perfect_matching(self): model = make_gas_expansion_model() # These are the variables and constraints of the square, # nonsingular subsystem variables = [] variables.extend(model.P.values()) variables.extend(model.T[i] for i in model.streams if i != model.streams.first()) variables.extend(model.rho[i] for i in model.streams if i != model.streams.first()) variables.extend(model.F[i] for i in model.streams if i != model.streams.first()) constraints = list(model.component_data_objects(pyo.Constraint)) imat = get_structural_incidence_matrix(variables, constraints) con_idx_map = ComponentMap((c, i) for i, c in enumerate(constraints)) n_var = len(variables) matching = maximum_matching(imat) matching = ComponentMap( (c, variables[matching[con_idx_map[c]]]) for c in constraints) values = ComponentSet(matching.values()) self.assertEqual(len(matching), n_var) self.assertEqual(len(values), n_var) # The subset of variables and equations we have identified # do not have a unique perfect matching. But we at least know # this much. self.assertIs(matching[model.ideal_gas[0]], model.P[0])
def test_bordered(self): N = 5 row = [] col = [] data = [] for i in range(N-1): # Bottom row row.append(N-1) col.append(i) data.append(1) # Right column row.append(i) col.append(N-1) data.append(1) # Diagonal row.append(i) col.append(i) data.append(1) matrix = sps.coo_matrix((data, (row, col)), shape=(N, N)) matching = maximum_matching(matrix) self.assertEqual(len(matching), N) values = set(matching.values()) for i in range(N): self.assertIn(i, matching) self.assertIn(i, values)
def test_low_rank_nondecomposable_hessenberg(self): """ | x | |x x | | x x | | x x| | x | """ N = 5 # For N odd, a matrix with this structure does not have # a perfect matching. row = [] col = [] data = [] for i in range(N-1): # Below diagonal row.append(i+1) col.append(i) data.append(1) # Above diagonal row.append(i) col.append(i+1) data.append(1) matrix = sps.coo_matrix((data, (row, col)), shape=(N, N)) matching = maximum_matching(matrix) values = set(matching.values()) self.assertEqual(len(matching), N-1) self.assertEqual(len(values), N-1)
def test_imperfect_matching(self): model = make_gas_expansion_model() all_vars = list(model.component_data_objects(pyo.Var)) all_cons = list(model.component_data_objects(pyo.Constraint)) imat = get_structural_incidence_matrix(all_vars, all_cons) n_eqn = len(all_cons) matching = maximum_matching(imat) values = set(matching.values()) self.assertEqual(len(matching), n_eqn) self.assertEqual(len(values), n_eqn)
def maximum_matching(self, variables=None, constraints=None): """ Returns a maximal matching between the constraints and variables, in terms of a map from constraints to variables. """ variables, constraints = self._validate_input(variables, constraints) matrix = self._extract_submatrix(variables, constraints) matching = maximum_matching(matrix.tocoo()) # Matching maps row (constraint) indices to column (variable) indices return ComponentMap((constraints[i], variables[j]) for i, j in matching.items())
def test_identity(self): N = 5 matrix = sps.identity(N) matching = maximum_matching(matrix) self.assertEqual(len(matching), N) for i in range(N): self.assertIn(i, matching) self.assertEqual(i, matching[i]) matrix = matrix.tocoo() matching = maximum_matching(matrix) self.assertEqual(len(matching), N) for i in range(N): self.assertIn(i, matching) self.assertEqual(i, matching[i]) matrix = matrix.tocsc() matching = maximum_matching(matrix) self.assertEqual(len(matching), N) for i in range(N): self.assertIn(i, matching) self.assertEqual(i, matching[i])
def test_low_rank_diagonal(self): N = 5 omit = N//2 row = [i for i in range(N) if i != omit] col = [j for j in range(N) if j != omit] data = [1 for _ in range(N-1)] matrix = sps.coo_matrix((data, (row, col)), shape=(N, N)) matching = maximum_matching(matrix) self.assertEqual(len(matching), N-1) for i in range(N): if i != omit: self.assertIn(i, matching) self.assertEqual(i, matching[i])
def test_low_rank_hessenberg(self): """ |x x | | | | x | | x| |x x x x x| Know that first and last row and column will be in the imperfect matching. """ N = 5 omit = N//2 row = [] col = [] data = [] for i in range(N): # Bottom row row.append(N-1) col.append(i) data.append(1) if i == 0: # Top left entry row.append(0) col.append(i) data.append(1) else: # One-off diagonal if i != omit: row.append(i-1) col.append(i) data.append(1) matrix = sps.coo_matrix((data, (row, col)), shape=(N, N)) matching = maximum_matching(matrix) values = set(matching.values()) self.assertEqual(len(matching), N-1) self.assertIn(0, matching) self.assertIn(N-1, matching) self.assertIn(0, values) self.assertIn(N-1, values)
def test_nondecomposable_hessenberg(self): """ |x x | | x x | | x x | | x x| |x x x x x| """ N = 5 row = [] col = [] data = [] for i in range(N): # Bottom row row.append(N-1) col.append(i) data.append(1) # Diagonal row.append(i) col.append(i) data.append(1) # ^ This will put another entry at (N-1, N-1). # This is fine. if i != 0: # One-off diagonal row.append(i-1) col.append(i) data.append(1) matrix = sps.coo_matrix((data, (row, col)), shape=(N, N)) matching = maximum_matching(matrix) values = set(matching.values()) self.assertEqual(len(matching), N) for i in range(N): self.assertIn(i, matching) self.assertIn(i, values)
def test_hessenberg(self): """ |x x | | x | | x | | x| |x x x x x| """ N = 5 row = [] col = [] data = [] for i in range(N): # Bottom row row.append(N-1) col.append(i) data.append(1) if i == 0: # Top left entry row.append(0) col.append(i) data.append(1) else: # One-off diagonal row.append(i-1) col.append(i) data.append(1) matrix = sps.coo_matrix((data, (row, col)), shape=(N, N)) matching = maximum_matching(matrix) self.assertEqual(len(matching), N) values = set(matching.values()) for i in range(N): self.assertIn(i, matching) self.assertIn(i, values)
def block_triangularize(matrix, matching=None): """ Computes the necessary information to permute a matrix to block-lower triangular form, i.e. a partition of rows and columns into an ordered set of diagonal blocks in such a permutation. Arguments --------- matrix: A SciPy sparse matrix matching: A perfect matching of rows and columns, in the form of a dict mapping row indices to column indices Returns ------- Two dicts. The first maps each row index to the index of its block in a block-lower triangular permutation of the matrix. The second maps each column index to the index of its block in a block-lower triangular permutation of the matrix. """ nxb = nx.algorithms.bipartite nxc = nx.algorithms.components nxd = nx.algorithms.dag from_biadjacency_matrix = nxb.matrix.from_biadjacency_matrix M, N = matrix.shape if M != N: raise ValueError( "block_triangularize does not currently " "support non-square matrices. Got matrix with shape %s." % (matrix.shape, )) bg = from_biadjacency_matrix(matrix) if matching is None: matching = maximum_matching(matrix) len_matching = len(matching) if len_matching != M: raise ValueError("block_triangularize only supports matrices " "that have a perfect matching of rows and columns. " "Cardinality of maximal matching is %s" % len_matching) # Construct directed graph of rows dg = nx.DiGraph() dg.add_nodes_from(range(M)) for n in dg.nodes: col_idx = matching[n] col_node = col_idx + M # For all rows that share this column for neighbor in bg[col_node]: if neighbor != n: # Add an edge towards this column's matched row dg.add_edge(neighbor, n) # Partition the rows into strongly connected components (diagonal blocks) scc_list = list(nxc.strongly_connected_components(dg)) node_scc_map = {n: idx for idx, scc in enumerate(scc_list) for n in scc} # Now we need to put the SCCs in the right order. We do this by performing # a topological sort on the DAG of SCCs. dag = nx.DiGraph() for i, c in enumerate(scc_list): dag.add_node(i) for n in dg.nodes: source_scc = node_scc_map[n] for neighbor in dg[n]: target_scc = node_scc_map[neighbor] if target_scc != source_scc: dag.add_edge(target_scc, source_scc) # Reverse direction of edge. This corresponds to creating # a block lower triangular matrix. scc_order = list(nxd.lexicographical_topological_sort(dag)) scc_block_map = {c: i for i, c in enumerate(scc_order)} row_block_map = {n: scc_block_map[c] for n, c in node_scc_map.items()} # ^ This maps row indices to the blocks they belong to. # Invert the matching to map row indices to column indices col_row_map = {c: r for r, c in matching.items()} assert len(col_row_map) == M col_block_map = {c: row_block_map[col_row_map[c]] for c in range(N)} return row_block_map, col_block_map