def test_gc_overhead(self): # Test time overhead for graph conversion. dfs_time = 0. z3_time = 0. mm_time = 0. N = 20 for i in range(N): rg = RandomGraph(9, 9, sparse=False, drawable=self.drawable) self.graph = rg.getGraph() if self.use_z3: solvable, time = self.convert_to_functional_graph_using_z3() z3_time += time for e in self.graph.edge_set: e.reset() start = timer() assert solvable == self.convert_to_functional_graph() end = timer() dfs_time += end - start self.graph = rg.getNxGraph() start = timer() biGraph.maximum_matching(self.graph) end = timer() mm_time += end - start print 'Avg z3 time used: {}'.format(z3_time / N) print 'Avg dfs time used: {}'.format(dfs_time / N) print 'Avg mm time used: {}'.format(mm_time / N) return True
def max_card_matching(instance): """Given an instance, calculate the cardinality of the biggest matching. Note that this matching need not be, and probably won't be, stable. :param instance: The instance in question :type instance: Instance :rtype: int :return: The size of the largest cardinality matching. """ graph = NxGraph() if instance.number_of_couples_left() != 0: raise Exception("max card matching does not currently support couples") for left in instance.single_agents_left: graph.add_node(f"l%s" % left.ident, bipartite=0) for right in instance.single_agents_right: for cap in range(right.capacity): graph.add_node(f"r%s_%d" % (right.ident, cap), bipartite=1) for left in instance.single_agents_left: for pref_group in left.preferences: for right_id in pref_group: for cap in range(instance.single_agent_right(right_id).capacity): graph.add_edge(f"l%s" % left.ident, f"r%s_%d" % (right_id, cap)) size = 0 for component in connected_component_subgraphs(graph): size += len(maximum_matching(component)) return int(size/2)
def graph(): G = nx.Graph() for name, flds in { name: {i for i, f in enumerate(fields) if len(f.difference(rule)) == 0} for name, rule in valid.items() }.items(): for f in flds: G.add_edge(name, f) return bip.maximum_matching(G)
def part1(): ingr_appearances, allergen_prospects = process_input() mm_edges = maximum_matching(Graph(allergen_prospects)) ingr_without_allergens = ingr_appearances.keys() - ( mm_edges.keys() - allergen_prospects.keys()) # Uncomment the following to draw the graphs - # draw_bipartite_graph(Graph(allergen_prospects), allergen_prospects.keys()) # draw_bipartite_graph(Graph(mm_edges.items()), allergen_prospects.keys()) return sum([ingr_appearances[ingr] for ingr in ingr_without_allergens])
def part2(): _, allergen_prospects = process_input() mm_edges = maximum_matching(Graph(allergen_prospects)) ingr_with_allergens = [ mm_edges[allergen] for allergen in allergen_prospects ] # Uncomment the following to draw the graphs - # draw_bipartite_graph(Graph(allergen_prospects), allergen_prospects.keys()) # draw_bipartite_graph(Graph(mm_edges.items()), allergen_prospects.keys()) return ','.join(sorted(ingr_with_allergens, key=lambda x: mm_edges[x]))
def halfint_lpvc2(graph: nx.Graph): """ Deprecated half integral LP solver """ n = graph.number_of_nodes() doubled = make_double(graph) print("double graph created") mates = bipartite.maximum_matching(doubled) print("matching found") vc = bipartite.to_vertex_cover(doubled, mates) print("vc found") lpval = {v: 0.5 * ((v in vc) + (v + n in vc)) for v in graph.nodes_iter()} return lpval
def birkhoff_von_neumann(Y, tol=0.0001): if Y.shape[0] != Y.shape[1]: raise ValueError('Y.shape[0] != Y.shape[1]') if np.any(Y < -tol): raise ValueError('np.any(Y < -tol)') Y = np.where(Y < tol, 0, Y) m = Y.shape[0] lambdas = [] perms = [] residuals = Y > tol while np.any(residuals): adj = residuals.astype(int) adj = sparse.csr_matrix(adj) G = bp.from_biadjacency_matrix(adj) M = bp.maximum_matching(G) M_ = [(kk, v - m) for kk, v in M.items() if kk < m] if len( M_ ) < m: # this can happen due to numerical stability issues TODO add test break M_ = sorted( M_, key=itemgetter(0) ) # if tuples sorted by rows, then the columns are the permutation rows, columns = zip(*M_) perm = np.array(columns) assert perm.shape == (m, ) lambda_ = np.min(Y[rows, columns]) P = np.zeros((m, m), dtype=float) P[rows, columns] = 1. lambdas.append(lambda_) perms.append(perm) Y -= lambda_ * P residuals = Y > tol return np.array(lambdas), np.array(perms)
def part2(): ticket_fields, my_ticket, nearby_tickets = process_input() # Create a set that contains the union of # the valid values for all of the fields all_valid_vals = set.union(*ticket_fields.values()) valid_tickets = [ t for t in nearby_tickets if is_ticket_valid(t, all_valid_vals)[0] ] graph = build_bipartite_graph(valid_tickets, ticket_fields) departures_product = 1 mm_edges = maximum_matching(graph).items() for start_edge, end_edge in mm_edges: if isinstance(start_edge, str) and "departure" in start_edge: departures_product *= my_ticket[end_edge] # Uncomment the following to draw the graphs - # draw_bipartite_graph(graph, ticket_fields.keys()) # draw_bipartite_graph(Graph(mm_edges), ticket_fields.keys()) return departures_product
def _create_loops(self): points = [] g_cities = nx.DiGraph() for city in self.cities: city = self.cities.get(city) g_cities.add_node('f-' + str(city.id), bipartite=0) g_cities.add_node('t-' + str(city.id), bipartite=1) points.append([city.x, city.y]) points = np.array(points) vor = Voronoi(points, incremental=True) for point in vor.ridge_points: self.cities[point[0]].add_neighbor(self.cities[point[1]]) self.cities[point[1]].add_neighbor(self.cities[point[0]]) g_cities.add_edge('f-' + str(self.cities[point[0]]), 't-' + str(self.cities[point[1]])) g_cities.add_edge('f-' + str(self.cities[point[1]]), 't-' + str(self.cities[point[0]])) temp = bipartite.maximum_matching(g_cities) print(temp) del g_cities islands = nx.DiGraph() i = 0 for key, value in temp.items(): # connect cities ... self.cities[int(key[2:])].connect_to(self.cities[int(value[2:])]) islands.add_edge(self.cities[int(key[2:])], self.cities[int(value[2:])]) i += 1 if (i >= temp.__len__() / 2): break for i, c in enumerate(nx.recursive_simple_cycles(islands)): # for i, c in enumerate(nx.simple_cycles(islands)): loop = Loop(c, i) self._loops.append(loop) self.loops.add(i)
def max_satisfied_voters(self): """Calculates the maximum number of satisfied voters of an instance""" if self.v == 0: return 0 # Construct bipartite graph for i, vote in enumerate(self.votes): loves_val = "cat" if "C" in vote[0] else "dog" self.graph.add_node(i, loves=loves_val, votes=vote) # Create edges for each conflict for node_1 in self.graph.nodes(): for node_2 in self.graph.nodes(): if node_1 == node_2: continue if self._conflict(node_1, node_2): self.graph.add_edge(node_1, node_2) matching = bipartite.maximum_matching(self.graph) matching = self._remove_duplicates(matching) return self.v - len(matching)
# lendo linha linha = input("") # separando elementos n, m = linha.split(" ") # convertendo para inteiro n = int(n) m = int(m) # adicionando no G.add_nodes_from(range(1, m), bipartite=0) for j in range(1, m + 1): # lendo linha linha = input("") # separando elementos tam1, tam2 = linha.split(" ") # adicionando nos G.add_nodes_from([tam1, tam2], bipartite=1) # adicionando arestas G.add_edges_from([(j, tam1), (j, tam2)]) # end for print("NO" if (len(bipartite.maximum_matching(G)) / 2 < m) else "YES") # end for # end main
def calculate_entity_mapping(G, method=None): """Given the networkx graph, calculate a dictionary mapping each row node to the most highly similar column node. :param G: A `networkx.Graph` comprising of nodes from two entities, connected by equally weighted edges if the similarity was above a threshold. :param method: The method to use to solve the entity mapping. Options are - 'flow' or None (default) - `networkx.maximum_flow` method. - 'bipartite' - the `networkx.bipartite.maximum_matching` algorithm (fastest) - 'weighted' - the `networkx.max_weight_matching` (slowest but most accurate with close matches) :return: A dictionary mapping of row index to column index. If no mate is found, the node isn't included. """ if method == 'bipartite': logging.info( 'Solving entity matches with bipartite maximum matching solver') network = bipartite.maximum_matching(G) entity_map = _to_int_map(network, lambda network, node: network[node]) elif method == 'weighted': logging.info( 'Solving entity matches with networkx maximum weight matching solver' ) network = nx.max_weight_matching(G) entity_map = _to_int_map(network, lambda network, node: network[node]) elif method == 'flow' or method is None: logging.info( 'Solving entity matches with networkx maximum flow solver') # The maximum flow solver requires a SOURCE and SINK num_rows, num_cols = 0, 0 for i, node in enumerate(G.nodes()): if node.startswith("row"): G.add_edge('start', node, capacity=1.0) num_rows += 1 if node.startswith("col"): G.add_edge(node, 'end', capacity=1.0) num_cols += 1 flow_value, network = nx.maximum_flow(G, 'start', 'end') # This method produces a quality metric `flow`, however # it needs to be compared to the number of entities if flow_value < num_rows: logging.info('Matching not perfect - {:.3f}'.format(flow_value)) else: logging.info('Matching complete. (perfect matching)') def find_pair(network, node): # Make sure to deal with unconnected nodes possible_nodes = [n for n in network[node] if n != 'start'] if len(possible_nodes) > 0: def get_score(node_name): return network[node][node_name] return max(possible_nodes, key=get_score) else: return None entity_map = _to_int_map(network, find_pair) else: raise NotImplementedError("Haven't implemented that matching method") return entity_map
def display_maximum_matching(B): """Displays the maximum matching of a bipartite graph. """ for k, v in bipartite.maximum_matching(B).iteritems(): if k < v: print "%d %d" % (k+1, v+1)
def maximalMatch(graph): match = bipartite.maximum_matching(graph) return match
or rr[1][0] <= f <= rr[1][1], fields[field])) field_name[(field, rule)] = res return res for i in range(N): for j in range(N): rr = rul[j][1] field_name[(i, j)] = all(map(lambda f: rr[0][0] <= f <= rr[0][1] or rr[1][0] <= f <= rr[1][1], fields[i])) # networkX bipartite matching algo g = nx.Graph() g.add_nodes_from(range(N), bipartite=0) g.add_nodes_from(range(N, 2*N), bipartite=1) g.add_edges_from((i, j+N) for i in range(N) for j in range(N) if field_name[(i, j)]) matching = bipartite.maximum_matching(g) ass = [x for _, x in sorted((i, j-N) for i, j in matching.items() if i < N)] print(f"Part2: {dep()}") # Sorting + backtracking counts = [(i, sum(1 for j in range(N) if field_name[(i, j)]), sum( 1 for j in range(N) if field_name[(j, i)])) for i in range(N)] fields_sorted = list(map(lambda x: x[0], sorted(counts, key=lambda x: x[1]))) names_sorted = list(map(lambda x: x[0], sorted(counts, key=lambda x: x[2]))) ass = [] # index into fields_sorted asss = set() NUMS = [10, 4, 5, 8, 18, 17, 0, 7, 13, 15, 16, 14, 3, 12, 2, 6, 1, 19, 9, 11] print("Computing by backtracking") done = False