def test_max_flow_scale_free_directed(n): seed_number = randint(1, 1000) generator = ScaleFree(n, directed=True, seed_number=seed_number) graph, source, target = generator.generate() solver = Goldberg(graph) max_flow = solver.get_max_flow(source, target) generator = ScaleFree(n, directed=True, seed_number=seed_number) graph, source, target = generator.generate() res = gt.push_relabel_max_flow(graph, source, target, graph.ep.cap) res.a = graph.ep.cap.a - res.a # the actual flow gt_max_flow = sum(res[e] for e in target.in_edges()) assert max_flow == gt_max_flow
def get_algorithm(algorithm, graph): if algorithm == 'generic': return Goldberg(graph) elif algorithm == 'height': return GoldbergHeight(graph) elif algorithm == 'wave': return GoldbergWave(graph)
def test_max_flow_triangulation_delaunay_directed(n): seed_number = randint(1, 1000) generator = Triangulation(n, type="delaunay", directed=True, seed_number=seed_number) graph, source, target = generator.generate() solver = Goldberg(graph) max_flow = solver.get_max_flow(source, target) generator = Triangulation(n, type="delaunay", directed=True, seed_number=seed_number) graph, source, target = generator.generate() res = gt.push_relabel_max_flow(graph, source, target, graph.ep.cap) res.a = graph.ep.cap.a - res.a # the actual flow gt_max_flow = sum(res[e] for e in target.in_edges()) assert max_flow == gt_max_flow
def test_max_flow_scale_random_undirected(size): seed_number = randint(1, 1000) generator = Random(size[0], size[1], directed=False, seed_number=seed_number) graph, source, target = generator.generate() solver = Goldberg(graph) max_flow = solver.get_max_flow(source, target) generator = Random(size[0], size[1], directed=False, seed_number=seed_number) graph, source, target = generator.generate() res = gt.push_relabel_max_flow(graph, source, target, graph.ep.cap) res.a = graph.ep.cap.a - res.a # the actual flow gt_max_flow = sum(res[e] for e in target.in_edges()) assert max_flow == gt_max_flow
file = open( "temporal_complexity_data_goldberg_4 edges for each vertex_10-90_nodes", "w") for nodes in [10, 20, 30, 40, 50, 60, 70, 80, 90]: for i in range(0, 35): #Goldberg version - using as graph generator Random seed_number = randint(1, 1000) generator = Random(nodes, nodes * 4, directed=True, seed_number=seed_number) g, source, target = generator.generate() title = '- Parte grafo versione Goldberg con ' + str( nodes) + ' nodi e ' + str(len(g.get_edges())) + ' archi - Random.' print(title) file.write(title) solver = Goldberg(graph=g) pr = cProfile.Profile() pr.enable() solution = solver.get_max_flow(source, target) pr.disable() s = StringIO() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() file.write(s.getvalue()) file.close()
data = [] nodes = [50, 100, 150, 200, 250, 300, 350, 400] edges = [] n_v = [] for i in range(0, len(nodes)): seed_number = randint(1, 1000) generator = Random(nodes[i], nodes[i] * 4, directed=True, seed_number=seed_number) g, source, target = generator.generate() edges += [str(len(g.get_edges()))] title = '- Parte grafo con ' + str( nodes[i]) + ' nodi e ' + edges[i] + ' archi.\n' print(title) solver = Goldberg(g) usage = memory_usage((solver.get_max_flow, (source, target))) data += [sum(usage) / len(usage)] print(data[i]) #for i in range(0, len(nodes)): # complexity += [(data[0]/float(nodes[0]))**(1/2) * float(nodes[i])**(1/2)] for i in range(0, len(nodes)): n_v += [nodes[i] + int(edges[i])] f = plt.figure() plt.xlabel('Edge size') plt.ylabel('Memory utilization') plt.title("Spatial complexity Goldberg implementation") red_patch = mpatches.Patch(color='red', label='Empirical')