def convertToGraph(battleGraph): nestedList = map( lambda item: [(item[0], k, v) for k, v in item[1].iteritems()], battleGraph.iteritems()) edgeList = [ee for e in nestedList for ee in e] graph = DiGraph() graph.add_weighted_edges_from(edgeList) return graph
def compute_matched_vol_per_pair(sent_vol): d = DiGraph() d.add_weighted_edges_from((f, t, -1) for f, t in sent_vol.keys()) for ft, vol in sent_vol.items(): f, t = ft d[f][t]['capacity'] = sent_vol[f, t] r = network_simplex(d) for f, t in sent_vol.keys(): assert r[1][f][t] <= sent_vol[f, t] + EPS return {(f, t): r[1][f][t] for f, t in sent_vol.keys()}
def test_write_pajek(): g = DiGraph() g.add_weighted_edges_from([(1, 2, 0.5), (3, 1, 0.75)]) with tempfile.NamedTemporaryFile(delete=False) as f: pajek.write_pajek(g, f) content = open(f.name).read() os.unlink(f.name) nt.assert_true(re.search(r'\*vertices 3', content)) nt.assert_true(re.search(r'\*arcs', content)) # The infomap code barfs if the '*network' line is present, check for that nt.assert_false(re.search(r'\*network', content))
def test_write_pajek(): g = DiGraph() g.add_weighted_edges_from([(1,2,0.5), (3,1,0.75)]) with tempfile.NamedTemporaryFile(delete=False) as f: pajek.write_pajek(g, f) content = open(f.name).read() os.unlink(f.name) nt.assert_true(re.search(r'\*vertices 3', content)) nt.assert_true(re.search(r'\*arcs', content)) # The infomap code barfs if the '*network' line is present, check for that nt.assert_false(re.search(r'\*network', content))
def main(): n = input("Enter number of nodes : ") # commands edges = [] N = None nodes = range(1, n + 1) print "Nodes created : ", nodes print "\n-> Enter edges using below syntax\n-> Enter 'N' if cost is unknown\n-> Enter 'end' with quote to continue" print "-> Enter 'nodes' to print all nodes\n-> Enter 'edges' to print all edges" print "syntax : \n>>> parent_node_no, child_node_no, edge_cost" while True: try: my_input = input(">>> ") if isinstance(my_input, tuple): edges.append(my_input) print "Edge ", my_input, "Added" elif isinstance(my_input, str) and my_input.lower() == 'end': break else: print my_input except NameError as error: print "Invalid Command!", error continue g = DiGraph() g.add_nodes_from(nodes) g.add_weighted_edges_from(edges) m = Minimax(g) # playing ... depth = len(m.depthMatrix) for i in range(depth - 1, 0, -1): value = m.minimax(i) if i % 2: print "Max palyed, found maximum value nodes ", m.closedList[-1] else: print "min palyed, found minimum value nodes ", m.closedList[-1] if value: print "Game end, root node value : ", value print "Path : " + ' - '.join(map(str, m.findPath()))
class Markov(object): def __init__(self, text): '''Build a graph representing a Markov chain from a training corpus text: Training text, must be an iterable of 'statements', where a sentence is a meaningful grouping of words, e.g. a sentence, tweet, source code line etc. ''' # This is weighted directed graph, where each vertex v_i # is a word, and each edge (v_i, v_j) models v_j appearing in the text # after v_i. Edge weights w_ij denote frequency, that is w_ij > w_ik # implies v_j appears more frequently after v_k def word_pairs(): '''Generate pairs of words, eventually edges. The first and last words of the sentence are paired with START and END sentinel strings respectively.''' for line in text: words = line.split() yield (zip(([START] + words), words + [END])) counted_pairs = Counter(chain(*word_pairs())).items() weighted_edges = ((u, v, w) for ((u, v), w) in counted_pairs) self.graph = DiGraph() self.graph.add_weighted_edges_from(weighted_edges) def sentence(self): '''Generate a 'sentence' from the training data''' # We simply walk the graph from the START vertex, picking a random # edge to follow, biased by weight (i.e. frequency in training text) # until we reach an END edge. def normalize(xs): '''Fit list of values to into a probability distribution''' s = sum(xs) return [i/s for i in xs] current = START while True: neighbours = self.graph[current] words = list(neighbours.keys()) weights = [attr['weight'] for attr in neighbours.values()] current = choice(words, p=normalize(weights)) if current == END: break else: yield current
def load_data(filename: str) -> DiGraph: """Function for loading graph data from specified file Args: filename (str): filename where graph data is located Returns: DiGraph: It returns ready to go DiGraph instance """ graph = DiGraph() lines = open(filename, 'r').readlines() for line in lines: node1, node2, weight = line.replace("\n", "").split(";") node1 = node1.replace(" ", "") node2 = node2.replace(" ", "") if node1 not in graph.nodes(): graph.add_node(node1) if node2 not in graph.nodes(): graph.add_node(node2) graph.add_weighted_edges_from([(node1, node2, float(weight))]) return graph
def build_network(hash_table): network = DiGraph() nodes = [] weighted_edges = [] weights = [] list_of_values = hash_table.values() for value in list_of_values: if value not in nodes: nodes.append(value) for node in nodes: number_of_children = len(node.children) network.add_node(node) for i in range(0, number_of_children): weighted_edges.append( (node, node.children[i], node.children_weights[i])) weights.append(node.children_weights[i]) network.add_weighted_edges_from(weighted_edges) return network
def main(): road_graph = DiGraph(name='Road Graph') roads = [ ('0', 'a', 1), ('a', 'c', 1), ('c', 'a', 1), ('c', '0', 19), ('0', 'd', 3), ('d', '0', 3), ('d', 'b', 7), ('b', 'c', 7), ('a', '0', 9), ] road_graph.add_weighted_edges_from(roads) src = '0' trgs = {'a', 'b', 'c', 'd'} trgs = {'a', 'd', 'c'} # path, weights = optimal_order(road_graph, src, trgs) # print('->'.join(path)) # print("Weight:{:.3f}".format(weights)) best = optimal_order_two(road_graph, src, trgs) tot = 0 for i, path in enumerate(best): print('Path {}'.format(i)) print('\t', end='') print('->'.join(path[0])) print('\t', end='') print("Weight: {:.3f}".format(path[1])) tot += path[1] print("Total Weight:{:.3f}".format(tot)) plt.figure() nx.draw(road_graph, with_labels=True) plt.show()
def _obfuscate_graph(graph: nx.DiGraph, seed: int, **kwargs) -> nx.DiGraph: log = logging.getLogger('_obfuscate_graph') local_random = Random(seed) # randomize all target indices, except for the start/end indices solution_nodes_count = len(graph.nodes) log.debug(f'solution_nodes_count: {solution_nodes_count}') try: start = int(kwargs['start']) end = int(kwargs['end']) except KeyError: old_solution_indices = tuple(graph.nodes.keys()) else: old_solution_indices = tuple( filter(lambda x: x != start or x != end, graph.nodes.keys())) log.debug(f'old_solution_indices: {old_solution_indices}') new_solution_indices = tuple( local_random.sample(range(10**6), solution_nodes_count)) randomize_mapping = dict(zip(old_solution_indices, new_solution_indices)) log.debug(f'randomize_mapping: {randomize_mapping}') nx.relabel_nodes(graph, randomize_mapping, copy=False) # create fake graph # all of it's edge weights have to be bigger than the number of # solution nodes, this guarantees a shortest path in the original nodes obf_nodes_count = 1000 obf_nodes_ids = tuple(local_random.sample(range(10**6), obf_nodes_count)) def random_key(): return local_random.choice(string.ascii_lowercase + string.digits + "@:/.:;") obf_nodes = tuple((idx, dict(key=random_key())) for idx in obf_nodes_ids) log.debug(f'obf_nodes: {obf_nodes}') obf_edges_weight_base = solution_nodes_count k = obf_nodes_count + solution_nodes_count random_target_indices = tuple(*obf_nodes_ids, *new_solution_indices) sources = random_target_indices random_targets = local_random.sample(random_target_indices, k) def random_weight(): return obf_edges_weight_base * local_random.random() * obf_nodes_count random_weights = (dict(weight=random_weight()) for _ in range(k)) obf_edges = tuple(zip(sources, random_targets, random_weights)) log.debug(f'obf_edges: {obf_edges}') obf_graph = nx.DiGraph() obf_graph.add_nodes_from(obf_nodes) obf_graph.add_weighted_edges_from(obf_edges) graph: nx.DiGraph = nx.compose(graph, obf_graph) # add some nodes/edges to the original ones, that go nowhere, # but have low edge weights dead_end_node_count = solution_nodes_count * 3 dead_end_node_ids = tuple( local_random.sample(range(10**6, 10**6 + 1_000), dead_end_node_count)) for idx, source_id in enumerate(new_solution_indices): new_nodes = ((dead_end_node_ids[idx + i], dict(key=random_key())) for i in range(3)) graph.add_nodes_from(new_nodes) random_weights = (dict(weight=random_weight()) for _ in range(k)) sources = itertools.repeat(source_id) targets = obf_nodes_ids[idx:idx + 3] graph.add_weighted_edges_from(zip(sources, targets, random_weights)) log.debug(f'final graph: \n' f'nodes:{graph.nodes}\n' f'edges:{graph.edges}') return graph