def getDegreeValuesOf(g: gt.Graph): # max deg (clique) min_deg = len(list(g.vertices())) - 1 max_deg = 0 avg_deg = 0 for v in g.vertices(): v: gt.Vertex = v # in_degree is 0 for undirected graphs if v.out_degree() + v.in_degree() > max_deg: max_deg = v.out_degree() + v.in_degree() if v.out_degree() + v.in_degree() < min_deg: min_deg = v.out_degree() + v.in_degree() avg_deg += v.out_degree() + v.in_degree() avg_deg = avg_deg / len(list(g.vertices())) return {"min_deg": min_deg, "avg_deg": avg_deg, "max_deg": max_deg}
def graph_measures(graph: gt.Graph) -> pd.DataFrame: def get_attrs(attrs): return (attrs[1][0], attrs[1][1][1], attrs[0]) def append_val(key, prop, v): measures[key][0].append(prop[v]) _, vp_authority, vp_hub = gt.hits(graph) measures = { key: ([], prop) for key, prop in { 'tp_group': graph.vp.group_name, 'tp_author': graph.vp.username, 'tn_degree_in': graph.degree_property_map('in'), 'tn_degree_out': graph.degree_property_map('out'), 'tn_degree_total': graph.degree_property_map('total'), 'tn_pagerank': gt.pagerank(graph), 'tn_betweenness': gt.betweenness(graph)[0], 'tn_closeness': gt.closeness(graph), 'tn_eigenvector': gt.eigenvector(graph)[1], 'tn_authority': vp_authority, 'tn_hub': vp_hub, 'tn_lcc': gt.local_clustering(graph) }.items() } for attrs in product(graph.vertices(), measures.items()): append_val(*get_attrs(attrs)) return pd.DataFrame( dict(map(lambda item: (item[0], item[1][0]), measures.items()))).fillna(0)
def calc_pagerank(g: gt.Graph) -> List[Tuple[int, str, float]]: """ Return: sorted list of tuples, [(vertex_idx, wk_title, pagerank_value), ....] """ vp_label = g.vp['_graphml_vertex_id'] # same as wktitle pr = gt.pagerank(g) ranks = [(g.vertex_index[v], vp_label[v], pr[v]) for v in g.vertices()] ranks = sorted(ranks, key=lambda e: -e[-1]) return ranks
class StackGraph(object): def __init__(self): self.g = None def load(self, filename): # Initialize the graph self.g = Graph() # Each node will store a FunctionWrapper() class instance. self.g.vertex_properties["functions"] = self.g.new_vertex_property("object") self.g.vertex_properties["display"] = self.g.new_vertex_property("string") # Each edge will store a [ ..tbd.. ] . self.g.edge_properties["calls"] = self.g.new_edge_property("object") # Load the log file and build the graph i = 0 f = open(filename, "rb") for line in f: i += 1 try: # Skip any informational lines if "*" in line: continue # Extract a call stack snapshot words = line.split() time = words[0][2:] depth = words[1][2:] stack = [FunctionWrapper(instring=item) for item in words[2].split("->")] # Add the top 2 functions to the graph, if necessary. Format: f1()->f2() f1, f2 = stack[-2], stack[-1] v1, v2 = None, None # Search for the vertices for v in self.g.vertices(): if self.g.vp.functions[v] == f1: v1 = v if self.g.vp.functions[v] == f2: v2 = v if v1 != None and v2 != None: break # Add new vertices if needed if v1 == None: v1 = self.g.add_vertex() self.g.vp.functions[v1] = f1 self.g.vp.display[v1] = f1.graphDisplayString() if v2 == None: v2 = self.g.add_vertex() self.g.vp.functions[v2] = f2 self.g.vp.display[v2] = f2.graphDisplayString() # Add the edge if necessary, and then add data to it if not self.g.edge(v1, v2): e = self.g.add_edge(v1, v2) self.g.ep.calls[e] = CallList(v1, v2) self.g.ep.calls[e].addCall(time, depth) except Exception as e: print "Exception on line", i, ":", e print [str(x) for x in stack] exit()
def __init_properties(g: gt.Graph): # sets properties used to keep trace of the opinion of each vertex opinion = g.new_vertex_property("int", 0) g.vertex_properties["opinion"] = opinion # used for graphic representation opinion_color = g.new_vertex_property("vector<double>") g.vertex_properties["opinion_color"] = opinion_color for v in g.vertices(): g.vertex_properties["opinion_color"][v] = __color_map(0) return g
def get_pagerank_values(self): start = time.time() logger.info('Started call to get_pagerank') g = Graph() vp = g.add_edge_list(self.__v.get_graph_edges(), hashed=True, hash_type='int') logger.info('Delta time to build graph: {}s'.format( timedelta(seconds=(time.time() - start)))) start = time.time() ranks = pagerank(g) logger.info('Delta time to compute pagerank: {}s'.format( timedelta(seconds=(time.time() - start)))) for vertex in g.vertices(): qid = vp[vertex] r = ranks[vertex] yield qid, r
def rysuj_graf_wejsciowy(g, output=None, size=(600, 600), bez_napisow=False): gx = Graph(g) gx.vertex_properties['wyswietlany_tekst'] = gx.new_vertex_property('string') for v in gx.vertices(): gx.vertex_properties['wyswietlany_tekst'][v] = v.__str__() + ': ' + \ str(gx.vertex_properties['liczba_kolorow'][v]) if bez_napisow: graph_draw(gx # , vertex_text=gx.vertex_properties['wyswietlany_tekst'] , bg_color=[255., 255., 255., 1] , output_size=size , output=output ) else: graph_draw(gx , vertex_text=gx.vertex_properties['wyswietlany_tekst'] , bg_color=[255., 255., 255., 1] , output_size=size , output=output )
def rysuj_graf_wejsciowy(g, output=None, size=(600, 600), bez_napisow=False): gx = Graph(g) gx.vertex_properties['wyswietlany_tekst'] = gx.new_vertex_property( 'string') for v in gx.vertices(): gx.vertex_properties['wyswietlany_tekst'][v] = v.__str__() + ': ' + \ str(gx.vertex_properties['liczba_kolorow'][v]) if bez_napisow: graph_draw( gx # , vertex_text=gx.vertex_properties['wyswietlany_tekst'] , bg_color=[255., 255., 255., 1], output_size=size, output=output) else: graph_draw(gx, vertex_text=gx.vertex_properties['wyswietlany_tekst'], bg_color=[255., 255., 255., 1], output_size=size, output=output)
class PointerProvenancePlot(Plot): """ Base class for plots using the pointer provenance graph. """ def __init__(self, *args, **kwargs): super(PointerProvenancePlot, self).__init__(*args, **kwargs) self._cached_dataset_valid = False """Tells whether we need to rebuild the dataset when caching.""" def init_parser(self, dataset, tracefile): if self.caching and os.path.exists(self._get_cache_file()): # if caching we will nevere use this return None return PointerProvenanceParser(dataset, tracefile) def init_dataset(self): logger.debug("Init provenance graph for %s", self.tracefile) self.dataset = Graph(directed=True) vdata = self.dataset.new_vertex_property("object") self.dataset.vp["data"] = vdata return self.dataset def _get_cache_file(self): return self.tracefile + "_provenance_plot.gt" def build_dataset(self): """ Build the provenance tree """ if self.caching: try: logger.debug("Load cached provenance graph") self.dataset = load_graph(self._get_cache_file()) except IOError: self.parser.parse() self.dataset.save(self._get_cache_file()) else: self.parser.parse() num_nodes = self.dataset.num_vertices() logger.debug("Total nodes %d", num_nodes) vertex_mask = self.dataset.new_vertex_property("bool") progress = ProgressPrinter(num_nodes, desc="Search kernel nodes") for node in self.dataset.vertices(): # remove null capabilities # remove operations in kernel mode vertex_data = self.dataset.vp.data node_data = vertex_data[node] if ((node_data.pc != 0 and node_data.is_kernel) or (node_data.cap.length == 0 and node_data.cap.base == 0)): vertex_mask[node] = True progress.advance() progress.finish() self.dataset.set_vertex_filter(vertex_mask, inverted=True) vertex_mask = self.dataset.copy_property(vertex_mask) num_nodes = self.dataset.num_vertices() logger.debug("Filtered kernel nodes, remaining %d", num_nodes) progress = ProgressPrinter( num_nodes, desc="Merge (cfromptr + csetbounds) sequences") for node in self.dataset.vertices(): progress.advance() # merge cfromptr -> csetbounds subtrees num_parents = node.in_degree() if num_parents == 0: # root node continue elif num_parents > 1: logger.error("Found node with more than a single parent %s", node) raise RuntimeError("Too many parents for a node") parent = next(node.in_neighbours()) parent_data = self.dataset.vp.data[parent] node_data = self.dataset.vp.data[node] if (parent_data.origin == CheriNodeOrigin.FROMPTR and node_data.origin == CheriNodeOrigin.SETBOUNDS): # the child must be unique to avoid complex logic # when merging, it may be desirable to do so with # more complex traces node_data.origin = CheriNodeOrigin.PTR_SETBOUNDS if parent.in_degree() == 1: next_parent = next(parent.in_neighbours()) vertex_mask[parent] = True self.dataset.add_edge(next_parent, node) elif parent.in_degree() == 0: vertex_mask[parent] = True else: logger.error("Found node with more than a single parent %s", parent) raise RuntimeError("Too many parents for a node") progress.finish() self.dataset.set_vertex_filter(vertex_mask, inverted=True) vertex_mask = self.dataset.copy_property(vertex_mask) num_nodes = self.dataset.num_vertices() logger.debug("Merged (cfromptr + csetbounds), remaining %d", num_nodes) progress = ProgressPrinter(num_nodes, desc="Find short-lived cfromptr") for node in self.dataset.vertices(): progress.advance() node_data = self.dataset.vp.data[node] if node_data.origin == CheriNodeOrigin.FROMPTR: vertex_mask[node] = True # if (node_data.origin == CheriNodeOrigin.FROMPTR and # len(node_data.address) == 0 and # len(node_data.deref["load"]) == 0 and # len(node_data.deref["load"]) == 0): # # remove cfromptr that are never stored or used in # # a dereference # remove_list.append(node) progress.finish() self.dataset.set_vertex_filter(vertex_mask, inverted=True)
for s in pos_file: s = s.strip(' \n\uefef') if len(s) != 0: positive.append(s) for s in neg_file: s = s.strip(' \n\uefef') if len(s) != 0: negative.append(s) ver_part = pairs_graph.new_vertex_property("int") not_deleted = pairs_graph.new_vertex_property("bool") # not deleted prop pos_vert = [] neg_vert = [] neu_vert = [] for v in pairs_graph.vertices(): ver_part[v] = 0 not_deleted[v] = True for w in positive: ver_part[pairs_graph.vertex(word_dict[w])] = 1 for w in negative: ver_part[pairs_graph.vertex(word_dict[w])] = -1 # new property pairs_graph.vertex_properties["part"] = ver_part pairs_graph.vertex_properties["notdeleted"] = not_deleted print("graph ready") print("-------------------------------------------------") # starting point:
class Network: def __init__(self, nodes_info=None, links_info=None, file_name=None): self.g = Graph() if nodes_info and links_info: self.nodes_info = nodes_info self.links_info = links_info self.g.vertex_properties["name"] = self.g.new_vertex_property( 'string') self.g.vertex_properties["id"] = self.g.new_vertex_property( 'int32_t') self.g.edge_properties["weight"] = self.g.new_edge_property( 'int32_t') self.create_network() self.g.vertex_properties["pagerank"] = pagerank( self.g, weight=self.g.edge_properties["weight"]) self.g.vertex_properties[ "degree_centrality"] = self.degree_centrality() elif file_name: self.load_network(file_name) def create_network(self): # Add Nodes for node in self.nodes_info: self.add_n(node) # Add Links for link in self.links_info: n_loser = 0 n_winner = 0 loser = link['loser'] winner = link['winner'] weight = link['rounds'] for team_id in self.g.vertex_properties.id: if loser == team_id: break n_loser += 1 for team_id in self.g.vertex_properties.id: if winner == team_id: break n_winner += 1 self.add_l(n_loser, n_winner, 16 / weight * 100) def load_network(self, file_name): new_file_name = '..' + sep + '..' + sep + 'network-graphs' + sep + file_name self.g.load(new_file_name, fmt="gt") def get_normalized_pagerank(self): max_pgr = 0 for pgr in self.g.vertex_properties.pagerank: if pgr > max_pgr: max_pgr = pgr return [ self.g.vertex_properties.pagerank[v] / max_pgr for v in self.g.vertices() ] def add_n(self, node_info): n = self.g.add_vertex() self.g.vertex_properties.id[n] = node_info['id'] self.g.vertex_properties.name[n] = node_info['Team_Name'] def add_l(self, loser, winner, weight): n1 = self.g.vertex(loser) n2 = self.g.vertex(winner) l = self.g.add_edge(n1, n2) self.g.edge_properties.weight[l] = weight def draw(self, output_file, fmt): graph_draw(self.g, vertex_text=self.g.vertex_index, output=output_file, fmt=fmt) def save_network(self, file_name): try: new_file_name = '..' + sep + '..' + sep + 'network-graphs' + sep + file_name self.g.save(new_file_name, fmt="gt") except: return False return True def vp_pagerank(self): return self.g.vertex_properties.pagerank def vp_degree_cent(self): return self.g.vertex_properties.degree_centrality def vp_name(self): return self.g.vertex_properties.name def vp_id(self): return self.g.vertex_properties.id def ep_weight(self): return self.g.edge_properties.weight # Calcula as características básicas da rede def get_basic_info(self): info = {} try: n_vertices = self.g.num_vertices() n_edges = self.g.num_edges() density = n_edges / ((n_vertices * (n_vertices - 1)) / 2) mean_degree = (2 * n_edges) / n_vertices # Cálculo do coeficiente de clusterização "na mão", usando a média dos # coeficientes locais calculados pela Graph Tools local_cc = local_clustering(self.g) clustering_coef = fsum( [local_cc[x] for x in self.g.vertices() if local_cc[x] != 0.0]) clustering_coef /= n_vertices info["Número de times"] = n_vertices info["Número de confrontos"] = n_edges info["Densidade"] = density info["Grau médio"] = mean_degree info["Coeficiente de Clusterização"] = clustering_coef except: info.clear() return info def degree_centrality(self): degree_centrality = self.g.new_vertex_property('float') for v in self.g.vertices(): degree_centrality[v] = v.in_degree() / (self.g.num_vertices() - 1) return degree_centrality # Calcula a distribuição de graus da rede def degree_distribution(self): degree_dist = {} try: for v in self.g.vertices(): if v.in_degree() not in degree_dist.keys(): degree_dist[v.in_degree()] = 1 else: degree_dist[v.in_degree()] += 1 for k in degree_dist.keys(): degree_dist[k] /= self.g.num_vertices() except: degree_dist.clear() return degree_dist
class SentenceGraph(): def __init__(self, sentence, directed=False, graph=None): # Create a SentenceGraph from an existing graph tool graph if graph is not None: self.sentence_graph = graph return # Create a new SentenceGraph from scratch self.sentence_graph = Graph(directed=directed) # Graph properties sentence_property = self.sentence_graph.new_graph_property("string", sentence) self.sentence_graph.graph_properties[SENTENCE_KEY] = sentence_property # Vertex properties word_property = self.sentence_graph.new_vertex_property("string") part_of_speech_property = self.sentence_graph.new_vertex_property("string") vertex_color_property = self.sentence_graph.new_vertex_property("vector<double>") self.sentence_graph.vertex_properties[WORD_KEY] = word_property self.sentence_graph.vertex_properties[PART_OF_SPEECH_KEY] = part_of_speech_property self.sentence_graph.vertex_properties[VERTEX_COLOR_KEY] = vertex_color_property # Edge properties sentence_edge_property = self.sentence_graph.new_edge_property("string") definition_edge_property = self.sentence_graph.new_edge_property("string") parsed_dependencies_edge_property = self.sentence_graph.new_edge_property("string") inter_sentence_edge_property = self.sentence_graph.new_edge_property("string") edge_color_property = self.sentence_graph.new_edge_property("vector<double>") dependency_edge_property = self.sentence_graph.new_edge_property("string") self.sentence_graph.edge_properties[SENTENCE_EDGE_KEY] = sentence_edge_property self.sentence_graph.edge_properties[DEFINITION_EDGE_KEY] = definition_edge_property self.sentence_graph.edge_properties[PARSED_DEPENDENCIES_EDGE_KEY] = parsed_dependencies_edge_property self.sentence_graph.edge_properties[INTER_SENTENCE_EDGE_KEY] = inter_sentence_edge_property self.sentence_graph.edge_properties[EDGE_COLOR_KEY] = edge_color_property self.sentence_graph.edge_properties[PARSE_TREE_DEPENDENCY_VALUE_KEY] = dependency_edge_property # Edge filter properties definition_edge_filter_property = self.sentence_graph.new_edge_property("bool") inter_sentence_edge_filter_property = self.sentence_graph.new_edge_property("bool") parsed_dependencies_edge_filter_property = self.sentence_graph.new_edge_property("bool") sentence_edge_filter_property = self.sentence_graph.new_edge_property("bool") self.sentence_graph.edge_properties[FILTER_DEFINITION_EDGE_KEY] = definition_edge_filter_property self.sentence_graph.edge_properties[FILTER_INTER_SENTENCE_EDGE_KEY] = inter_sentence_edge_filter_property self.sentence_graph.edge_properties[FILTER_PARSED_DEPENDENCIES_EDGE_KEY] = parsed_dependencies_edge_filter_property self.sentence_graph.edge_properties[FILTER_SENTENCE_EDGE_KEY] = sentence_edge_filter_property def get_sentence(self): return self.sentence_graph.graph_properties[SENTENCE_KEY] def add_vertex(self, word, pos): word_pos_tuple = (word, pos) # Create vertex, set properties word_vertex = self.sentence_graph.add_vertex() self.sentence_graph.vertex_properties[WORD_KEY][word_vertex] = word self.sentence_graph.vertex_properties[PART_OF_SPEECH_KEY][word_vertex] = pos self.sentence_graph.vertex_properties[VERTEX_COLOR_KEY][word_vertex] = [0, 0, 1, 1] return word_vertex def set_vertex_color_from_word(self, word, pos, color=[1, 0, 0, 1]): word_vertex = self.get_vertex(word, pos) return self.set_vertex_color(word_vertex, color) def set_vertex_color(self, vertex, color=[1, 0, 0, 1]): self.sentence_graph.vertex_properties[VERTEX_COLOR_KEY][vertex] = color def set_vertices_color(self, vertices, color=[1, 0, 0, 1]): for vertex in vertices: self.set_vertex_color(vertex, color) def add_sentence_edge_from_words(self, word1, pos1, word2, pos2): return self.add_sentence_edge(self.get_vertex(word1, pos1), self.get_vertex(word2, pos2)) def add_sentence_edge(self, word_vertex1, word_vertex2): sentence_edge = self.sentence_graph.add_edge(word_vertex1, word_vertex2, add_missing=False) self.sentence_graph.edge_properties[SENTENCE_EDGE_KEY][sentence_edge] = sentence_edge # Green self.sentence_graph.edge_properties[EDGE_COLOR_KEY][sentence_edge] = [0.2, 1, 0.2, 1] self._set_edge_to_zero_in_all_filters(sentence_edge) self.sentence_graph.edge_properties[FILTER_SENTENCE_EDGE_KEY][sentence_edge] = True return sentence_edge def add_sentence_edges(self, sentence_vertices): for i in range(1, len(sentence_vertices)): self.add_sentence_edge(sentence_vertices[i - 1], sentence_vertices[i]) def add_parsed_dependency_edge(self, word_vertex1, word_vertex2, dependency_relationship): parsed_dependency_edge = self.sentence_graph.add_edge(word_vertex1, word_vertex2, add_missing=False) self.sentence_graph.edge_properties[PARSED_DEPENDENCIES_EDGE_KEY][parsed_dependency_edge] = parsed_dependency_edge self.sentence_graph.edge_properties[PARSE_TREE_DEPENDENCY_VALUE_KEY][parsed_dependency_edge] = dependency_relationship # Blue self.sentence_graph.edge_properties[EDGE_COLOR_KEY][parsed_dependency_edge] = [0, 0, 1, 1] self._set_edge_to_zero_in_all_filters(parsed_dependency_edge) self.sentence_graph.edge_properties[FILTER_PARSED_DEPENDENCIES_EDGE_KEY][parsed_dependency_edge] = True return parsed_dependency_edge def add_parsed_dependency_edge_from_words(self, word1, pos1, word2, pos2, dependency_relationship): return self.add_parsed_dependency_edge( self.get_vertex(word1, pos1), self.get_vertex(word2, pos2), dependency_relationship) def add_definition_edge_from_words(self, word, pos, definition_word, definition_pos): return self.add_definition_edge( self.get_vertex(word, pos), self.get_vertex(definition_word, definition_pos)) def _set_edge_to_zero_in_all_filters(self, edge): self.sentence_graph.edge_properties[FILTER_DEFINITION_EDGE_KEY][edge] = False self.sentence_graph.edge_properties[FILTER_INTER_SENTENCE_EDGE_KEY][edge] = False self.sentence_graph.edge_properties[FILTER_PARSED_DEPENDENCIES_EDGE_KEY][edge] = False self.sentence_graph.edge_properties[FILTER_SENTENCE_EDGE_KEY][edge] = False def add_definition_edge(self, word_vertex, definition_word_vertex): definition_edge = self.sentence_graph.add_edge(word_vertex, definition_word_vertex, add_missing=False) self.sentence_graph.edge_properties[DEFINITION_EDGE_KEY][definition_edge] = definition_edge # Red self.sentence_graph.edge_properties[EDGE_COLOR_KEY][definition_edge] = [1, 0.1, 0.1, 1] self._set_edge_to_zero_in_all_filters(definition_edge) self.sentence_graph.edge_properties[FILTER_DEFINITION_EDGE_KEY][definition_edge] = True return definition_edge def add_definition_edges(self, word_vertex, definition_word_vertices): # Add edges from the word_vertex to all definition vertices and set # the definition edge property on each edge for definition_word_vertex in definition_word_vertices: self.add_definition_edge(word_vertex, definition_word_vertex) return self def add_inter_sentence_edge(self, sentence1_word_vertex, sentence2_word_vertex): inter_sentence_edge = self.sentence_graph.add_edge(sentence1_word_vertex, sentence2_word_vertex, add_missing=False) self.sentence_graph.edge_properties[INTER_SENTENCE_EDGE_KEY][inter_sentence_edge] = inter_sentence_edge # Pink self.sentence_graph.edge_properties[EDGE_COLOR_KEY][inter_sentence_edge] = [1, 0.05, 1, 1] self._set_edge_to_zero_in_all_filters(inter_sentence_edge) self.sentence_graph.edge_properties[FILTER_INTER_SENTENCE_EDGE_KEY][inter_sentence_edge] = True return inter_sentence_edge def add_inter_sentence_edge_from_words(self, word1, pos1, word2, pos2): return self.add_inter_sentence_edge( self.get_vertex(word1, pos1), self.get_vertex(word2, pos2)) def remove_vertex_by_word(self, word, pos): self.remove_vertex(self.get_vertex(word, pos)) def remove_vertex(self, vertex): word = self.sentence_graph.vertex_properties[WORD_KEY][vertex] pos = self.sentence_graph.vertex_properties[PART_OF_SPEECH_KEY][vertex] self.sentence_graph.remove_vertex(vertex) def remove_edge(self, word1, pos1, word2, pos2): self.sentence_graph.remove_edge(self.get_edge(word1, pos1, word2, pos2)) def contains(self, word, pos): return self.get_vertex(word, pos) is not None def get_vertex(self, word, pos): for vertex in self.sentence_graph.vertices(): try: vertex_word = self.sentence_graph.vertex_properties[WORD_KEY][vertex] vertex_pos = self.sentence_graph.vertex_properties[PART_OF_SPEECH_KEY][vertex] if vertex_word == word and vertex_pos == pos: return vertex except: pass return None def get_word_pos_tuple(self, vertex): return self.sentence_graph.vertex_properties[WORD_KEY][vertex],\ self.sentence_graph.vertex_properties[PART_OF_SPEECH_KEY][vertex] def get_word_pos_tuple_by_index(self, index): return self.get_word_pos_tuple(self.get_vertex_by_index(index)) def get_vertex_by_index(self, index): return self.sentence_graph.vertex(index) def get_vertices_iterator(self): return self.sentence_graph.vertices() def get_vertices(self): return [x for x in self.sentence_graph.vertices()] def get_vertex_out_neighbor_word_pos_tuples(self, vertex): return [self.get_word_pos_tuple(neighbor_vertex) for neighbor_vertex in self.get_vertex_out_neighbors(vertex)] def get_vertex_in_neighbor_word_pos_tuples(self, vertex): return [self.get_word_pos_tuple(neighbor_vertex) for neighbor_vertex in self.get_vertex_in_neighbors(vertex)] def get_vertex_out_neighbors(self, vertex): return [neighbor_vertex for neighbor_vertex in vertex.out_neighbours()] def get_vertex_in_neighbors(self, vertex): return [neighbor_vertex for neighbor_vertex in vertex.in_neighbours()] def get_word_pos_tuples(self): return [self.get_word_pos_tuple(v) for v in self.sentence_graph.vertices()] def get_num_vertices(self): return self.sentence_graph.num_vertices() def get_num_edges(self): return self.sentence_graph.num_edges() def get_edge(self, word1, pos1, word2, pos2): vertex_1 = self.get_vertex(word1, pos1) vertex_2 = self.get_vertex(word2, pos2) return None\ if vertex_1 is None or vertex_2 is None\ else self.sentence_graph.edge(vertex_1, vertex_2) def get_edges_iterator(self): return self.sentence_graph.edges() def get_edges(self): return [x for x in self.sentence_graph.edges()] def set_definition_edge_filter(self, inverted=False): self.sentence_graph.set_edge_filter( self.sentence_graph.edge_properties[FILTER_DEFINITION_EDGE_KEY], inverted=inverted) def set_inter_sentence_edge_filter(self, inverted=False): self.sentence_graph.set_edge_filter( self.sentence_graph.edge_properties[FILTER_INTER_SENTENCE_EDGE_KEY], inverted=inverted) def set_parsed_dependency_edge_filter(self, inverted=False): self.sentence_edge.set_edge_filter( self.sentence_graph.edge_properties[FILTER_PARSED_DEPENDENCIES_EDGE_KEY], inverted=inverted) def set_sentence_edge_filter(self, inverted=False): self.sentence_graph.set_edge_filter( self.sentence_graph.edge_properties[FILTER_SENTENCE_EDGE_KEY], inverted=inverted) def clear_filters(self): self.sentence_graph.clear_filters() def get_definition_edges(self): return filter(lambda x: x in self.get_definition_edge_properties(), self.get_edges()) def get_word_vertex_properties(self): return self.sentence_graph.vertex_properties[WORD_KEY] def get_pos_vertex_properties(self): return self.sentence_graph.vertex_properties[PART_OF_SPEECH_KEY] def get_color_vertex_properties(self): return self.sentence_graph.vertex_properties[VERTEX_COLOR_KEY] def get_sentence_edge_properties(self): return self.sentence_graph.edge_properties[SENTENCE_EDGE_KEY] def get_definition_edge_properties(self): return self.sentence_graph.edge_properties[DEFINITION_EDGE_KEY] def get_inter_sentence_edge_properties(self): return self.sentence_graph.edge_properties[INTER_SENTENCE_EDGE_KEY] def get_color_edge_properties(self): return self.sentence_graph.edge_properties[EDGE_COLOR_KEY] def get_vertex_index(self, vertex): return self.sentence_graph.vertex_index[vertex] def get_degree_properties(self, degree_type): return self.sentence_graph.degree_property_map(degree_type) def get_graph(self): return self.sentence_graph def copy(self): return SentenceGraph( sentence=self.sentence_graph.graph_properties[SENTENCE_KEY], graph=self.sentence_graph.copy())
def __absorptionStateReached(g: gt.Graph): for vertex in g.vertices(): if g.vertex_properties["opinion"][vertex] == 0: return False return True
class TTC(AbstractMatchingAlgorithm): """This class searches for cycles where each student gets his best option. This takes a list of students, a list of schools and a ruleset (which is used to calculate priorities). This works by generating a directed graph, where each student points at at his best option, and each school points at the student (or students) with the highest priority. """ EDGE_WIDTH_SIZE_FACTOR = 700 """Size factor (in the image) of each edge that is not part of the main cycle.""" EDGE_WIDTH_CYCLE_SIZE = 10 """Size factor (in the image) of each edge that takes part of the main cycle.""" def __init__(self, generate_images=False, images_folder="TTC_images", use_longest_cycle=True): """Initializes the algorithm. :param generate_images: If the process generates images or not. :type generate_images: bool :param images_folder: Where images are saved. :type images_folder: str :param use_longest_cycle: If the algorithm applies the longest cycle available, or the first one encountered. :type use_longest_cycle: bool """ self.generate_images = generate_images self.images_folder = images_folder self.use_longest_cycle = use_longest_cycle self.__graph = None self.__vertices_by_school_id = None self.__vertices_by_student_id = None self.__students_by_id = None self.__schools_by_id = None self.__entity_id = None self.__entity_type = None def reset_variables(self): """Resets all variables.""" self.__graph = Graph() self.__vertices_by_school_id = {} self.__vertices_by_student_id = {} self.__students_by_id = {} self.__schools_by_id = {} self.__entity_id = self.__graph.new_vertex_property("int") self.__graph.vertex_properties["entity_id"] = self.__entity_id self.__entity_type = self.__graph.new_vertex_property("string") self.__graph.vertex_properties["entity_type"] = self.__entity_type def run(self, students, schools, ruleset): """Runs the algorithm. First it creates the graph, then it lists all the cycles available, after that it selects one cycle, and applies it. Finally, it starts the process again. :param students: List of students. :type students: list :param schools: List of school. :type schools: list :param ruleset: Set of rules used. :type ruleset: Ruleset """ self.reset_variables() can_improve = True iteration_counter = 1 while can_improve: self.structure_graph(students, schools) cycles = [c for c in all_circuits(self.__graph, unique=True)] # print("CYCLES", cycles, "iteration", iteration_counter) cycle_edges = [] if cycles: for cycle in cycles: # ToDo: Possible optimisation: apply all disjoint cycles at once for current_v_index in range(len(cycle)): next_v_index = (current_v_index + 1) % len(cycle) from_v = self.__graph.vertex(cycle[current_v_index]) target_v = self.__graph.vertex(cycle[next_v_index]) edge = self.__graph.edge(from_v, target_v) cycle_edges.append(edge) if self.__entity_type[from_v] == "st": sel_student = self.__students_by_id[ self.__entity_id[from_v]] sel_school = self.__schools_by_id[ self.__entity_id[target_v]] sel_student.assigned_school = sel_school sel_school.assignation.append(sel_student) # vertex_school_target_id = self.__entity_id[target_v] # vertex_school_target = self.__schools_by_id[vertex_school_target_id] # print("CYCLE: Student", sel_student.id, "School", sel_school.id) # print("VVV: School {} -> School {} (Student {}) ".format(self.__entity_id[from_v], self.__entity_id[target_v], self.__entity_id[self.__graph.edge(from_v, target_v)])) if self.generate_images: self.generate_image(cycle_edges, iteration_n=iteration_counter) else: can_improve = False self.__graph.clear() iteration_counter += 1 def structure_graph(self, students, schools): """Creates a graph where students points to schools, and schools points to students. In the graph, each student points at at his best option, and each school points at the student (or students) with the highest priority. :param students: List of students. :type students: list :param schools: :type schools: list """ if not self.__students_by_id and not self.__schools_by_id: for student in students: self.__students_by_id[student.id] = student for school in schools: self.__schools_by_id[school.id] = school for school in schools: setattr(school, 'preferences', StudentQueue(school, preference_mode=True)) remaining_students = [ student for student in students if not student.assigned_school ] for student in remaining_students: for pref_school in student.preferences: pref_school.preferences.append(student) for student in remaining_students: v_source_student = self.create_vertex_student(student) pref_school = next( (school for school in student.preferences if len(school.assignation.get_all_students()) < school.capacity), None) if pref_school: v_target_school = self.create_vertex_school(pref_school) self.create_edge(v_source_student, v_target_school) for school in schools: if len(school.assignation.get_all_students()) < school.capacity: v_source_school = self.create_vertex_school(school) pref_student = next( iter(school.preferences.get_all_students()), None) if pref_student: v_target_student = self.create_vertex_student(pref_student) self.create_edge(v_source_school, v_target_student) # graph_draw(self.__graph, # vertex_text=self.__entity_id, vertex_shape="circle", # output_size=(1000, 1000), bg_color=[1., 1., 1., 1], output="graph.png") def create_vertex_student(self, student): """Defines a new student as a vertex in the graph (if it did not existed before).""" if student.id in self.__vertices_by_student_id: vertex = self.__vertices_by_student_id[student.id] else: vertex = self.__graph.add_vertex() self.__vertices_by_student_id[student.id] = vertex self.__entity_id[vertex] = student.id self.__entity_type[ vertex] = "st" # ToDo: There may be other ways to do this. return vertex def create_vertex_school(self, school): """Defines a new school as a vertex in the graph (if it did not existed before).""" if school.id in self.__vertices_by_school_id: vertex = self.__vertices_by_school_id[school.id] else: vertex = self.__graph.add_vertex() self.__vertices_by_school_id[school.id] = vertex self.__entity_id[vertex] = school.id self.__entity_type[vertex] = "sc" return vertex def create_edge(self, source_v, target_v): """Creates a directed edge between two vertices.""" self.__graph.add_edge(source_v, target_v) def generate_image(self, cycle_edges, iteration_n=0): """Generates an image of a graph. :param cycle_edges: Edges which are part of the main cycle (they will be highlighted in red). :type cycle_edges: list :param iteration_n: Number of iteration of the algorithm (this is added in the filename of the image). :type iteration_n: int .. DANGER:: This is an experimental feature. """ edge_color = self.__graph.new_edge_property("vector<float>") edge_width = self.__graph.new_edge_property("int") for edge in self.__graph.edges(): if edge in cycle_edges: edge_color[edge] = [1., 0.2, 0.2, 0.999] edge_width[edge] = 7 else: edge_color[edge] = [0., 0., 0., 0.3] edge_width[edge] = 4 vertex_shape = self.__graph.new_vertex_property("string") vertex_size = self.__graph.new_vertex_property("int") for vertex in self.__graph.vertices(): if self.__entity_type[vertex] == "st": vertex_shape[vertex] = "circle" vertex_size[vertex] = 1 else: vertex_shape[vertex] = "double_circle" vertex_size[vertex] = 100 # pos = sfdp_layout(self.__graph, C=10, p=5, theta=2, gamma=1) pos = arf_layout(self.__graph, d=0.2, a=3) graph_draw( self.__graph, pos=pos, vertex_text=self.__entity_id, vertex_font_size= 1, # ToDo: Move image related code outside the class. vertex_fill_color=[0.97, 0.97, 0.97, 1], vertex_color=[0.05, 0.05, 0.05, 0.95], vertex_shape=vertex_shape, edge_color=edge_color, edge_pen_width=edge_width, output_size=(1000, 1000), bg_color=[1., 1., 1., 1], output=self.generate_filename(iteration_n)) def generate_filename(self, iteration_n): # ToDo: Move this to utils """Returns a filename (which is used to generate the images).""" filename = "Graph (iteration {})".format( iteration_n) if iteration_n > 0 else "Graph" output_file = gen_filepath(self.images_folder, filename=filename, extension="png") return output_file
def load_instance(filepath): g = Graph(directed=False) node_upgraded = g.new_vertex_property("bool") node_cost = g.new_vertex_property("float") edge_weight = g.new_edge_property("float") edge_weight_lv2 = g.new_edge_property("float") edge_weight_lv3 = g.new_edge_property("float") edge_upgradeable_weights = g.new_edge_property("vector<float>") graph_total_cost = g.new_graph_property("float") graph_total_cost[g] = 0 with open(filepath, "r") as f: first_line = f.readline() tokens = first_line.split(" ") assert len(tokens) == 2 n = int(tokens[0]) m = int(tokens[1]) g.add_vertex(n) #print (n, m) for _ in range(m): line = f.readline() tokens = line.split(" ") assert len(tokens) == 5 v1 = int(tokens[0]) v2 = int(tokens[1]) e = g.add_edge(v1, v2) w1 = float(tokens[2]) w2 = float(tokens[3]) w3 = float(tokens[4]) edge_weight[e] = w1 edge_weight_lv2[e] = w2 edge_weight_lv3[e] = w3 # identify how weights are for vertices line = f.readline() v_cost = [float(x) for x in line.split(" ") if x != " "] if len(v_cost) > 1: # case where formatting is incorrect for i in range(n): v = g.vertex(i) node_cost[v] = v_cost[i] node_upgraded[i] = False else: v = g.vertex(0) node_cost[v] = float(v_cost[0]) node_upgraded[v] = False vertices = g.vertices() vertices.next() for v in vertices: line = f.readline() c = float(line) node_cost[v] = c node_upgraded[v] = False g.vp.is_upgraded = node_upgraded g.vp.cost = node_cost g.ep.weight = edge_weight g.ep.weight_2 = edge_weight_lv2 g.ep.weight_3 = edge_weight_lv3 for v in g.vertices(): graph_total_cost[g] += node_cost[v] g.gp.total_cost = graph_total_cost return g
class Network: def __init__(self): self.g = Graph(directed=True) self.player_id_to_vertex = {} self.pairs = {} # player pair: edge # property maps for additional information self.g.vertex_properties['player_id'] = self.g.new_vertex_property( "string") self.g.vertex_properties['player_coords'] = self.g.new_vertex_property( "vector<float>") self.g.vertex_properties[ 'average_player_coords'] = self.g.new_vertex_property( "vector<float>") self.g.vertex_properties[ 'player_n_coords'] = self.g.new_vertex_property("int") self.g.edge_properties['weight'] = self.g.new_edge_property("float") @property def edge_weights(self): return self.g.edge_properties['weight'] @property def player_id_pmap(self): return self.g.vertex_properties['player_id'] @property def player_coords_pmap(self): return self.g.vertex_properties['player_coords'] @property def player_n_coords_pmap(self): return self.g.vertex_properties['player_n_coords'] @property def average_player_coords_pmap(self): # lazy evaluation of means for v in self.g.vertices(): self.g.vertex_properties['average_player_coords'][v] = np.asarray( self.player_coords_pmap[v]) / self.player_n_coords_pmap[v] return self.g.vertex_properties['average_player_coords'] def add_players(self, pids: List[str]): n = len(pids) vs = list(self.g.add_vertex(n)) self.player_id_to_vertex.update({pids[i]: vs[i] for i in range(n)}) for i in range(n): self.player_id_pmap[vs[i]] = pids[i] return vs def add_passes(self, id_pairs: List[Tuple], coords_pairs: List[Tuple], pass_scores=None): pairs = [(self.player_id_to_vertex[i1], self.player_id_to_vertex[i2]) for i1, i2 in id_pairs] # append player coordinates n = len(coords_pairs) if pass_scores is None: pass_scores = [1 for _ in range(n)] for i in range(n): # remember orig and dest location # orig player coords = self.player_coords_pmap[pairs[i][0]] if len(coords) == 0: coords = np.asarray([coords_pairs[i][0], coords_pairs[i][1]]) else: # accumulate coords += np.asarray([coords_pairs[i][0], coords_pairs[i][1]]) self.player_coords_pmap[pairs[i][0]] = coords self.player_n_coords_pmap[pairs[i][0]] += 1 # dest player coords = self.player_coords_pmap[pairs[i][1]] if len(coords) == 0: coords = np.asarray([coords_pairs[i][2], coords_pairs[i][3]]) else: # accumulate coords += np.asarray([coords_pairs[i][2], coords_pairs[i][3]]) self.player_coords_pmap[pairs[i][1]] = coords self.player_n_coords_pmap[pairs[i][1]] += 1 # if the edge exists, increment its weight instead of creating a new edge e = self.pairs.get(pairs[i]) if e is not None: self.edge_weights[e] += pass_scores[i] else: e = self.g.add_edge(*pairs[i]) self.pairs[pairs[i]] = e self.edge_weights[e] = pass_scores[i] def cleanup(self): """remove isolated vertices""" to_remove = [] for v in self.g.vertices(): if v.in_degree() + v.out_degree() == 0: to_remove.append(v) n = len(to_remove) self.g.remove_vertex(to_remove, fast=True) print("Removed {0} isolated vertices".format(n)) def save(self, file: str): self.g.save(file, fmt='graphml')
def remove_leaves(G: Graph, del_list): """remove leaves (in-degree 0 or out-degree 0) from graph""" for v in G.vertices(): # remove leaf nodes if v.out_degree() == 0 or v.in_degree() == 0: del_list[v] = False
class GeneralGraph(): """ General wrapper for graph-tool or networkx graphs to add edges and nodes according to constraints """ def __init__(self, directed=True, verbose=1): self.graphtool = GRAPH_TOOL # Initialize graph if self.graphtool: self.graph = Graph(directed=directed) self.weight = self.graph.new_edge_property("float") else: if directed: print("directed graph") self.graph = nx.DiGraph() else: self.graph = nx.Graph() # set metaparameter self.time_logs = {} self.verbose = verbose def set_edge_costs(self, layer_classes=["resistance"], class_weights=[1], **kwargs): """ Initialize edge cost variables :param classes: list of cost categories :param weights: list of weights for cost categories - must be of same shape as classes (if None, then equal weighting) """ class_weights = np.array(class_weights) # set different costs: self.cost_classes = layer_classes if self.graphtool: self.cost_props = [ self.graph.new_edge_property("float") for _ in range(len(layer_classes)) ] self.cost_weights = class_weights / np.sum(class_weights) if self.verbose: print(self.cost_classes, self.cost_weights) # save weighted instance for plotting self.instance = np.sum( np.moveaxis(self.cost_instance, 0, -1) * self.cost_weights, axis=2) * self.hard_constraints def set_shift(self, start, dest, pylon_dist_min=3, pylon_dist_max=5, max_angle=np.pi / 2, **kwargs): """ Initialize shift variable by getting the donut values :param lower, upper: min and max distance of pylons :param vec: vector of diretion of edges :param max_angle: Maximum angle of edges to vec """ vec = dest - start if self.verbose: print("SHIFT:", pylon_dist_min, pylon_dist_max, vec, max_angle) self.shifts = get_half_donut(pylon_dist_min, pylon_dist_max, vec, angle_max=max_angle) self.shift_tuples = self.shifts def set_corridor(self, dist_surface, start_inds, dest_inds, sample_func="mean", sample_method="simple", factor_or_n_edges=1): # set new corridor corridor = (dist_surface > 0).astype(int) self.factor = factor_or_n_edges self.cost_rest = self.cost_instance * (self.hard_constraints > 0).astype(int) * corridor # downsample tic = time.time() if self.factor > 1: self.cost_rest = CostUtils.downsample(self.cost_rest, self.factor, mode=sample_method, func=sample_func) self.time_logs["downsample"] = round(time.time() - tic, 3) # repeat because edge artifacts self.cost_rest = self.cost_rest * (self.hard_constraints > 0).astype(int) * corridor # add start and end TODO ugly self.cost_rest[:, dest_inds[0], dest_inds[1]] = self.cost_instance[:, dest_inds[0], dest_inds[1]] self.cost_rest[:, start_inds[0], start_inds[1]] = self.cost_instance[:, start_inds[0], start_inds[1]] def add_nodes(self, nodes): """ Add vertices to the graph param nodes: list of node names if networkx, integer if graphtool """ tic = time.time() # add nodes to graph if self.graphtool: _ = self.graph.add_vertex(nodes) self.n_nodes = len(list(self.graph.vertices())) else: self.graph.add_nodes_from(np.arange(nodes)) self.n_nodes = len(self.graph.nodes()) # verbose if self.verbose: print("Added nodes:", nodes, "in time:", time.time() - tic) self.time_logs["add_nodes"] = round(time.time() - tic, 3) def add_edges(self): tic_function = time.time() n_edges = 0 # kernels, posneg = ConstraintUtils.get_kernel(self.shifts, # self.shift_vals) # edge_array = [] times_edge_list = [] times_add_edges = [] if self.verbose: print("n_neighbors:", len(self.shift_tuples)) for i in range(len(self.shift_tuples)): tic_edges = time.time() # set cost rest if necessary (random graph) self.set_cost_rest() # compute shift and weights out = self._compute_edges(self.shift_tuples[i]) # Error if -1 entries because graph-tool crashes with -1 nodes if np.any(out[:, :2].flatten() < 0): print(np.where(out[:, :2] < 0)) raise RuntimeError n_edges += len(out) times_edge_list.append(round(time.time() - tic_edges, 3)) # add edges to graph tic_graph = time.time() if self.graphtool: self.graph.add_edge_list(out, eprops=self.cost_props) else: nx_edge_list = [(e[0], e[1], { "weight": np.sum(e[2:] * self.cost_weights) }) for e in out] self.graph.add_edges_from(nx_edge_list) times_add_edges.append(round(time.time() - tic_graph, 3)) # alternative: collect edges here and add alltogether # edge_array.append(out) # # alternative: add edges all in one go # tic_concat = time.time() # edge_lists_concat = np.concatenate(edge_array, axis=0) # self.time_logs["concatenate"] = round(time.time() - tic_concat, 3) # print("time for concatenate:", self.time_logs["concatenate"]) # tic_graph = time.time() # self.graph.add_edge_list(edge_lists_concat, eprops=[self.weight]) # self.time_logs["add_edges"] = round( # (time.time() - tic_graph) / len(shifts), 3 # ) self.n_edges = len(list(self.graph.edges())) self._update_time_logs(times_add_edges, times_edge_list, tic_function) if self.verbose: print("DONE adding", n_edges, "edges:", time.time() - tic_function) def _update_time_logs(self, times_add_edges, times_edge_list, tic_function): self.time_logs["add_edges"] = round(np.mean(times_add_edges), 3) self.time_logs["add_edges_times"] = times_add_edges self.time_logs["edge_list"] = round(np.mean(times_edge_list), 3) self.time_logs["edge_list_times"] = times_edge_list self.time_logs["add_all_edges"] = round(time.time() - tic_function, 3) if self.verbose: print("Done adding edges:", len(list(self.graph.edges()))) def sum_costs(self): """ Additive weighting of costs Take the individual edge costs, compute weighted sum --> self.weight """ # add sum of all costs if not self.graphtool: return tic = time.time() summed_costs_arr = np.zeros(self.cost_props[0].get_array().shape) for i in range(len(self.cost_props)): prop = self.cost_props[i].get_array() summed_costs_arr += prop * self.cost_weights[i] self.weight.a = summed_costs_arr self.time_logs["sum_of_costs"] = round(time.time() - tic, 3) def remove_vertices(self, dist_surface, delete_padding=0): """ Remove edges in a certain corridor (or all) to replace them by a refined surface @param dist_surface: a surface where each pixel value corresponds to the distance of the pixel to the shortest path @param delete_padding: define padding in which part of the corridor to delete vertices (cannot delete all because then graph unconnected) """ tic = time.time() self.graph.clear_edges() self.graph.shrink_to_fit() self.time_logs["remove_edges"] = round(time.time() - tic, 3) def get_pareto(self, vary, source, dest, out_path=None, compare=[0, 1], plot=1): """ Arguments: vary: how many weights to explore e.g 3 --> each cost class can have weight 0, 0.5 or 1 source, dest: as always the source and destination vertex out_path: where to save the pareto figure(s) compare: indices of cost classes to compare Returns: paths: All found paths pareto: The costs for each combination of weights """ tic = time.time() # initialize lists pareto = list() paths = list() cost_sum = list() # get the edge costs cost_arrs = [cost.get_array() for cost in self.cost_props] # [self.cost_props[comp].get_array() for comp in compare] # get vary weights between 0 and 1 var_weights = np.around(np.linspace(0, 1, vary), 2) # construct weights array if len(compare) == 2: weights = [[v, 1 - v] for v in var_weights] elif len(compare) == 3: weights = list() for w0 in var_weights: for w1 in var_weights[var_weights <= 1 - w0]: weights.append([w0, w1, 1 - w0 - w1]) else: raise ValueError("argument compare can only have length 2 or 3") # w_avail: keep weights of non-compare classes, get leftover amount w_avail = np.sum(np.asarray(self.cost_weights)[compare]) # compute paths for each combination of weights for j in range(len(weights)): # option 2: np.zeros(len(cost_arrs)) + non_compare_weight w = self.cost_weights.copy() # replace the ones we want to compare w[compare] = np.array(weights[j]) * w_avail # weighted sum of edge costs self.weight.a = np.sum( [cost_arrs[i] * w[i] for i in range(len(cost_arrs))], axis=0) # get shortest path path, path_costs, _ = self.get_shortest_path(source, dest) # don't take cost_sum bc this is sum of original weighting pareto.append(np.sum(path_costs, axis=0)[compare]) paths.append(path) # take overall sum of costs (unweighted) that this w leads to cost_sum.append(np.sum(path_costs)) # print best weighting best_weight = np.argmin(cost_sum) w = self.cost_weights.copy() w[compare] = np.array(weights[best_weight]) * w_avail print("Best weights:", w, "with (unweighted) costs:", np.min(cost_sum)) self.time_logs["pareto"] = round(time.time() - tic, 3) pareto = np.array(pareto) classes = [self.cost_classes[comp] for comp in compare] # Plotting if plot: if len(compare) == 2: plot_pareto_scatter_2d(pareto, weights, classes, cost_sum=cost_sum, out_path=out_path) elif len(compare) == 3: # plot_pareto_3d(pareto, weights, classes) plot_pareto_scatter_3d(pareto, weights, classes, cost_sum=cost_sum, out_path=out_path) return paths, weights, cost_sum def get_shortest_path(self, source, target): """ Compute shortest path from source vertex to target vertex """ tic = (time.time()) # #if source and target are given as indices: if self.graphtool: vertices_path, _ = shortest_path(self.graph, source, target, weights=self.weight, negative_weights=True) else: try: vertices_path = nx.dijkstra_path(self.graph, source, target) except nx.exception.NetworkXNoPath: return [] self.time_logs["shortest_path"] = round(time.time() - tic, 3) return vertices_path def save_graph(self, OUT_PATH): """ Save the graph in OUT_PATH """ if self.graphtool: for i, cost_class in enumerate(self.cost_classes): self.graph.edge_properties[cost_class] = self.cost_props[i] self.graph.edge_properties["weight"] = self.weight self.graph.save(OUT_PATH + ".xml.gz") else: nx.write_weighted_edgelist(self.graph, OUT_PATH + '.weighted.edgelist') def load_graph(self, IN_PATH): """ Retrieve graph from IN_PATH """ if self.graphtool: self.g_prev = load_graph(IN_PATH + ".xml.gz") self.weight_prev = self.g_prev.ep.weight # weight = G2.ep.weight[G2.edge(66, 69)] else: self.g_prev = nx.read_edgelist(IN_PATH + '.weighted.edgelist', nodetype=int, data=(('weight', float), )) # ----------------------------------------------------------------------- # INTERFACE def single_sp(self, **kwargs): """ Function for full processing until shortest path """ self.start_inds = kwargs["start_inds"] self.dest_inds = kwargs["dest_inds"] self.set_shift(self.start_inds, self.dest_inds, **kwargs) # self.set_corridor( # np.ones(self.hard_constraints.shape) * 0.5, # self.start_inds, # self.dest_inds, # factor_or_n_edges=1 # ) if self.verbose: print("1) Initialize shifts and instance (corridor)") self.set_edge_costs(**kwargs) # add vertices self.add_nodes() if self.verbose: print("2) Initialize distances to inf and predecessors") self.add_edges() if self.verbose: print("3) Compute source shortest path tree") print("number of vertices and edges:", self.n_nodes, self.n_edges) # weighted sum of all costs self.sum_costs() source_v, target_v = self.add_start_and_dest(self.start_inds, self.dest_inds) # get actual best path path, path_costs, cost_sum = self.get_shortest_path(source_v, target_v) if self.verbose: print("4) shortest path", cost_sum) return path, path_costs, cost_sum
print ("Carregando arquivo...") g = Graph(directed=False) edgelist = [] with open(args.edge_list) as f: for line in f: if(line): edgelist.append(map(int,line.split())) labels_vertices = g.add_edge_list(edgelist,hashed=True) labels_vertices_str = g.new_vertex_property("string") for v in g.vertices(): labels_vertices_str[v] = str(labels_vertices[v]) labels_vertices_inv = mapeiaLabels(g,labels_vertices) pos = sfdp_layout(g) colors = trataCores(dict_map) pos_new = trataPosicoes(g,pos,dict_map,labels_vertices_inv) color = g.new_vertex_property("string") for v in g.vertices(): index = g.vertex_index[v] label = labels_vertices[v] if label not in colors: colors[label] = '#0c0cff'