Esempio n. 1
0
    def max_degree_sample(graph: Graph, num_vertices: int,
                          prev_state: DegreeWeightedSampleState,
                          args: argparse.Namespace) -> SampleState:
        """Max-degree sampling. Simply samples the highest-degree vertices.

        Parameters
        ----------
        graph : Graph
            the filtered graph from which to sample vertices
        num_vertices : int
            number of vertices in the unfiltered graph
        prev_state : UniformRandomSampleState
            the state of the previous sample in the stack. If there is no previous sample, an empty SampleState object
            should be passed in here.
        args : argparse.Namespace
            the command-line arguments provided by the user

        Returns
        -------
        state : SampleState
            the sample state with the sampled vertex ids (Note: these ids correspond to the filtered graph, and have
            to be mapped back to the unfiltered graph)
        """
        state = MaxDegreeSampleState(graph.num_vertices(), prev_state)
        sample_num = int(
            (num_vertices * (args.sample_size / 100)) / args.sample_iterations)
        vertex_degrees = graph.get_total_degrees(
            np.arange(graph.num_vertices()))
        vertex_degrees[state.sample_idx] = 0
        top_indices = np.argpartition(vertex_degrees,
                                      -sample_num)[-sample_num:]
        state.sample_idx = np.concatenate((state.sample_idx, top_indices))
        return state
def gen_cascade(g, p, source=None, stop_fraction=0.5):
    if source is None:
        source = random.choice(np.arange(g.num_vertices()))
    infected = {source}
    infection_times = np.ones(g.num_vertices()) * -1
    infection_times[source] = 0
    time = 0
    edges = []
    while np.count_nonzero(infection_times != -1) / g.num_vertices() <= stop_fraction:
        infected_nodes_until_t = copy(infected)
        time += 1
        for i in infected_nodes_until_t:
            for j in g.vertex(i).all_neighbours():
                j = int(j)
                if j not in infected and random.random() <= p:
                    infected.add(j)
                    infection_times[j] = time
                    edges.append((i, j))

    tree = Graph(directed=True)
    for _ in range(g.num_vertices()):
        tree.add_vertex()
    for u, v in edges:
        tree.add_edge(u, v)
    return source, infection_times, tree
Esempio n. 3
0
    def random_walk_sample(graph: Graph, num_vertices: int,
                           prev_state: RandomWalkSampleState,
                           args: argparse.Namespace) -> SampleState:
        """Random walk sampling. Start from a vertex and walk along the edges, sampling every vertex that is a part of
        the walk. With a probability of 0.15, restart the walk from the original vertex. To prevent getting stuck,
        after making N attempts, where N = the target number of vertices in the sample, change the starting vertex to a
        random vertex.

        Parameters
        ----------
        graph : Graph
            the filtered graph from which to sample vertices
        num_vertices : int
            number of vertices in the unfiltered graph
        prev_state : RandomWalkSampleState
            the state of the previous sample in the stack. If there is no previous sample, an empty SampleState object
            should be passed in here.
        args : argparse.Namespace
            the command-line arguments provided by the user

        Returns
        -------
        state : SampleState
            the sample state with the sampled vertex ids (Note: these ids correspond to the filtered graph, and have
            to be mapped back to the unfiltered graph)
        """
        state = RandomWalkSampleState(graph.num_vertices(), prev_state)
        sample_num = int(
            (num_vertices * (args.sample_size / 100)) / args.sample_iterations)
        sample_num += len(state.sample_idx)
        num_tries = 0
        start = np.random.randint(sample_num)  # start with a random vertex
        vertex = start

        while len(state.index_set) == 0 or len(
                state.index_set) % sample_num != 0:
            num_tries += 1
            if not state.sampled_marker[vertex]:
                state.index_set.append(vertex)
                state.sampled_marker[vertex] = True
            if num_tries % sample_num == 0:  # If the number of tries is large, restart from new random vertex
                start = np.random.randint(sample_num)
                vertex = start
                num_tries = 0
            elif np.random.random(
            ) < 0.15:  # With a probability of 0.15, restart at original node
                vertex = start
            elif len(
                    graph.get_out_neighbors(vertex)
            ) > 0:  # If the vertex has out neighbors, go to one of them
                vertex = np.random.choice(graph.get_out_neighbors(vertex))
            else:  # Otherwise, restart from the original vertex
                if len(
                        graph.get_out_neighbors(start)
                ) == 0:  # if original vertex has no out neighbors, change it
                    start = np.random.randint(sample_num)
                vertex = start

        state.sample_idx = np.asarray(state.index_set)
        return state
Esempio n. 4
0
def golang_graph_to_graphtool_graph(edge_data):
    g = golang_graph_to_graph(edge_data)
    G = Graph(directed=False)
    handles = [G.add_vertex() for _ in g.nodes()]
    for u, v in g.edges():
        G.add_edge(handles[u], handles[v])
    return G
Esempio n. 5
0
 def __init__(self, protocol):
     self._protocol = protocol
     self._graph = Graph(directed=True)
     self._vertices = {}
     self._leaves = set()
     self._speed = Speed.ZERO
     self._construct_tree()
    def __init__(self, name, edges, object_ids, weights, hidden_graph=None):
        """
        Params:
            name (str): unique string to name this dataset (for pickling and
                unpickling)
            edges (numpy.ndarray): numpy array of shape [num_edges, 2]
                containing the indices of nodes in all edges
            objects (List[str]): string object ids for all nodes
            weights (numpy.ndarray): numpy array of shape [num_edges]
                containing edge weights
            hidden_graph (GraphDataset): Graph data that should be excluded
                but not considered as negative edges. (i.e. train
                edges should not be in eval dataset but they shouldn't be
                counted as negatives either)
        """

        self.name = name
        self.edges = edges
        self.object_ids = np.asarray(object_ids)
        self.weights = weights
        self.hidden_graph = hidden_graph

        self.graph = Graph(directed=False)
        self.graph.add_vertex(len(object_ids))
        edge_weights = [[edge[0], edge[1], weight]
                        for edge, weight in zip(self.edges, self.weights)]
        self.weight_property = self.graph.new_edge_property("float")
        eprops = [self.weight_property]
        self.graph.add_edge_list(edge_weights, eprops=eprops)
        self.manifold_nns = None
Esempio n. 7
0
    def init2(self, emacs_var_dict):
        self.emacs_var_dict = emacs_var_dict

        self.link_str = self.emacs_var_dict['links']
        self.g = Graph()
        self.label_ep = self.g.new_edge_property("string")
        self.links = self.link_str.split(";")

        link_tpls = [i.split(" -- ") for i in self.links]
        dumper([str(i) for i in link_tpls])

        self.g_id = self.g.add_edge_list(link_tpls,
                                         hashed=True,
                                         string_vals=True,
                                         eprops=[self.label_ep])

        self.adj = np.array([(int(i.source()), int(i.target()))
                             for i in self.g.edges()])
        self.node_names = [self.g_id[i] for i in self.g.vertices()]

        self.vd = {}
        for i in self.g.vertices():
            self.vd[self.g_id[i]] = int(i)

        # self.pos_vp = sfdp_layout(self.g, K=0.5)
        self.pos_vp = fruchterman_reingold_layout(self.g)
        self.base_pos_ar = self.pos_vp.get_2d_array((0, 1)).T
        self.qt_coords = self.nolz_pos_ar(self.base_pos_ar)

        dumper([str(self.qt_coords)])
Esempio n. 8
0
 def split_gt():
     g = GTGraph()
     g.add_edge_list(adjacency)
     component_labels = label_components(g, directed=False)[0].a
     components = group(component_labels)
     result = mesh.submesh(components, only_watertight=only_watertight)
     return result
Esempio n. 9
0
def spc_querying_naive(g : graph_tool.Graph, paths, y, trust_own_predictions=True, weight=None, closed_interval=False):
    '''

    :param g:
    :param paths: list of paths
    :param y: ground truth
    :param weight:
    :return:
    '''
    known_labels = -np.ones(g.num_vertices())*np.inf
    budget = np.zeros(g.num_vertices())
    for i, path in enumerate(paths):
        if not trust_own_predictions or known_labels[path[0]] == -np.inf:
            budget[i] += 1
            known_labels[path[0]] = y[path[0]]
        if not trust_own_predictions or known_labels[path[-1]] == -np.inf:
            budget[i] += 1
            known_labels[path[-1]] = y[path[-1]]

        if known_labels[path[0]] == known_labels[path[-1]]:
            known_labels[path] = known_labels[path[0]]
        else:
            label_budget, new_labels = binarySearch(y[path], 0, len(path)-1, known_labels[path[0]], known_labels[path])
            known_labels[path] = new_labels
            budget[i] += label_budget
        if closed_interval:
            p =closure.compute_hull(g, np.where(known_labels==np.unique(y)[0])[0], weight, compute_closure=False)
            n = closure.compute_hull(g, np.where(known_labels==np.unique(y)[1])[0], weight, compute_closure=False)

            known_labels[p] = np.unique(y)[0]
            known_labels[n] = np.unique(y)[1]

    return known_labels, budget
def build_graph_from_edges(edges):
    """returns Graph (a new one)
    """
    g = Graph()
    for u, v in edges:
        g.add_edge(u, v)
    return g
Esempio n. 11
0
 def __init__(self):
     self.g = Graph()
     self.dvertex_index = dict()
     self.vertex_label = self.g.new_vertex_property("string")
     self.g.vertex_properties["label"] = self.vertex_label
     self.edge_weight = self.g.new_edge_property("int")
     self.g.edge_properties["weight"] = self.edge_weight
Esempio n. 12
0
    def save(self, file_name, fmt="auto"):
        """ overload Graph.save to make output dotfiles pretty.
            This is entirely cosmetic. """
        u = self

        # add some properties to prettify dot output
        if fmt is "dot" or fmt is "auto" and file_name.endswith(".dot"):
            u = GraphView(self)

            # add shape property according to vertex owners
            shape = u.new_vertex_property("string")
            for v in u.vertices():
                if u.vp.owner[v] == 1:
                    shape[v] = "box"
                else:
                    shape[v] = "diamond"
            u.vp.shape = shape

            # add label property according to priorities
            #u.vertex_properties['label'] = u.vertex_properties['priority']
            label = u.new_vertex_property("string")
            for v in u.vertices():
                prio = u.vertex_properties['priority'][v]
                name = u.vertex_index[v]
                label[v] = "%d (%d)" % (name, prio)
            u.vp.label = label

        Graph.save(u, file_name, fmt)
Esempio n. 13
0
def compute_shadow(g: gt.Graph,
                   A,
                   B,
                   weight=None,
                   dist_map=None,
                   comps=None,
                   hist=None,
                   B_hulls=None):
    A_closed = compute_hull(g, A, weight, dist_map, comps, hist)
    #B_closed = compute_hull(g, B, weight, dist_map, comps, hist)

    B_closed = np.zeros(g.num_vertices(), dtype=np.bool)
    B_closed[B] = True

    shadow = A_closed.copy()

    for x in range(g.num_vertices()):
        if A_closed[x] or B_closed[x]:
            continue
        if B_hulls is None:
            if np.any(
                    compute_hull(g, np.append(B, x), weight, dist_map, comps,
                                 hist, True) & A_closed):
                shadow[x] = True
        else:
            if np.any(B_hulls[x] & A_closed):
                shadow[x] = True

    return shadow
Esempio n. 14
0
    def uniform_random_sample(graph: Graph, num_vertices: int,
                              prev_state: UniformRandomSampleState,
                              args: argparse.Namespace) -> SampleState:
        """Uniform random sampling. All vertices are selected with the same probability.

        Parameters
        ----------
        graph : Graph
            the filtered graph from which to sample vertices
        num_vertices : int
            number of vertices in the unfiltered graph
        prev_state : UniformRandomSampleState
            the state of the previous sample in the stack. If there is no previous sample, an empty SampleState object
            should be passed in here.
        args : argparse.Namespace
            the command-line arguments provided by the user

        Returns
        -------
        state : SampleState
            the sample state with the sampled vertex ids (Note: these ids correspond to the filtered graph, and have
            to be mapped back to the unfiltered graph)
        """
        state = UniformRandomSampleState(graph.num_vertices(), prev_state)
        sample_num = int(
            (num_vertices * (args.sample_size / 100)) / args.sample_iterations)
        choices = np.setdiff1d(np.asarray(range(graph.num_vertices())),
                               state.sample_idx)
        state.sample_idx = np.concatenate(
            (state.sample_idx,
             np.random.choice(choices, sample_num, replace=False)),
            axis=None)
        return state
def build_closure(g, terminals, debug=False, verbose=False):
    terminals = list(terminals)
    # build closure
    gc = Graph(directed=False)
    gc.add_vertex(g.num_vertices())

    edges_with_weight = set()
    r2pred = {}

    for r in terminals:
        if debug:
            print('root {}'.format(r))
        vis = init_visitor(g, r)
        pbfs_search(g, source=r, terminals=terminals, visitor=vis)
        new_edges = set(get_edges(vis.dist, r, terminals))
        if debug:
            print('new edges {}'.format(new_edges))
        edges_with_weight |= new_edges
        r2pred[r] = vis.pred

    for u, v, c in edges_with_weight:
        gc.add_edge(u, v)

    eweight = gc.new_edge_property('int')
    weights = np.array([c for _, _, c in edges_with_weight])
    eweight.set_2d_array(weights)

    vfilt = gc.new_vertex_property('bool')
    vfilt.a = False
    for v in terminals:
        vfilt[v] = True
    gc.set_vertex_filter(vfilt)
    return gc, eweight, r2pred
Esempio n. 16
0
    def load(self, data):
        data = [x.strip() for x in data]
        # Create a new undirectedgraph
        self.graph = Graph(directed=False)

        # Label everything so I'm not going back and forth between hashes
        v_name = self.graph.new_vertex_property('string')
        self.graph.vertex_properties["name"] = v_name

        self.planets = dict()
        # Create all the vertexes first, so that creating the edges (orbits)
        # is easier
        for item in data:
            # Add vertexes, as needed
            src, dest = item.split(")")
            if src not in self.planets:
                v_src = self.graph.add_vertex()
                v_name[v_src] = src
                self.planets[src] = v_src
            if dest not in self.planets:
                v_dest = self.graph.add_vertex()
                v_name[v_dest] = dest
                self.planets[dest] = v_dest
            # Add edge
            self.graph.add_edge(self.planets[src], self.planets[dest])
Esempio n. 17
0
def spc_querying_naive_multiclass(g : graph_tool.Graph, paths, y, trust_own_predictions=True, weight=None):
    '''

    :param g:
    :param paths: list of paths
    :param y: ground truth
    :param weight:
    :return:
    '''
    known_labels = -np.ones(g.num_vertices())*np.inf
    budget = np.zeros(g.num_vertices())
    for i, path in enumerate(paths):
        if not trust_own_predictions or known_labels[path[0]] == -np.inf:
            budget[i] += 1
            known_labels[path[0]] = y[path[0]]
        if not trust_own_predictions or known_labels[path[-1]] == -np.inf:
            budget[i] += 1
            known_labels[path[-1]] = y[path[-1]]

        if known_labels[path[0]] == known_labels[path[-1]]:
            known_labels[path] = known_labels[path[0]]
        else:
            mid, label_budget = binarySearch(y[path], 0, len(path)-1, known_labels[path[0]], known_labels[path])
            budget[i] += label_budget
            known_labels[path[0:mid+1]] = known_labels[path[0]]
            known_labels[path[mid+1:]] = known_labels[path[-1]]


    return known_labels, budget
Esempio n. 18
0
def diagram_to_graph(diagram):
    '''Convert specified chord diagram to the equivalent graph.
       Construct the graph with a vertex for each diagram chord, and an edge
       from the vertex for each node to the next greater node as though manually
       drawing the planar diagram from the chord diagram, such that edges go
       from nodes (1 -> 2), (2 -> 3),..., (n-1 -> n), (n -> 1).

       Arguments:
           diagram: A list of chords, where each chord is a node tuple.
       Returns:
           Returns the graph corresponding to the input diagram.
       Raises:
           None
    '''
    graph = Graph()

    # Create a vertex for each chord - same vertex stored for each node of chord
    vertex_by_node = {}
    for chord in diagram:
        vertex = graph.add_vertex()
        vertex_by_node[chord[0]] = vertex
        vertex_by_node[chord[1]] = vertex

    # As though drawing the planar diagram from the chord diagram by hand,
    # add an edge from 1st node to 2nd, 2nd to 3rd, and so on, until closing
    # the loop back to the 1st node.
    nodes = vertex_by_node.keys()
    n_nodes = len(nodes)
    for i in range(0, len(nodes)):
        j = (i + 1) % n_nodes
        graph.add_edge(vertex_by_node[nodes[i]], vertex_by_node[nodes[j]])

    return graph
Esempio n. 19
0
def find_cycles2(args, wires):
    # implemented with graph-tools
    g = Graph()
    t_a = datetime.now()
    lst = []
    for i in range(0, len(wires)):
        lst.append(g.add_vertex())

    for i in range(0, len(wires)):
        if wires[i].type != "inp":
            for j in range(len(wires[i].operands)):
                g.add_edge(lst[wires[i].operands[j].index],
                           lst[wires[i].index])

    cycles = []
    for c in all_circuits(g):
        if len(cycles) > 100000:
            logging.info("number of cycles is limited.")
            break
        cycles.append(c.tolist())

    t_b = datetime.now()
    logging.info("time of finding cycles: " + diff(t_a, t_b))
    logging.info("there are" + str(len(cycles)) + "cycles")
    if args.p:
        logging.info("list of cycles:")
        for cycle in cycles:
            tmp = ""
            for i in range(len(cycle)):
                tmp += wires[cycle[i]].name + " "
            logging.info(tmp)
        print()
    return cycles
def s2(g: gt.Graph, weight_prop: gt.EdgePropertyMap, labels, budget=20):
    L = set()

    n = g.num_vertices()

    known_labels = -np.ones(n) * np.inf

    W = gt.topology.shortest_distance(g, weights=weight_prop).get_2d_array(
        range(n))  #original distance map

    x = np.random.choice(list(set(range(n)).difference(L)))
    while budget > 0:
        known_labels[x] = labels[x]
        L.add(x)
        if len(L) == n:
            break
        budget -= 1
        to_remove = []
        for e in g.get_out_edges(x):
            if known_labels[e[1]] >= 0 and known_labels[
                    e[1]] != known_labels[x]:
                to_remove.append(e)

        for e in to_remove:
            g.remove_edge(g.edge(e[0], e[1]))

        #mid_point = mssp(g, weight_prop, L, known_labels)

        if False:
            x = int(mid_point)
        else:
            x = np.random.choice(list(set(range(n)).difference(L)))

    return label_propagation(W, known_labels, np.unique(labels))
Esempio n. 21
0
def save_degree_distribution(args: argparse.Namespace, graph: Graph):
    """Saves the in and out degrees of all vertices in the graph.

    Parameters
    ----------
    args : argparse.Namespace
        the command-line arguments provided
    graph : Graph
        the graph object
    """
    write_header = False
    if not os.path.isfile(args.csv + ".csv"):
        directory = os.path.dirname(args.csv + ".csv")
        if directory not in [".", ""]:
            os.makedirs(directory, exist_ok=True)
        write_header = True
    num_vertices = [i for i in range(graph.num_vertices())]
    out_degrees = [v.out_degree() for v in graph.vertices()]
    in_degrees = [v.in_degree() for v in graph.vertices()]
    with open(args.csv + ".csv", "a") as details_file:
        writer = csv.writer(details_file)
        if write_header:
            writer.writerow(["Num Vertices", "In/Out", "Degree"])
        for degree in out_degrees:
            writer.writerow([num_vertices, "Out", degree])
        for degree in in_degrees:
            writer.writerow([num_vertices, "In", degree])
    exit()
Esempio n. 22
0
 def split_gt():
     g = GTGraph()
     g.add_edge_list(adjacency)
     component_labels = label_components(g, directed=False)[0].a
     components = group(component_labels)
     result = mesh.submesh(components, only_watertight=only_watertight)
     return result
Esempio n. 23
0
    def __init__(
            self,
            seed_str,
            name,
            file_extension='gml',
            vertex_schema={
                'gene': 'vector<bool>',
                'gen': 'int',
                'fitness': 'vector<long>',
                'score': 'long'
            },
            edge_schema={
                'label': 'string',
                'gen': 'int'
            }):

        self.seed = seed_str
        self.name = name
        self.file_extension = file_extension
        self.graph = Graph()

        # Create graph properties
        self.graph.gp.labels = self.graph.new_gp('vector<string>')
        self.graph.gp.labels = [seed_str]

        self.graph.gp.name = self.graph.new_gp('string')
        self.graph.gp.name = self.name

        # Create vertex properties
        for key in vertex_schema:
            self.graph.vp[key] = self.graph.new_vp(vertex_schema[key])

        # Create edge properties
        for key in edge_schema:
            self.graph.ep[key] = self.graph.new_ep(edge_schema[key])
Esempio n. 24
0
    def __init__(self):
        """
        Constructor of the abstract SegmentationGraph object.

        Returns:
            None
        """
        self.graph = Graph(directed=False)
        """graph_tool.Graph: a graph object storing the segmentation graph
        topology, geometry and properties (initially empty).
        """

        # Add "internal property maps" to the graph.
        # vertex property for storing the xyz coordinates of the corresponding
        # vertex:
        self.graph.vp.xyz = self.graph.new_vertex_property("vector<float>")
        # edge property for storing the distance between the connected vertices:
        self.graph.ep.distance = self.graph.new_edge_property("float")

        self.coordinates_to_vertex_index = {}
        """dict: a dictionary mapping the vertex coordinates (x, y, z) to the
        vertex index.
        """
        self.coordinates_pair_connected = set()
        """set: a set storing pairs of vertex coordinates that are
Esempio n. 25
0
def spc_querying_experiments(g: gt.Graph, weight_prop: gt.EdgePropertyMap, spc,
                             labels):
    print("correct labels: ", labels)
    print("================naive=================")
    a, b = spc_querying_naive(g, spc, labels)
    print("pred: ", a)
    print("queries: ", b, np.sum(b))
    print("correct: ", np.sum(a == labels),
          np.sum(a == labels) / g.num_vertices())
    print("================interval================")
    a, b = spc_querying_with_closure(g, spc, weight_prop, labels, False)
    print("pred: ", a)
    print("queries: ", b, np.sum(b))
    print("correct: ", np.sum(a == labels),
          np.sum(a == labels) / g.num_vertices())
    print("================closure================")
    #a, b = spc_querying_with_closure(g, spc, weight_prop, labels)
    #print("pred: ", a)
    #print("queries: ", b, np.sum(b))
    #print("correct: ", np.sum(a == labels), np.sum(a == labels) / g.num_vertices())
    #print("================s2================")
    new_labels = np.zeros(g.num_vertices())
    new_labels[labels == np.unique(labels)[1]] = 1
    s2_labelling = shortest_shortest_path_querying.s2(g, weight_prop, labels,
                                                      int(np.sum(b)))
    print("accuracy s2 after label_prop: ",
          np.sum(s2_labelling == new_labels) / g.num_vertices())
def edges2graph(g, edges):
    tree = Graph(directed=True)
    for _ in range(g.num_vertices()):
        tree.add_vertex()
    for u, v in edges:
        tree.add_edge(int(u), int(v))

    return filter_nodes_by_edges(tree, edges)
Esempio n. 27
0
def avg_shortest_path(g: GT.Graph):
    if g.num_vertices() < 10_000:
        d = GT.topology.shortest_distance(
            g)  #NB: allocate memory, isn't an iterator!
        return sum((max(_d, key=lambda x: x if x != INF_INT else -1)
                    for _d in d)) / (g.num_vertices())
    else:
        return None  #TODO
Esempio n. 28
0
 def is_planar(self):
     """
     See the following library:
     https://graph-tool.skewed.de/static/doc/topology.html
     """
     edges = self.obtain_edge_list()
     graph = Graph().add_vertex(len(self.adj_matrix)).add_edge_list(edges)
     return graph.is_planar()
Esempio n. 29
0
 def __init__(self, mapfile):
     self._mapfile = mapfile
     self.DIRECTION_index = 6
     self.PATHCLASS_index = 20
     self.g = Graph()
     self.g.edge_properties["length"] = self.g.new_edge_property("double")
     self.g.edge_properties["level"] = self.g.new_edge_property("int")
     self.g.vertex_properties["pos"] = self.g.new_vertex_property("vector<double>")
     self.cross_pos_index = {}
Esempio n. 30
0
class Network:
    def __init__(self):
        self.g = Graph(directed=True)
        self.player_id_to_vertex = {}
        self.pairs = {}
        self.g.vertex_properties['player_id'] =
            self.g.new_vertex_property("string")
        self.g.vertex_properties['player_coords'] =
            self.g.new_vertex_property("vector<float>")
Esempio n. 31
0
 def components_graphtool():
     g = GTGraph()
     # make sure all the nodes are in the graph
     if min_len <= 1:
         g.add_vertex(node_count)
     g.add_edge_list(edges)
     component_labels = label_components(g, directed=False)[0].a
     components = grouping.group(component_labels, min_len=min_len)
     return components
def build_closure(g, terminals, p=None, debug=False, verbose=False):
    """build the transitive closure on terminals"""
    def get_edges(dist, root, terminals):
        """get adjacent edges to root with weight"""
        return {(root, t, dist[t])
                for t in terminals if dist[t] != -1 and t != root}

    terminals = list(terminals)
    gc = Graph(directed=False)

    gc.add_vertex(g.num_vertices())

    edges_with_weight = set()
    r2pred = {}  # root to predecessor map (from bfs)

    # shortest path to all other nodes
    for r in terminals:
        if debug:
            print('root {}'.format(r))

        targets = list(set(terminals) - {r})
        dist_map, pred_map = shortest_distance(g,
                                               source=r,
                                               target=targets,
                                               weights=p,
                                               pred_map=True)
        dist_map = dict(zip(targets, dist_map))
        # print(dist_map)
        # print(pred_map)
        new_edges = get_edges(dist_map, r, targets)
        # if p is None:
        #     vis = init_visitor(g, r)
        #     bfs_search(g, source=r, visitor=vis)
        #     new_edges = set(get_edges(vis.dist, r, terminals))
        # else:
        #     print('weighted graph')

        if debug:
            print('new edges {}'.format(new_edges))
        edges_with_weight |= new_edges
        # r2pred[r] = vis.pred
        r2pred[r] = pred_map

    for u, v, c in edges_with_weight:
        gc.add_edge(u, v)

    # edge weights
    eweight = gc.new_edge_property('int')
    weights = np.array([c for _, _, c in edges_with_weight])
    eweight.set_2d_array(weights)

    vfilt = gc.new_vertex_property('bool')
    vfilt.a = False
    for v in terminals:
        vfilt[v] = True
    gc.set_vertex_filter(vfilt)
    return gc, eweight, r2pred
Esempio n. 33
0
def copy_edge_attributes(g_to: gt.Graph, edge_to: gt.Edge, g_from: gt.Graph,
                         edge_from: gt.Edge):
    for p_type, ep_name in g_from.ep.properties:
        if p_type != 'e':
            continue
        old_ep = g_from.ep[ep_name]
        if ep_name not in g_to.ep:
            g_to.ep[ep_name] = g_to.new_ep(old_ep.value_type())
        new_ep = g_to.ep[ep_name]
        new_ep[edge_to] = deepcopy(old_ep[edge_from])
Esempio n. 34
0
def copy_node_attributes(g_to: gt.Graph, node_to: gt.Vertex, g_from: gt.Graph,
                         node_from: gt.Vertex):
    for p_type, vp_name in g_from.vp.properties:
        if p_type != 'v':
            continue
        old_vp = g_from.vp[vp_name]
        if vp_name not in g_to.vp:
            g_to.vp[vp_name] = g_to.new_vp(old_vp.value_type())
        new_vp = g_to.vp[vp_name]
        new_vp[node_to] = deepcopy(old_vp[node_from])
def build_minimum_tree(g, root, terminals, edges, directed=True):
    """remove redundant edges from `edges` so that root can reach each node in terminals
    """
    # build the tree
    t = Graph(directed=directed)

    for _ in range(g.num_vertices()):
        t.add_vertex()

    for (u, v) in edges:
        t.add_edge(u, v)

    # mask out redundant edges
    vis = init_visitor(t, root)
    pbfs_search(t, source=root, terminals=list(terminals), visitor=vis)

    minimum_edges = {e
                     for u in terminals
                     for e in extract_edges_from_pred(t, root, u, vis.pred)}
    # print(minimum_edges)
    efilt = t.new_edge_property('bool')
    efilt.a = False
    for u, v in minimum_edges:
        efilt[u, v] = True
    t.set_edge_filter(efilt)

    return filter_nodes_by_edges(t, minimum_edges)
Esempio n. 36
0
class RoadMap(object):
    def __init__(self, mapfile):
        self._mapfile = mapfile
        self.DIRECTION_index = 6
        self.PATHCLASS_index = 20
        self.g = Graph()
        self.g.edge_properties["length"] = self.g.new_edge_property("double")
        self.g.edge_properties["level"] = self.g.new_edge_property("int")
        self.g.vertex_properties["pos"] = self.g.new_vertex_property("vector<double>")
        self.cross_pos_index = {}

    def load(self):
        if self._mapfile[-3:] != 'shp':
            self.g = load_graph(self._mapfile)
            return

        try:
            sf = shapefile.Reader(self._mapfile)
        except Exception as e:
            print(str(e))
            return False
        roads_records = sf.shapeRecords()  # 获取路段信息'
        for road_record in roads_records:
            cross_s_index = self.add_cross(road_record.shape.points[0])
            cross_e_index = self.add_cross(road_record.shape.points[-1])
            self.add_road_edge(cross_s_index, cross_e_index, road_record)
            if int(road_record.record[self.DIRECTION_index]) == 0:  # 若路段是双向车道
                self.add_road_edge(cross_e_index, cross_s_index, road_record)
        return True

    def has_edge(self, s_vertex, e_vertex):
        if self.g.num_vertices() >= max(s_vertex, e_vertex):
            return self.g.edge(s_vertex, e_vertex)
        else:
            return None

    def add_cross(self, cross_pos):
        if cross_pos in self.cross_pos_index:
            return self.cross_pos_index.get(cross_pos)
        else:
            cross_index = self.g.add_vertex()
            self.g.vp.pos[cross_index] = cross_pos
            self.cross_pos_index[cross_pos] = cross_index
            return cross_index

    def add_road_edge(self, s_vertex, e_vertex, road):
        if self.has_edge(s_vertex, e_vertex):
            return self.g.edge(s_vertex, e_vertex)
        else:
            edge = self.g.add_edge(s_vertex, e_vertex)
            self.g.ep.level[edge] = int(road.record[self.PATHCLASS_index])
            self.g.ep.length[edge] = self.road_length(road)
            return edge

    @staticmethod
    def road_length(road):
        length = 0
        for sub_road in zip(road.shape.points[:-1], road.shape.points[1:]):
            length += distance.euclidean(sub_road[0], sub_road[1])
        return length
def to_directed(g, t, root):
    new_t = Graph(directed=True)
    all_edges = set()
    leaves = [v for v in t.vertices()
              if (v.out_degree() + v.in_degree()) == 1 and t != root]
    for target in leaves:
        path = shortest_path(t, source=root, target=target)[0]
        edges = set(zip(path[:-1], path[1:]))
        all_edges |= edges

    for _ in range(g.num_vertices()):
        new_t.add_vertex()
    for u, v in all_edges:
        new_t.add_edge(int(u), int(v))
    return new_t
Esempio n. 38
0
    def _parse_data(self):
        """
        extract interal points(degree>2) and endpoints(degree=1)
        extract segments
        """
        if self.verts == None or self.edges == None:
            print 'please first call read_skel_file function'
        else:
            self.verts = np.array(self.verts, dtype=np.float)
            self.edges = np.array(self.edges, dtype=np.int)
            terminal_index = []
            junction_index = []
            self.skel_graph = Graph(directed=False)
            self.skel_graph.add_vertex(len(self.verts))
            for edge in self.edges :
                self.skel_graph.add_edge(self.skel_graph.vertex(edge[0]), self.skel_graph.vertex(edge[1]))

            for v in self.skel_graph.vertices():
                if v.out_degree() == 2 :
                    continue
                elif v.out_degree() == 1 :
                    terminal_index.append(int(v))
                elif v.out_degree() > 2 :
                    junction_index.append(int(v))

            self.terminal = self.verts[terminal_index]
            self.junction = self.verts[junction_index]
            self.terminal_index = terminal_index
            self.junction_index = junction_index
            self.feature_node_index = junction_index + terminal_index 
            self.feature_node = self.verts[self.feature_node_index]

            """
Esempio n. 39
0
def facets_gt(mesh):
    '''
    Returns lists of facets of a mesh. 
    Facets are defined as groups of faces which are both adjacent and parallel
    
    facets returned reference indices in mesh.faces
    If return_area is True, both the list of facets and their area are returned. 
    '''
    face_idx       = mesh.face_adjacency()
    normal_pairs   = mesh.face_normals[[face_idx]]
    parallel       = np.abs(np.sum(normal_pairs[:,0,:] * normal_pairs[:,1,:], axis=1) - 1) < TOL_PLANAR
    graph_parallel = GTGraph()
    graph_parallel.add_edge_list(face_idx[parallel])

    connected  = label_components(graph_parallel, directed=False)[0].a
    facets_idx = group(connected, min_length=2)
    return facets_idx
Esempio n. 40
0
def load_graph(infile):
    inmatrix = np.loadtxt(infile, dtype=np.dtype('uint32'), delimiter=" ")
    numv = np.amax(inmatrix[:,0:2])

    #print numv, inmatrix[:,0:2]

    g = Graph(directed=False)
    edge_weights = g.new_edge_property("double")
    g.edge_properties["weights"] = edge_weights
    vlist = list(g.add_vertex(numv))

    for i in inmatrix:
        edge = g.add_edge(vlist[i[0]-1], vlist[i[1]-1]) # need to convert from 1-based index in file to 0-based
        edge_weights[edge] = i[2]

    remove_parallel_edges(g)
    return g
Esempio n. 41
0
 def test_sredni_wspolczynnik_klasteryzacji_na_sztywno_graf_pelny(self):
     # self.assertEqual(7. / 15, self.stat.sredni_wspolczynnik_klasteryzacji_moj())
     # print self.stat.sredni_wspolczynnik_klasteryzacji_moj()
     g = Graph(directed=False)
     v0 = g.add_vertex()
     v1 = g.add_vertex()
     v2 = g.add_vertex()
     v3 = g.add_vertex()
     g.add_edge(v0, v1)
     g.add_edge(v0, v2)
     g.add_edge(v0, v3)
     g.add_edge(v1, v2)
     g.add_edge(v1, v3)
     g.add_edge(v2, v3)
     lc = local_clustering(g, undirected=True)
     self.assertEqual(1.0, vertex_average(g, lc)[0])
Esempio n. 42
0
def split_gt(mesh, check_watertight=True, only_count=False):
    g = GTGraph()
    g.add_edge_list(mesh.face_adjacency())    
    component_labels = label_components(g, directed=False)[0].a
    if check_watertight: 
        degree = g.degree_property_map('total').a
    meshes     = deque()
    components = group(component_labels)
    if only_count: return len(components)

    for i, current in enumerate(components):
        fill_holes = False
        if check_watertight:
            degree_3 = degree[current] == 3
            degree_2 = degree[current] == 2
            if not degree_3.all():
                if np.logical_or(degree_3, degree_2).all():
                    fill_holes = True
                else: 
                    continue

        # these faces have the original vertex indices
        faces_original = mesh.faces[current]
        face_normals   = mesh.face_normals[current]
        # we find the unique vertex indices, so we can reindex from zero
        unique_vert    = np.unique(faces_original)
        vertices       = mesh.vertices[unique_vert]
        replacement    = np.zeros(unique_vert.max()+1, dtype=np.int)
        replacement[unique_vert] = np.arange(len(unique_vert))
        faces                    = replacement[faces_original]
        new_mesh = mesh.__class__(faces        = faces, 
                                  face_normals = face_normals, 
                                  vertices     = vertices)
        new_meta = deepcopy(mesh.metadata)
        if 'name' in new_meta:
            new_meta['name'] = new_meta['name'] + '_' + str(i)
        new_mesh.metadata.update(new_meta)
        if fill_holes: 
            try:              new_mesh.fill_holes(raise_watertight=True)
            except MeshError: continue
        meshes.append(new_mesh)
    return list(meshes)
 def __init__(self, number_of_vertices, graph_type):
     super().__init__(number_of_vertices, graph_type)
     # Graph tool creates directed multigraph by default.
     self._graph = Graph()
     self._graph.add_vertex(number_of_vertices)
     self._graph.vertex_properties["cell"] = self._graph.new_vertex_property(
         "object", number_of_vertices * [BoardCell()]
     )
     self._graph.edge_properties["direction"
                                ] = self._graph.new_edge_property("object")
     self._graph.edge_properties["weight"
                                ] = self._graph.new_edge_property("int")
Esempio n. 44
0
    def components_graphtool():
        """
        Find connected components using graphtool
        """
        g = GTGraph()
        # make sure all the nodes are in the graph
        g.add_vertex(node_count)
        # add the edge list
        g.add_edge_list(edges)

        labels = np.array(label_components(g, directed=False)[0].a,
                          dtype=np.int64)[:node_count]

        # we have to remove results that contain nodes outside
        # of the specified node set and reindex
        contained = np.zeros(node_count, dtype=np.bool)
        contained[nodes] = True
        index = np.arange(node_count, dtype=np.int64)[contained]

        components = grouping.group(labels[contained], min_len=min_len)
        components = np.array([index[c] for c in components])

        return components
Esempio n. 45
0
def user_network(storage, track, session):
    g = Graph()
    users = defaultdict(g.add_vertex)

    g.graph_properties["track"] = g.new_graph_property("string", track)
    g.graph_properties["session"] = g.new_graph_property("string", session)

    g.edge_properties["created_at"] = g.new_edge_property("int64_t")

    for tweet in storage:
        tweeter_id = tweet["user__id_str"]
        origin_id = tweet["retweeted_status__user__id_str"]

        created_at = arrow.get(tweet["created_at"], DATE_FORMAT).timestamp

        if origin_id:
            edge = g.add_edge(users[tweeter_id], users[origin_id])
            g.edge_properties["created_at"][edge] = created_at

    return g
Esempio n. 46
0
def graph_from_dataframes(vertex_df, edge_df):
    '''Re-creates a Graph object with PropertyMaps taken
    from the vertex_df and edge_df DataFrames

    Paramters:
    ==========
    verex_df: a DataFrame with an index named 'vertex_index'
    edge_df: a DataFrame with a multi-index named ('source', 'target')

    Returns:
    ========
    graph: a grah-tool Graph with PropertyMaps copied
        from the columns of the input DataFrames
    '''

    graph = Graph(directed=True)

    vertex_index = vertex_df.index.get_level_values(level='vertex_index')
    vertices = graph.add_vertex(n=vertex_index.shape[0])
    for col in vertex_df.columns:
        in_type = vertex_df[col].dtype.name
        try:
            dtype = ALIASES[in_type]
        except KeyError:
            log.info('Data type {} not supported'.format(in_type))
            continue
        prop = graph.new_vertex_property(dtype)
        prop.fa = vertex_df[col]
        graph.vertex_properties[col] = prop

    src = edge_df.index.names.index('source')
    trgt = edge_df.index.names.index('target')
    ### TODO: use the list edge creation
    for tup in edge_df.index:
        source, target = tup[src], tup[trgt]
        try:
            edge = graph.add_edge(source, target)
        except ValueError:
            log.info('Invalid vertex in (source: {}, target: {})'.format(source, target))
    for col in edge_df.columns:
        in_type = edge_df[col].dtype.name
        try:
            dtype = ALIASES[in_type]
        except KeyError:
            log.info('Data type {} not supported'.format(in_type))
            continue
        prop = graph.new_edge_property(dtype)
        prop.fa = edge_df[col]
        graph.edge_properties[col] = prop

    return graph
Esempio n. 47
0
    def create_graph(cls, edges, is_directed=True):
        """Create a graph-tool type graph from a list of edges"""
        g = Graph()
        g.set_directed(is_directed)
        label2index = dict()
        label = g.new_vertex_property('int32_t')
        g.vertex_properties['label'] = label

        for v1_label, v2_label in edges:
            cls.add_vertex(v1_label, label2index, g)
            cls.add_vertex(v2_label, label2index, g)

            v1, v2 = label2index[v1_label], label2index[v2_label]
            g.add_edge(v1, v2)

        return g, label2index
def build_closure(g, terminals,
                  debug=False,
                  verbose=False):
    terminals = list(terminals)
    # build closure
    gc = Graph(directed=False)

    for _ in range(g.num_vertices()):
        gc.add_vertex()

    edges_with_weight = set()
    r2pred = {}

    for r in terminals:
        if debug:
            print('root {}'.format(r))
        vis = init_visitor(g, r)
        pbfs_search(g, source=r, terminals=terminals, visitor=vis)
        new_edges = set(get_edges(vis.dist, r, terminals))
        if debug:
            print('new edges {}'.format(new_edges))
        edges_with_weight |= new_edges
        r2pred[r] = vis.pred
    
    for u, v, c in edges_with_weight:
        gc.add_edge(u, v)
        
    eweight = gc.new_edge_property('int')
    weights = np.array([c for _, _, c in edges_with_weight])
    eweight.set_2d_array(weights)

    vfilt = gc.new_vertex_property('bool')
    vfilt.a = False
    for v in terminals:
        vfilt[v] = True
    gc.set_vertex_filter(vfilt)
    return gc, eweight, r2pred
Esempio n. 49
0
    def __init__(self, skel1, skel2, centricity=.5, length=.5, distorted=20.):
        if skel1 is not None and skel2 is not None :
            self.skel1 = skel1
            self.skel2 = skel2
            self.centricity_threhold = centricity
            self.length_threhold = length
            self.distorted_threhold = distorted
            self.skel1.calc_skel_properties()
            self.skel2.calc_skel_properties()
            # use index instead of real value
            skel1_index = np.arange(len(self.skel1.feature_node_index))
            skel2_index = np.arange(len(self.skel2.feature_node_index))
            junc1_num = len(skel1.junction_index)
            junc2_num = len(skel2.junction_index)

            #print 'skel1 normalized_verts\n', skel1.normalized_feature_verts
            #print 'skel2 normalized_verts\n', skel2.normalized_feature_verts

            #candidate matched pairs
            junction_pairs = []
            junc_term_pairs = []
            terminal_pairs = []
            for i, j in itertools.product(skel1_index, skel2_index):
                if self.test_node_centricity(c1=i, c2=j):
                    if i < junc1_num and j < junc2_num: # only junction nodes
                        junction_pairs.append([i,j])
                    elif i >= junc1_num and j >= junc2_num: # only terminal nodes
                        terminal_pairs.append([i,j])
                    else:
                        junc_term_pairs.append([i,j])

            self.junction_pairs = np.array(junction_pairs)
            self.terminal_pairs = np.array(terminal_pairs)
            self.junc_term_pairs = np.array(junc_term_pairs)
            #self.all_junc_pairs = np.vstack((self.junction_pairs, self.junc_term_pairs))

            self.vote_tree = Graph(directed=False)
            self.node_pair = self.vote_tree.new_vertex_property("vector<short>")

            self._construct_voting_tree()
        else:
            print 'need input two skeleton to match'
Esempio n. 50
0
	def __init__ (self, dicProp={"Name": "Graph", "Type": "None", "Weighted": False}, graph=None):
		''' init from properties '''
		self.dicProperties = deepcopy(dicProp)
		self.dicGetProp = { "Reciprocity": get_reciprocity, "Clustering": get_clustering, "Assortativity": get_assortativity,
							"Diameter": get_diameter, "SCC": get_num_scc, #"Spectral radius": get_spectral_radius, 
							"WCC": get_num_wcc, "InhibFrac": get_inhib_frac }
		self.dicGenGraph = { "Erdos-Renyi": gen_er, "Free-scale": gen_fs, "EDR": gen_edr }
		# create a graph
		if graph != None:
			# use the one furnished
			self.__graph = graph
			self.update_prop()
			self.bPropToDate = True
		elif dicProp["Type"] == "None":
			# create an empty graph
			self.__graph = Graph()
			self.bPropToDate = False
		else:
			# generate a graph of the requested type
			self.__graph = self.dicGenGraph[dicProp["Type"]](self.dicProperties)
			self.update_prop()
			self.set_name()
			self.bPropToDate = True
Esempio n. 51
0
def graph_from_dataframes(vertex_df, edge_df):
    '''Re-creates a Graph object with PropertyMaps taken
    from the vertex_df and edge_df DataFrames

    Paramters:
    ==========
    verex_df: a DataFrame with an index named 'vertex_index'
    edge_df: a DataFrame with a multi-index named ('source', 'target')

    Returns:
    ========
    graph: a grah-tool Graph with PropertyMaps copied
        from the columns of the input DataFrames
    '''

    graph = Graph(directed=True)

    vertex_index = vertex_df.index.get_level_values(level='vertex_index')
    vertices = graph.add_vertex(n=vertex_index.shape[0])
    for col in vertex_df.columns:
        dtype = ALIASES[vertex_df[col].dtype.name]
        prop = graph.new_vertex_property(dtype)
        prop.a = vertex_df[col]
        graph.vertex_properties[col] = prop

    src = edge_df.index.names.index('source')
    trgt = edge_df.index.names.index('target')
    ### TODO: use the list edge creation
    for tup in edge_df.index:
        source, target = tup[src], tup[trgt]
        edge = graph.add_edge(source, target)

    for col in edge_df.columns:
        dtype = ALIASES[edge_df[col].dtype.name]
        prop = graph.new_edge_property(dtype)
        prop.a = edge_df[col]
        graph.edge_properties[col] = prop
    return graph
Esempio n. 52
0
    def parse_graph_from_string(self, graphML_string):
        dom = minidom.parseString(graphML_string)
        root = dom.getElementsByTagName("graphml")[0]
        graph = root.getElementsByTagName("graph")[0]
        name = graph.getAttribute('id')

        g = Graph(directed=False)

        vpos=g.new_vertex_property("vector<double>")
        for node in graph.getElementsByTagName("node"):
            id=node.getAttribute('id')
            n = g.add_vertex()
            g.vertex_index[id]

            #right now only the positions are available
            for attr in node.getElementsByTagName("data"):
                if attr.firstChild:
                    key=attr.getAttribute("key")
                    #n[key] = attr.firstChild.data
                    if(key=="x"):
                        x=attr.firstChild.data
                    elif(key=="y"):
                        y=attr.firstChild.data

            vpos[id]=(x,y)

        g.vertex_properties["pos"]=vpos

        #have to workaround the directed graph written by the server
        for edge in graph.getElementsByTagName("edge"):
            source = edge.getAttribute('source')
            dest = edge.getAttribute('target')

            edge=g.edge(dest,source)
            if(edge==None):
                e = g.add_edge(source, dest)

	return g
Esempio n. 53
0
    def _filter_short_branch(self, filter=False, short=30):
        """
        filter out very short branches: do this maybe not right for some models, for models with flat part, it is right
        I will test how this effect the final matching results
        need to delete nodes, switch with the last one then delete last
        """
        if filter == False:
            self.verts = self.verts_init
            self.edges = self.edges_init
        else:
            init_graph = Graph(directed=False)
            init_graph.add_vertex(len(self.verts_init))
            for edge in self.edges_init:
                init_graph.add_edge(init_graph.vertex(edge[0]), init_graph.vertex(edge[1]))

            terminal_node = []
            for v in init_graph.vertices():
                if v.out_degree() == 1:
                    terminal_node.append(v)

            visitor = DepthVisitor()
            short_nodes = []
            for tn in terminal_node:
                search.dfs_search(init_graph, tn, visitor)
                tmp_node = visitor.get_short_branch(min_length=short)
                visitor.reset()
                for n in tmp_node:
                    short_nodes.append(n)

            ## get edges on the short paths
            short_nodes = list(set(short_nodes))
            short_edges = []
            temp_verts = self.verts_init[:]
            v_num = len(self.verts_init)
            if len(short_nodes):
                for v in reversed(sorted(short_nodes)):
                    for ve in init_graph.vertex(v).out_edges():
                        short_edges.append(ve)

                ## delete edges first, then vertex
                short_edges = list(set(short_edges))
                for e in short_edges:
                    init_graph.remove_edge(e)

                print 'deleting vertex',
                for v in reversed(sorted(short_nodes)):
                    print v,
                    temp_verts[int(v)] = temp_verts[v_num-1]
                    init_graph.remove_vertex(v, fast=True)
                    v_num -= 1
                print '\ndeleting related edges' # already done above, just info user
            else:
                print 'no short branches'

            ######## new vertices and edges ########
            self.verts = temp_verts[:v_num]
            self.edges = []
            for e in init_graph.edges():
                self.edges.append([int(e.source()), int(e.target())])
Esempio n. 54
0
class SkeletonData(object):
    """
    class to store and process skeleton data, like generated from starlab mean curvature skeleton
    """
    def __init__(self, fname=None, mesh_name=None, filter_sb=False):
        """
        @param filter_sb: if filter out Short Branch
        """
        if fname != None:
            self.skel_name = fname
            self.read_skel_file(fname)
            self._filter_short_branch(filter=filter_sb, short=10)
            self._parse_data()
            self.mesh_name = mesh_name
            self.vert_radius = None

    def calc_skel_properties(self):
        """
        calc all properties needed for matching
        """
        self.calc_node_centricity()
        self.calc_skel_radius()
        self.calc_path_length_ratio()
        self.calc_path_radius_ratio()
        self.normalize_skeleton()


    def read_skel_file(self, fname, dim=3):
        if fname == None:
            print 'please input skeleton file name'
            sys.exit(0)
        elif os.path.isfile(fname):
            self.verts_init = []
            self.edges_init = []
            with open(fname) as sf:
                for line in sf:
                    line = line.strip('\n')
                    line = line.split(' ')
                    if line[0] == '#':
                        continue
                    elif line[0] == 'v':
                        self.verts_init.append([x for x in line[1:(dim+1)]])
                    #### attention!! verts of edge start from 1 in files ####
                    elif line[0] == 'e':
                        self.edges_init.append([int(x)-1 for x in line[1:3]])
                    else:
                        print 'not support this format'
                        sys.exit(0)
        else:
            print 'no such flie', fname
            sys.exit(0)


    def _filter_short_branch(self, filter=False, short=30):
        """
        filter out very short branches: do this maybe not right for some models, for models with flat part, it is right
        I will test how this effect the final matching results
        need to delete nodes, switch with the last one then delete last
        """
        if filter == False:
            self.verts = self.verts_init
            self.edges = self.edges_init
        else:
            init_graph = Graph(directed=False)
            init_graph.add_vertex(len(self.verts_init))
            for edge in self.edges_init:
                init_graph.add_edge(init_graph.vertex(edge[0]), init_graph.vertex(edge[1]))

            terminal_node = []
            for v in init_graph.vertices():
                if v.out_degree() == 1:
                    terminal_node.append(v)

            visitor = DepthVisitor()
            short_nodes = []
            for tn in terminal_node:
                search.dfs_search(init_graph, tn, visitor)
                tmp_node = visitor.get_short_branch(min_length=short)
                visitor.reset()
                for n in tmp_node:
                    short_nodes.append(n)

            ## get edges on the short paths
            short_nodes = list(set(short_nodes))
            short_edges = []
            temp_verts = self.verts_init[:]
            v_num = len(self.verts_init)
            if len(short_nodes):
                for v in reversed(sorted(short_nodes)):
                    for ve in init_graph.vertex(v).out_edges():
                        short_edges.append(ve)

                ## delete edges first, then vertex
                short_edges = list(set(short_edges))
                for e in short_edges:
                    init_graph.remove_edge(e)

                print 'deleting vertex',
                for v in reversed(sorted(short_nodes)):
                    print v,
                    temp_verts[int(v)] = temp_verts[v_num-1]
                    init_graph.remove_vertex(v, fast=True)
                    v_num -= 1
                print '\ndeleting related edges' # already done above, just info user
            else:
                print 'no short branches'

            ######## new vertices and edges ########
            self.verts = temp_verts[:v_num]
            self.edges = []
            for e in init_graph.edges():
                self.edges.append([int(e.source()), int(e.target())])


    def create_virtual_node(self):
        """
        I am planning use this function to make virtual nodes for those feature nodes
        """
        pass


    def _parse_data(self):
        """
        extract interal points(degree>2) and endpoints(degree=1)
        extract segments
        """
        if self.verts == None or self.edges == None:
            print 'please first call read_skel_file function'
        else:
            self.verts = np.array(self.verts, dtype=np.float)
            self.edges = np.array(self.edges, dtype=np.int)
            terminal_index = []
            junction_index = []
            self.skel_graph = Graph(directed=False)
            self.skel_graph.add_vertex(len(self.verts))
            for edge in self.edges :
                self.skel_graph.add_edge(self.skel_graph.vertex(edge[0]), self.skel_graph.vertex(edge[1]))

            for v in self.skel_graph.vertices():
                if v.out_degree() == 2 :
                    continue
                elif v.out_degree() == 1 :
                    terminal_index.append(int(v))
                elif v.out_degree() > 2 :
                    junction_index.append(int(v))

            self.terminal = self.verts[terminal_index]
            self.junction = self.verts[junction_index]
            self.terminal_index = terminal_index
            self.junction_index = junction_index
            self.feature_node_index = junction_index + terminal_index 
            self.feature_node = self.verts[self.feature_node_index]

            """
            edge_vert_index = self.edges.flatten()
            print 'edge vertex index dtype', edge_vert_index.dtype
            if 0 in edge_vert_index:
                print 'vertex start from 0'
            else:
                print 'vertex start from 1'
            print 'skeleton vertex num', self.skel_graph.num_vertices()
            print 'skeleton edge num', self.skel_graph.num_edges()
            """
    
    def _calc_edge_length(self):
        """
        calc edge length and make it edge property map in graph-tool
        """
        vec = self.verts[self.edges[:,0]] - self.verts[self.edges[:,1]]
        edge_length = np.sqrt(np.sum(vec**2, axis=-1))
        self.edge_length_map = self.skel_graph.new_edge_property("double")
        self.edge_length_map.a = edge_length
    

    def calc_node_centricity(self):
        """
        calc node centricity of feature nodes(terminal and junction nodes)
        T1 in Oscar's EG 2010 paper
        """
        self._calc_edge_length()
        node_centricity = []
        for n_idx in self.feature_node_index:
            dist = topology.shortest_distance(self.skel_graph, source=self.skel_graph.vertex(n_idx), weights=self.edge_length_map)
            node_centricity.append(dist.a.mean())

        node_centricity = np.array(node_centricity)
        self.node_centricity = node_centricity / np.max(node_centricity)


    def calc_skel_radius(self, mesh_name=None, dim=3):
        """
        calc nearest mesh vertex of skeleton vertex
        """
        if mesh_name != None:
            self.mesh_name = mesh_name

        if self.mesh_name == None:
            print 'please set mesh_name before calc_skel_radius'
        elif os.path.isfile(self.mesh_name):
            mesh = om.TriMesh()
            assert om.read_mesh(mesh, self.mesh_name)
            mesh_vertices = np.zeros((mesh.n_vertices(), dim), dtype=float)
            for n, vh in enumerate(mesh.vertices()):
                for i in xrange(3):
                    mesh_vertices[n, i] = mesh.point(vh)[i]

            nbrs = NearestNeighbors(n_neighbors=1, algorithm='ball_tree').fit(mesh_vertices)
            self.vert_radius, indices = nbrs.kneighbors(self.verts)
        else:
            print 'cannot find mesh file', self.mesh_name                
            sys.exit(0)


    def calc_path_radius(self, start, end):
        """
        utile function for other function
        calc skeleton **mean** vertex radius along some segment
        """
        if self.vert_radius == None:
            print 'please call calc_skel_radius function first'
            return None
        elif start in self.feature_node_index and end in self.feature_node_index:
            v_list, e_list = topology.shortest_path(self.skel_graph, self.skel_graph.vertex(start), self.skel_graph.vertex(end), weights=self.edge_length_map)
            v_idx_list = []
            for v in v_list:
                v_idx_list.append(int(v))
            v_radius = self.vert_radius[v_idx_list]
            return v_radius.mean()
        else:
            print 'input vertex index is not feature node index'
            return None
    

    def calc_path_length_ratio(self):
        """
        for each feature node pair segment, calculate path length ratio
        normalized, to make it scale invariant
        """
        path_length = np.zeros((len(self.feature_node_index), len(self.feature_node_index)), dtype=float)
        for i, n_idx in enumerate(self.feature_node_index):
            for j, m_idx in enumerate(self.feature_node_index[i+1:], start=i+1):
                length = topology.shortest_distance(self.skel_graph, self.skel_graph.vertex(n_idx), self.skel_graph.vertex(m_idx), weights=self.edge_length_map)
                if length != None :
                    path_length[i,j] = path_length[j,i] = length
                else:
                    print 'compute path length ratio error'
                    return None

        ### extract path length from each feature node to junction nodes ###
        ### Careful!! path_length MUST start from junction node
        self.path_to_junction = path_length[:,:len(self.junction_index)]

        self.path_length_ratio = path_length / path_length.max()
        return self.path_length_ratio
    

    def calc_path_radius_ratio(self):
        """
        for each feature node pair segment, calculate path radius ratio   
        normalized, to make it scale invariant
        """
        path_radius = np.zeros((len(self.feature_node_index), len(self.feature_node_index)), dtype=float)
        for i, n_idx in enumerate(self.feature_node_index):
            for j, m_idx in enumerate(self.feature_node_index[i+1:], start=i+1):
                radius = self.calc_path_radius(n_idx, m_idx)
                if radius != None :
                    path_radius[i, j] = path_radius[j, i] = radius
                else:
                    print 'comptue path radius error'
                    return None

        self.path_radius_ratio = path_radius / path_radius.max()
        return self.path_radius_ratio


    def normalize_skeleton(self):
        """
        calc the pose-normalized skeleton to distinguish symmetric nodes
        using multidimensional scaling method(MDS)
        """
        v_num = len(self.verts)
        geodesic_dist = np.zeros((v_num, v_num))
        geodesic_dist_map = topology.shortest_distance(self.skel_graph, weights=self.edge_length_map)
        for i in xrange(v_num):
            geodesic_dist[i] = geodesic_dist_map[self.skel_graph.vertex(i)].a

        mds = manifold.MDS(n_components=3, max_iter=500, eps=1e-10, dissimilarity="precomputed", n_jobs=-2, n_init=1)
        verts_mean = self.verts - self.verts.mean(axis=0)
        normalized_verts = mds.fit(geodesic_dist, init=verts_mean).embedding_
        #scale = np.sqrt((verts_mean ** 2).sum()) / np.sqrt((normalized_verts ** 2).sum())
        #normalized_verts *= scale
        self.normalized_verts = normalized_verts
        self.normalized_feature_verts = normalized_verts[self.feature_node_index]
        return self.normalized_verts


    def write_file(self, file_path='./'):
        """
        maybe need to save file after filter
        same as starlab mean curvature skeleton
        """
        file_name = os.path.basename(self.skel_name)
        full_name = file_path + file_name
        v_num = len(self.verts)
        e_num = len(self.edges)
        first_line = '# D:3 ' + 'NV:' + str(v_num) + ' NE:' + str(e_num) + '\n'
        with open(full_name, 'w') as f:
            f.write(first_line)
            for v in self.verts:
                line = 'v ' + str(v[0]) + ' ' + str(v[1]) + ' ' + str(v[2]) + '\n'
                f.write(line)

            for e in self.edges:
                line = 'e ' + str(e[0]+1) + ' ' + str(e[1]+1) + '\n'
                f.write(line)
from random import randint

from Stack import Stack
from AllConstants import *
from main import number_requests
#number_requests = 10

all_child_graphs = []
output = open(str(number_frequency_bands) + "_Channels_" + str(simulated_on_network) + "_Network_Blockings_log_BK.txt","a")

if simulated_on_network == 1:
    ## Network is USIP ##

    ########### Creating as many layered graphs as there are channels ###########
    for i in range(number_frequency_bands):
        child_graph = Graph();
        vertices_set = child_graph.add_vertex(24)
        '''
        e01 = child_graph.add_edge(child_graph.vertex_index[0], child_graph.vertex_index[1])
        e02 = child_graph.add_edge(child_graph.vertex_index[0], child_graph.vertex_index[2])
        e12 = child_graph.add_edge(child_graph.vertex_index[1], child_graph.vertex_index[2])
        e13 = child_graph.add_edge(child_graph.vertex_index[1], child_graph.vertex_index[3])
        e24 = child_graph.add_edge(child_graph.vertex_index[2], child_graph.vertex_index[4])
        e34 = child_graph.add_edge(child_graph.vertex_index[3], child_graph.vertex_index[4])
        e35 = child_graph.add_edge(child_graph.vertex_index[3], child_graph.vertex_index[5])
        e45 = child_graph.add_edge(child_graph.vertex_index[4], child_graph.vertex_index[5])

        e10 = child_graph.add_edge(child_graph.vertex_index[1], child_graph.vertex_index[0])
        e20 = child_graph.add_edge(child_graph.vertex_index[2], child_graph.vertex_index[0])
        e21 = child_graph.add_edge(child_graph.vertex_index[2], child_graph.vertex_index[1])
        e31 = child_graph.add_edge(child_graph.vertex_index[3], child_graph.vertex_index[1])
Esempio n. 56
0
 def facets_gt():
     graph_parallel = GTGraph()
     graph_parallel.add_edge_list(face_idx[parallel])
     connected  = label_components(graph_parallel, directed=False)[0].a
     facets_idx = group(connected, min_len=2)
     return facets_idx
Esempio n. 57
0
class GraphClass:

	#------------#
	# Initialize #
	#------------#

	def __init__ (self, dicProp={"Name": "Graph", "Type": "None", "Weighted": False}, graph=None):
		''' init from properties '''
		self.dicProperties = deepcopy(dicProp)
		self.dicGetProp = { "Reciprocity": get_reciprocity, "Clustering": get_clustering, "Assortativity": get_assortativity,
							"Diameter": get_diameter, "SCC": get_num_scc, #"Spectral radius": get_spectral_radius, 
							"WCC": get_num_wcc, "InhibFrac": get_inhib_frac }
		self.dicGenGraph = { "Erdos-Renyi": gen_er, "Free-scale": gen_fs, "EDR": gen_edr }
		# create a graph
		if graph != None:
			# use the one furnished
			self.__graph = graph
			self.update_prop()
			self.bPropToDate = True
		elif dicProp["Type"] == "None":
			# create an empty graph
			self.__graph = Graph()
			self.bPropToDate = False
		else:
			# generate a graph of the requested type
			self.__graph = self.dicGenGraph[dicProp["Type"]](self.dicProperties)
			self.update_prop()
			self.set_name()
			self.bPropToDate = True

	@classmethod
	def from_graph_class(cls, graphToCopy):
		''' create new GraphClass instance as a deepcopy of another '''
		dicProperties = deepcopy(graphToCopy.get_dict_properties())
		gtGraph = graphToCopy.get_graph().copy()
		# create
		graphClass = cls(dicProperties, gtGraph)
		# set state of properties
		bPropToDate = deepcopy(graphToCopy.bPropToDate)
		bBetwToDate = deepcopy(graphToCopy.bBetwToDate)
		graphClass.bPropToDate = bPropToDate
		graphClass.bBetwToDate = bBetwToDate
		return graphClass

	def copy(self):
		''' returns a deepcopy of the graphClass instance '''
		graphCopy = GraphClass()
		graphCopy.set_graph(self.__graph.copy())
		graphCopy.update_prop()
		graphCopy.set_name(self.dicProperties["Name"]+'_copy')
		return graphCopy

	#---------------------------#
	# Manipulating the gt graph #
	#---------------------------#

	def set_graph(self, gtGraph):
		''' acquire a graph_tool graph as its own '''
		if gtGraph.__class__ == Graph:
			self.__graph = gtGraph
		else:
			raise TypeError("The object passed to 'copy_gt_graph' is not a < class 'graph_tool.Graph' > but a {}".format(gtGraph.__class__))

	def inhibitory_subgraph(self):
		''' create a GraphClass instance which graph contains only
		the inhibitory connections of the current instance's graph '''
		graph = self.graph.copy()
		epropType = graph.new_edge_property("bool",-graph.edge_properties["type"].a+1)
		graph.set_edge_filter(epropType)
		inhibGraph = GraphClass()
		inhibGraph.set_graph(Graph(graph,prune=True))
		inhibGraph.set_prop("Weighted", True)
		return inhibGraph

	def excitatory_subgraph(self):
		''' create a GraphClass instance which graph contains only
		the excitatory connections of the current instance's graph '''
		graph = self.graph.copy()
		epropType = graph.new_edge_property("bool",graph.edge_properties["type"].a+1)
		graph.set_edge_filter(epropType)
		excGraph = GraphClass()
		excGraph.set_graph(Graph(graph,prune=True))
		excGraph.set_prop("Weighted", True)
		return excGraph

	#-------------------------#
	# Set or update functions #
	#-------------------------#
		
	def set_name(self,name=""):
		''' set graph name '''
		if name != "":
			self.dicProperties["Name"] = name
		else:
			strName = self.dicProperties["Type"]
			tplUse = ("Nodes", "Edges", "Distribution")
			for key,value in self.dicProperties.items():
				if key in tplUse and (value.__class__ != dict):
					strName += '_' + key[0] + str(value)
				if key == "Clustering":
					strName += '_' + key[0] + str(around(value,4))
			self.dicProperties["Name"] = strName
		print(self.dicProperties["Name"])

	def update_prop(self, lstProp=[]):
		''' update part or all of the graph properties '''
		if lstProp:
			for strPropName in lstProp:
				if strPropName in self.dicGetProp.keys():
					self.dicProperties[strPropName] = self.dicGetProp[strPropName](self.__graph)
				else:
					print("Ignoring unknown property '{}'".format(strPropName))
		else:
			self.dicProperties.update({ strPropName: self.dicGetProp[strPropName](self.__graph) for strPropName in self.dicGetProp.keys() })
			self.bPropToDate = True

	#---------------#
	# Get functions #
	#---------------#

	## basic properties

	def get_name(self):
		return self.dicProperties["Name"]
	
	def num_vertices(self):
		return self.__graph.num_vertices()

	def num_edges(self):
		return self.__graph.num_edges()

	def get_density(self):
		return self.__graph.num_edges()/float(self.__graph.num_vertices()**2)

	def is_weighted(self):
		return self.dicProperties["Weighted"]

	## graph and adjacency matrix
	
	def get_graph(self):
		self.bPropToDate = False
		self.bBetwToDate = False
		self.wBetweeness = False
		return self.__graph

	def get_mat_adjacency(self):
		return adjacency(self.__graph, self.get_weights())

	## complex properties
	
	def get_prop(self, strPropName):
		if strPropName in self.dicProperties.keys():
			if not self.bPropToDate:
				self.dicProperties[strPropName] = self.dicGetProp[strPropName](self.__graph)
			return self.dicProperties[strPropName]
		else:
			print("Ignoring request for unknown property '{}'".format(strPropName))

	def get_dict_properties(self):
		return self.dicProperties

	def get_degrees(self, strType="total", bWeights=True):
		lstValidTypes = ["in", "out", "total"]
		if strType in lstValidTypes:
			return degree_list(self.__graph, strType, bWeights)
		else:
			print("Ignoring invalid degree type '{}'".format(strType))
			return None

	def get_betweenness(self, bWeights=True):
		if bWeights:
			if not self.bWBetwToDate:
				self.wBetweeness = betweenness_list(self.__graph, bWeights)
				self.wBetweeness = True
			return self.wBetweeness
		if not self.bBetwToDate and not bWeights:
			self.betweenness = betweenness_list(self.__graph, bWeights)
			self.bBetwToDate = True
			return self.betweenness

	def get_types(self):
		if "type" in self.graph.edge_properties.keys():
			return self.__graph.edge_properties["type"].a
		else:
			return repeat(1, self.__graph.num_edges())
	
	def get_weights(self):
		if self.dicProperties["Weighted"]:
			epropW = self.__graph.edge_properties["weight"].copy()
			epropW.a = multiply(epropW.a, self.__graph.edge_properties["type"].a)
			return epropW
		else:
			return self.__graph.edge_properties["type"].copy()
Esempio n. 58
0
def is_watertight_gt(mesh):
    g = GTGraph()
    g.add_edge_list(mesh.face_adjacency())    
    degree     = g.degree_property_map('total').a
    watertight = np.equal(degree, 3).all()
    return watertight
Esempio n. 59
0
def main():
	conn = serial_interface.connect()

	cur_track = track.init_tracka()
	g = Graph()
	g.add_vertex(len(cur_track))
	for (vi, node) in enumerate(cur_track): node.i = vi

	n_title = g.new_vertex_property("string")
	n_color = g.new_vertex_property("string")
	n_pos = g.new_vertex_property("vector<double>")
	e_title = g.new_edge_property("string")
	e_dist = g.new_edge_property("double")

	for node in cur_track:
		v = g.vertex(node.i)
		n_title[v] = node.name
		if node.typ == track.NODE_EXIT:
			# Invert points to match our ASCII display.
			n_pos[v] = (-node.reverse.coord_x, -node.reverse.coord_y)
		else:
			n_pos[v] = (-node.coord_x, -node.coord_y)
		e = g.add_edge(g.vertex(node.i), g.vertex(node.reverse.i))
		if node.typ == track.NODE_SENSOR: n_color[v] = "blue"
		elif node.typ == track.NODE_BRANCH: n_color[v] = "orange"
		elif node.typ == track.NODE_MERGE: n_color[v] = "yellow"
		elif node.typ == track.NODE_ENTER: n_color[v] = "green"
		elif node.typ == track.NODE_EXIT: n_color[v] = "red"
		else: n_color[v] = "white"
		for edge in node.edge:
			if edge.src is None: continue
			e = g.add_edge(g.vertex(edge.src.i), g.vertex(edge.dest.i))
			e_dist[e] = edge.dist
			e_title[e] = "%.2f" % (edge.dist)

	win = graph_tool.draw.GraphWindow(g, n_pos, (640, 480), edge_text=e_title, vertex_fill_color=n_color, vertex_text=n_title)
	win.show_all()
	def destroy_callback(*args, **kwargs):
		win.destroy()
		Gtk.main_quit()

	def set_switch(sw, d):
		for node in cur_track:
			if node.typ == track.NODE_BRANCH and node.num == sw:
				node.switch_direction = d
				return
		print "WARN: Could not find switch %d" % sw

	class Train():
		num = -1
		vel = 0.
		speed = 0.
		edge = cur_track[0].edge[0]
		edge_dist = 0
		SPEEDX = 1.

		def __init__(self, num):
			self.num = num

		def update(self):
			# Super shitty deacceleration model
			self.vel = self.vel + (0.018/self.SPEEDX)*(self.speed - self.vel)
			self.edge_dist += self.vel
			while True:
				e = self.e()
				if self.edge_dist < e_dist[e]: break
				if self.edge.dest.typ == track.NODE_SENSOR:
					conn.set_sensor_tripped(self.edge.dest.num)
				self.edge = self.edge.dest.edge[self.edge.dest.switch_direction]
				self.edge_dist -= e_dist[e]

		def draw(self, n_pos, da, cr):
			e = self.e()
			start, end = np.array(n_pos[e.source()]), np.array(n_pos[e.target()])
			alpha = self.edge_dist / e_dist[e]
			pos = start + alpha*(end - start)
			dp = win.graph.pos_to_device(pos) # dp: device position
			cr.rectangle(dp[0]-10, dp[1]-10, 20, 20)
			cr.set_source_rgb(102. / 256, 102. / 256, 102. / 256)
			cr.fill()
			cr.move_to(dp[0]-10, dp[1] + 10 - 12./2)
			cr.set_source_rgb(1., 1., 1.)
			cr.set_font_size(12)
			cr.show_text("%d" % self.num)
			cr.fill()
		def e(self): return g.edge(self.edge.src.i, self.edge.dest.i)
		def set_speed(self, speed): self.speed = speed/self.SPEEDX
		def toggle_reverse(self):
			self.edge = self.edge.reverse
			self.edge_dist = e_dist[self.e()] - self.edge_dist

	def find_train(train_number):
		for train in trains:
			if train.num == train_number:
				return train
		train = Train(train_number)
		trains.append(train)
		return train

	trains = [Train(12)]
	startup_time = time.time()
	accumulated_error = [0.]
	last_time = [time.time()]
	last_sensor_poll = [0]
	FPS = 30.
	def my_draw(da, cr):
		(typ, a1, a2) = conn.next_cmd()
		if typ is None: pass
		elif typ == 'set_speed': find_train(a1).set_speed(a2)
		elif typ == 'toggle_reverse': find_train(a1).toggle_reverse()
		elif typ == 'switch': set_switch(a1, a2)
		elif typ == 'sensor': last_sensor_poll[0] = round((time.time() - startup_time) * 1000)/1000
		else: print "Ignoring command %s" % typ
		cur_time = time.time()
		dt = cur_time - last_time[0] + accumulated_error[0]
		num_steps = int(dt*FPS)
		accumulated_error[0] = dt - num_steps/FPS
		for train in trains:
			for _ in range(0, num_steps): train.update()
			train.draw(n_pos, da, cr)
			cr.move_to(10., 10.)
			cr.set_source_rgb(0., 0., 0.)
			cr.set_font_size(12)
			cr.show_text("Last polled at %.3f" % last_sensor_poll[0])
		da.queue_draw()
		last_time[0] = cur_time

	win.connect("delete_event", destroy_callback)
	win.graph.connect("draw", my_draw)
	Gtk.main()