Exemplo n.º 1
0
 def get_metric(ggt, metric, n_nodes, n_edges):
     if "d" == metric:
         # Density
         if n_nodes <= 1:
             value = 0.0
         else:
             value = ( 2.0 * n_edges ) / ( n_nodes * (n_nodes - 1.0) )
         ggt.gp[metric] = ggt.new_gp("float", val=value)
     elif "dg" == metric:
         # Degree
         if n_nodes <= 1:
             value = np.zeros(n_nodes, dtype=np.float32)
         else:
             value = ggt.degree_property_map('total').get_array()
         ggt.vp[metric] = ggt.new_vp("double", vals=value)
     elif "dgc" == metric:
         # Degree centrality
         if n_nodes <= 1:
             value = np.zeros(n_nodes, dtype=np.float32)
         else:
             value = ggt.degree_property_map('total').get_array() / (n_nodes - 1.0)
         ggt.vp[metric] = ggt.new_vp("double", vals=value)
     elif "cnw" == metric:
         # Clustering coefficient ( non-weighted )
         value = local_clustering(ggt).get_array()
         ggt.vp[metric] = ggt.new_vp("double", vals=value)
     elif "cw" == metric:
         # Clustering coefficient ( weighted )
         value = local_clustering(ggt, weight=ggt.ep.weight).get_array()
         ggt.vp[metric] = ggt.new_vp("double", vals=value)
     elif "pgr" == metric:
         # Page Rank
         value = pagerank(ggt).get_array()
         ggt.vp[metric] = ggt.new_vp("double", vals=value)
Exemplo n.º 2
0
 def clu_attack(g: GT.Graph):
     lc = IdNodes(list(local_clustering(g)))
     if all(c == 0.0 for c in lc):
         for i in range(len(lc)):
             lc[i] = R.random()
         print("!!! clu_attack randomized !!!")
     return lc
Exemplo n.º 3
0
    def get_basic_info(self):
        info = {}

        try:
            n_vertices = self.g.num_vertices()
            n_edges = self.g.num_edges()
            density = n_edges / ((n_vertices * (n_vertices - 1)) / 2)
            mean_degree = (2 * n_edges) / n_vertices

            # Cálculo do coeficiente de clusterização "na mão", usando a média dos
            # coeficientes locais calculados pela Graph Tools
            local_cc = local_clustering(self.g)
            clustering_coef = fsum(
                [local_cc[x] for x in self.g.vertices() if local_cc[x] != 0.0])
            clustering_coef /= n_vertices

            info["Número de times"] = n_vertices
            info["Número de confrontos"] = n_edges
            info["Densidade"] = density
            info["Grau médio"] = mean_degree
            info["Coeficiente de Clusterização"] = clustering_coef
        except:
            info.clear()

        return info
Exemplo n.º 4
0
def local_clustering_binary_undirected(g, nodes=None):
    '''
    Returns the undirected local clustering coefficient of some `nodes`.

    If `g` is directed, then it is converted to a simple undirected graph
    (no parallel edges).

    Parameters
    ----------
    g : :class:`~nngt.Graph`
        Graph to analyze.
    nodes : list, optional (default: all nodes)
        The list of nodes for which the clustering will be returned

    Returns
    -------
    lc : :class:`numpy.ndarray`
        The list of clustering coefficients, on per node.

    References
    ----------
    .. [gt-local-clustering] :gtdoc:`clustering.locall_clustering`
    '''
    # use undirected graph view, filter parallel edges
    u = GraphView(g.graph, directed=False)
    u = GraphView(u, efilt=label_parallel_edges(u).fa == 0)

    # compute clustering
    lc = gtc.local_clustering(u, weight=None, undirected=None).a

    if nodes is None:
        return lc

    return lc[nodes]
Exemplo n.º 5
0
def metrics(file, use_cache=True):
    # use cache or recompute
    cache = os.path.splitext(file)[0] + ".json"
    if use_cache and os.path.isfile(cache):
        print('using cached metrics for', os.path.basename(file))
        with open(cache, "r") as fp:
            return json.load(fp)
    print('computing metrics for', os.path.basename(file))

    # read file
    g = load_graph(file)
    degrees = list(g.degree_property_map("out"))
    with open(file) as f:
        metalines = [next(f) for x in range(13)]

    # gather data
    metrics = {}
    metrics['file'] = os.path.basename(file)
    metrics['edges'] = int(metalines[5].split()[-1])
    metrics['rounds'] = int(metalines[1].split()[-1])
    metrics['max_degree'] = max(degrees)
    metrics['avg_degree'] = mean(degrees)
    metrics['min_degree'] = min(degrees)
    metrics['local_clustering'] = mean(local_clustering(g).get_array())
    metrics['global_clustering'] = global_clustering(g)[0]
    metrics['pseudo_diameter'] = int(pseudo_diameter(g)[0])
    fit = powerlaw.Fit(degrees, discrete=True, verbose=False)
    metrics['exponent'] = fit.alpha
    metrics['KS'] = fit.power_law.KS()
    metrics['x_min'] = fit.xmin

    with open(cache, "w") as fp:
        json.dump(metrics, fp)

    return metrics
Exemplo n.º 6
0
 def set_properties(self, subgraph):
   v_betweenness, e_betweenness = betweenness(subgraph)
   subgraph.vertex_properties["vertex_betweenness"] = v_betweenness
   subgraph.edge_properties["edge_betweenness"] = e_betweenness
   v_closeness = closeness(subgraph)
   subgraph.vertex_properties["closeness"] = v_closeness
   l_clustering = local_clustering(subgraph)
   subgraph.vertex_properties["local_clustering"] = l_clustering
   bicomp, articulation, nc = label_biconnected_components(subgraph)
   subgraph.vertex_properties["articulation"] = articulation
   return subgraph
Exemplo n.º 7
0
 def test_sredni_wspolczynnik_klasteryzacji_na_sztywno_graf_pelny(self):
     # self.assertEqual(7. / 15, self.stat.sredni_wspolczynnik_klasteryzacji_moj())
     # print self.stat.sredni_wspolczynnik_klasteryzacji_moj()
     g = Graph(directed=False)
     v0 = g.add_vertex()
     v1 = g.add_vertex()
     v2 = g.add_vertex()
     v3 = g.add_vertex()
     g.add_edge(v0, v1)
     g.add_edge(v0, v2)
     g.add_edge(v0, v3)
     g.add_edge(v1, v2)
     g.add_edge(v1, v3)
     g.add_edge(v2, v3)
     lc = local_clustering(g, undirected=True)
     self.assertEqual(1.0, vertex_average(g, lc)[0])
Exemplo n.º 8
0
def f_local_clustering(D,
                       stats,
                       options={
                           'features': [],
                           'skip_features': []
                       }):
    """"""

    if not 'local_clustering' in options['features'] or (
            'skip_features' in options
            and 'local_clustering' in options['skip_features']):
        log.debug('Skipping local_clustering')
        return

    stats['local_clustering'] = vertex_average(D, local_clustering(D))[0]
    log.debug('done local_clustering')
Exemplo n.º 9
0
def user_network_summary(g):
    span = "{:D MMM YYYY, HH:mm} - {:D MMM YYYY, HH:mm}".format(
        arrow.get(g.edge_properties["created_at"].a.min()),
        arrow.get(g.edge_properties["created_at"].a.max())
    )
    largest_component = label_largest_component(g, directed=False).a.sum()

    display(Markdown("### " + g.graph_properties["track"].replace("#", r"\#")))
    display(Markdown("#### " + span))

    graph_draw(g, inline=True, output_size=[1000, 1000],
               vertex_fill_color=[.2, .3, .9, .7], vertex_size=2)
    stats = pd.DataFrame([
        ["Vertices",
         g.num_vertices()],
        ["Edges",
         g.num_edges()],
        ["Avg. degree",
         float(g.num_edges()) / g.num_vertices()],
        ["Avg. clustering",
         vertex_average(g, local_clustering(g))[0]],
        ["Giant component share",
         "{:.1%}".format(largest_component / g.num_vertices())]
    ], columns=["Metric", "Value"])
    display(stats)

    bins = 20
    counts, _ = vertex_hist(g, "in", range(bins))

    plt.bar(range(1, bins), counts, align="center")

    plt.xticks(range(bins))
    plt.xlim([0.5, bins - 1])
    plt.title("Degree distribution")

    plt.show()
Exemplo n.º 10
0
 def local_clustering_coeff(g, nodes=None):
     lc = local_clustering(g).a
     if nodes is None:
         return lc
     return lc[nodes]
Exemplo n.º 11
0
 def clu_protection(g: GT.Graph, nodes, n_protected):
     lc = list(local_clustering(g))
     if all(n == 0.0 for n in lc):
         print("!!! clu_protection randomized !!!")
         return set(R.sample(range(len(lc)), n_protected))
     return {nodes.id_of(n) for n in i_of_bests(lc, n_protected)}
Exemplo n.º 12
0
def clustering_coefficient(g: Graph):
    return clustering.local_clustering(g,
                                       weight=g.edge_properties['weight'],
                                       undirected=False)
Exemplo n.º 13
0
if worldRank > 0:
    sleep(5)
    Graph = gt.load_graph("networkDump_%s.xml.gz" % selected)
    weightProp = Graph.edge_properties["w"]
    overlapProp = Graph.edge_properties["o"]
    kikjProp = Graph.edge_properties["kij"]

#FOR MPI over the nodes...
strengthProp = Graph.new_vertex_property("double")
degreeProp = Graph.new_vertex_property("double")
kkk = Graph.degree_property_map("total").a
sss = Graph.degree_property_map("total", weight=weightProp).a
degreeProp.a = kkk
strengthProp.a = sss

ccc = local_clustering(Graph).a
ccc = np.array(ccc, dtype=np.float64)
ccw = np.ones(Graph.num_vertices(), dtype=np.float64) * -1.
cww = np.ones(Graph.num_vertices(), dtype=np.float64) * -1.
knn = np.ones(Graph.num_vertices(), dtype=np.float64) * -1.
knw = np.ones(Graph.num_vertices(), dtype=np.float64) * -1.

iii = worldRank
if worldRank == 0:
    print("Computing nodes properties...")
for nodeI in Graph.get_vertices()[worldRank::worldSize]:
    if np.random.rand() < nodeFraction:
        tmp_ccw = tmp_cww = .0
        tmp_knn = tmp_knw = tmp_wsum = .0

        tmp_strI = strengthProp[nodeI]