示例#1
0
def calculate_centrality(graph):
    cl_unweighted = gt.closeness(graph)
    cl_distance = gt.closeness(graph, weight=graph.ep.Distance)
    bt_unweighted, ep = gt.betweenness(graph)
    bt_distance, ep = gt.betweenness(graph, weight=graph.ep.Distance)

    f = open('cl_unweighted.txt', 'w+')
    f = open('cl_unweighted.txt', 'r+')
    f.writelines(["%s\n" % item for item in cl_unweighted.a])
    f = open('cl_distance.txt', 'w+')
    f = open('cl_distance.txt', 'r+')
    f.writelines(["%s\n" % item for item in cl_distance.a])
    f = open('bt_unweighted.txt', 'w+')
    f = open('bt_unweighted.txt', 'r+')
    f.writelines(["%s\n" % item for item in bt_unweighted.a])
    f = open('bt_distance.txt', 'w+')
    f = open('bt_distance.txt', 'r+')
    f.writelines(["%s\n" % item for item in bt_distance.a])

    with open('results.csv', 'wb') as results:
        writer = csv.writer(results, delimiter=',')
        header = [
            'Name', 'Type', 'Longitude', 'Latitude', 'Closeness_Unweighted',
            'Closeness_Distance', 'Betweenness_Unweighted',
            'Betweenness_Distance'
        ]
        writer.writerow(header)
        for v in graph.vertices():
            row = [
                graph.vp.name[v], graph.vp.Type[v], graph.vp.Longitude[v],
                graph.vp.Latitude[v], cl_unweighted[v], cl_distance[v],
                bt_unweighted[v], bt_distance[v]
            ]
            writer.writerow(row)
示例#2
0
def graph_measures(graph: gt.Graph) -> pd.DataFrame:
    def get_attrs(attrs):
        return (attrs[1][0], attrs[1][1][1], attrs[0])

    def append_val(key, prop, v):
        measures[key][0].append(prop[v])

    _, vp_authority, vp_hub = gt.hits(graph)

    measures = {
        key: ([], prop)
        for key, prop in {
            'tp_group': graph.vp.group_name,
            'tp_author': graph.vp.username,
            'tn_degree_in': graph.degree_property_map('in'),
            'tn_degree_out': graph.degree_property_map('out'),
            'tn_degree_total': graph.degree_property_map('total'),
            'tn_pagerank': gt.pagerank(graph),
            'tn_betweenness': gt.betweenness(graph)[0],
            'tn_closeness': gt.closeness(graph),
            'tn_eigenvector': gt.eigenvector(graph)[1],
            'tn_authority': vp_authority,
            'tn_hub': vp_hub,
            'tn_lcc': gt.local_clustering(graph)
        }.items()
    }

    for attrs in product(graph.vertices(), measures.items()):
        append_val(*get_attrs(attrs))

    return pd.DataFrame(
        dict(map(lambda item: (item[0], item[1][0]),
                 measures.items()))).fillna(0)
示例#3
0
 def generate_report(self, analysis_type, out_file):
     if type(out_file) != str or out_file == "":
         print("Invalid output path.")
         exit(1)
     data = None
     if analysis_type == "total_degree":
         pass
     elif analysis_type == "in_degree":
         pass
     elif analysis_type == "out_degree":
         pass
     elif analysis_type == "closeness":
         data = gt.closeness(self.G, weight=self.e_weights)
     elif analysis_type == "betweenness":
         vp, ep = gt.betweenness(self.G, weight=self.e_weights)
     else:
         print('Invalid analysis type, select from:')
         print('total_degree')
         print('in_degree')
         print('out_degree')
         print('closeness')
         print('betweenness')
         exit(1)
     #data = dict(sorted(data.items(), key=operator.itemgetter(1), reverse=True))
     f = open(out_file, 'w')
     for v in self.G.vertices():
         print(self.v_labels[v], vp[v], file=f)
     f.close()
示例#4
0
def closeness(rankCommands, Graph, conn, cur):
    gt.openmp_set_num_threads(4)  #enable 4 threads for runing algorithm
    before_time = time.time()
    c = gt.closeness(Graph.g)
    values = c.get_array()
    idCl = dict()
    for each in Graph.g.vertices():
        if numpy.isnan(values[each]):
            idCl[Graph.indexIdDict[each]] = 0.0
        else:
            idCl[Graph.indexIdDict[each]] = values[each]
    print "Total handling time is: ", (time.time() - before_time)
    slist = sorted(idCl, key=lambda key: idCl[key], reverse=True)
    createTable(rankCommands, slist, idCl, conn, cur)
示例#5
0
def closeness(rankCommands, Graph, conn, cur):
    gt.openmp_set_num_threads(4) #enable 4 threads for runing algorithm
    before_time = time.time()
    c = gt.closeness(Graph.g) 
    values = c.get_array()
    idCl = dict()
    for each in Graph.g.vertices():
        if numpy.isnan(values[each]):
            idCl[Graph.indexIdDict[each]] = 0.0
        else:   
            idCl[Graph.indexIdDict[each]] = values[each]
    print "Total handling time is: ", (time.time() - before_time)
    slist = sorted(idCl, key = lambda key: idCl[key], reverse = True)
    createTable(rankCommands, slist, idCl, conn, cur)
def calculate_centrality(graph):
	print 'calculate closeness'
	cl_time = gt.closeness(graph,weight=graph.ep.Time)

	f = open('cl_time_adjusted.txt', 'w+')
	f = open('cl_time_adjusted.txt', 'r+')
	f.writelines(["%s\n" % item  for item in cl_time.a])

	with open('results_time_adjusted.csv','wb') as results:
		writer = csv.writer(results,delimiter=',')
		header = ['Name','Type','Longitude','Latitude','Closeness_Time']
		writer.writerow(header)
		for v in graph.vertices():
			row = [graph.vp.name[v],graph.vp.Type[v],graph.vp.Longitude[v],graph.vp.Latitude[v],cl_time[v]]
			writer.writerow(row)
def g_centrality_correlations(g):
    dgr = g.degree_property_map('total').a
    dgr = dgr / (g.num_vertices() - 1)
    btn = gt.betweenness(g, norm=True)[0].a
    cln = gt.closeness(g, norm=True, harmonic=False).a
    egn = gt.eigenvector(g)[1].a
    return dict(
        # dgr=dgr,
        # btn=btn,
        # cln=cln,
        # egn=egn,
        db_p=stats.pearsonr(dgr, btn),
        dc_p=stats.pearsonr(dgr, cln),
        de_p=stats.pearsonr(dgr, egn),
        db_s=stats.spearmanr(dgr, btn),
        dc_s=stats.spearmanr(dgr, cln),
        de_s=stats.spearmanr(dgr, egn),
        db_k=stats.kendalltau(dgr, btn),
        dc_k=stats.kendalltau(dgr, cln),
        de_k=stats.kendalltau(dgr, egn))
示例#8
0
def process(name, g):
    # Properties
    vp_pos = gt.sfdp_layout(g)
    vp_deg = g.degree_property_map('total')
    vp_deg_log = g.new_vp('double')
    vp_deg_log.a = np.log10(vp_deg.a)
    vp_cls = gt.closeness(g)
    vp_page = gt.pagerank(g)
    vp_btw, ep_btw = gt.betweenness(g, norm=False)

    # Colormaps
    for cmap in [
            'viridis', 'plasma', 'inferno', 'YlGnBu', 'Blues', 'Greys',
            'Greens', 'Oranges'
    ]:
        draw_graph(g,
                   vp_pos,
                   f'{name}.prop=deg.color={cmap}.png',
                   vp_color=vp_deg,
                   vcmap=cmap)
        draw_graph(g,
                   vp_pos,
                   f'{name}.prop=deg_log.color={cmap}.png',
                   vp_color=vp_deg_log,
                   vcmap=cmap)
        draw_graph(g,
                   vp_pos,
                   f'{name}.prop=cls.color={cmap}.png',
                   vp_color=vp_cls,
                   vcmap=cmap)
        draw_graph(g,
                   vp_pos,
                   f'{name}.prop=page.color={cmap}.png',
                   vp_color=vp_page,
                   vcmap=cmap)
        draw_graph(g,
                   vp_pos,
                   f'{name}.prop=btw.color={cmap}.png',
                   vp_color=vp_btw,
                   vcmap=cmap)

    # Construct dicts for D3-style JSON
    nodes = []
    for u in g.vertices():
        p = vp_pos[u]
        nodes.append({
            'x': p[0],
            'y': p[1],
            'deg': vp_deg[u],
            'deg_log': vp_deg_log[u],
            'cls': vp_cls[u],
            'page': vp_page[u],
            'btw': vp_btw[u],
        })

    vp_idx = g.vertex_index
    links = [{
        'source': vp_idx[e.source()],
        'target': vp_idx[e.target()],
    } for e in g.edges()]

    # Save D3 style JSON
    d = {'nodes': nodes, 'links': links}
    with open(f'{name}.json', 'w') as f:
        json.dump(d, f)
示例#9
0
import graph_tool.all as gtool

gr = gtool.collection.data["polblogs"]
gr = gtool.GraphView(gr, vfilt=gtool.label_largest_component(gr))

cness = gtool.closeness(gr)

gtool.graph_draw(gr,
                 pos=gr.vp["pos"],
                 vertex_fill_color=cness,
                 vertex_size=gtool.prop_to_size(cness, mi=5, ma=15),
                 vorder=cness,
                 vcmap=matplotlib.cm.gist_heat,
                 output="political_closeness.pdf")
示例#10
0
betweenness = GT.betweenness(g)[0].get_array()

plt.title("Betweenness distribution")
plt.ylabel('#Nodes')
plt.xlabel('Betweenness coefficient')
plt.bar(*float_distribution(betweenness, 40), width=(max(betweenness)-min(betweenness))/50)
plt.savefig(f"img/betweenness_dist.png", format='png')
plt.close()

print(f"top {TOP} betweenness nodes: {get_top(betweenness, TOP)}")
del betweenness

#############
# Closeness #
#############
closeness = GT.closeness(GT.extract_largest_component(g), norm=False, harmonic=True).get_array()

plt.title("Closeness distribution")
plt.ylabel('#Nodes')
plt.xlabel('Closeness coefficient')
plt.bar(*float_distribution(closeness, 40), width=(max(closeness)-min(closeness))/50)
plt.savefig(f"img/closeness_dist.png", format='png')
plt.close()

print(f"top {TOP} closeness nodes: {get_top(closeness, TOP)}")
del closeness

##############
# Clustering #
##############
clustering = list(GT.local_clustering(g))
示例#11
0
def harmonic_centrality(g):
    return gt.closeness(g, harmonic=True).get_array()
    dist, ends = gt.pseudo_diameter(g_friend_LC)

    print('(Pseudo-) Diameter: ', dist)  # 14.0
    #print('(Pseudo-) Diameter start:', ends[0], 'end:', ends[1])                # start: 1275 end: 37966

    print(
        "\n\nDeskriptives Friendship Network - Largest Component specific - (Pseudo-) Diameter -done\n"
    )

#-- Closeness Distribution of Largest Component --#

if descClose_bool == True:

    print("\n\n#-- Closeness Distribution --#\n")

    vprop_closeness = gt.closeness(g_friend_LC)
    g_friend_LC.vp.closeness = vprop_closeness

    close_array = np.array(vprop_closeness.a)

    close_array_index_LC = np.where(close_array != 0)
    close_array_LC = close_array[close_array_index_LC]

    print("Avg Closeness Centrality: ",
          sum(close_array_LC) / len(close_array_LC))  # 0.2769106346214449
    print("Median Closeness Centrality: ",
          np.median(close_array_LC))  # 0.27148728827604496
    print("Mode Closeness Centrality: ",
          stats.mode(close_array_LC))  # 0.2712535187945024

    plt.hist(
            weight_map[e] = 1. * common_votes / len(dep1[5:])
            edges[(dep1[4],dep2[4])] = [weight_map[e], dep1, dep2] # adds for debuging

        except Exception, e:
            print str(e)


# conventional centrality analysis

# degree 
degree = g.degree_property_map('total', weight = weight_map)

# vertice betweeness
betweeness = gt.betweenness(g, weight = weight_map)

# closeness
closeness = gt.closeness(g, weight = weight_map)

# Katz
katz = gt.katz(g, weight = weight_map)

# Pagerank
pagerank = gt.pagerank(g, weight = weight_map)


metrics = ['name', 'diap', 'betweenness', 'closeness', 'degree', 'katz', 'pagerank']
df = pd.DataFrame(zip(vertex_to_name.values(), diap, degree.a.tolist(), betweeness[0].a.tolist(), closeness.a.tolist(), katz.a.tolist(), 
                      pagerank.a.tolist()), columns = metrics)

df.sort('pagerank', ascending=True)[:30]
ctrlity_frame = [df_eigen_centrality, df_harmnic_centrality, df_betweenness_centrality, df_degree_centrality]
ctrlity_merged = reduce(lambda left,right: pd.merge(left, right, on=['ctry', 'year'],
                                            how='inner'), ctrlity_frame).fillna('0')

ctrlity_merged.to_csv("/content/drive/MyDrive/G11-MEA-Diffusion/dataMEA_Ctrlity/ctrlity_output.csv")

"""### visualization"""

#eigenvector centrality

ee, x = gt.eigenvector(gt_2018_univ)
x.a /= (x.a*10 - 0.7)/0.04 # follow the formula in the book 
gt.graph_draw(gt_2018_univ, vertex_fill_color=x, vcmap=matplotlib.cm.gist_earth, vorder=x) #

gc = gt.GraphView(gt_2018_univ, vfilt=gt.label_largest_component(gt_2018_univ))
c = gt.closeness(gc)
c.a /= c.a / 232
gt.graph_draw(gc, vertex_fill_color=c, vcmap=matplotlib.cm.Oranges, vorder=c)

#betweenness centrality 

bv, be = betweenness(gt_2018_univ)
graph_draw(gt_2018_univ, pos=None, vertex_fill_color=bv, vcmap=matplotlib.cm.summer)

deg = gt_2018_univ.degree_property_map("total")
gt.graph_draw(gt_2018_univ, vertex_fill_color=deg, vorder=deg)

# https://colab.research.google.com/github/count0/colab-gt/blob/master/colab-gt.ipynb#scrollTo=6km1lWMF2kAm

!apt-get install
示例#15
0
g = gt.GraphView(g, vfilt=gt.label_largest_component(g), directed=False)
g = gt.Graph(g, prune=True)

# chequea que todo sea como "debe ser"
print('chequeando...', args.file)
print('vertices', g.num_vertices())  # numero de vertices
print('edges', g.num_edges())  # numero de links

weight = g.ep['weight']
width = gt.prop_to_size(weight, ma=.5)

# seteo de algunas argumentos de la creacion del grafo

pos = g.vp['pos_sfdp_infomap']

vcl = gt.closeness(g, weight=weight)
vsize = gt.prop_to_size(vcl)
vorder = -vsize.a

df = pd.DataFrame({'node': list(g.vertices()), 'closeness': list(vcl)})
df.sort_values(by='closeness', inplace=True, ascending=False)
print(df.head(15))

print('drawing...')
# dibuja el grafico en el archivo filename.png
gt.graph_draw(
    g,
    pos,
    output_size=(500, 400),
    vertex_size=vsize,
    vertex_fill_color=vsize,
示例#16
0
    gCoocNode = nx.Graph()
    gCoocNode = gCooc

    gtgCooc = nx2gt_module.nx2gt(gCooc)

    CoocEdgeWeight = gtgCooc.edge_properties['weight']
    CoocVertexId = gtgCooc.vertex_properties['id']
    CoocVertexIter = gtgCooc.vertices()
    print("4")
    CoocBetween, ep = gt.betweenness(gtgCooc, weight=CoocEdgeWeight, norm=True)
    print("5")
    #ee, CoocEigen = gt.eigenvector(gtgCooc, weight=CoocEdgeWeight)
    print("6")
    #ee, CoocAuthority, CoocHub = gt.hits(gtgCooc, weight =CoocEdgeWeight)
    #CoocPagerank = gt.pagerank(gtgCooc, weight =CoocEdgeWeight)
    CoocCloseness = gt.closeness(gtgCooc, weight=CoocEdgeWeight)

    print("7")
    CoocKatz = gt.katz(gtgCooc, weight=CoocEdgeWeight)
    CoocClustering = gt.local_clustering(gtgCooc)
    print("8")
    CoocDegree = gtgCooc.degree_property_map("total", weight=CoocEdgeWeight)
    print("9")
    print("where")
    print("A")
    print len(nodeList)
    print("B")

    tempCoocList = []
    for i in CoocVertexIter:
        temp = (str(j_id), CoocVertexId[i], CoocDegree[i], CoocBetween[i],