Ejemplo n.º 1
0
def metrics(g: Graph, file):
    summary(g)

    degrees = g.degree()
    betweenness = g.betweenness()
    closeness = g.closeness()
    clustering_coef = g.transitivity_local_undirected()

    file.write("\nGLOBAL MEASUURES\n")
    file.write("Connected: " + str(g.is_connected()) + "\n")
    file.write("Density: " + str(g.density()) + "\n")
    file.write("Diameter: " + str(g.diameter()) + "\n")
    file.write("Clustering Coefficient: " + str(g.transitivity_undirected()) +
               "\n")
    file.write("Average Local Clustering Coefficient: " +
               str(g.transitivity_avglocal_undirected()) + "\n")
    file.write("Average Degree: " + str(mean(degrees)) + "\n")
    file.write("Max Degree: " + str(g.maxdegree()) + "\n")
    file.write("Average Betweenness: " + str(mean(g.betweenness())) + "\n")
    file.write("Max Betweenness: " + str(max(betweenness)) + "\n")
    file.write("Average Closeness: " + str(mean(closeness)) + "\n")
    file.write("Max Closeness: " + str(max(closeness)) + "\n")

    file.write("\nLOCAL MEASURES\n")
    file.write("Vertex with highest degree: " +
               str(g.vs.select(_degree=g.maxdegree())['name']) + "\n")
    file.write("Vertex with highest betweenness: " +
               str(g.vs.select(_betweenness=max(betweenness))['name']) + "\n")
    file.write("Vertex with highest closeness: " +
               str(g.vs.select(_closeness=max(closeness))['name']) + "\n")
    file.write("Vertex with highest clustering coefficient: " +
               str(g.vs[clustering_coef.index(max(clustering_coef))]['name']) +
               "\n")

    return degrees, betweenness, closeness, clustering_coef
Ejemplo n.º 2
0
def print_graph_details(graph, communities=None):
    # print("Components")
    # d = g.components('WEAK')
    # a = d.giant()
    # print()
    #
    #
    # print("Decompose")
    # d = g.decompose('WEAK', 122222222, 7)
    #
    # print(d)

    print("Degree distribution")
    degree = igraph.Graph.degree_distribution(graph)
    bins = degree._bins
    plot_histogram(bins)

    print("Average clustering coefficient")
    i = igraph.GraphBase.transitivity_avglocal_undirected(graph)
    print(i)

    print("Vertices")
    i = graph.vcount()
    print(i)

    print("Edges")
    i = graph.ecount()
    print(i)

    if communities:
        print('Modularity')
        q = graph.modularity(communities)
        print(q)

    print("Average degree distribution")
    i = mean(graph.degree())
    print(i)

    print("Clique number")
    i = graph.clique_number()
    print(i)

    print("Density")
    i = graph.density()
    print(i)

    print("Max degree")
    max_degree = graph.maxdegree()
    print(max_degree)

    print("Person with max degree")
    print([v.attributes()['name'] for v in graph.vs(_degree_eq=max_degree)])

    print("Eigenvector centrality")
    i = mean(
        graph.eigenvector_centrality()
    )  # look at a combination of a nodes edges and the edges of that nodes neighbors.
    # cares if you are a hub, but it also cares how many hubs you are connected to
    print(i)
Ejemplo n.º 3
0
def get_graph_statistics(graph):
    return {
        'Size': str(graph.vcount()) + ' vertices',
        'Volume': str(graph.ecount()) + ' edges',
        'Average degree': str(round(mean(graph.degree()), 2)) + ' edges/vertex',
        'Clustering coefficient': str(round(graph.transitivity_undirected() * 100, 2)) + '%',
        'Diameter': str(graph.diameter(directed=False)) + ' edges',
    }
Ejemplo n.º 4
0
def degree_stats(graph):
    """
    Gathers graph statistics relative to its degree and returns a DegreeStat object.

    Args:
        graph - an igraph Graph.

    Returns:
        degreestats - a DegreeStat object.

    """

    #Basic operations
    vertex_num = graph.vcount()
    edge_num = graph.ecount()

    #Degrees
    if graph.is_directed() :
        degrees = graph.degree()
        max_degree = graph.maxdegree()
        avg_degree = mean(degrees)
        in_degrees = graph.indegree()
        avg_in_degree = mean(in_degrees)
        out_degrees = graph.outdegree()
        avg_out_degree = mean(out_degrees)

        degreestats = metrique_stats.stats_class.DegreeStat(vertex_num=vertex_num, edge_num=edge_num,
                    degrees=degrees, max_degree=max_degree, avg_degree=avg_degree,
                    in_degrees=in_degrees, avg_in_degree=avg_in_degree,
                    out_degrees=out_degrees,avg_out_degree=avg_out_degree,directed=graph.is_directed())
    else:
        degrees = graph.degree()
        max_degree = graph.maxdegree()
        avg_degree = mean(degrees)
        hist = graph.degree_distribution()

        degreestats = metrique_stats.stats_class.DegreeStat(vertex_num=vertex_num, edge_num=edge_num,
                    degrees=degrees, max_degree=max_degree, avg_degree=avg_degree,
                     deg_hist=hist,directed=graph.is_directed())

    return degreestats
Ejemplo n.º 5
0
def ecc_stats(graph):
    """
    Gathers graph statistics relative to eccentricity and returns an EccStat object.

    Args:
        graph - an igraph Graph.

    Returns:
        eccstats - a EccStat object.

    """

    ecc = graph.eccentricity()
    avg_ecc = mean(ecc)

    if graph.is_directed():
        in_ecc = graph.eccentricity(mode='IN')
        avg_in_ecc = mean(in_ecc)
        out_ecc = graph.eccentricity(mode='OUT')
        avg_out_ecc = mean(out_ecc)

        in_radius = graph.radius('IN')
        out_radius = graph.radius(mode='OUT')

        diameter = graph.diameter(directed=True)

        eccstats = metrique_stats.stats_class.EccStat(ecc, avg_ecc, in_ecc, avg_in_ecc, out_ecc, avg_out_ecc, in_radius, out_radius,graph.is_directed(), diameter )


    else:
        radius = graph.radius()
        diameter = graph.diameter(directed=False)

        eccstats = metrique_stats.stats_class.EccStat(ecc, avg_ecc, directed=graph.is_directed(), diameter=diameter, radius=radius )

    return eccstats
Ejemplo n.º 6
0
Archivo: ec.py Proyecto: dguelde/python
def main():
	#FILENAME="yeastInter_st.txt"
	FILENAME="USairport_2010.txt"
	ITERATIONSPERNODE=2000 #iterations on each node
	matrix = readFile(FILENAME) #numpy matrix
	networkSize=len(matrix)
	g = igraph.Graph.Adjacency((matrix>0).tolist())
	c = igraph.mean(g.degree())
	p = 1./c #transmission probability

	epidemicSize=np.zeros(networkSize) #average cascade size per node
	cascadeSize=np.zeros(ITERATIONSPERNODE) #cascade size per run on patient Zero node
	possibleNewInfections=[] #neighbors of contageous nodes
	newInfections=[] #newly infected nodes at a single time t
	for patientZero in xrange(networkSize): #everybody gets a turn...
		print patientZero
		for iteration in xrange(ITERATIONSPERNODE):
			start = time.time()
			immunity=np.random.rand(networkSize) #immunity chance for nodes
			condition=np.zeros(networkSize) #0=susceptible, 1=contageous, 2=infected but not contageous
			condition[patientZero]=1
			newInfection=True
			while(newInfection):
				newInfection=False
				diseaseSpreaders=np.where(condition==1)
				condition[condition==1]=2 #not contageous any more
				try: #will throw error if no neighbors (if patient zero has no edges...)
					exposed=[neighbors[spreader] for spreader in diseaseSpreaders][0]
				except TypeError:
					continue
				exposed=np.intersect1d(exposed,np.where(condition==0)) #remove non-susceptible from list
				if(len(exposed)==0): continue #if no susceptible, finished
				newInfections=np.intersect1d(exposed,exposed[np.where(immunity[np.array(exposed)]<p)]) #cascade spreads as function of p
				condition[newInfections]=1 #contageous
				if newInfections.sum()>0:
					newInfection=True
			cascadeSize[iteration]=len(np.where(condition!=0)[0]) #if contageous or infected, you count as sick
		epidemicSize[patientZero]=np.average(cascadeSize)
	outputFile=FILENAME[:-4]+"_undirected_{}_iterations_results.txt".format(ITERATIONSPERNODE)
	with (open(outputFile,'w'))as f:
		for index in range(0,networkSize):
			winner=np.argmax(epidemicSize) 
			f.write('{} {}\n'.format(winner+1,epidemicSize[winner]))
			epidemicSize[winner]=0
	f.close()		
Ejemplo n.º 7
0
    def stepInteligente(self):
        """ Un paso del modelo SIS en el que solo ataca nodos
        con un grado superior a la media."""
        ## Se extiende la infeccion
        s_to_i = set()  # Inicializamos un conjunto para los traspasos

        ## Grado medio del grafo
        g_medio = mean(self.grafo.degree()) // 1

        ## Calculamos para cada vertice infectado
        ## Cuales de sus vecinos seran infectados
        for v in self.compartimentos["I"]:
            neis = self.grafo.neighbors(v)
            s_to_i.update([
                nodo for nodo in neis if self.grafo.vs[nodo].degree() > g_medio
            ])
            ## Aplicamos los cambios
        self.compartimentos.move_vertices(s_to_i, "I")

        ## Algunos de los infectados se curaran
        i_to_s = muestraAleatoria(self.compartimentos["I"], self.gamma)
        self.compartimentos.move_vertices(i_to_s, "S")
Ejemplo n.º 8
0
    def stepInteligente(self):
        """ Un paso del modelo SIS en el que solo ataca nodos
        con un grado superior a la media."""
        ## Se extiende la infeccion
        s_to_i = set()  # Inicializamos un conjunto para los traspasos

        # Percentil 90 de la dist del grado
        # de los nodos
        g_alto = mean(self.grafo.degree()) // 1
        ## Calculamos para cada vertice infectado
        ## Cuales de sus vecinos seran infectados
        for v in self.compartimentos["I"]:
            neis = self.grafo.neighbors(v)
            for nodo in neis:
                aux = []
                if (self.grafo.vs[nodo].degree() > g_alto
                        and self.compartimentos.get_state(nodo) == 'S'):
                    aux.append(nodo)
            s_to_i.update(aux)
            # Ahora calculamos los infectados por el proceso
            # normal
            aux = []
            for nei in muestraAleatoria(neis, self.beta):
                if self.compartimentos.get_state(nei) == "S":
                    aux.append(nei)
            s_to_i.update(aux)

            # s_to_i.update([nodo for nodo in neis if
            #                self.grafo.vs[nodo].degree() > g_alto
            #               and self.compartimentos.get_state(nodo) == 'S'])
            ## Aplicamos los cambios
        self.compartimentos.move_vertices(s_to_i, "I")

        ## Algunos se recuperan
        i_to_r = muestraAleatoria(self.compartimentos["I"], self.gamma)
        self.compartimentos.move_vertices(i_to_r, "R")
Ejemplo n.º 9
0
def smallworld():
    print("Small World Property")
    print(log(g.vcount()) / log(mean(g.degree())))
Ejemplo n.º 10
0
def calc_stats(network, edge_type, method, path):

    # if network is weighted
    if network.is_weighted():

        # variables to hold stats
        node_count = network.vcount()
        edge_count = network.ecount()
        directed_status = 'Directed' if network.is_directed() else 'Undirected'
        weighted_status = 'Yes' if network.is_weighted() else 'No'
        connected_status = 'Yes' if network.is_connected() else 'No'
        avg_degree = ig.mean(network.degree(loops=False))
        avg_weighted_degree = ig.mean(network.strength(weights='weight'))
        diameter = network.diameter(directed=False, weights='weight')
        radius = network.radius(mode='ALL')
        density = network.density()
        modularity = network.community_multilevel(weights='weight').modularity
        communities = len(network.community_multilevel(weights='weight'))
        components = len(network.components())
        closeness = ig.mean(network.closeness(weights='weight'))
        node_betweenness = ig.mean(
            network.betweenness(directed=False, weights='weight'))
        edge_betweenness = ig.mean(
            network.edge_betweenness(directed=False, weights='weight'))
        avg_clustering_coeff = ig.mean(
            network.transitivity_avglocal_undirected())
        eigenvector_centrality = ig.mean(
            network.eigenvector_centrality(directed=False, weights='weight'))
        avg_path_length = ig.mean(network.average_path_length(directed=False))

    # if network is not weighted
    else:

        # variables to hold stats
        node_count = network.vcount()
        edge_count = network.ecount()
        directed_status = 'Directed' if network.is_directed() else 'Undirected'
        weighted_status = 'Yes' if network.is_weighted() else 'No'
        connected_status = 'Yes' if network.is_connected() else 'No'
        avg_degree = ig.mean(network.degree(loops=False))
        avg_weighted_degree = ig.mean(network.strength())
        diameter = network.diameter(directed=False)
        radius = network.radius(mode='ALL')
        density = network.density()
        modularity = network.community_multilevel().modularity
        communities = len(network.community_multilevel())
        components = len(network.components())
        closeness = ig.mean(network.closeness())
        node_betweenness = ig.mean(network.betweenness(directed=False))
        edge_betweenness = ig.mean(network.edge_betweenness(directed=False))
        avg_clustering_coeff = ig.mean(
            network.transitivity_avglocal_undirected())
        eigenvector_centrality = ig.mean(
            network.eigenvector_centrality(directed=False))
        avg_path_length = ig.mean(network.average_path_length(directed=False))

    # variable to hold output file
    output_file = open('../../data/networks/{}/network/txt/'
                       '{}/{}/stats.txt'.format(path, edge_type, method),
                       mode='w')

    # write stats to file
    output_file.write('> Network Overview\n\n')
    output_file.write('- Nodes: {}\n'.format(node_count))
    output_file.write('- Edges: {}\n'.format(edge_count))
    output_file.write('- Type: {}\n'.format(directed_status))
    output_file.write('- Weighted: {}\n'.format(weighted_status))
    output_file.write('- Connected: {}\n'.format(connected_status))
    output_file.write('- Average Degree: {0:.3f}\n'.format(avg_degree))
    # if network is weighted
    if network.is_weighted():
        output_file.write(
            '- Average Weighted Degree: {0:.3f}\n'.format(avg_weighted_degree))
    output_file.write('- Diameter: {}\n'.format(diameter))
    output_file.write('- Radius: {}\n'.format(radius))
    output_file.write('- Density: {0:.3f}\n'.format(density))
    output_file.write('- Modularity: {0:.3f}\n'.format(modularity))
    output_file.write('- Communities: {}\n'.format(communities))
    output_file.write('- Weak Components: {}\n'.format(components))
    output_file.write('- Node Closeness: {0:.3f}\n'.format(closeness))
    output_file.write('- Node Betweenness: {0:.3f}\n'.format(node_betweenness))
    output_file.write(
        '- Edge Betweenness: {0:.3f}\n\n'.format(edge_betweenness))

    output_file.write('> Node Overview\n\n')
    output_file.write('- Average Clustering Coefficient: {0:.3f}\n'.format(
        avg_clustering_coeff))
    output_file.write(
        '- Eigenvector Centrality: {0:.3f}\n\n'.format(eigenvector_centrality))

    output_file.write('> Edge Overview\n\n')
    output_file.write(
        '- Average Path Length: {0:.3f}\n'.format(avg_path_length))

    # close output file
    output_file.close()
Ejemplo n.º 11
0
plt.close()

plt.plot(s_in, s_out, ".")
plt.savefig("../img/p9-s-s.png")
plt.close()

# D
g_undirected = g.as_undirected(mode="collapse", combine_edges=dict(weight=sum))

plt.hist(g_undirected.degree())
plt.savefig("../img/p9-k.png")
plt.close()

s_data = np.matrix(g_undirected.get_adjacency(attribute="weight")._get_data())
s = np.squeeze(np.asarray(s_data.sum(axis=0, dtype=float)))
plt.hist(s, bins=3)
plt.savefig("../img/p9-s.png")
plt.close()

# E
plt.plot(s, g_undirected.degree(), ".")
plt.savefig("../img/p9-k-vs-s.png")
plt.close()

# F
sin_peso = G.mean(filter(lambda x: x == x, g_undirected.transitivity_local_undirected()))
con_peso = G.mean(filter(lambda x: x == x, g_undirected.transitivity_local_undirected(weights="weight")))

print "sin peso:", sin_peso
print "con peso:", con_peso
Ejemplo n.º 12
0
 def get_average_degree(g):
     return igraph.mean(g.degree())
	plog("\tbuilding projection: {}".format(np.count_nonzero(B)), log_file)

	# Actual User-User Projection
	padj = np.matmul(B, B.transpose()) 
	np.fill_diagonal(padj, 0)
	clip_padj = np.where( padj > MIN_EDGE_VALUE, 1, 0)

	uuproj_graph = ig.Graph.Adjacency(clip_padj.tolist(), mode=ig.ADJ_MAX) 
	components = uuproj_graph.components()

	# print("\t{} nodes, {} edges".format(uuproj_graph.vcount(),uuproj_graph.ecount()))
	plog("\t{}".format(components.summary()), log_file)

	large_cc = components.giant()
	plog("\t{} nodes, {} edges:".format(large_cc.vcount(),large_cc.ecount()), log_file)
	plog("\t{} ave degree".format(ig.mean(large_cc.degree())), log_file)

	### Community Detection Algorithms
	## Fast Greedy
	fg_vdendr = large_cc.community_fastgreedy()
	plog("\tCD - fast greedy dendrogram:", log_file)
	plog("\t opt cut: {}".format(fg_vdendr.optimal_count), log_file)
	cut_oi = fg_vdendr.as_clustering(n=fg_vdendr.optimal_count)
	plog("\t opt cut q: {}".format(cut_oi.q), log_file)

	## Leading Eigenvector
	plog("\tCD - Leading Eigenvector:", log_file)
	for i in range(2, 5):
		le_vclust = large_cc.community_leading_eigenvector(clusters=i)
		plog("\t  {} clusters: {}".format(i, le_vclust.q), log_file)
Ejemplo n.º 14
0
el = Graph.Read_Ncol('karate.txt', directed=True)

#convert the edgelist to an igraph graph object
g = igraph.Graph.Read_Ncol('karate.txt')
#summary(karate)

#no of vertices
print("No of vertices", karate.vcount())
#no of edges
print("No of edges", karate.ecount())

#plot the graph
igraph.plot(g)

print("Degree of vertices", karate.degree())
print("Mean: ", mean(karate.degree()))
#print("Betweeness: ", karate.edge_betweenness())

#plotly.tools.set_credentials_file(username='******', api_key='yeBweYgKVZKMkFUfS3G2')
#sorted_list = sorted(karate.degree())

#find cliques
clique = karate.cliques()
#print clique
#largest clique
cliques = karate.maximal_cliques()
#print cliques

#find k-cliques
karate.cliques(min=4, max=4)
	print("\tbuilding projection: {}".format(np.count_nonzero(B)))

	# Actual User-User Projection
	padj = np.matmul(B, B.transpose()) 
	np.fill_diagonal(padj, 0)
	clip_padj = np.where( padj > MIN_EDGE_VALUE, 1, 0)

	uuproj_graph = ig.Graph.Adjacency(clip_padj.tolist(), mode=ig.ADJ_MAX) 
	components = uuproj_graph.components()

	# print("\t{} nodes, {} edges".format(uuproj_graph.vcount(),uuproj_graph.ecount()))
	print("\t{}".format(components.summary()))

	print("\t{} nodes, {} edges:".format(uuproj_graph.vcount(),uuproj_graph.ecount()))
	print("\t{} ave degree".format(ig.mean(uuproj_graph.degree())))

	### Community Detection Algorithms
	print("community detection, all ccs:")
	## Fast Greedy
	fg_vdendr = uuproj_graph.community_fastgreedy()
	print("\tCD - fast greedy dendrogram:")
	print("\t opt cut: {}".format(fg_vdendr.optimal_count))
	cut_oi = fg_vdendr.as_clustering(n=fg_vdendr.optimal_count)
	print("\t opt cut q: {}".format(cut_oi.q))

	## Leading Eigenvector
	print("\tCD - Leading Eigenvector:")
	for i in range(2, 5):
		le_vclust = uuproj_graph.community_leading_eigenvector(clusters=i)
		print("\t  {} clusters: {}".format(i, le_vclust.q))
Ejemplo n.º 16
0
def analyse_graphs(user_graphs, users):
    """
    Analysis of the edge usage graph of the users.
    """
    user_graph_data = []
    print("Analysing user graphs...")

    for i, user_graph in enumerate(user_graphs):
        nodes = user_graph.vcount()
        edges = user_graph.ecount()
        apl = user_graph.average_path_length(directed=True, unconn=True)
        diameter = user_graph.diameter(directed=True, unconn=True)
        average_deg = igraph.mean(user_graph.degree())
        # degree_dist = user_graph.degree_distribution()
        giant_component_size = max(user_graph.components().sizes())
        user_graph_data.append({
            "user": users[i],
            "node_count": nodes,
            "edge_count": edges,
            "apl": apl,
            "diameter": diameter,
            "avg_degree": average_deg,
            "giant_component_size": giant_component_size
        })

        # Plotting graph
        # Coloring edges
        colors = ["orange", "darkorange", "red", "blue"]
        for e in user_graph.es:
            weight = e['weight']
            if weight >= 15:
                e['color'] = colors[3]
            elif 8 <= weight < 15:
                e['color'] = colors[2]
            elif 3 <= weight < 8:
                e['color'] = colors[1]
            else:
                e['color'] = colors[0]

        # Styling graph
        visual_style = {
            "bbox": (3000, 3000),
            "margin": 17,
            "vertex_color": 'grey',
            "vertex_size": 20,
            "vertex_label_size": 8,
            "edge_curved": False,
            "edge_width": user_graph.es['weight']
        }
        # Set the layout
        try:
            layout = user_graph.layout("kk")
            visual_style["layout"] = layout
            save_name = f'postgres_{users[i]}.png'
            igraph.plot(user_graph, SAVE_PATH + save_name, **visual_style)
            print(f"Graph from {users[i]} analysed and plotted to {save_name}")
        except MemoryError:
            print(f"Memory error. Skipping to plot {users[i]}'s graph.")
            continue
        # Saving results
        with open(SAVE_PATH + 'scaffold_results_postgres.json', 'w') as fp:
            json.dump(user_graph_data, fp, indent=4)
Ejemplo n.º 17
0
    def getDegreeDistribution(self):
        return self.graph.degree_distribution()

    def getAveragePath(self):
        return self.graph.average_path_length()

    def getDegree(self):
        degree = self.graph.degree()
        self.graph.vs["degree"] = degree
        return degree

    def getClusterCoeficient(self):
        clusterCoeficient = self.graph.transitivity_local_undirected()
        self.graph.vs["clusterCoeficient"] = clusterCoeficient

        return clusterCoeficient

    def getAverageClustering(self):
        return self.graph.transitivity_undirected()


#nodes = 1331
#edges = 1515
nodes = 587
edges = 1270
g = Graph(nodes, edges)
print(igraph.mean(g.getDegree()))
print(g.getAverageShortestPathMean())
print(g.getAverageClustering())
print(g.getDiameter())
#igraph.plot(g.graph)
Ejemplo n.º 18
0
print "Hello World"
print "Jehovah is my God"
print "I love my HoneyBee"
print "I adore Phillip and Nathan"
print "I love serving in the Temple and Courtyard of Jehovah all my day"
print 'God "Jehovah" is great and very much to be praised'
# print 'Jehovah is good to all'
print "Hens", 6+10/6
print "Roosters", 100-25*3%4
print 'Perc', 4%3
print 3+2+1-5+4%2-1/4+6
print "What is 5-7?", 5-7
print 5 > -2
print 5>=-2
print 5<=-2
from igraph import igraph, mean
g = Graph.GRG(100, 0.2)
mean(g.degree())

def main():
    # ---- MAIN PART ----
    # Directed graph, because the rotation transformation applied here is irreversible.
    graph = igraph.Graph(512, directed=True)

    for i in range(512):
        state = get_state(i)
        # neighbours with inverting rows
        for j in range(3):
            state = get_state(i)
            for k in range(3):
                invert(state, j, k)
            neighbour_id = get_id(state)
            graph.add_edge(i, neighbour_id)

        # neighbours with inverting columns
        for j in range(3):
            state = get_state(i)
            for k in range(3):
                invert(state, k, j)
            neighbour_id = get_id(state)
            graph.add_edge(i, neighbour_id)

        # neighbours with inverting diagonals
        state = get_state(i)
        for j in range(3):
            invert(state, j, j)
        neighbour_id = get_id(state)
        graph.add_edge(i, neighbour_id)

        state = get_state(i)
        for j in range(3):
            invert(state, 2 - j, j)
        neighbour_id = get_id(state)
        graph.add_edge(i, neighbour_id)

        # neighbours with rotating
        # rows
        for j in range(3):
            state = get_state(i)
            rotate(state, j, True)
            neighbour_id = get_id(state)
            graph.add_edge(i, neighbour_id)
        # or columns
        for j in range(3):
            state = get_state(i)
            rotate(state, j, False)
            neighbour_id = get_id(state)
            graph.add_edge(i, neighbour_id)

    # deleting multiple edges
    graph.simplify(True, True)
    graph.vs["label"] = range(graph.vcount())

    # Check whether graph is fully connected
    components = graph.components()
    component_sizes = components.sizes()

    print(component_sizes)
    # print(components.giant())

    # Print statistics
    diam = igraph.Graph.diameter(graph, False, False, None)
    apl = igraph.Graph.average_path_length(graph, False, False)
    cl = igraph.Graph.transitivity_undirected(graph)
    print(graph.summary())
    print(graph.vcount(), graph.ecount())
    print("diameter: ", diam, " path length: ", apl, " clustering: ", cl)
    # print(graph.degree())
    print("average degree: ", igraph.mean(graph.degree()))

    for component in components:
        print(component)

    # Plot graph
    layout = graph.layout("kk")
    visual_style = {
        "vertex_size": 40,
        "edge_width": 2,
        "layout": layout,
        "bbox": (4000, 4000)
    }

    igraph.plot(graph, "graphall.pdf", **visual_style)
Ejemplo n.º 20
0
 def getAverageDegree(self):
     return igraph.mean(self.graph.degree())
Ejemplo n.º 21
0
print(i)

print("Vertices")
i = g.vcount()
print(i)

print("Edges")
i = g.ecount()
print(i)

print('Modularity')
q = g.modularity(p)
print(q)

print("Average degree distribution")
i = mean(g.degree())
print(i)

print("Clique number")
i = g.clique_number()
print(i)

print("Density")
i = g.density()
print(i)

print("Max degree")
max_degree = g.maxdegree()
print(max_degree)

print("Person with max degree")
Ejemplo n.º 22
0
def get_avgdegree(G):
    return round(ig.mean(G.degree()), 2)
Ejemplo n.º 23
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import igraph as G
import random

g = G.GraphBase.Erdos_Renyi(400, 1)

deleted = 0
while G.mean(g.degree()) > 1:
    deleted += 1
    g.delete_edges(random.randint(0, g.ecount() - 1))

print "Removed", deleted, "edges"