def __preprocess(self):
        self.__graph = Graph.Read_GML(self.__graph_path)
        self.__vcount = self.__graph.vcount()
        self.__ecount = self.__graph.ecount()
        if self.__return_vnge:
            self.__von_Neumann_entropy = von_Neumann_entropy(self.__graph)

        tik = time.time()
        self.__one_dimensional_structural_entropy = one_dimensional_structural_entropy(self.__graph)
        tok = time.time()
        self.__time_structural_information = tok - tik

        tik = time.time()
        self.__approx_entropy_by_finger_hat = finger_hat_entropy(self.__graph)
        tok = time.time()
        self.__time_finger_hat = tok - tik

        tik = time.time()
        self.__approx_entropy_by_finger_tilde = finger_tilde_entropy(self.__graph)
        tok = time.time()
        self.__time_finger_tilde = tok - tik

        entropy_by_slaq = np.zeros(10)
        tik = time.time()
        for i in range(10):
            entropy_by_slaq[i] = slaq.vnge(self.__graph)
        tok = time.time()
        self.__time_slaq = (tok - tik) / 10
        self.__approx_entropy_by_slaq = entropy_by_slaq.mean()
Пример #2
0
def read_gml(path):
    g = Graph.Read_GML(path)
    adj = g.get_adjacency()
    n = adj.shape[0]
    a = np.array([adj[i] for i in range(n)])

    return a
Пример #3
0
 def import_graph(graph_loc):
     if isinstance(graph_loc, Graph):
         return graph_loc
     extension = graph_loc.split('.')[-1]
     if extension == 'edgelist':
         return Graph.Read_Edgelist(graph_loc)
     elif extension == 'gml':
         return Graph.Read_GML(graph_loc)
Пример #4
0
def read_gml(filename):
	g = Graph.Read_GML(filename)
	edgelist = g.get_edgelist()
	data = np.zeros((len(edgelist), 2), dtype=np.int)
	for i, edge in enumerate(edgelist):
		data[i, 0] = edge[0]
		data[i, 1] = edge[1]
	return data
Пример #5
0
def import_graph(folder, ego):
    home = os.path.expanduser('~')
    if not os.path.isfile('%s/GALLERY/%s/%s/Graphs/friends.gml' %
                          (home, folder, ego)):
        return Graph.Formula('')
    graph = Graph.Read_GML('%s/GALLERY/%s/%s/Graphs/friends.gml' %
                           (home, folder, ego))
    graph['folder'] = folder
    graph['ego'] = ego
    return graph
Пример #6
0
    def __preprocess(self):
        self.__graph = Graph.Read_GML(self.__graph_path)
        self.__ecount = self.__graph.ecount()
        self.__vcount = self.__graph.vcount()
        self.__sorted_degree_sequence = list(range(self.__vcount))
        self.__von_Neumann_entropy = von_Neumann_entropy(self.__graph)

        if self.__method == 'greedy':
            self.__sorted_degree_sequence.sort(
                key=lambda x: self.__graph.degree(x))
            for deg in self.__graph.vs.degree():
                self.__sum_dlogd += self.__xlogx(deg)
            self.__structural_information = log(
                2 * self.__ecount, 2) - self.__sum_dlogd / (2 * self.__ecount)
Пример #7
0
    def __generate_anomalous_graph(self, idx):
        anomalous_graph = Graph.Read_GML(
            f'datasets/synthetic/anomaly-BA-{idx}.gml')

        N = anomalous_graph.vcount()
        anomalous_source = np.random.choice(N)

        possible_targets = [
            i for i in range(N)
            if not anomalous_graph.are_connected(i, anomalous_source)
        ]
        anomalous_targets = np.random.choice(possible_targets,
                                             self.__ddos_param)

        new_edges = [(anomalous_source, t) for t in anomalous_targets]
        anomalous_graph.add_edges(new_edges)

        return anomalous_graph
Пример #8
0
    def run(self):
        self.__start()

        for w in self.__weight_range:
            g = Graph.Read_GML(self.__graph_path)
            g.es['weight'] = 1.0
            for e in g.get_edgelist():
                g[e[0], e[1]] = np.random.uniform(1, w)

            laplacian = np.asarray(g.laplacian(weights='weight'))
            structural_information = self.__compute_structural_information(
                laplacian)
            von_Neumann_entropy = self.__compute_von_Neumann_entropy(laplacian)
            entropy_gap = structural_information - von_Neumann_entropy
            logger.info(
                f'structural information: ({structural_information:8.7f}), von Neumann entropy: ({von_Neumann_entropy:8.7f}), entropy gap: ({entropy_gap:8.7f})'
            )

        self.__quit()
 def all_shortest_paths_and_LCS(self, lcs_pref_value="shortest_path"):
     nx.write_gml(self.G.graph, "data/temp_graph.gml")
     ig = Graph.Read_GML("data/temp_graph.gml")
     dist = np.array(ig.shortest_paths())
     self._dist_top_down = dist
     concept_pos = [self.G.get_position(key) for key in self.G._concepts]
     sim_m = dist[:, concept_pos]
     s = sim_m.shape
     if lcs_pref_value != "shortest_path":
         return self.sim_leastCommonSubsummer(sim_m, lcs_pref_value)
     l = s[1]
     M = np.zeros((l, l))
     LCS = np.zeros((l, l))
     for i in range(l):
         M_i_j = sim_m + sim_m[:, i][np.newaxis].T
         arg_min_j = np.argmin(M_i_j, axis=0)
         M[i, :] = M_i_j[arg_min_j, np.arange(l)]
         LCS[i, :] = arg_min_j
     self._SIM = M
     self._LCS = LCS
     print("done calculation all shortest distance")
     return M, LCS,
Пример #10
0
 def import_graph(self, graph_loc):
     extension = graph_loc.split('.')[-1]
     if extension == 'edgelist':
         return Graph.Read_Edgelist(graph_loc)
     elif extension == 'gml':
         return Graph.Read_GML(graph_loc)
     elif extension == 'graphml':
         return Graph.Read(graph_loc)
     elif extension == 'dl':
         with open(graph_loc, 'r') as to_read:
             data_reached = False
             edge_list = []
             for line in to_read:
                 if data_reached:
                     edge = line.split(' ')[0:2]
                     if edge in edge_list or [edge[1], edge[0]
                                              ] in edge_list:
                         continue
                     edge_list.append(edge)
                 elif line == 'data:\n':
                     data_reached = True
         return Graph.TupleList(edge_list, directed=False)
Пример #11
0
    def __init__(self,
                 fname,
                 g_fname,
                 config,
                 interaction=list(),
                 debug=False):
        ''' g_fname : string -- filename of input graph in GML
            config  : list of tuples -- node to qbit mapping
            interaction : list of tuples -- qbit pairs
            steps  : maximum number of steps for mapping '''
        self.fname = fname
        self.g = Graph.Read_GML(g_fname)

        self.config = config
        self.interaction = interaction
        self.gate_count = len(interaction)
        self.steps = -1
        self.qbits = []
        # update graph with qbit positions based on config
        for (node, qbit) in config:
            v = self.g.vs.find(node)
            v['qbit'] = qbit
            self.qbits.append(qbit)
        self.debug = debug
Пример #12
0
mapa.drawcountries(linewidth = 1.2, zorder = 2)
# desenha linhas dos estados
mapa.drawstates(linewidth = 0.5, zorder = 2)
# cor dos continentes
mapa.fillcontinents(color = '#c0f772', zorder = 1)
# cor do oceano
mapa.drawmapboundary(fill_color = '#49c4d1', zorder = 0)

# divisoes do grid
a = 68 # numero de divisoes na latitude
b = 82 # numero de divisoes na longitude

# abre o grafo
rd = os.path.dirname(os.path.dirname(os.getcwd()))
rd = rd + '/Resultados e Dados/Australia'
grafo = gr.Read_GML(rd + '/LatLong05/grafosMes/grafo47_Nov-2006.gml')

# lista com longitudes dos vertices
longs = grafo.vs['longitude']

# lista com latitudes dos vertices
lats = grafo.vs['latitude']

# lista com dados dos vertices
dados = grafo.degree()

del grafo

# transforma lista de dados em uma matriz
dados = np.array(dados)
dados = dados.reshape(a, b)
    max_partitions = None

    while times:
        temp_partitions = master.GRAPH_SETTINGS['detection_func'](graph, **master.GRAPH_SETTINGS['func_args'])
        modularity = temp_partitions.modularity
        if max_modularity < modularity:
            max_modularity = modularity
            max_partitions = temp_partitions

        times -= 1

    return max_partitions


if __name__ == '__main__':
    test_graph = Graph.Read_GML(master.GRAPH_SETTINGS['path'])
    partitions = choose_partitions(test_graph)

    # for part in partitions:
    #    logger.info(f'{part}')
    # test_graph.write_gml('test/before.gml')
    # print(test_graph.get_edgelist())
    i0 = 1
    i1 = 2
    #############################################################
    #fastadd    radomadd      mindegreeadd     betweenessadd    maxdegreeadd
    combine = CommunityCombine(graph=test_graph.copy(), partitions=partitions, index0=i0, index1=i1, **master.GRAPH_SETTINGS)
    combine.run()
    eval = CommunityEvaluation(graph=test_graph.copy(), partitions=partitions, index0=i0, index1=i1, ffname='fastadd',
                               **master.GRAPH_SETTINGS)
    eval.run()
Пример #14
0
rd = rd + '/Resultados e Dados/Amazonas'

# lista com os nomes dos grafos
f = os.listdir(rd + "/grafosMes")
# ordena os nomes cronologicamente
f = ordenaNomesArquivos(f)

# listas para os dados dos grafos
deg = []
bet = []
clo = []

# obtencao dos dados dos grafos
for arquivo in f:
    # abre o grafo mensal
    g = Graph.Read_GML(rd + "/grafosMes/" + arquivo)
    # obtem as informacoes de todos os vertices
    deg.append(g.degree())
    bet.append(g.betweenness())
    clo.append(g.closeness())
    del g
del f

# dados normalizados

# totais
t_deg = total2d(deg)
t_bet = total2d(bet)
t_clo = total2d(clo)

# zscores
Пример #15
0
        in_edges += subgraph.ecount()

    print(f"fraction: {in_edges / graph.ecount()}")
    print("Degree Distribution: ")
    print(graph.degree_distribution())
    return parts.modularity


def desc_learners(learners: List[Learner]):
    result = list()

    for learner in learners:
        result.append(
            [
                [round(val, 4) for val in learner._rows],
                [round(val, 4) for val in learner._cols],
            ]
        )

    return json.dumps(result)


if __name__ == '__main__':

    graph_name = "4_50_1_10_0.9"
    test_graph = Graph.Read_GML(f"../data/gaussian/{graph_name}.gml")
    desc(test_graph)
    # test_graph.name = graph_name
    #
    # get_edges(test_graph, 1, fast_resistance, 1000, 1000)
Пример #16
0
from igraph import Graph
from igraph import summary
from igraph import plot, mean
from igraph import *
import plotly.plotly as py
import plotly.graph_objs as go
import plotly
import igraph

karate = Graph.Read_GML("karate.gml")
igraph.plot(karate)
#read edge list from a data file
el = Graph.Read_Ncol('karate.txt', directed=True)

#convert the edgelist to an igraph graph object
g = igraph.Graph.Read_Ncol('karate.txt')
#summary(karate)

#no of vertices
print("No of vertices", karate.vcount())
#no of edges
print("No of edges", karate.ecount())

#plot the graph
igraph.plot(g)

print("Degree of vertices", karate.degree())
print("Mean: ", mean(karate.degree()))
#print("Betweeness: ", karate.edge_betweenness())

#plotly.tools.set_credentials_file(username='******', api_key='yeBweYgKVZKMkFUfS3G2')
Пример #17
0
init_iter_num = 10000
iter_num = 1000
available_action = 10
edge_sum = 500
one_time_edge_num = 5

graph_names = [
    # ('gaussian', '6_50_1_20_0.9'),
    # ('gaussian', '11_28_1_5_0.9'),
    ('real', 'dblp_202'),
    # ('lfr', '500_2.5_1.5_0.1_5_40'),
    # ('gaussian', '10_50_1_10_0.9'),
    # ('gaussian', '4_50_1_10_0.9'),
    # ('lfr', '200_2.5_1.5_0.2_5_30')
]
for data_dir, graph_name in graph_names:
    graph = Graph.Read_GML(f"data/{data_dir}/{graph_name}.gml")

    for i in range(repeat):
        copy_graph = graph.copy()
        copy_graph.name = f"{graph_name}_{one_time_edge_num}"
        runner = AdaptRunner(graph=copy_graph,
                             iter_num=iter_num,
                             init_iter_num=init_iter_num,
                             available_action=available_action,
                             edge_sum=edge_sum,
                             mode=13,
                             one_time_edge_num=one_time_edge_num,
                             init_with_membership=False)
        runner.run()
Пример #18
0
    return math.floor((y - lat_i) * 2)


# diretorio para resultados e dados
rd = os.path.dirname(os.getcwd())
rdAu = rd + '/Resultados e Dados/Australia'

# leitura dos arquivos gml dos grafos da Australia
fAu = os.listdir(rdAu + "/grafosMes")  # lista com os nomes dos grafos
fAu = ordenaNomesArquivos(fAu)  # ordena os nomes cronologicamente

# pega o primeiro grafo para fazer o teste
arquivo = fAu[0]

# abre o grafo
g = Graph.Read_GML(rdAu + "/LatLong05/grafosMes/" + arquivo)

print(g.summary())

labelsToInclude = []

for y in range(34, 66):
    for x in range(14, 64):
        labelsToInclude.append(str(x) + ',' + str(y))

verticesToDelete = []

for v in g.vs:
    if v['name'] not in labelsToInclude:
        verticesToDelete.append(v.index)
Пример #19
0
# gera as coordenadas das divisoes do grid
y = np.linspace(lat_i, lat_f, num=a)
x = np.linspace(long_i, long_f, num=b)

# transforma as listas de coordenadas em matrizes
xx, yy = np.meshgrid(x, y)

numMapa = 0

# percorre os arquivos
for arquivo in f:
    ano = substringEntreChars(arquivo, '-', '.')
    mes = substringEntreChars(arquivo, '_', '-')

    # abre o grafo
    grafo = gr.Read_GML(rd + '/grafosMes/' + arquivo)

    # lista com longitudes dos vertices
    longs = grafo.vs['longitude']
    # lista com latitudes dos vertices
    lats = grafo.vs['latitude']
    # lista com dados dos vertices
    dados_degree = grafo.degree()
    dados_betweenness = grafo.betweenness()
    dados_closeness = grafo.closeness()

    del grafo

    # transforma as listas de dados em matrizes
    dados_degree = np.array(dados_degree)
    dados_degree = dados_degree.reshape(a, b)
Пример #20
0
# obtencao dos dados da Africa
for arquivo in fAf: # percorre todos os grafos mensais em ordem
    # guarda o ano do grafo
    A = substringEntreChars(arquivo, '-', '.')
    dados['ano'].append(A)
    
    # guarda o mes do grafo
    M = substringEntreChars(arquivo, '_', '-')
    dados['mes'].append(M) # guarda mes do grafo
    
    # guarda a regiao do grafo
    dados['regiao'].append(0) # 0 -> Africa
    
    # abre o grafo
    g = Graph.Read_GML(rdAf + "/grafosMes/" + arquivo)
    
    # calculo e armazenamento do grau medio
    listaDado = g.degree() # listaDado contem os graus
    dado = somaLista(listaDado)/len(listaDado) # dado contem a media
    dados['mean_degree'].append(dado)

    # calculo e armazenamento da variancia do grau
    var = sum((i - dado) ** 2 for i in listaDado) / len(listaDado)
    dados['variance_degree'].append(var)
    del var

    # calculo e armazenamento da betweenness media
    listaDado = g.betweenness()
    dado = somaLista(listaDado)/len(listaDado)
    dados['mean_betweenness'].append(dado)
Пример #21
0
 def __preprocess(self):
     for i in range(self.__num_graph):
         self.__graph_stream[i] = Graph.Read_GML(
             f'datasets/synthetic/anomaly-BA-{i}.gml')
Пример #22
0
# gera as coordenadas das divisoes do grid
y = np.linspace(lat_i, lat_f, num = a)
x = np.linspace(long_i, long_f, num = b)

# transforma as listas de coordenadas em matrizes
xx, yy = np.meshgrid(x, y)

numMapa = 0

# percorre os arquivos
for arquivo in f:
    ano = substringEntreChars(arquivo, '-', '.')
    mes = substringEntreChars(arquivo, '_', '-')
    
    # abre o grafo
    grafo = gr.Read_GML(rd + '/LatLong05/grafosMes/' + arquivo)
    
    # lista com longitudes dos vertices
    longs = grafo.vs['longitude']
    # lista com latitudes dos vertices
    lats = grafo.vs['latitude']
    # lista com dados dos vertices
    dados_degree = grafo.degree()
    dados_betweenness = grafo.betweenness()
    dados_closeness = grafo.closeness()
    
    del grafo
    
    # transforma as listas de dados em matrizes
    dados_degree = np.array(dados_degree)
    dados_degree = dados_degree.reshape(a, b)
Пример #23
0
from core.gutil import GUtil
from core.learning.social_learning import SocialLearning
from igraph import Graph
import pickle
from core.learning.social_learning import Learner


def generate_learners(graph, rounds, actions, output_path=None):
    gutil = GUtil(graph)
    slearning = SocialLearning(gutil, actions, True, None)
    slearning.emerge(rounds)

    if not output_path: return slearning.learners
    else:
        file_name = output_path + f"/{graph.name}.learners"
        with open(file_name, 'wb') as f:
            obj = {'learners': slearning.learners, 'payoff': slearning.payoff}
            pickle.dump(obj, f)
        return slearning.learners


if __name__ == '__main__':
    graph_name = "300_2.5_1.5_0.1_5_50"
    graph = Graph.Read_GML("../data/lfr/" + graph_name + ".gml")
    graph.name = graph_name

    learners = generate_learners(graph, 10000, 5, '../data/learners')
    from collections import Counter
    print(Counter(learner.action for learner in learners))
Пример #24
0
mapa.drawcountries(linewidth=1.2, zorder=2)
# desenha linhas dos estados
mapa.drawstates(linewidth=0.5, zorder=2)
# cor dos continentes
mapa.fillcontinents(color='#c0f772', zorder=1)
# cor do oceano
mapa.drawmapboundary(fill_color='#49c4d1', zorder=0)

# divisoes do grid
a = 68  # numero de divisoes na latitude
b = 82  # numero de divisoes na longitude

# abre o grafo
rd = os.path.dirname(os.path.dirname(os.getcwd()))
rd = rd + '/Resultados e Dados/Australia'
grafo = gr.Read_GML(rd + '/LatLong05/grafosMes/grafo84_Dez-2009.gml')

# lista com longitudes dos vertices
longs = grafo.vs['longitude']

# lista com latitudes dos vertices
lats = grafo.vs['latitude']

# lista com dados dos vertices
dados = grafo.degree()

del grafo

# transforma lista de dados em uma matriz
dados = np.array(dados)
dados = dados.reshape(a, b)
def load_unipartite_undirected_gml(file):
    graph = Graph.Read_GML(file)
    graph = delete_self_edges(graph)
    return graph
Пример #26
0
    path = expanduser('~/data/three/%s/statuses.jsons' % ego)

    if isfile(path):
        f = open(path, 'rb')
    else:
        gz = path + ".gz"
        f = gzip.open(gz, 'rb')
    return f


for ego in listdir('%s/data/three/' % home):
    if ego[0] != 'a':
        continue

    print ego
    graph = Graph.Read_GML('%s/GALLERY/three/%s/Graphs/friends.gml' %
                           (home, ego))
    if len(graph.vs) == 0:
        continue
    if not 'name' in graph.vs[0].attribute_names():
        continue
    if not 'cluster' in graph.vs[0].attribute_names():
        continue
    cluster_per_alter = {v['name']: int(v['cluster']) for v in graph.vs}
    clusters_per_status = {}

    nb_per_cluster = [0] * (max([int(v['cluster']) for v in graph.vs]) + 1)
    for v in graph.vs:
        nb_per_cluster[int(v['cluster'])] += 1

    statuses = open_statuses(ego)
    for line in statuses:
Пример #27
0
# obtencao dos dados da Africa
for arquivo in fAf:  # percorre todos os grafos mensais em ordem
    # guarda o ano do grafo
    A = substringEntreChars(arquivo, '-', '.')
    dados['ano'].append(A)

    # guarda o mes do grafo
    M = substringEntreChars(arquivo, '_', '-')
    dados['mes'].append(M)  # guarda mes do grafo

    # guarda a regiao do grafo
    dados['regiao'].append(0)  # 0 -> Africa

    # abre o grafo
    g = Graph.Read_GML(rdAf + "/grafosMes/" + arquivo)

    # calculo e armazenamento do grau medio
    listaDado = g.degree()  # listaDado contem os graus
    dado = somaLista(listaDado) / len(listaDado)  # dado contem a media
    dados['mean_degree'].append(dado)

    # calculo e armazenamento da variancia do grau
    var = sum((i - dado)**2 for i in listaDado) / len(listaDado)
    dados['variance_degree'].append(var)
    del var

    # calculo e armazenamento da betweenness media
    listaDado = g.betweenness()
    dado = somaLista(listaDado) / len(listaDado)
    dados['mean_betweenness'].append(dado)