def CD_each_step(dynNetSN:tn.DynGraphSN,method=None):
    """
    Apply a community detection at each step

    Compute snapshot_affiliations at each snapshot and return a dynamic community object with those.

    :param dynNetSN: a dynamic network as a DynGraphSN
    :param method: a function, the community detection algorithm to use. Default: the louvain algorithm. must return a list of set of nodes, or a dictionary comname:set of node
    :return: a DynCommunitiesSN object
    """
    if method==None:
        method = best_partition

    coms = DynCommunitiesSN()
    for SNt in dynNetSN.snapshots():
        coms.set_communities( SNt)
        if len(dynNetSN.snapshots(SNt).edges())>0:
            partition = method(dynNetSN.snapshots(SNt))
            if isinstance(partition,dict): #louvain is returning a different format
                asNodeSets = affiliations2nodesets(partition)
                partition = [asNodeSets[c] for c in asNodeSets]
            #for c in asNodeSets:
            for nodes in partition:
                coms.add_community(SNt, nodes)
    return coms
示例#2
0
def read_graph_link_stream(inputFile: str) -> DynGraphSN:
    """
    Format used by SOCIOPATTERN

    This format is a variation of snapshot_affiliations, in which all snapshot_affiliations are in a single file, adapted for occasional observations
    at a high framerate (each SN is not really meaningful).

    Format:
    ::

        DATE1	N1	N2
        DATE1	N2	N3
        DATE2	N1	N2
        DATE3	N1	N2
        DATE3	N2	N4
        DATE3	N5	N2

    :param inputFile: address of the file to read
    :return: DynGraphSN
    """
    theDynGraph = DynGraphSN()
    f = open(inputFile)

    for l in f:
        l = l.split("\t")
        date = int(l[0])
        n1 = l[1]
        n2 = l[2]
        theDynGraph.add_interaction(n1, n2, date)
    return theDynGraph
示例#3
0
def CD_each_step(dynNetSN: tn.DynGraphSN, method=None, multithread=False):
    """
    Apply a community detection at each step

    Compute snapshot_affiliations at each snapshot and return a dynamic community object with those.

    :param dynNetSN: a dynamic network as a DynGraphSN
    :param method: a function, the community detection algorithm to use. Default: the louvain algorithm. must return a list of set of nodes, or a dictionary comname:set of node
    :return: a DynCommunitiesSN object
    """
    if method == None:
        method = best_partition
    coms = DynCommunitiesSN()

    if multithread:
        procs_to_use = int(mp.cpu_count())
        print("Multi-thread, number of processors: ", procs_to_use)

        pool = mp.Pool(procs_to_use)

        allComs = pool.starmap_async(__compute_communities,
                                     [(SNt, dynNetSN.snapshots(SNt), method)
                                      for SNt in dynNetSN.snapshots()]).get()
        pool.close()
    else:
        bar = progressbar.ProgressBar(max_value=len(dynNetSN.snapshots()))
        count = 0
        bar.update(0)
        allComs = []
        for SNt in dynNetSN.snapshots():
            allComs.append(
                __compute_communities(SNt, dynNetSN.snapshots(SNt), method))
            bar.update(count)
            sys.stdout.flush()
            count += 1
        bar.update(count)
        sys.stdout.flush()

    unique_id = 0
    for SNt, partition in allComs:
        coms.set_communities(SNt, {
            str(unique_id) + "_" + str(i): com
            for i, com in enumerate(partition)
        })
        unique_id += 1
    #for nodes in partition:
    #   coms.add_community(SNt, nodes)

    return coms
示例#4
0
def quality_at_each_step(dynamicCommunities: tn.DynCommunitiesSN,
                         dynamicGraph: tn.DynGraphSN,
                         score=None):
    """
    Compute a community quality at each step

    :param dynamicCommunities: dynamic communities as SN
    :param score: score to use, default: Modularity
    :return: pair(scores, sizes)
    """

    if score == None:
        score = nx.algorithms.community.modularity
    scores = []
    sizes = []

    #for each step
    for t, affils in dynamicCommunities.snapshot_communities().items():
        g = dynamicGraph.snapshots(t)
        partition = list(affils.values())
        try:
            sc = score(g, partition)
            scores.append(sc)
        except:
            #print("problem to compute with partition: ",partition," nodes",g.nodes())
            scores.append(None)
        sizes.append(len(g.nodes))

    return scores, sizes
示例#5
0
def write_as_SN_E(graph: tn.DynGraphSN, filename):
    """

    :param filename:
    :return:
    """
    nodes = list(graph.cumulated_graph().nodes())
    dict_nodes = {n: i for i, n in enumerate(nodes)}
    times = list(graph.change_times())
    dict_times = {t: i for i, t in enumerate(times)}

    interactions = []
    for t, g in graph.snapshots().items():
        renamed = [[dict_nodes[e[0]], dict_nodes[e[1]]] for e in g.edges()]
        interactions.append(renamed)
    json.dump({
        "nodes": nodes,
        "times": times,
        "interactions": interactions
    }, open(filename, 'w'))
示例#6
0
def transversal_network_leidenalg(dyn_graph: tn.DynGraphSN,
                                  interslice_weight=1,
                                  elapsed_time=False):
    """
    Multiplex community detection reimplemented in leidenalg

    Algorithm described in [1]
    (see method `mucha_original` for more information)
    This function use the implementation in the leidenalg library instead of the original matlab implementation.
    It requires the installation of the leidenalg library (including igraph).
    It is usually slower than the original implementation (but does not require matlab)

    [1]Mucha, P. J., Richardson, T., Macon, K., Porter, M. A., & Onnela, J. P. (2010).
    Community structure in time-dependent, multiscale, and multiplex networks.
    science, 328(5980), 876-878.

    :param dyn_graph: dynamic network
    :param interslice_weight:
    :param elapsed_time:
    :return:
    """
    print("preprocessing transversal network leidenalg ")

    graphs = dyn_graph.snapshots()
    igraph_graphs = sortedcontainers.SortedDict()
    for t, g in graphs.items():
        igraph_graphs[t] = __from_nx_to_igraph(g)

    start_time = time.time()
    print("calling external code")

    coms, scores = la.find_partition_temporal(
        list(igraph_graphs.values()),
        la.ModularityVertexPartition,
        interslice_weight=interslice_weight,
        vertex_id_attr="name")
    duration = time.time() - start_time
    print("postprocessing ")

    to_return = tn.DynCommunitiesSN()
    ts = list(igraph_graphs.keys())
    for i in range(len(coms)):
        t = ts[i]
        partition = single_list_community2nodesets(coms[i],
                                                   igraph_graphs[t].vs["name"])
        to_return.set_communities(t, partition)

    print("sucessfully finished transversal network leidenalg  ")

    if elapsed_time:
        return (to_return, {"total": duration})
    return to_return
示例#7
0
def _write_for_dynamo(dynGraph: tn.DynGraphSN, outputDir: str):
    """
    """
    allGraphs = list(dynGraph.snapshots().values())
    sn_dir = os.path.join(outputDir, "sn")
    diff_dir = os.path.join(outputDir, "diff")

    filelist = [f for f in os.listdir(sn_dir)]
    for f in filelist:
        os.remove(os.path.join(sn_dir, f))

    filelist = [f for f in os.listdir(diff_dir)]
    for f in filelist:
        os.remove(os.path.join(diff_dir, f))

    if not os.path.exists(sn_dir):
        os.makedirs(sn_dir, exist_ok=True)
    if not os.path.exists(diff_dir):
        os.makedirs(diff_dir, exist_ok=True)

    all_nodes = set()
    allGraphs_copy = []
    for g in allGraphs:
        all_nodes.update(set(g.nodes()))
    nodes_dict = {v: i for i, v in enumerate(all_nodes)}
    for g in allGraphs:
        allGraphs_copy.append(nx.relabel_nodes(g, nodes_dict))

    for i, g in enumerate(allGraphs_copy):
        #_write_network_file(g, os.path.join(sn_dir, str(i + 1)), out_format=format)
        f = open(os.path.join(sn_dir, str(i + 1) + ".edges"), "w+")
        for e in g.edges():
            ee = sorted(e)
            f.write(str(ee[0]) + "   " + str(ee[1]) + "\n")

        f.close()

        if i > 0:
            f = open(os.path.join(diff_dir, str(i + 1) + ".diff"), "w+")
            added_edges = set(g.edges()) - set(allGraphs_copy[i - 1].edges())
            removed_edges = set(allGraphs_copy[i - 1].edges()) - set(g.edges())
            for e in added_edges:
                ee = sorted(e)
                f.write("   +   " + str(ee[0]) + "   " + str(ee[1]) + "\n")

            for e in removed_edges:
                ee = sorted(e)

                f.write("   -   " + str(ee[0]) + "   " + str(ee[1]) + "\n")
            f.close()

    return nodes_dict
示例#8
0
def write_snapshots(dynGraph: DynGraphSN, outputDir: str, format: str = None):
    """
    Write one file per snapshot
    
    Write a dynamic graph as a directory containing one file for each snapshot. The format of files can be chosen.
    
    :param dynGraph: a dynamic graph
    :param outputDir: address of the directory to write
    :param format: default edgelist, choose among edges(edgelist)|ncol|gefx|gml|pajek|graphML
    """
    if format == None:
        format = "edges"
    allGraphs = dynGraph.snapshots()
    for g in allGraphs:
        _write_network_file(allGraphs[g],
                            os.path.join(outputDir, str(g)),
                            out_format=format)
def _write(dynGraph: tn.DynGraphSN, dir):
    allGraphs = list(dynGraph.snapshots().values())

    all_nodes = set()
    for g in allGraphs:
        all_nodes.update(set(g.nodes))
    nodes_dict = {v: i for i, v in enumerate(all_nodes)}

    for i, g in enumerate(allGraphs):
        gg = nx.relabel_nodes(g, nodes_dict)
        nx.set_node_attributes(gg, 1, "weight")
        nx.write_edgelist(gg,
                          os.path.join(dir,
                                       str(i + 1) + ".ncol"),
                          data=["weight"])

    return nodes_dict
示例#10
0
def _write_for_dynmoga(dynGraph: tn.DynGraphSN, outputDir: str):
    """
    """
    _create_and_clean_directory(outputDir)

    dyn_graph_normalized, dic_nodes, dic_time = dynGraph.normalize_to_integers(
        nodes_start_at=1, time_start_at=1)

    for i in dic_time.keys():
        path = os.path.join(outputDir, "nets.t0" + str(i) + ".edges")
        nx.write_edgelist(dyn_graph_normalized.snapshots(i), path, data=False)

        f = open(os.path.join(outputDir, "coms.t0" + str(i) + ".comm1"), "w+")
        #for i,n in enumerate(dyn_graph_normalized.snapshots(i).nodes):
        for j, n in enumerate(list(dic_nodes.keys())):
            f.write(str(n) + " " + str(j + 1) + "\n")
        f.close()

    return dic_nodes, dic_time
示例#11
0
def dynmoga(dynGraph: tn.DynGraphSN, elapsed_time=False):
    """
    Dynmoga Algorithm

    Requires Matlab

    :param dynGraph:
    :param elapsed_time:
    :return:
    """
    dir = os.path.dirname(__file__)
    dir = os.path.join(dir, "temp", "dynmoga")

    dic_nodes, dic_times = _write_for_dynmoga(dynGraph, dir)

    T = len(dynGraph.snapshots())

    output_file = os.path.join(dir, "dynmoga_output.mat")
    duration = _runMatlabCode(os.path.join(dir, "coms"),
                              os.path.join(dir, "nets"), T, output_file)

    start = time.time()
    #load_address = "/Users/cazabetremy/Documents/GitHub/tnetwork/result_T_" + str(T) + "_bS_" + str(len(dic_nodes)) + ".mat"
    load_address = output_file

    print("+++++++++++++++")
    print(load_address)
    print(dic_times)
    dyn_coms = _load_dynmoga(load_address, dic_nodes, dic_times, dynGraph)

    dyn_coms.create_standard_event_graph()

    dyn_coms._relabel_coms_from_continue_events(typedEvents=False)

    duration2 = time.time() - start

    if elapsed_time:
        return dyn_coms, {"total": duration + duration2}
    return dyn_coms
示例#12
0
文件: PAF.py 项目: Yquetzal/tnetwork
def read_coms_dynamo(dynGraph: tn.DynGraphSN, input_dir, nodes_dict):
    nodes_dict = {v: k for k, v in nodes_dict.items()}
    coms = tn.DynCommunitiesSN()
    i = 1
    for t, g in dynGraph.snapshots().items():
        communities_this_step = {}
        file_Addr = os.path.join(input_dir,
                                 "runDynamicModularity_com_" + str(i))
        i += 1
        f = open(file_Addr)
        for id_line, l in enumerate(f.readlines()):
            l = l[:-1]

            real_node = nodes_dict[id_line]
            if real_node in g.nodes:
                communities_this_step.setdefault(l, set())
                communities_this_step[l].add(real_node)
        coms.set_communities(t, communities_this_step)
    coms.create_standard_event_graph(threshold=0.3)
    #print(coms.events.edges)
    coms._relabel_coms_from_continue_events(typedEvents=False)
    return coms
示例#13
0
def write_snapshots_single_file(dynGraph: DynGraphSN,
                                outputFile: str,
                                both_directions=False):
    """
    Write a single file with all edges from all steps

    Format:
    time n1 n2 1
    :param dynGraph: a dynamic graph
    :param outputFile: address of the file to write
    """
    f = open(outputFile, "w")
    allGraphs = dynGraph.snapshots()
    for t, g in allGraphs.items():
        for e in g.edges():
            weights = " " + str(1)
            f.write(
                str(t) + " " + str(e[0]) + " " + str(e[1]) + weights + "\n")
            if both_directions:
                f.write(
                    str(t) + " " + str(e[1]) + " " + str(e[0]) + weights +
                    "\n")
    f.close()
示例#14
0
def transversal_network_mucha_original(dyn_graph: tn.DynGraphSN,
                                       om=0.5,
                                       form="local",
                                       elapsed_time=False,
                                       matlab_session=None):
    """
    Multiplex community detection, Mucha et al.

    Algorithm described in [1]

    Brief summary: a single network is created by adding nodes between themselves in different snaphsots. A modified modularity optimization algorithm is run
    on this network

    For this function, it is necessary to have Matlab installed
    And to set up the matlab for python engine, see how to there
    https://fr.mathworks.com/help/matlab/matlab_external/install-the-matlab-engine-for-python.html
    (you can find the value of matlabroot by tapping matlabroot in your matlab console)

    If you do not have matlab, you can try to use the transversal_network_leidenalg which is slower but requires only a package installation

    [1] Mucha, P. J., Richardson, T., Macon, K., Porter, M. A., & Onnela, J. P. (2010).
    Community structure in time-dependent, multiscale, and multiplex networks.
    science, 328(5980), 876-878.

    :param dyn_graph: dynamic network
    :param om:
    :param form:
    :param elapsed_time:
    :param matlab_session:
    :return:
    """
    print("preprocessing MUCHA ")

    #Original example on genlouvain website
    #N = length(A{1});
    #T = length(A);
    #B = spalloc(N * T, N * T, N * N * T + 2 * N * T);
    #twomu = 0;
    #for s=1:T
    #     k = sum(A{s});
    #     twom = sum(k);
    #     twomu = twomu + twom;
    #     indx = [1:N]+(s - 1) * N;
    #     B(indx, indx) = A
    #     {s} - gamma * k'*k/twom;
    #
    #
    # end
    # twomu = twomu + 2 * omega * N * (T - 1);
    # B = B + omega * spdiags(ones(N * T, 2), [-N, N], N * T, N * T);
    # [S, Q] = genlouvain(B);
    # Q = Q / twomu
    # S = reshape(S, N, T);

    graphs = dyn_graph.snapshots()

    nodeOrderAllSN = []
    listModularityMatrices = []

    #for each graph in order
    for t, gT in enumerate(graphs):
        g = graphs[gT]
        nodeOrder = list(g.nodes())
        if len(nodeOrder) > 0:
            nodeOrderAllSN += [(t, n) for n in nodeOrder]

            gmat = nx.to_scipy_sparse_matrix(g,
                                             nodelist=nodeOrder,
                                             format="dok")
            k = gmat.sum(axis=0)  #degrees of nodes
            twom = k.sum(axis=1)  #sum of degrees
            nullModel = k.transpose() * k / twom
            listModularityMatrices.append(gmat - nullModel)

    #Concatenate all null modularity matrices
    #B = scipy.sparse.block_diag(*listModularityMatrices)
    B = scipy.sparse.block_diag(listModularityMatrices, format="dok")
    listModularityMatrices = None

    #B = scipy.sparse.dok_matrix(B)

    #add the link between same nodes in different timestamps
    multipleAppearances = {}  #for each node, list of indices where it appears

    ordered_real_times = dyn_graph.snapshots_timesteps()
    for (i, (t, n)) in enumerate(nodeOrderAllSN):
        multipleAppearances.setdefault(n, []).append((i, t))

    if form == "global":
        for (n, nAppearences) in multipleAppearances.items():
            for (i, t) in nAppearences:
                for (j, t) in nAppearences:
                    if i != j:
                        B[i, j] = om
    if form == "local":
        #print(multipleAppearances)
        for (n, orderedAppearences) in multipleAppearances.items():
            #print(orderedAppearences)
            for i in range(0, len(orderedAppearences) - 1):
                #BE CAREFUL, modified recently
                ii, t = orderedAppearences[i]
                ii_next, t_next = orderedAppearences[i + 1]
                #index_t = ordered_real_times.index(t)

                if ordered_real_times[t + 1] == ordered_real_times[t_next]:
                    B[ii, ii_next] = om

    if form == "local_relaxed":
        #print(multipleAppearances)
        for (n, orderedAppearences) in multipleAppearances.items():
            for i in range(0, len(orderedAppearences) - 1):
                ii, t = orderedAppearences[i]
                ii_next, t_next = orderedAppearences[i + 1]
                B[ii, ii_next] = om

    #print("saving temp file")
    #numpy.savetxt("test.csv", B, fmt="%.2f", delimiter=",")
    #print("file saved")

    #B = scipy.sparse.coo_matrix(B)
    print("calling external code")

    (S, duration) = _runMatlabCode(B, matlab_session=matlab_session)
    #print("transforming back to dynamic net")

    DCSN = tn.DynCommunitiesSN()
    times = dyn_graph.snapshots_timesteps()
    for i in range(len(S)):
        DCSN.add_affiliation(nodeOrderAllSN[i][1], S[i],
                             times[nodeOrderAllSN[i][0]])

    print("sucessfully finished MUCHA ")

    if elapsed_time:
        return (DCSN, {"total": duration})
    return DCSN
def estrangement_confinement(dyn_graph: tn.DynGraphSN,
                             tolerance=0.00001,
                             convergence_tolerance=0.01,
                             delta=0.05,
                             elapsed_time=False,
                             **kwargs):
    """
    Estrangement confinement

    Algorithm introduced in [1]. Uses original code.



    [1]Kawadia, V., & Sreenivasan, S. (2012).
    Sequential detection of temporal communities by estrangement confinement.
    Scientific reports, 2, 794.

    :param delta: see original article
    :param convergence_tolerance: see original article
    :param tolerance: see original article
    :return:
    """
    print("preprocessing estrangement confinement")

    #write files
    dir = os.path.dirname(__file__)
    dir_graphs = os.path.join(dir, "temp", "estrangement", "graph")
    result_file = os.path.join(dir, "temp", "estrangement", "result.log")
    clean_create_dir(dir_graphs)
    clear_file(result_file)

    all_nodes = set()
    allGraphs = dyn_graph.snapshots()
    for g in allGraphs.values():
        all_nodes = all_nodes.union(g.nodes())
    node_dict = {v: k for k, v in enumerate(all_nodes)}
    node_dict_reversed = {v: k for k, v in node_dict.items()}

    for i, g in enumerate(allGraphs.values()):
        nx.set_edge_attributes(g, 1, "weight")
        g_copy = nx.relabel_nodes(g, node_dict, copy=True)
        _write_network_file(g_copy,
                            os.path.join(dir_graphs, str(i)),
                            out_format="ncol",
                            weight=["weight"])
    start_time = time.time()
    print("calling external code")

    ECA(dir_graphs,
        result_file,
        tolerance=tolerance,
        convergence_tolerance=convergence_tolerance,
        delta=delta,
        **kwargs)
    print("postprocessing")
    duration = time.time() - start_time

    with open(result_file, 'r') as fr:
        result = eval(fr.read())
    to_return = tn.DynCommunitiesSN()
    for t, affils in result.items():
        partitions = tn.utils.community_utils.affiliations2nodesets(affils)
        #print(partitions)
        for c, nodes in partitions.items():
            partitions[c] = [node_dict_reversed[x] for x in nodes]
        to_return.set_communities(t, partitions)

    # to_return = tn.DynCommunitiesSN()
    # ts  =list(igraph_graphs.keys())
    # for i in range(len(coms)):
    #     t= ts[i]
    #     partition = single_list_community2nodesets(coms[i],igraph_graphs[t].vs["name"])
    #    to_return.set_communities(t,partition)

    print("sucessfully estrangement confinement")

    if elapsed_time:
        return (to_return, {"total": duration})
    return to_return
示例#16
0
def rollingCPM(dynNetSN: DynGraphSN, k=3):
    """

    This method is based on Palla et al[1]. It first computes overlapping snapshot_communities in each snapshot based on the
    clique percolation algorithm, and then match snapshot_communities in successive steps using a method based on the
    union graph.

    [1] Palla, G., Barabási, A. L., & Vicsek, T. (2007).
    Quantifying social group evolution.
    Nature, 446(7136), 664.

    :param dynNetSN: a dynamic network (DynGraphSN)
    :param k: the size of cliques used as snapshot_communities building blocks
    :return: DynCommunitiesSN
    """

    DynCom = DynCommunitiesSN()
    old_communities = None
    old_graph = nx.Graph()

    graphs = dynNetSN.snapshots()

    for (date, graph) in graphs.items():
        communitiesAtT = list(
            _get_percolated_cliques(graph, k)
        )  #get the percolated cliques (snapshot_affiliations) as a list of set of nodes
        for c in communitiesAtT:
            DynCom.add_community(date, c)

        if old_communities == None:  #if first snapshot
            old_graph = graph
            dateOld = date
            old_communities = communitiesAtT

        else:
            if len(communitiesAtT) > 0:  #if there is at least one community
                union_graph = nx.compose(
                    old_graph, graph
                )  #create the union graph of the current and the previous
                communities_union = list(
                    _get_percolated_cliques(
                        union_graph,
                        k))  #get the snapshot_affiliations of the union graph

                jaccardBeforeAndUnion = _included(
                    old_communities,
                    communities_union)  #we only care if the value is above 0
                jaccardUnionAndAfter = _included(
                    communitiesAtT,
                    communities_union)  #we only care if the value is above 0

                for c in jaccardBeforeAndUnion:  #for each community in the union graph
                    matched = []
                    born = []
                    killed = []

                    allJaccards = set()
                    for oldC in jaccardBeforeAndUnion[c]:
                        for newC in jaccardUnionAndAfter[c]:
                            allJaccards.add(
                                ((oldC, newC), _singleJaccard(oldC, newC))
                            )  #compute jaccard between candidates before and after
                    allJaccards = sorted(allJaccards,
                                         key=itemgetter(1),
                                         reverse=True)
                    sortedMatches = [k[0] for k in allJaccards]

                    oldCToMatch = dict(
                        jaccardBeforeAndUnion[c])  #get all coms before
                    newCToMatch = dict(
                        jaccardUnionAndAfter[c])  #get all new coms
                    while len(
                            sortedMatches
                    ) > 0:  #as long as there are couples of unmatched snapshot_affiliations
                        matchedKeys = sortedMatches[
                            0]  #pair of snapshot_affiliations of highest jaccard
                        matched.append(matchedKeys)  #this pair will be matched

                        del oldCToMatch[matchedKeys[
                            0]]  #delete chosen com from possible to match
                        del newCToMatch[matchedKeys[1]]
                        sortedMatches = [
                            k for k in sortedMatches
                            if len(set(matchedKeys) & set(k)) == 0
                        ]  #keep only pairs of unmatched snapshot_affiliations

                    if len(oldCToMatch) > 0:
                        killed.append(list(oldCToMatch.keys())[0])
                    if len(newCToMatch) > 0:
                        born.append(list(newCToMatch.keys())[0])

                    for aMatch in matched:
                        DynCom.events.add_event(
                            (dateOld, DynCom._com_ID(dateOld, aMatch[0])),
                            (date, DynCom._com_ID(date, aMatch[1])), dateOld,
                            date, "continue")

                    for kil in killed:  #these are actual merge (unmatched snapshot_affiliations are "merged" to new ones)
                        for com in jaccardUnionAndAfter[c]:
                            DynCom.events.add_event(
                                (dateOld, DynCom._com_ID(dateOld, kil)),
                                (date, DynCom._com_ID(date, com)), dateOld,
                                date, "merged")

                    for b in born:  #these are actual merge (unmatched snapshot_affiliations are "merged" to new ones)
                        for com in jaccardBeforeAndUnion[c]:
                            DynCom.events.add_event(
                                (dateOld, DynCom._com_ID(dateOld, com)),
                                (date, DynCom._com_ID(date, b)), dateOld, date,
                                "split")

            old_graph = graph
            dateOld = date
            old_communities = communitiesAtT
    print(DynCom.snapshots)
    print(DynCom.events.nodes)
    DynCom._relabel_coms_from_continue_events()

    return (DynCom)
示例#17
0
def muchaOriginal(dynNetSN: tn.DynGraphSN,
                  om=0.5,
                  form="local",
                  elapsed_time=False):
    print("INITIALISING MUCHA ")

    #dynNetSN.remove_nodes_from(dynNetSN.isolates())

    graphs = dynNetSN.snapshots()

    nodeOrderAllSN = []
    listModularityMatrices = []

    #for each graph in order
    for i, gT in enumerate(graphs):
        g = graphs[gT]
        nodeOrder = list(g.nodes())
        nodeOrderAllSN += [(i, n) for n in nodeOrder]

        gmat = nx.to_numpy_matrix(g, nodelist=nodeOrder)

        #
        k = gmat.sum(axis=0)  #degrees of nodes
        twom = k.sum(axis=1)  #sum of degrees
        nullModel = k.transpose() * k / twom
        listModularityMatrices.append(gmat - nullModel)

    #Concatenate all null modularity matrices
    B = scipy.linalg.block_diag(*listModularityMatrices)
    #B = scipy.sparse.block_diag(listModularityMatrices)

    #add the link between same nodes in different timestamps
    multipleAppearances = {}  #for each node, list of indices where it appears
    for (i, (t, n)) in enumerate(nodeOrderAllSN):
        multipleAppearances.setdefault(n, []).append(i)

    if form == "global":
        for (n, nAppearences) in multipleAppearances.items():
            for i in nAppearences:
                for j in nAppearences:
                    if i != j:
                        B[i, j] = om
    if form == "local":
        #print(multipleAppearances)
        for (n, nAppearences) in multipleAppearances.items():
            orderedAppearences = nAppearences
            for i in range(0, len(orderedAppearences) - 1, 1):
                B[orderedAppearences[i], orderedAppearences[i + 1]] = om

    print("saving temp file")
    numpy.savetxt("test.csv", B, fmt="%.2f", delimiter=",")
    print("file saved")

    #B = scipy.sparse.coo_matrix(B)

    (S, duration) = runMatlabCode(B)
    print("transforming back to dynamic net")

    DCSN = tn.DynCommunitiesSN()
    for i in range(len(S)):
        DCSN.add_affiliation(nodeOrderAllSN[i][1], S[i], nodeOrderAllSN[i][0])

    if elapsed_time:
        return (DCSN, {"total": duration})
    return DCSN


#preprocessMatrixForm(0.5)
#muchaOriginal("bla")
示例#18
0
def rollingCPM(dynNetSN: DynGraphSN, k=3, elapsed_time=False):
    """

    This method is based on Palla et al[1]. It first computes overlapping snapshot_communities in each snapshot based on the
    clique percolation algorithm, and then match snapshot_communities in successive steps using a method based on the
    union graph.

    [1] Palla, G., Barabási, A. L., & Vicsek, T. (2007).
    Quantifying social group evolution.
    Nature, 446(7136), 664.

    :param dynNetSN: a dynamic network (DynGraphSN)
    :param k: the size of cliques used as snapshot_communities building blocks
    :param elapsed_time: if True, will return a tuple (communities,time_elapsed)
    :return: DynCommunitiesSN
    """

    DynCom = DynCommunitiesSN()
    old_communities = None
    old_graph = nx.Graph()

    graphs = dynNetSN.snapshots()

    time_Steps = {}
    start = time.time()
    step2 = start

    total_percolation = 0
    total_match = 0

    pool = mp.Pool(mp.cpu_count())

    allComs = pool.starmap_async(__compute_communities,
                                 [(SNt, dynNetSN.snapshots(SNt), k)
                                  for SNt in graphs]).get()
    print("CD detection done", len(allComs))
    pool.close()

    com_ids = dict()
    for (date, communitiesAtT) in allComs:
        #print("------------",date)
        #for (date, graph) in graphs.items():

        #communitiesAtT = list(_get_percolated_cliques(graph, k)) #get the percolated cliques (snapshot_affiliations) as a list of set of nodes
        step1 = time.time()
        total_percolation += step1 - step2
        for current_com in communitiesAtT:
            id = DynCom.add_community(date, current_com)
            com_ids[(date, current_com)] = id

        if old_communities == None:  #if first snapshot
            old_graph = graphs[date]
            dateOld = date
            old_communities = communitiesAtT

        else:
            if len(communitiesAtT) > 0:  #if there is at least one community
                union_graph = nx.compose(
                    old_graph, graphs[date]
                )  #create the union graph of the current and the previous
                communities_union = list(
                    _get_percolated_cliques(
                        union_graph,
                        k))  #get the snapshot_affiliations of the union graph

                jaccardBeforeAndUnion = _included(
                    old_communities,
                    communities_union)  #we only care if the value is above 0
                jaccardUnionAndAfter = _included(
                    communitiesAtT,
                    communities_union)  #we only care if the value is above 0

                already_assigned = set()
                for current_com in jaccardBeforeAndUnion:  #for each community in the union graph
                    matched = []
                    born = []
                    killed = []

                    allJaccards = set()
                    for oldC in jaccardBeforeAndUnion[
                            current_com]:  #for communities included in it in t-1
                        for newC in jaccardUnionAndAfter[
                                current_com]:  # and t+1
                            if not oldC in already_assigned and not newC in already_assigned:
                                allJaccards.add(
                                    ((oldC, newC), _singleJaccard(
                                        oldC,
                                        newC)))  #compute jaccard between those

                    allJaccards = sorted(allJaccards,
                                         key=itemgetter(1),
                                         reverse=True)
                    sortedMatches = [
                        k[0] for k in allJaccards
                    ]  #list of pairs of communities in t-1 and t+1 ordered by decreasing jaccard

                    oldCToMatch = dict(jaccardBeforeAndUnion[current_com]
                                       )  #get all coms before
                    newCToMatch = dict(
                        jaccardUnionAndAfter[current_com])  #get all new coms
                    while len(
                            sortedMatches
                    ) > 0:  #as long as there are couples of unmatched communities (t-1,t+1)included in the current com
                        matchedKeys = sortedMatches[
                            0]  #pair of snapshot_affiliations of highest jaccard
                        matched.append(matchedKeys)  #this pair will be matched

                        del oldCToMatch[matchedKeys[
                            0]]  #delete chosen com from possible to match
                        del newCToMatch[matchedKeys[1]]
                        sortedMatches = [
                            k for k in sortedMatches
                            if len(set(matchedKeys) & set(k)) == 0
                        ]  #keep only pairs of unmatched snapshot_affiliations

                    if len(oldCToMatch) > 0:
                        killed.append(list(oldCToMatch.keys())[0])
                    if len(newCToMatch) > 0:
                        born.append(list(newCToMatch.keys())[0])

                    for aMatch in matched:
                        #print("--",aMatch)
                        already_assigned.add(aMatch[0])
                        already_assigned.add(aMatch[1])

                        DynCom.events.add_event(
                            (dateOld, com_ids[(dateOld, aMatch[0])]),
                            (date, com_ids[(date, aMatch[1])]), dateOld, date,
                            "continue")

                    for kil in killed:  #these are actual merge (unmatched snapshot_affiliations are "merged" to new ones)
                        for com in jaccardUnionAndAfter[current_com]:
                            DynCom.events.add_event(
                                (dateOld, com_ids[(dateOld, kil)]),
                                (date, com_ids[(date, com)]), dateOld, date,
                                "merged")

                    for b in born:  #these are actual merge (unmatched snapshot_affiliations are "merged" to new ones)
                        for com in jaccardBeforeAndUnion[current_com]:
                            DynCom.events.add_event(
                                (dateOld, com_ids[(dateOld, com)]),
                                (date, com_ids[(date, b)]), dateOld, date,
                                "split")
            step2 = time.time()
            total_match += step2 - step1

            old_graph = graphs[date]
            dateOld = date
            old_communities = communitiesAtT

    end = time.time()
    time_Steps["total"] = end - start
    time_Steps["CD"] = total_percolation
    time_Steps["match"] = total_match

    DynCom._relabel_coms_from_continue_events()

    if elapsed_time:
        return (DynCom, time_Steps)
    return (DynCom)