def consecutive_sn_similarity(dynamicCommunity: tn.DynCommunitiesSN, score=None): """ Similarity between partitions in consecutive snapshots. Compute the average of a similarity score between all pair of successive partitions :param dynamicCommunity: the dynamic partition to evaluate :param score: the score to use for computing the similarity between each pair of snapshots. default: Overlapping NMI :return: pair (list of scores, list of partition sizes (avg both partitions)) """ if score == None: score = onmi #We use onmi because the number of labels can be different scores = [] sizes = [] #for each step com_snapshots = list(dynamicCommunity.snapshot_communities().values()) #print(com_snapshots) for i in range(len(com_snapshots) - 1): partition_before = list(com_snapshots[i].values()) partition_after = list(com_snapshots[i + 1].values()) elts_before = sum([len(x) for x in partition_before]) elts_after = sum([len(x) for x in partition_after]) scores.append(score(partition_before, partition_after)) sizes.append((elts_after + elts_before) / 2) return scores, sizes
def quality_at_each_step(dynamicCommunities: tn.DynCommunitiesSN, dynamicGraph: tn.DynGraphSN, score=None): """ Compute a community quality at each step :param dynamicCommunities: dynamic communities as SN :param score: score to use, default: Modularity :return: pair(scores, sizes) """ if score == None: score = nx.algorithms.community.modularity scores = [] sizes = [] #for each step for t, affils in dynamicCommunities.snapshot_communities().items(): g = dynamicGraph.snapshots(t) partition = list(affils.values()) try: sc = score(g, partition) scores.append(sc) except: #print("problem to compute with partition: ",partition," nodes",g.nodes()) scores.append(None) sizes.append(len(g.nodes)) return scores, sizes
def write_com_SN(dyn_communities: tn.DynCommunitiesSN, output_dir, asNodeSet=True): """ Write directory, 1 file = snapshot_affiliations of a snaphshot Write dynamic snapshot_affiliations as a directory containing one file for each snapshot. Two possible formats: **Affiliations:** :: node1 com1 com2 node2 com1 node3 com2 com3 com4 **Node Sets:** :: com:com1 n1 n2 n3 com:another_com n1 n4 n5 :param dynGraph: a dynamic graph :param outputDir: address of the directory to write :param asNodeSet: if True, node sets, otherwise, snapshot_affiliations """ os.makedirs(output_dir, exist_ok=True) all_partitions = dyn_communities.snapshot_communities() for t, p in all_partitions.items(): if asNodeSet: write_communities_as_nodeset(p, os.path.join(output_dir, str(t))) else: p = nodesets2affiliations(p) write_communities_as_affiliations(p, os.path.join(output_dir, str(t)))
def nb_node_change(dyn_com: tn.DynCommunitiesSN): """ Compute the total number of node changes Measure of smoothness at the level of nodes, adapated to evaluate glitches :param dyn_com: The dynamic community :return: total number of node changes """ coms_by_nodes = {} for t, coms in dyn_com.snapshot_communities().items(): #print(t,coms) for com, nodes in coms.items(): #print(n,com) for n in nodes: coms_by_nodes.setdefault(n, [com]) if coms_by_nodes[n][-1] != com: coms_by_nodes[n].append(com) nb_changes = 0 for n in coms_by_nodes: #print(n,coms_by_nodes[n]) nb_changes += len(coms_by_nodes[n]) - 1 return nb_changes
def longitudinal_similarity(dynamicCommunityReference: tn.DynCommunitiesSN, dynamicCommunityObserved: tn.DynCommunitiesSN, score=None, convert_coms_sklearn_format=True): """ Longitudinal similarity The longitudinal similarity between two dynamic clusters is computed by considering each couple (node,time) as an element belong to a cluster, a cluster containing therefore nodes in differnt times It takes into account the fact that the reference might by incomplete by removing from the partition to evaluate all (node,time) not present in the reference. :param dynamicCommunityReference: the dynamic partition used as reference (ground truth) :param dynamicCommunityObserved: the dynamic partition to evaluate (result of an algorithm) :param score: community comparison score, by default the adjsted NMI. (sklearn) :param convert_coms_sklearn_format: if the score expect in input clusters represented as in sklearn, True. if False, score will receive in input lists of sets of nodes :return: score """ if score == None: score = lambda x, y: sklearn.metrics.adjusted_mutual_info_score( x, y, average_method="arithmetic") affilReference = [] affilToEvaluate = [] if convert_coms_sklearn_format: comsToEvaluate = dynamicCommunityObserved.snapshot_affiliations() #for each step for t, affils in dynamicCommunityReference.snapshot_affiliations( ).items(): #for each node for n, comId in affils.items(): affilReference.append(str(list(comId)[0])) if n in comsToEvaluate[t]: affilToEvaluate.append(str(list(comsToEvaluate[t][n])[0])) else: print("node not in partition to evaluate: ", str(n), " ", str(t)) affilToEvaluate.append("-1") else: affilReference = {} affilToEvaluate = {} for t, coms in dynamicCommunityReference.snapshot_communities().items( ): all_nodes = set() for id, nodes in coms.items(): node_sn = {(n, t) for n in nodes} all_nodes.update(node_sn) affilReference.setdefault(id, set()).update(node_sn) for id, nodes in dynamicCommunityObserved.snapshot_communities( t).items(): node_sn = {(n, t) for n in nodes} affilToEvaluate.setdefault(id, set()).update(node_sn & all_nodes) affilReference = list(affilReference.values()) affilToEvaluate = list(affilToEvaluate.values()) return score(affilReference, affilToEvaluate)