def test_build_matrice_of_subgraph(graphes_GR_LG): dico = dict(); print("TEST build_matrice_of_subgraph => debut") for graphe_GR_LG in graphes_GR_LG: mat_LG = graphe_GR_LG[1]; prob = graphe_GR_LG[5]; num_graph = graphe_GR_LG[8]+"_p_"+str(prob); sommets = creat_gr.sommets_mat_LG(mat_LG, etat=0); id_nom_som, nom_sommet_alea = random.choice(list( enumerate(sommets.keys()))) mat_subgraph = algo_couv.build_matrice_of_subgraph( nom_sommet=nom_sommet_alea, sommets_k_alpha=sommets) aretes_LG = fct_aux.aretes(mat_GR=mat_LG, orientation=False, val_0_1=1) aretes_subgraph = fct_aux.aretes(mat_GR=mat_subgraph, orientation=False, val_0_1=1) aretes_int = aretes_LG.intersection(aretes_subgraph); res="" if aretes_int == aretes_subgraph: res = "OK" else: res = "NOK" dico[num_graph]={"res":res} print("TEST build_matrice_of_subgraph => FIN") return pd.DataFrame.from_dict(dico).T;
def calculate_hamming_distance(mat_LG, mat_LG_k): """ identifier la liste des aretes differentes entre les graphes en parametres. aretes_ajout = aretes ajoutees a LG aretes_supp = aretes supprimees de LG """ aretes_modifs = set() aretes_ajout = set() aretes_supp = set() if isinstance(mat_LG, pd.DataFrame) and isinstance(mat_LG_k, pd.DataFrame): aretes_LG = fct_aux.aretes(mat_LG, orientation=False, val_0_1=1) aretes_LG_k = fct_aux.aretes(mat_LG_k, orientation=False, val_0_1=1) aretes_ajout = aretes_LG.union(aretes_LG_k) - aretes_LG aretes_supp = aretes_LG.union(aretes_LG_k) - aretes_LG_k aretes_modifs = aretes_ajout.union(aretes_supp) elif isinstance(mat_LG, list) and isinstance(mat_LG_k, list): aretes_ajout = set(mat_LG).union(set(mat_LG_k)) - set(mat_LG) aretes_supp = set(mat_LG).union(set(mat_LG_k)) - set(mat_LG_k) elif isinstance(mat_LG, set) and isinstance(mat_LG_k, set): aretes_ajout = mat_LG.union(mat_LG_k) - mat_LG aretes_supp = mat_LG.union(mat_LG_k) - mat_LG_k aretes_modifs = aretes_ajout.union(aretes_supp) return aretes_modifs
def test_grouped_cliques_by_node(graphes_GR_LG): etat = 2; dico_df = dict() for graphe_GR_LG in graphes_GR_LG: num_graph = graphe_GR_LG[8]+"_p_"+str(graphe_GR_LG[5]); mat_LG = graphe_GR_LG[1]; #mat_GR = graphe_GR_LG[0]; aretes_LG = fct_aux.aretes(mat_LG); sommets_LG = creat_gr.sommets_mat_LG(mat_LG) #test cliques_covers cliqs_couverts, aretes, sommets = \ algo_couv.clique_covers(mat_LG, aretes_LG, sommets_LG,True); noms_sommets = fct_aux.node_names_by_state(sommets=sommets, etat_1=etat) dico_cliques = fct_aux.grouped_cliques_by_node( cliques=cliqs_couverts, noms_sommets_1=noms_sommets) dico_df[num_graph]={"noms_sommets":noms_sommets} for nom, liste_cliques in dico_cliques.items(): dico_df[num_graph][nom] = liste_cliques; return pd.DataFrame.from_dict(dico_df).T
def algo_Welsh_Powel(matA): """ but: coloration des sommets du graphe tel que * 2 arcs adjacents ont des couleurs differentes particularite de existe_node_adj_in_liste_Node_NonAdj: recupere les noeuds n'etant pas adjacents a un "noeud defini" dans une liste "ma_liste" ensuite cherche les noeuds dans "ma_liste" etant adjacent entre eux et les inserer dans "ma_liste_bis" si il existe des noeuds dans "ma_liste_bis" alors un de ces noeuds a la meme couleur que le "noeud defini" et les autres autres noeuds auront des numero de couleurs differentes """ liste_noeuds = matA.columns.tolist() liste_arcs_ = fct_aux.aretes(matA, orientation=True) # 1 liste des noeuds par ordre decroissant # degre de chaque noeud dico_degre_noeud = dict() for noeud in liste_noeuds: dico_degre_noeud[noeud] = fct_aux.degre_noeud(liste_arcs_, noeud) # classer liste_noeuds par ordre decroissant sorted_tuple_ordre_decroissant = sorted(dico_degre_noeud.items(), key=lambda x: x[1], reverse=True) liste_noeuds_decroissant = list() for tuple_ in sorted_tuple_ordre_decroissant: liste_noeuds_decroissant.append(tuple_[0]) # 2 attribution de couleurs aux noeuds color = 0 dico_color_noeud = dict() liste_noeuds_decroissant_copy = liste_noeuds_decroissant.copy() for noeud in liste_noeuds_decroissant_copy: # initialisation node color s dico_color_noeud[noeud] = None while liste_noeuds_decroissant_copy: noeud = liste_noeuds_decroissant_copy.pop() if dico_color_noeud[noeud] == None: dico_color_noeud[noeud] = color liste_node_nonAdj = liste_node_NonAdj_NonColorie( noeud, matA, dico_color_noeud) liste_nodes_u_v = existe_node_adj_in_liste_Node_NonAdj( list(liste_node_nonAdj), liste_arcs_) if len(liste_nodes_u_v) != 0: nodeAdjANoeud = liste_node_nonAdj.intersection(liste_nodes_u_v) for node_u in nodeAdjANoeud: dico_color_noeud[node_u] = color for u in liste_nodes_u_v: dico_color_noeud[u] = color color += 1 else: for noeud_nonAdj in liste_node_nonAdj: dico_color_noeud[noeud_nonAdj] = color color += 1 return liste_noeuds_decroissant, dico_color_noeud
def generer_matrice_with_mean_degre(dim_mat, degre_moy): """ but: """ mat_ = np.random.randint(0, 1, (dim_mat, dim_mat)) proba = degre_moy / mat_.shape[0] ind_diagonale = 0 cpt_row = 0 index_row = 0 for row in mat_: degre_row_max = math.floor(proba * mat_.shape[0]) - sum(x == 1 for x in row) index_row = None #print("*** row: ",row, " ind_diagonale: ",ind_diagonale) for nbre_case1 in range(0, degre_row_max): ind_items0 = [i[0] for i in enumerate(row) if i[1] == 0] #print("cpt_row: ", cpt_row," degre_row_max: ",degre_row_max,\ # " nbre_case1: ",nbre_case1," ind_items0: ",ind_items0,\ # " index_row: ",index_row," ind_diagonale: ",ind_diagonale) if len(ind_items0) != 0: index_row = random.choice(ind_items0) row[index_row] = 1 mat_[:, cpt_row][index_row] = 1 ind_diagonale += 1 cpt_row += 1 #print("***(apres) row: ",row) np.fill_diagonal(mat_, 0) noeuds = [str(i) for i in range(mat_.shape[0])] mat = pd.DataFrame(mat_, index=noeuds, columns=noeuds) aretes = fct_aux.aretes(mat, orientation=True) # graphes connexes mat = graphe_connexe(mat, aretes) matA = orienter_graphe(mat, noeuds, aretes) G_ = nx.Graph(fct_aux.aretes(matA, orientation=True)) matA.index.rename("nodes", inplace=True) matA.loc["nodes"] = [str(i) for i in matA.index] # print("matA is_directed = ", nx.is_directed_acyclic_graph(G_)) return matA
def creer_graphe(nbre_sommets_GR = 5, nbre_moyen_liens = (2,5), chemin_matrice=""): """ creation d'un graphe GR et son line graphe LG. retourne mat_GR, mat_LG, sommets, aretes. """ mat_LG, mat_GR = generer_reseau(nbre_sommets_GR, nbre_moyen_liens, chemin_matrice) sommets = sommets_mat_LG(mat_LG); aretes = fct_aux.aretes(mat_LG, orientation=False) return mat_GR, mat_LG, sommets, aretes;
def generer_reseau(nbre_sommets_GR, nbre_moyen_liens, chemin_matrice): """ Methode que cree le graphe racine et son line graphe. """ mat_GR = geneMatA.genererMatriceA(nbre_sommets_GR, nbre_moyen_liens); arcs = fct_aux.aretes(mat_GR, orientation = True); mat_LG = creer_mat_LG(arcs); path_matrice = Path(chemin_matrice); if not path_matrice.is_dir() : path_matrice.mkdir(parents=True, exist_ok=True) mat_LG.to_csv(chemin_matrice + NOM_MATE_LG, index_label = INDEX_COL_MATE_LG) mat_GR.to_csv(chemin_matrice + NOM_MAT_GR) return mat_LG, mat_GR;
def graphe_connexe(mat_, aretes): # aretes = fct_aux.aretes(mat_, orientation=True); G = nx.Graph(aretes) # print("mat_ is_DAG = ", nx.is_directed(G), " is_connected = ",nx.is_connected(G)) if nx.is_connected(G): return mat_ else: components = list(nx.connected_components(G)) for ind_i in range(len(components) - 1): for ind_j in range(ind_i, len(components)): node_1 = random.choice(list(components[ind_i])) node_2 = random.choice(list(components[ind_j])) mat_.loc[node_1][node_2] = 1 mat_.loc[node_2][node_1] = 1 G_ = nx.Graph(fct_aux.aretes(mat_, orientation=True)) # print("mat_ is_connected = ", nx.is_connected(G_)) return mat_ pass
def add_remove_edges(mat_LG, aretes_LG, k_erreur, prob): """ ajouter/supprimer les aretes du graphe selon la valeur de prob si prob = 0 => suppression d'aretes uniquement. si prob = ]0,1[ => ajout et suppression d'aretes. si prob = 1 => ajout d'aretes uniquement. """ # aretes_LG = None; # if isinstance(mat_LG, pd.DataFrame): # aretes_LG = fct_aux.aretes(mat_LG, orientation=False, val_0_1 = 1); # elif isinstance(mat_LG, list): # aretes_LG = mat_LG; aretes_modifiees = { "aretes_supprimees": [], "aretes_ajoutees": [] } mat_LG_k = mat_LG.copy() aretes_LG = list(aretes_LG) nbre_aretes_a_supp = math.ceil(k_erreur * (1 - prob)) nbre_aretes_a_ajout = k_erreur - nbre_aretes_a_supp for _ in range(0, nbre_aretes_a_supp): id_arete, arete = random.choice(list(enumerate(aretes_LG))) aretes_LG.pop(id_arete) mat_LG_k.loc[tuple(arete)[0], tuple(arete)[1]] = 0 mat_LG_k.loc[tuple(arete)[1], tuple(arete)[0]] = 0 aretes_modifiees["aretes_supprimees"].append(arete) not_aretes_LG = fct_aux.aretes(mat_LG, orientation=False, val_0_1=0) not_aretes_LG = list(not_aretes_LG) for _ in range(0, nbre_aretes_a_ajout): id_arete, arete = random.choice(list(enumerate(not_aretes_LG))) not_aretes_LG.pop(id_arete) aretes_LG.append(arete) mat_LG_k.loc[tuple(arete)[0], tuple(arete)[1]] = 1 mat_LG_k.loc[tuple(arete)[1], tuple(arete)[0]] = 1 aretes_modifiees["aretes_ajoutees"].append(arete) aretes_LG = set(aretes_LG) return mat_LG_k, aretes_LG, aretes_modifiees
def test_calculate_hamming_distance(graphes_GR_LG): dico_df = dict(); for graphe_GR_LG in graphes_GR_LG: mat_LG = graphe_GR_LG[1]; prob = graphe_GR_LG[5]; k_erreur = graphe_GR_LG[6]; num_graph = graphe_GR_LG[8]+"_p_"+str(prob); aretes_LG = fct_aux.aretes(mat_LG, orientation=False, val_0_1=1) mat_LG_k, aretes_LG_k, aretes_modifiees = \ gr_disco_simi.add_remove_edges( mat_LG, aretes_LG, k_erreur, prob ) aretes_modifs = gr_disco_simi.calculate_hamming_distance(mat_LG, mat_LG_k) aretes_modifs_cal = set(aretes_modifiees["aretes_supprimees"]).\ union(set(aretes_modifiees["aretes_ajoutees"])) res = "" if aretes_modifs == aretes_modifs_cal : res = "OK" print("TEST : DH OK") else: res = "NOK"; print("TEST : DH NOK") dico_df[num_graph] = { "prob":prob, "k_erreur":k_erreur, "res":res, "DH":len(aretes_modifs), "aretes_modifs":aretes_modifs, "aretes_modifs_cal":aretes_modifs_cal, "aretes_ajoutees":aretes_modifiees["aretes_ajoutees"], "aretes_supprimees":aretes_modifiees["aretes_supprimees"] } return pd.DataFrame.from_dict(dico_df).T;
def test_update_edges_neighbor(graphes_GR_LG): dico_df = dict(); for graphe_GR_LG in graphes_GR_LG: mat_LG = graphe_GR_LG[1]; prob = graphe_GR_LG[5]; num_graph = graphe_GR_LG[8]+"_p_"+str(prob); sommets = creat_gr.sommets_mat_LG(mat_LG, etat=0); id_nom_som, nom_sommet_alea = random.choice(list( enumerate(sommets.keys()))) aretes_LG = fct_aux.aretes(mat_GR=mat_LG, orientation=False, val_0_1=1) # print("TEST update_edge num_graph={}".format(num_graph)); cliques = algo_couv.partitionner( sommet = sommets[nom_sommet_alea], sommets_k_alpha = sommets, aretes_LG_k_alpha = aretes_LG, DBG= True ) cliques_coh = [] bool_clique, bool_coherent, cliques_coh = \ algo_couv.verify_cliques( cliques = cliques, nom_sommet = nom_sommet_alea) C1, C2 = set(), set(); if len(cliques_coh) == 1: C1 = cliques_coh[0]; elif len(cliques_coh) == 2: C1, C2 = cliques_coh[0], cliques_coh[1]; aretes_LG_res, sommets = algo_couv.update_edges_neighbor( C1 = C1, C2 = C2, aretes = aretes_LG, sommets = sommets) aretes_supps_res = aretes_LG.union(aretes_LG_res) - aretes_LG_res; # transform sommets to dataframe puis calculer aretes_restantes et # comparer aretes_restantes avec aretes_LG aretes_supps_cal = set(); mat_res = fct_aux.convert_sommet_to_df(sommets_k_alpha=sommets); aretes_restantes = fct_aux.aretes(mat_GR=mat_res, orientation=False, val_0_1=1) aretes_supps_cal = aretes_LG.union(aretes_restantes) - aretes_restantes; res = "" if aretes_supps_cal == aretes_supps_res: res = 'OK'; else: res = 'NOK'; dico_df[num_graph] = {"nom_sommet":nom_sommet_alea, "voisins":set(sommets[nom_sommet_alea].voisins), "cliques":cliques, "cliques_coh":cliques_coh, "aretes_supps_res": aretes_supps_res, "aretes_supps_cal": aretes_supps_cal, "res":res } print("TEST update_edge, num_graph={}, res={}".format(num_graph,res)); return pd.DataFrame.from_dict(dico_df).T;
def genererMatriceA_nbreAreteMinMax(dimMat, nb_lien=(2, 5)): """ dimMat: nombre de sommets dans le graphe nb_lien: le nbre d'aretes min et max TODO verifier sil est un graphe connexe """ liste_noeuds = [str(i) for i in range(dimMat)] matA = pd.DataFrame(columns=liste_noeuds, index=liste_noeuds) # generation graphe avec nbre de voisins min et max mat_ = np.random.randint(0, 2, (dimMat, dimMat)) mat_ = np.tril(mat_, k=-1) for i in range(dimMat): if i <= int(dimMat / 2): l = list(mat_[i + 1:, i]) while l.count(1) > nb_lien[-1]: indices = [i for i, x in enumerate(l) if x == 1] rand_index = indices[rd.randint(0, len(indices) - 1)] l[rand_index] = 0 while l.count(1) < nb_lien[0]: indices = [i for i, x in enumerate(l) if x == 0] rand_index = indices[rd.randint(0, len(indices) - 1)] l[rand_index] = 1 mat_[i + 1:, i] = np.asarray(l) else: l = list(mat_[i, :i]) while l.count(1) > nb_lien[-1]: indices = [i for i, x in enumerate(l) if x == 1] rand_index = indices[rd.randint(0, len(indices) - 1)] l[rand_index] = 0 while l.count(1) < nb_lien[0]: indices = [i for i, x in enumerate(l) if x == 0] rand_index = indices[rd.randint(0, len(indices) - 1)] l[rand_index] = 1 mat_[i, :i] = np.asarray(l) for i in range(1, dimMat): for j in range(0, i): mat_[j, i] = mat_[i, j] mat = pd.DataFrame(mat_) mat.columns = liste_noeuds mat.index = liste_noeuds # orientation des aretes dico = dict() for noeud in liste_noeuds: dico[noeud] = 0 liste_arcs_ = fct_aux.aretes(mat, orientation=True) liste_noeuds_decroissant, dico_color_noeud = algo_Welsh_Powel(mat) liste_noeuds_decroissant.reverse() for noeud in liste_noeuds_decroissant: liste_w = fct_aux.voisins(liste_arcs_, noeud) for w in liste_w: if dico_color_noeud[noeud] < dico_color_noeud[w]: matA.loc[noeud][w] = 1 matA.fillna(0, inplace=True) matA.index.rename("nodes", inplace=True) matA.loc["nodes"] = [str(i) for i in matA.index] return matA
def execute_algos(mat_GR, mat_LG, chemin_matrice, mode, critere, prob, k_erreur, nbre_graphe, num_graph_G_k, alpha, number_items_pi1_pi2, DBG): """ executer les algorithmes de couverture et de correction selon les parametres. """ print("num_graph_G_k={} <=== debut ===>".format(num_graph_G_k)) start_G_k = time.time() results_k_alpha = [] moy_dh, moy_dc = 0, 0 sum_dh, sum_dc = np.inf, np.inf aretes_LG = fct_aux.aretes(mat_LG, orientation=False, val_0_1=1) for alpha_ in range(0, alpha): result_k_alpha = None mat_LG_k_alpha, aretes_LG_k_alpha, aretes_modifiees = \ add_remove_edges( mat_LG, aretes_LG, k_erreur, prob) mat_LG_k_alpha.to_csv(chemin_matrice + \ NOM_MATE_LG_k_alpha + str(alpha_) + EXTENSION, index_label = INDEX_COL_MATE_LG) sommets_k_alpha = creat_gr.sommets_mat_LG(mat_LG_k_alpha) # algo couverture cliques_couvertures, aretes_LG_k_alpha_res, sommets_k_alpha_res = \ algoCouverture.clique_covers( mat_LG_k_alpha, aretes_LG_k_alpha, sommets_k_alpha, DBG) sommets_trouves_couv = [] sommets_absents_couv = set() etat0_couv, etat1_couv, etat_1_couv, etat2_couv, etat3_couv, cliqs_couv = \ set(), set(), set(), set(), set(), set() sommets_trouves_couv, sommets_absents_couv, \ etat0_couv, etat1_couv, etat_1_couv, etat2_couv, etat3_couv = \ analyse_resultat(cliques_couvertures, sommets_k_alpha_res, set(mat_GR.columns)) # algo de correction sommets_trouves_cor = [] sommets_absents_cor = set() etat0_cor, etat1_cor, etat_1_cor, etat2_cor, etat3_cor, cliqs_cor = \ set(), set(), set(), set(), set(), set() aretes_LG_k_alpha_cor = [] if fct_aux.is_exists_sommet(sommets=sommets_k_alpha, etat_1=-1): aretes_LG_k_alpha_cor = aretes_LG_k_alpha_res.copy() cliques_couvertures_1 = list(cliques_couvertures.copy()) aretes_res_non_effacees = list( map(frozenset, aretes_LG_k_alpha_res)) cliques_couvertures_1.extend(aretes_res_non_effacees) sommets_tmp = creat_gr.sommets_mat_LG(mat_LG_k_alpha) sommets_k_alpha_1 = fct_aux.modify_state_sommets_mat_LG( sommets=sommets_tmp, sommets_res=sommets_k_alpha_res) cliques_couvertures_cor, \ aretes_LG_k_alpha_cor, \ sommets_k_alpha_cor, \ cliques_par_nom_sommets_k_alpha_cor, \ dico_sommets_corriges = \ algoCorrection.correction_algo( cliques_couvertures=set(cliques_couvertures_1), aretes_LG_k_alpha=aretes_LG_k_alpha, sommets_LG=sommets_k_alpha_1, mode_correction=mode, critere_correction=critere, number_items_pi1_pi2=number_items_pi1_pi2, DBG=DBG ) sommets_trouves_cor, sommets_absents_cor, \ etat0_cor, etat1_cor, etat_1_cor, etat2_cor, etat3_cor = \ analyse_resultat(cliques_couvertures_cor, sommets_k_alpha_cor, set(mat_GR.columns)) # calcul distance dc_alpha = len( calculate_hamming_distance(mat_LG=aretes_LG_k_alpha, mat_LG_k=aretes_LG_k_alpha_cor) ) # aretes_LG_k_alpha_ est celle apres correction. On peut ajouter aussi aretes_LG_k_alpha dh_alpha = len( calculate_hamming_distance(mat_LG=aretes_LG, mat_LG_k=aretes_LG_k_alpha_cor)) #resultat d'un execution k_alpha result_k_alpha = ( num_graph_G_k, k_erreur, alpha_, mode, critere, prob, len(sommets_trouves_couv), len(sommets_absents_couv), len(etat0_couv), len(etat1_couv), len(etat_1_couv), len(etat2_couv), len(etat3_couv), len(sommets_trouves_cor), len(sommets_absents_cor), len(etat0_cor), len(etat1_cor), len(etat_1_cor), len(etat2_cor), len(etat3_cor), len(cliqs_couv), len(cliqs_cor), dc_alpha, dh_alpha, ) results_k_alpha.append(result_k_alpha) sum_dc = dc_alpha if sum_dc == np.inf else sum_dc + dc_alpha sum_dh = dh_alpha if sum_dh == np.inf else sum_dh + dh_alpha pass # for alpha_ moy_dc = sum_dc / alpha moy_dh = sum_dh / alpha if moy_dh == 0 and moy_dc == 0: correl_dc_dh = 1 else: correl_dc_dh = abs(moy_dh - moy_dc) / max(moy_dh, moy_dc) # ecrire dans un fichier pouvant etre lu pendant qu'il continue d'etre ecrit nbre_sommets_LG = len(mat_LG.columns) chemin_dist = chemin_matrice + "../.." + "/" + "distribution" + "/" path = Path(chemin_dist) path.mkdir(parents=True, exist_ok=True) if not path.is_dir() else None f = open(chemin_dist + \ "distribution_moyDistLine_moyHamming_k_" + \ str(k_erreur) + \ ".txt","a") f.write(str(num_graph_G_k) + ";" \ + str(k_erreur) + ";" \ + str( round(moy_dc,2) ) + ";" \ + str( round(moy_dh,2) ) + ";" \ # str( len() ) + ";" + \ + str(nbre_sommets_LG) + ";" \ + str(len(aretes_LG)) + ";" \ + str(correl_dc_dh) + ";" \ + str( time.time()-start_G_k ) \ + "\n" ) print("results_k_alpha={}".format(len(results_k_alpha))) print("num_graph_G_k={} <=== Termine :runtime={} ===>".format( num_graph_G_k, round(time.time() - start_G_k, 4))) if DBG: return results_k_alpha else: return [] pass # execute_algo
def test_add_remove_edges(graphes_GR_LG): dico_df = dict(); for graphe_GR_LG in graphes_GR_LG : mat_LG = graphe_GR_LG[1]; prob = graphe_GR_LG[5]; k_erreur = graphe_GR_LG[6]; num_graph = graphe_GR_LG[8]+"_p_"+str(prob); aretes_LG = fct_aux.aretes(mat_LG, orientation=False, val_0_1=1) mat_LG_k, aretes_LG_k, aretes_modifiees = gr_disco_simi.add_remove_edges( mat_LG, aretes_LG, k_erreur, prob ) ### test : a effacer aretes_LG_k_from_mat_LG_k = fct_aux.aretes(mat_LG_k, orientation=False, val_0_1=1) aretes_diff_from_mat_LG_k = aretes_LG_k_from_mat_LG_k - \ aretes_LG_k_from_mat_LG_k.intersection(aretes_LG_k) ### test avec boucle for ===> debut : a effacer aretes_ajouts_for , aretes_supps_for = set(), set(); for arete in aretes_LG: if arete not in aretes_LG_k: aretes_supps_for.add(arete) for arete_k in aretes_LG_k: if arete_k not in aretes_LG: aretes_ajouts_for.add(arete_k) ### test avec boucle for ===> fin aretes_ajout_LG_cal = aretes_LG.union(aretes_LG_k) - aretes_LG aretes_supp_LG_cal = aretes_LG_k.union(aretes_LG) - aretes_LG_k res = "" if aretes_ajout_LG_cal == set(aretes_modifiees["aretes_ajoutees"]) and \ aretes_supp_LG_cal == set(aretes_modifiees["aretes_supprimees"]) : res = "OK" print("TEST : add_remove_edges OK") else: res = "NOK" print("TEST : add_remove_edges NOK") dico_df[num_graph] = { "nbre_aretes_diff": len(aretes_LG) - len(aretes_LG_k), "nbre_aretes_diff_from_mat": len(aretes_diff_from_mat_LG_k), "prob":prob, "k_erreur":k_erreur, "res":res, "aretes_ajout_LG_cal": aretes_ajout_LG_cal, "aretes_supp_LG_cal": aretes_supp_LG_cal, "aretes_ajoutees": set(aretes_modifiees["aretes_ajoutees"]), "aretes_supprimees": set(aretes_modifiees["aretes_supprimees"]), "aretes_LG":aretes_LG, "aretes_LG_k":aretes_LG_k, "aretes_ajouts_for":aretes_ajouts_for, "aretes_supps_for":aretes_supps_for } df_test_ajout_supp_k_aretes = pd.DataFrame.from_dict(dico_df).T; return df_test_ajout_supp_k_aretes;
def test_update_sommets_LG(graphes_GR_LG): """ faire un test pour verifier si les modifications d'un graphe sont repercutees dans sommets_LG. on fera ainsi: - on construit sommets_LG avec mat_LG - on supprime/ajoute k aretes de mat_LG => on obtient mat_LG_k - on applique update_sommets_LG => sommets_LG_new - on verifie que le voisinage des sommets sommets_LG_new a change en comparaison a sommets_LG. """ dico_df = dict(); for graphe_GR_LG in graphes_GR_LG: print("TEST is_state ET selected_node") mat_LG = graphe_GR_LG[1]; prob = graphe_GR_LG[5]; k_erreur = graphe_GR_LG[6]; sommets_LG = creat_gr.sommets_mat_LG(mat_LG, etat=2); aretes_LG = fct_aux.aretes(mat_GR=mat_LG, orientation=False, val_0_1=1) mat_LG_k_alpha, aretes_LG_k_alpha, aretes_modifiees = \ gr_disco_simi.add_remove_edges( mat_LG, aretes_LG, k_erreur, prob) sommets_k_alpha = creat_gr.sommets_mat_LG(mat_LG_k_alpha, etat=2) cliques_couvertures_res, aretes_LG_k_alpha_res, sommets_k_alpha_res = \ algo_couv.clique_covers( mat_LG_k_alpha, aretes_LG_k_alpha, sommets_k_alpha, DBG) if aretes_LG_k_alpha_res: cliques_couvertures_res = cliques_couvertures_res.union( aretes_LG_k_alpha_res) cliques_par_nom_sommets_k = fct_aux.grouped_cliques_by_node( cliques_couvertures_res, sommets_LG.keys()) sommets_new = algo_corr.update_sommets_LG( sommets_LG, cliques_couvertures_res, cliques_par_nom_sommets_k) res_supp, res_ajout = 'OK', 'OK'; for arete_modif in aretes_modifiees['aretes_supprimees']: som_0, som_1 = list(arete_modif)[0], list(arete_modif)[1] if som_0 in sommets_new[som_1].voisins or \ som_1 in sommets_new[som_0].voisins: res_supp = "NOK"; for arete_modif in aretes_modifiees['aretes_ajoutees']: som_0, som_1 = list(arete_modif)[0], list(arete_modif)[1] if som_0 in sommets_LG[som_1].voisins or \ som_1 in sommets_LG[som_0].voisins: res_ajout = "NOK"; # ########33 esprit tordu # res_supp, res_ajout = 'NOK', 'NOK'; # for arete_modif in aretes_modifiees['aretes_supprimees']: # som_0, som_1 = list(arete_modif)[0], list(arete_modif)[1] # if som_0 not in sommets_new[som_1].voisins and \ # som_1 not in sommets_new[som_0].voisins: # res_supp = "OK_s" # # for arete_modif in aretes_modifiees['aretes_ajoutees']: # som_0, som_1 = list(arete_modif)[0], list(arete_modif)[1] # if som_0 not in sommets_LG[som_1].voisins and \ # som_1 not in sommets_LG[som_0].voisins: # res_ajout = "OK_a"; # ########33 num_graph = graphe_GR_LG[8]+"_p_"+str(prob); dico_df[num_graph] = { "nbre_sommets": len(mat_LG.columns), "res_ajout": res_ajout, "res_supp": res_supp } return pd.DataFrame.from_dict(dico_df).T;
def test_modify_state_sommets_mat_LG(graphes_GR_LG): results_k_alpha = [] cols = ['num_graph', 'sommets_trouves_couv','sommets_absents_couv', 'etat0_couv','etat1_couv','etat_1_couv', 'etat2_couv','etat3_couv', 'sommets_trouves_cor','sommets_absents_cor', 'etat0_cor','etat1_cor','etat_1_cor', 'etat2_cor','etat3_cor','res'] for graphe_GR_LG in graphes_GR_LG: num_graph = graphe_GR_LG[8]+"_p_"+str(graphe_GR_LG[5]); mat_LG = graphe_GR_LG[1]; mat_GR = graphe_GR_LG[0]; aretes_LG = fct_aux.aretes(mat_LG); sommets_LG = creat_gr.sommets_mat_LG(mat_LG) #test cliques_covers cliqs_couverts, aretes, sommets = \ algo_couv.clique_covers(mat_LG, aretes_LG, sommets_LG,True); # verif l'etat des sommets apres couverture sommets_trouves_couv=[]; sommets_absents_couv=set(); etat0_couv, etat1_couv, etat_1_couv, etat2_couv, etat3_couv = \ set(), set(), set(), set(), set(); sommets_trouves_couv, sommets_absents_couv, \ etat0_couv, etat1_couv, etat_1_couv, etat2_couv, etat3_couv = \ gr_disco_simi.analyse_resultat(cliqs_couverts, sommets, set(mat_GR.columns)) # test correction sommets_tmp = creat_gr.sommets_mat_LG(mat_LG) sommets_k_alpha_1 = fct_aux.modify_state_sommets_mat_LG( sommets=sommets_tmp, sommets_res=sommets) cliques_couvertures_cor, \ aretes_LG_k_alpha_cor,\ sommets_k_alpha_cor = \ algo_corr.correction_algo( cliques_couverture=set(cliqs_couverts), aretes_LG_k_alpha=aretes_LG, sommets_LG=sommets_k_alpha_1 ) # verif l'etat des sommets apres correction sommets_trouves_cor, sommets_absents_cor, \ etat0_cor, etat1_cor, etat_1_cor, etat2_cor, etat3_cor = \ gr_disco_simi.analyse_resultat(cliques_couvertures_cor, sommets_k_alpha_cor, set(mat_GR.columns)) if etat0_couv == etat0_cor and etat1_couv == etat1_cor and \ etat_1_couv == etat_1_cor and etat2_couv == etat2_cor and \ etat3_couv == etat3_cor : res = "OK" else: res = "NOK" result_k_alpha = (num_graph, len(sommets_trouves_couv),len(sommets_absents_couv), len(etat0_couv),len(etat1_couv),len(etat_1_couv), len(etat2_couv),len(etat3_couv), len(sommets_trouves_cor),len(sommets_absents_cor), len(etat0_cor),len(etat1_cor),len(etat_1_cor), len(etat2_cor),len(etat3_cor), res ) results_k_alpha.append(result_k_alpha); df = pd.DataFrame(results_k_alpha, columns=cols) return df; pass
def test_algo_covers(graphes_GR_LG) : """ test la fonction algo_cover et aussi is_exists_sommet """ f=lambda x: set(x.split("_")) etat_recherche_1 = -1#0,1,2,3,-1; dico_df = dict() for graphe_GR_LG in graphes_GR_LG: prob = graphe_GR_LG[5]; num_graph = graphe_GR_LG[8]+"_p_"+str(prob); mat_LG = graphe_GR_LG[1]; aretes_LG = fct_aux.aretes(mat_LG); sommets_LG = creat_gr.sommets_mat_LG(mat_LG) start = time.time() #test cliques_covers cliqs_couverts, aretes, sommets = \ algo_couv.clique_covers(mat_LG, aretes_LG, sommets_LG,True); # test is_exists_sommet exist_som_1 = None; exist_som_1 = fct_aux.is_exists_sommet(sommets=sommets, etat_1=etat_recherche_1) # test modify_state_sommets_mat_LG => pas fait car je ne sais pas ce que je dois comparer # sommets_tmp = creat_gr.sommets_mat_LG(mat_LG); # sommets_LG_after = fct_aux.modify_state_sommets_mat_LG( # sommets=sommets_tmp, # sommets_res=sommets) runtime = round(time.time() - start, 2); som_trouves=[] for cliq in cliqs_couverts: aretes = list(map(f, cliq)) sommet_commun = None; sommet_commun = set.intersection(*aretes); if sommet_commun != None and len(sommet_commun) == 1: som_trouves.append(sommet_commun.pop()) # calculer le nombre de sommets ayant un etat specifique etat0, etat1, etat_1, etat2, etat3 = set(), set(), set(), set(), set(); for nom_som, sommet in sommets.items(): if sommet.etat == 0: etat0.add(nom_som); elif sommet.etat == 1: etat1.add(nom_som); elif sommet.etat == 2: etat2.add(nom_som); elif sommet.etat == 3: etat3.add(nom_som); elif sommet.etat == -1: etat_1.add(nom_som); mat_GR = graphe_GR_LG[0] som_GR = set(mat_GR.columns) som_absents = som_GR.union(som_trouves) - set(som_trouves) res = "" if som_GR == set(som_trouves): res = 'OK' else: res = 'NOK' print("TEST cliques_cover num_graphe={} runtime={}, ==>res={}, exist_som={}".format( num_graph,runtime,res, exist_som_1)) dico_df[num_graph] = {"res":res, "nbre_som_GR":len(som_GR), "nbre_som_trouves":len(som_trouves), "som_absents":som_absents, "aretes_res":aretes, "etat0": len(etat0), "etat1": len(etat1), "etat2": len(etat2), "etat3": len(etat3), "etat_1": len(etat_1), "exist_som_1": exist_som_1, "runtime":runtime } return pd.DataFrame.from_dict(dico_df).T;