def mise_a_jour_voisinage_sommets(dico_gamma_noeud, cliques, matE_k_alpha, aretes_matE_k_alpha): """ supprimer les aretes de C1 et C2 en changeant le voisinage des sommets. """ aretes_supp = [] for C in cliques: if C is not None: aretes = set(it.combinations(C, 2)) aretes_supp.extend(aretes) for arete in aretes: matE_k_alpha.loc[arete[0], arete[1]] = 0 matE_k_alpha.loc[arete[1], arete[0]] = 0 aretes_matE_k_alpha = fct_aux.liste_arcs(matE_k_alpha) dico_gamma_noeud = fct_aux.gamma_noeud(matE_k_alpha, aretes_matE_k_alpha) return dico_gamma_noeud, list(aretes_matE_k_alpha)
def simulation_G0_k(matE_G0_k, k, alpha_min, identifiant, arg_params): """ but: tester la modification de correlation sur le graphe particulier G0_k selon les methodes suivantes * le degre min avec permutation * le cout min avec permutation * aleatoire N = 100 matE_G0_k : k etant la profondeur du graphe G0, matE_G0_k est une MATRICE D'ADJACENCE de G0_k arg_params = {"number_permutations_nodes_1": 10(30, 100), "biais": True, "algoGreedy":False, \ "mode_select_noeuds_1":"coutMin" or "degreMin" or "aleatoire", "number_items_pi1_pi2" = 1,\ "methode_deleted_add_edges": 0, "SEUIL_PROBA": 0.8, \ "proba_seuil": proba_seuil, \ "coef_fct_cout":(exposant, facteur_multiplicatif)} """ # fichier de tracking or debug headers_df = ["G_cpt", "k", "alpha", "nbre_aretes_matE", "nbre_aretes_matE_k_alpha", "deleted_edges",\ "nbre_aretes_L_G", "nbre_aretes_diff_matE_k_alpha_LG",\ "dist_line", "aretes_diff_matE_k_alpha_LG",\ "nbre_aretes_diff_matE_LG", "hamming", "aretes_diff_matE_LG",\ "C","som_cout_min","noeuds_corriges",\ "min_hamming","mean_hamming","max_hamming","ecart_type",\ "max_cout","max_permutation",\ "dico_som_min_permutations","dico_dual_arc_sommet","ordre_noeuds_traites","C_old"] # creation du repertoire de calcul selon methode path_distr_chemin = str(arg_params["mode_select_noeuds_1"])+"_particulier/"+"data_p_"+\ str(arg_params["proba_seuil"])+"/distribution/" path_distr = Path(path_distr_chemin) path_distr.mkdir(parents=True, exist_ok=True) df_debug = pd.DataFrame(columns=headers_df) cpt_df_debug = 0 G_cpt = "G_" + str(k) + "_" + str(alpha_min) # creation repertoire contenant dataset et matrices # exple rep = methode_correction_nodes_1/data_p_XX/G_10 avec 10 = cpt_graphe_genere path = Path(str(arg_params["mode_select_noeuds_1"])+"_particulier/"+\ "data_p_"+str(arg_params["proba_seuil"])+"/"+G_cpt+'/datasets/') path.mkdir(parents=True, exist_ok=True) path = Path(str(arg_params["mode_select_noeuds_1"])+"_particulier/"+\ "data_p_"+str(arg_params["proba_seuil"])+"/"+G_cpt+'/matrices/') path.mkdir(parents=True, exist_ok=True) # initialisation variables aretes_matE = len(fct_aux.liste_arcs(matE_G0_k)) moy_distline = 0 moy_hamming = 0 sum_distline = 0 sum_hamming = 0 correl_dl_dh = 0 # chemin_datasets = str(arg_params["mode_select_noeuds_1"])+"/"+\ # "data_p_"+str(arg_params["proba_seuil"])+"/"+G_cpt+"/datasets/"; # chemin_matrices = str(arg_params["mode_select_noeuds_1"])+"/"+\ # "data_p_"+str(arg_params["proba_seuil"])+"/"+G_cpt+"/matrices/"; #### A EFFACER SI CA MARCHE try: # modification k correlations # TODO modifier modif_k_cases tel que deleted_edge ne se repete pas ==> UN PEU COMPLIQUE car process independant matE_k_alpha, dico_deleted_add_edges = simu.modif_k_cases(matE_G0_k.copy(), k, \ arg_params["methode_delete_add_edges"], arg_params["proba_seuil"]) deleted_edges = list(dico_deleted_add_edges.values()) dico_proba_cases = simu.ajouter_proba_matE(matE_k_alpha, dico_deleted_add_edges, arg_params["SEUIL_PROBA"]) # cliques decoulant de l'algo de couverture liste_cliques = list() dico_cliq = dict() ordre_noeuds_traites = [] # car liste_cliques = [] for noeud in matE_k_alpha.columns.tolist(): dico_cliq[noeud] = -1 aretes_matE_alpha = fct_aux.liste_arcs(matE_k_alpha) dico_gamma_noeud = fct_aux.gamma_noeud(matE_k_alpha, aretes_matE_alpha) # algo de couverture selon methodes (arg_params["mode_select_noeuds_1"]) dico_permutations = dict() dico_permutations = decouvClique.solution_methode_nodes_1(dico_gamma_noeud,\ liste_cliques, aretes_matE_alpha, ordre_noeuds_traites, \ dico_cliq, dico_proba_cases, arg_params) # Debut selection de la permutation de noeuds dont la distance hamming est la plus petite dico_sol = dict() dico_sol = simu.best_permutation(dico_permutations, matE_G0_k, matE_k_alpha) # FIN selection de la permutation de noeuds dont la distance hamming est la plus petite # moyenner dist_line et hamming pour k aretes supprimes moy_distline = dico_sol["dist_line"] moy_hamming = dico_sol["hamming"] if moy_hamming == 0 and moy_distline == 0: correl_dl_dh = 1 else: correl_dl_dh = abs(moy_hamming - moy_distline) / max( moy_hamming, moy_distline) #print("ici") # ecrire dans un fichier pouvant etre lu pendant qu'il continue d'etre ecrit f = open( path_distr_chemin + "distribution_moyDistLine_moyHamming_k_" + str(k) + ".txt", "a") f.write(G_cpt+";"+str(k)+";"+str(moy_distline)+";"+str(moy_hamming)+";"+str(aretes_matE)+\ ";"+str(correl_dl_dh)+"\n") f.close() # pour debug, log, ..... dico_som_min_permutations = dict() for l_noeuds_1, values in dico_permutations.items(): if values[6] not in dico_som_min_permutations.keys(): dico_som_min_permutations[values[6]] = [l_noeuds_1] else: dico_som_min_permutations[values[6]].append(l_noeuds_1) dico_dual_arc_sommet = mesures.nommage_arcs(matE_G0_k) df_debug.loc[len(df_debug.index)] = [G_cpt, k, alpha_min, \ dico_sol["nbre_aretes_matE"], dico_sol["nbre_aretes_matE_k_alpha"], \ deleted_edges,\ dico_sol["nbre_aretes_LG"], dico_sol["nbre_aretes_diff_matE_k_alpha_LG"],\ dico_sol["dist_line"], dico_sol["liste_aretes_diff_matE_k_alpha_LG"], \ dico_sol["nbre_aretes_diff_matE_LG"], dico_sol["hamming"],\ dico_sol["liste_aretes_diff_matE_LG"],\ dico_sol["C"], dico_sol["som_cout_min"], \ dico_sol["noeuds_corriges"], \ dico_sol["min_hamming"],dico_sol["mean_hamming"], \ dico_sol["max_hamming"],dico_sol["ecart_type"],\ dico_sol["max_cout"], dico_sol["max_permutation"],\ dico_som_min_permutations, dico_dual_arc_sommet,\ dico_sol["ordre_noeuds_traites"], dico_sol["C_old"]] if cpt_df_debug % 100 == 0: simu.save_df(df_debug, path_distr_chemin, identifiant, headers_df) df_debug = pd.DataFrame(columns=headers_df) print("save %s fois" % cpt_df_debug) except Exception as e: print("####### EmptyDataError ", G_cpt, ": e = ", e, " ####### ") df_debug.loc[len(df_debug.index)] = [G_cpt, k, alpha_min, \ "error", "error", \ "error",\ "error","error" ,\ "error","error" , \ "error","error" ,\ "error",\ "error", "error", \ "error", \ "error","error", "error",\ "error",\ "error","error" ,\ "error","error","error", "error"] pass
def simulation_iourte(matE_G_k, dico_proba_cases, args): """ corriger le graphe G_k selon les paramatres args """ # fichier de tracking or debug headers_df = ["G_cpt", "nbre_aretes_G_k", "nbre_aretes_LG", \ "dist_line", "nbre_aretes_diff_G_k_LG", \ "aretes_diff_G_k_LG", "C", "len(C)", "som_cout_min",\ "noeuds_corriges", "ordre_noeuds_traites",\ "min_DL", "mean_DL", "max_DL", "ecart_type",\ "max_cout", "max_permutation",\ "dico_som_min_permutations"]; df_debug = pd.DataFrame( columns = headers_df); G_cpt = "G_"+str(args["k"]); # creation repertoire contenant distribution path_distr = Path(args["path_save"]+args["mode_select_noeuds_1"]+"_iourte/"); path_distr.mkdir(parents=True, exist_ok=True); path_save = args["path_save"]+args["mode_select_noeuds_1"]+"_iourte/"; # initialisation variables aretes_matE_G_k = fct_aux.liste_arcs(matE_G_k); try: ordre_noeuds_traites = [] # car liste_cliques = [] cliques = []; # car graphe iourte; dico_cliq = dict(); for noeud in matE_G_k.columns: dico_cliq[noeud] = -1 dico_gamma_noeud = fct_aux.gamma_noeud(matE_G_k, aretes_matE_G_k) # algo de correction selon methodes (arg_params["mode_select_noeuds_1"]) dico_permutations = dict(); dico_permutations = decouvClique.solution_methode_nodes_1(dico_gamma_noeud,\ cliques, aretes_matE_G_k, ordre_noeuds_traites, \ dico_cliq, dico_proba_cases, args); # Debut selection de la permutation de noeuds dont la distance hamming est la plus petite dico_sol = dict() dico_sol = best_permutation_iourte(dico_permutations, matE_G_k) # FIN selection de la permutation de noeuds dont la distance hamming est la plus petite # comparaison entre aretes_matE_G_k et aretes_LG cpt_aretes_G_k_notIn_LG = 0; #(en pourcentage) cpt_aretes_G_k_notIn_LG = comparaison_aretes_G_k_LG(aretes_matE_G_k, dico_sol["aretes_LG"]); # ecrire dans un fichier pouvant etre lu pendant qu'il continue d'etre ecrit f = open(path_save+"distribution_moyDistLine_G_k.txt","a") f.write(G_cpt+";"+str(args["k"])+";"+str(dico_sol["dist_line"])+";"\ +str(len(aretes_matE_G_k))+";"+str(cpt_aretes_G_k_notIn_LG)+"\n") f.close(); # pour debug, log, ..... dico_som_min_permutations = dict(); for l_noeuds_1, values in dico_permutations.items(): if values[6] not in dico_som_min_permutations.keys(): dico_som_min_permutations[values[6]] = [l_noeuds_1] else: dico_som_min_permutations[values[6]].append(l_noeuds_1) df_debug.loc[len(df_debug.index)] = [\ G_cpt, len(aretes_matE_G_k),\ dico_sol["nbre_aretes_LG"], dico_sol["dist_line"],\ dico_sol["nbre_aretes_diff_matE_G_k_LG"],\ dico_sol["aretes_diff_matE_G_k_LG"],\ dico_sol["C"], len(dico_sol["C"]), dico_sol["som_cout_min"],\ dico_sol["noeuds_corriges"], dico_sol["ordre_noeuds_traites"],\ dico_sol["min_line"], dico_sol["mean_line"],\ dico_sol["max_line"], dico_sol["ecart_type"],\ dico_sol["max_cout"], dico_sol["max_permutation"],\ dico_som_min_permutations] # CPT_DF_DEBUG += 1; if args["k"] % 100 == 0: simu50.save_df(df_debug, path_save, args["k_deep"], headers_df) df_debug = pd.DataFrame( columns = headers_df) print("save {} fois".format( args["k"] )) except Exception as e: print("####### EmptyDataError ", G_cpt, ": e = ", e," ####### "); df_debug.loc[len(df_debug.index)] = [G_cpt, len(aretes_matE_G_k), \ "error", "error", "error", "error", "error" ,\ "error", "error", "error", "error", "error",\ "error", "error", "error", "error","error", \ "error"]; simu50.save_df(df_debug, path_save, args["k_deep"], headers_df) pass
def simulation_p_correl_k(matE_LG, matA_GR, dico_sommet_arete, chemin_matrices, chemin_datasets, mode, p_correl, k_erreur, num_graph, rep_base, dico_parametres_new): """ executer les algos de couverture et de correction sur des graphes dans lesquels on a supprime (p_correl = 1) k aretes alpha (<alpha_max) fois. """ path_distr = rep_base+"/../"+"distribution/"; path = Path(path_distr); path.mkdir(parents=True, exist_ok=True) print("path_distr = {}".format(path_distr)) aretes_matE = fct_aux.liste_arcs(matE_LG); list_returns = list(); for alpha in range(dico_parametres_new["alpha_max"]): num_graph_alpha = num_graph +"_"+str(alpha) print("num_graph={}, k = {}, alpha = {} ==> debut".format( num_graph, k_erreur, alpha)) start = time.time(); try: matE_k_alpha = None; dico_proba_cases = dict(); matE_k_alpha, \ dico_deleted_add_edges = \ fct_aux.modif_k_cases( matE_LG.copy(), k_erreur, dico_parametres_new["methode_delete_add_edges"], p_correl) dico_proba_cases = fct_aux.ajouter_proba_matE( matE_k_alpha, dico_deleted_add_edges, dico_parametres_new["loi_stats"], p_correl, dico_parametres_new["correl_seuil"]) matE_k_alpha.to_csv(chemin_matrices\ +"matE_"+str(k_erreur)+"_"+str(alpha)+".csv") # algorithme de couverture # ajouter les aretes restantes trop nombreuses ==> OK # Verifier algo de couverture ==> OK print("1") dico_couverture = algo_couv.algo_decomposition_en_cliques( matE_k_alpha, dico_sommet_arete, seuil_U=10, epsilon=0.75, chemin_datasets=chemin_datasets, chemin_matrices=chemin_matrices, ascendant_1=True, simulation=True, dico_proba_cases=dico_proba_cases, dico_parametres_new=dico_parametres_new ) dico_couverture["k_erreur"] = k_erreur; dico_couverture["C_old"] = dico_couverture["C"].copy(); dico_couverture["sommets_a_corriger"] = \ [k for k, v in dico_couverture["etats_sommets"].items() if v == -1]; dico_couverture["nbre_sommets_a_corriger"] = \ len(dico_couverture["sommets_a_corriger"]); # algorithme correction pour k = 1 print("2") dico_correction = dico_couverture; dico_sommets_corriges = dict(); if -1 in dico_couverture['etats_sommets'].values(): aretes_matE_k_alpha = fct_aux.liste_arcs(matE_k_alpha); dico_correction["C"] = dico_correction["C"] \ + dico_correction['aretes_restantes']; dico_correction["C"] = list(map(set, dico_correction["C"])); dico_correction["sommets_par_cliqs_avec_aretes"] = \ fct_aux.couverture_par_sommets( sommets_matE = list(dico_correction["etats_sommets"].keys()), C = dico_correction["C"]); dico_correction["aretes_matE"] = aretes_matE; dico_correction["aretes_Ec"] = aretes_matE_k_alpha; dico_correction["dico_gamma_sommets"] = fct_aux.gamma_noeud( matE_k_alpha, aretes_matE_k_alpha); dico_correction, dico_sommets_corriges = \ algo_corr.correction_cliques( dico_correction, dico_parametres_new); elif -1 not in dico_couverture['etats_sommets'].values() and \ len(dico_correction['aretes_restantes']) > 0: dico_correction["C"] = \ dico_correction["C"] \ + dico_correction['aretes_restantes']; dico_correction["sommets_par_cliqs_avec_aretes"] = \ fct_aux.couverture_par_sommets( sommets_matE = list(dico_correction["etats_sommets"].keys()), C = dico_correction["C"]); elif -1 not in dico_couverture['etats_sommets'].values() and \ len(dico_correction['aretes_restantes']) == 0: dico_correction["sommets_par_cliqs_avec_aretes"] = \ fct_aux.couverture_par_sommets( sommets_matE = list(dico_correction["etats_sommets"].keys()), C = dico_correction["C"]); # calcul DH et DC print("3") aretes_cliques = fct_aux.determiner_aretes_cliques(dico_correction["C"]); aretes_matE_k_alpha = fct_aux.liste_arcs(matE_k_alpha); aretes_matE_LG = fct_aux.liste_arcs(matE_LG); print("4") dc, set_dc = calculer_distance_hamming( aretes_cliques, aretes_matE_k_alpha); dh, set_dh = calculer_distance_hamming( aretes_cliques, aretes_matE_LG); X1 = abs(dc - k_erreur); correl_dc_dh = abs(dh - X1)/(k_erreur + dc) if k_erreur+dc != 0 else -1; print("k={}, ".format(k_erreur)\ +"moy_dc={}, ".format(dc) \ +"moy_dh={}, ".format(dh) \ +"moy_dc-k={}, ".format(X1) \ +"moy_dc+k={}, ".format( k_erreur+dc ) \ +"corr={} ".format( round(correl_dc_dh,2) ) ) # sauvegarde dans un fichier distribution sauvegarder_execution_k_alpha(path_distr, num_graph, k_erreur, alpha, dc, dh, len(aretes_matE_LG), correl_dc_dh, start) print("5") # voisins des sommets des aretes supprimees/ajoutees et leurs etats dico_voisins_etats_supp,\ dico_voisins_etats_ajout = \ determiner_voisins_etats_aretes_modifiees( matE_k_alpha, dico_deleted_add_edges, dico_couverture["etats_sommets"] ) print("6") # sommets a -1 etant les voisins des sommets de l arete modifies sommets_1_vois, \ sommets_1_non_vois = determiner_sommets_1_avec_conditions( dico_correction["etats_sommets"], matE_k_alpha, dico_correction["sommets_par_cliqs_avec_aretes"], dico_deleted_add_edges ); print("7") # sauvegarde les parametres de l'execution num_graph_alpha = num_graph +"_"+str(alpha) list_returns.append(sauvegarder_parametres_execution( num_graph_alpha, k_erreur, alpha, dc, dh, time.time() - start, len(aretes_matE_LG), dico_deleted_add_edges, dico_correction, dico_voisins_etats_supp, dico_voisins_etats_ajout, sommets_1_vois, sommets_1_non_vois)) print("8") except Exception as e : print("####### EmptyDataError {}".format(dico_parametres_new["coef_fct_cout"][2]) \ +" {},".format(dico_parametres_new["mode_select_noeuds_1"]) \ +" seuil={}".format(dico_parametres_new["correl_seuil"]) \ +" p={}".format(p_correl) \ +" k={}".format(k_erreur) \ +" num_graph={}".format(num_graph) \ +" alpha={}".format(alpha) \ +" e={}".format(e) ) return list_returns; pass ############################################################################### # simulation graphe avec des k=1 aretes supprimees ---> fin ###############################################################################
def algo_decomposition_en_cliques(matE_k_alpha, dico_sommet_arete, seuil_U=10, epsilon=0.75, chemin_datasets="", chemin_matrices="", ascendant_1=True, simulation=True, dico_proba_cases=dict(), dico_parametres_new=dict()): """ obtenir la decomposition en cliques des sommets du graphe matE_k_alpha un sommet doit appartenir au plus a 2 cliques. une arete doit appartenir a 1 clique. chaque sommet a 5 etats {0,1,2,3,-1} """ #initialisation cliq et ver et C etats_sommets = dict() dico_ver = dict() for sommet in matE_k_alpha.columns: # nbre de noeuds dans le graphe etats_sommets[sommet] = 0 dico_ver[sommet] = 0 # fusion des datasets liste_grandeurs = [] #fct_aux.liste_grandeurs(chemin_datasets) arguments_MAJ = { "dico_sommet_arete": dico_sommet_arete, "df_fusion": dict(), "seuil_U": seuil_U, "epsilon": epsilon, "chemin_dataset": chemin_datasets, "simulation": simulation, "grandeurs": liste_grandeurs } # copy E0 <- Ec aretes_matE_k_alpha = fct_aux.liste_arcs(matE_k_alpha) dico_gamma_noeud = fct_aux.gamma_noeud( matE_k_alpha, aretes_matE_k_alpha) # {"2":[3,{"1","3","4"}],....} if is_isomorphe_graphe_double(aretes_matE_k_alpha): """ QU'EST CE un GRAPHE DOUBLE : graphe avec 2 couvertures. """ #print("le traiter avec Verif_correl ou ORACLE") return { "C": list(), "etats_sommets": etats_sommets, "aretes_restantes": aretes_matE_k_alpha, "ordre_noeuds_traites": list(), "sommets_par_cliqs": dict() } else: dico_couverture = couverture_en_cliques(etats_sommets, dico_gamma_noeud, aretes_matE_k_alpha, matE_k_alpha.copy(), dico_ver, arguments_MAJ) return dico_couverture
def simulation_graphe_cellule(args): headers_df = ["G_cpt", "nbre_aretes_G_k", "nbre_aretes_LG", \ "dist_line", "nbre_aretes_diff_G_k_LG", \ "aretes_diff_G_k_LG", "orientation_aretes_corrigees","count_cliques",\ "C", "len(C)", "som_cout_min",\ "noeuds_corriges", "ordre_noeuds_traites",\ "min_DL", "mean_DL", "max_DL", "ecart_type",\ "max_cout", "max_permutation",\ "dico_som_min_permutations"] df_debug = pd.DataFrame(columns=headers_df) G_cpt = "G_" + str(args["k"]) # creation repertoire contenant distribution path_distr = Path(args["path_save"] + args["mode_select_noeuds_1"] + "_cellule/") path_distr.mkdir(parents=True, exist_ok=True) path_save = args["path_save"] + args["mode_select_noeuds_1"] + "_cellule/" try: ordre_noeuds_traites = [] # car liste_cliques = [] cliques = [] # car graphe iourte; dico_cliq = dict() for noeud in args["M_G_nn"].columns: dico_cliq[noeud] = -1 dico_gamma_noeud = fct_aux.gamma_noeud(\ args["M_G_nn"], args["aretes_G_nn_k"]) # algo de correction selon methodes (arg_params["mode_select_noeuds_1"]) dico_permutations = dict() dico_permutations = decouvClique.solution_methode_nodes_1(dico_gamma_noeud,\ cliques, args["aretes_G_nn_k"], ordre_noeuds_traites, \ dico_cliq, args["dico_proba_cases"], args) # Debut selection de la permutation de noeuds dont la distance hamming est la plus petite dico_sol = dict() dico_sol = best_permutation_cellule(dico_permutations, args["M_G_nn"]) # FIN selection de la permutation de noeuds dont la distance hamming est la plus petite # comparaison entre aretes_matE_G_k et aretes_LG cpt_aretes_G_k_notIn_LG = 0 #(en pourcentage) cpt_aretes_G_k_notIn_LG = comparaison_aretes_G_k_LG(\ args["aretes_G_nn_k"], dico_sol["aretes_LG"]) # ecrire dans un fichier pouvant etre lu pendant qu'il continue d'etre ecrit f = open(path_save + "distribution_moyDistLine_G_k.txt", "a") f.write(G_cpt+";"+str(args["k"])+";"+str(dico_sol["dist_line"])+";"\ +str(len(args["aretes_G_nn_k"]))+";"+str(cpt_aretes_G_k_notIn_LG)+"\n") f.close() #----------- pour debug, log, ..... ------------- dico_som_min_permutations = dict() for l_noeuds_1, values in dico_permutations.items(): if values[6] not in dico_som_min_permutations.keys(): dico_som_min_permutations[values[6]] = [l_noeuds_1] else: dico_som_min_permutations[values[6]].append(l_noeuds_1) # combien de cliques a 4, 3 2 et 1 count_cliques = Counter([len(C) for C in dico_sol["C"]]) # comment sont ajoute/supprimer les aretes (verticalement/diagonalement/horizontalement) notes_aretes_diff = [] orientation_aretes_corrigees = "" for arete_diff in list(dico_sol["aretes_diff_matE_G_k_LG"])[:150]: sommet_gauche = arete_diff[0] sommet_droit = arete_diff[1] print("sommet_gauche={}, sommet_droit={}".format( sommet_gauche, sommet_droit)) if arete_diff[0] in args["dico_sommets_row_col"].keys(): sommet_gauche = int( args["dico_sommets_row_col"][arete_diff[0]].split("_")[0]) if arete_diff[1] in args["dico_sommets_row_col"].keys(): sommet_droit = int( args["dico_sommets_row_col"][arete_diff[1]].split("_")[1]) som = 0 som = int(sommet_gauche.split("_")[0])+int(sommet_gauche.split("_")[1])+\ int(sommet_droit.split("_")[0])+int(sommet_droit.split("_")[1]) notes_aretes_diff.append(som) if countEven(notes_aretes_diff) > countOdd(notes_aretes_diff): orientation_aretes_corrigees = "PAIR" elif countEven(notes_aretes_diff) < countOdd(notes_aretes_diff): orientation_aretes_corrigees = "IMPAIR" else: orientation_aretes_corrigees = "NONE" df_debug.loc[len(df_debug.index)] = [\ G_cpt, len(args["aretes_G_nn_k"]),\ dico_sol["nbre_aretes_LG"], dico_sol["dist_line"],\ dico_sol["nbre_aretes_diff_matE_G_k_LG"],\ dico_sol["aretes_diff_matE_G_k_LG"],\ orientation_aretes_corrigees,\ count_cliques,\ dico_sol["C"], len(dico_sol["C"]), dico_sol["som_cout_min"],\ dico_sol["noeuds_corriges"], dico_sol["ordre_noeuds_traites"],\ dico_sol["min_line"], dico_sol["mean_line"],\ dico_sol["max_line"], dico_sol["ecart_type"],\ dico_sol["max_cout"], dico_sol["max_permutation"],\ dico_som_min_permutations] if args["k"] % 100 == 0: simu50.save_df(df_debug, path_save, args["k"], headers_df) df_debug = pd.DataFrame(columns=headers_df) print("save {} fois".format(args["k"])) if args["debug"]: edges_C = aretes_C(dico_sol["C"]) #mylabels = get_labels() print("C={}, \n edges_C={}, \n aretes_G_nn_k={}, \n dico_sommets_row_col={}"\ .format(dico_sol["C"], edges_C, args["aretes_G_nn_k"], \ args["dico_sommets_row_col"])) G = nx.Graph() # G.add_edges_from(edges_C); G.add_edges_from(dico_sol["aretes_LG"]) nx.draw(G, node_size=500, with_labels=True) plt.savefig(path_save + "Graph_" + str(args["k"]) + ".png", format="PNG") plt.clf() #----------- pour debug, log, ..... ------------- except Exception as e: print("####### EmptyDataError ", G_cpt, ": e = ", e, " ####### ") df_debug.loc[len(df_debug.index)] = [G_cpt, len(args["aretes_G_nn_k"]), \ "error", "error", "error", "error", "error" ,\ "error", "error", "error", "error", "error",\ "error", "error", "error", "error","error", \ "error", "error", "error"] simu50.save_df(df_debug, path_save, args["k"], headers_df)