def main(graph_name): G = nx.read_gml(graph_name) for_testing_fixed_set = "YES" # when YES, fixed values param, to get all statistics on final distances etc # change the range for the parameters accordingly envelopes = "YES" Niter = 1000 # 100 iter seems to be enough (no big diff. with respect to 1000it) percent_envelope = 95. list_id_weekends_T3 = look_for_T3_weekends( G ) # T3 doesnt share fellows in the weekend (but they are the exception) Nbins = 20 # for the histogram of sum of distances cutting_day = 175 # i use this only for the filenames all_team = "NO" # as adopters or not dir_real_data = '../Results/' dir = "../Results/weight_shifts/infection/" delta_end = 3. # >= than + or - dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!) if for_testing_fixed_set == "NO": output_file3 = "../Results/weight_shifts/Landscape_parameters_infection_" + str( Niter) + "iter.dat" file3 = open(output_file3, 'wt') file3.close() ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### if all_team == "YES": print "remember that now i use the file of adopters without fellows\n../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" exit() else: filename_actual_evol = "../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" file1 = open( filename_actual_evol, 'r' ) ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file = file1.readlines() list_actual_evol = [] for line in list_lines_file: # [1:]: # i exclude the first row num_adopters = float(line.split(" ")[1]) list_actual_evol.append(num_adopters) ################################################################## #../Results/weight_shifts/infection/Average_time_evolution_Infection_training_p0.8_Immune0.3_1000iter_2012_avg_ic_day125.dat ESTOS VALORES SON EL OPTIMUM FIT THE 152-DIAS prob_min = 0.1 prob_max = 0.101 delta_prob = 0.1 prob_Immune_min = 0.00 prob_Immune_max = 0.001 delta_prob_Immune = 0.1 dict_filenames_tot_distance = { } # i will save the filename as key and the tot distance from that curve to the original one prob_Immune = prob_Immune_min while prob_Immune <= prob_Immune_max: print "prom Immune:", prob_Immune prob_infection = prob_min while prob_infection <= prob_max: print " p:", prob_infection if for_testing_fixed_set == "YES": output_file2 = dir + "Average_time_evolution_Infection_train_test_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_2012.dat" else: output_file2 = dir + "Average_time_evolution_Infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_2012.dat" file2 = open(output_file2, 'wt') file2.close() # list_final_I_values_fixed_p=[] # i dont care about the final values right now, but about the whole time evol list_lists_t_evolutions = [] list_dist_fixed_parameters = [] list_abs_dist_at_ending_point_fixed_parameters = [] list_dist_at_ending_point_fixed_parameters = [] list_final_num_infected = [] # list_abs_dist_at_cutting_day=[] for iter in range(Niter): #print " iter:",iter #######OJO~!!!!!!!!!! COMENTAR ESTO CUANDO ESTOY BARRIENDO TOOOOOOOOOODO EL ESPACIO DE PARAMETROS # file_name_indiv_evol=output_file2.strip("Average_").split('.dat')[0]+"_indiv_iter"+str(iter)+".dat" # file4 = open(file_name_indiv_evol,'wt') # file4.close() ########################################## ########### set I.C. list_I = [] #list infected doctors max_order = 0 for n in G.nodes(): G.node[n]["status"] = "S" # all nodes are Susceptible if G.node[n]['type'] == "shift": if G.node[n]['order'] > max_order: max_order = G.node[n][ 'order'] # to get the last shift-order for the time loop else: if G.node[n]['label'] == "Wunderink" or G.node[n][ "label"] == "Weiss": G.node[n]["status"] = "I" list_I.append(G.node[n]['label']) list_single_t_evolution = [] list_single_t_evolution.append( 2.0) # I always start with TWO infected doctors!! for n in G.nodes( ): # i make some DOCTORs INMUNE (anyone except Weiss and Wunderink) if (G.node[n]['type'] == "A") or (G.node[n]['type'] == "F"): if G.node[n]['label'] != "Wunderink" and G.node[n][ "label"] != "Weiss": rand = random.random() if rand < prob_Immune: G.node[n]["status"] = "Immune" ################# the dynamics starts: t = 1 while t <= max_order: # loop over shifts, in order for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: shift_lenght = int(G.node[n]['shift_lenght']) if shift_lenght == 2 and n not in list_id_weekends_T3: shift_lenght = 1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) # print "one-day weekend", G.node[n]['label'],G.node[n]['shift_lenght'] flag_possible_infection = 0 for doctor in G.neighbors( n ): #first i check if any doctor is infected in this shift if G.node[doctor]["status"] == "I": flag_possible_infection = 1 if flag_possible_infection: for doctor in G.neighbors( n ): # then the doctors in that shift, gets infected with prob_infection for i in range( shift_lenght ): # i repeat the infection process several times, to acount for shift lenght if G.node[doctor]["status"] == "S": rand = random.random() if rand < prob_infection: G.node[doctor]["status"] = "I" if G.node[doctor][ "type"] == "A": # fellows participate in the dynamics, but i only consider the attendings as real adopters list_I.append( G.node[doctor] ["label"]) # if for_testing_fixed_set=="YES": # if t==cutting_day: # list_abs_dist_at_cutting_day.append(abs(float(list_actual_evol[-1])-float(len(list_I)))) # print abs(float(list_actual_evol[-1])-float(len(list_I))), float(list_actual_evol[t]),float(len(list_I)) list_single_t_evolution.append(float(len(list_I))) t += 1 ######## end t loop ########OJO~!!!!!!!!!! COMENTAR ESTO CUANDO ESTOY BARRIENDO TOOOOOOOOOODO EL ESPACIO DE PARAMETROS # file4 = open(file_name_indiv_evol,'at') #for i in range(len(list_single_t_evolution)): #time step by time step # print >> file4, i,list_single_t_evolution[i], prob_infection, prob_Immune #file4.close() ######################################################## list_lists_t_evolutions.append(list_single_t_evolution) list_dist_fixed_parameters.append( compare_real_evol_vs_simus_to_be_called.compare_two_curves( list_actual_evol, list_single_t_evolution)) list_abs_dist_at_ending_point_fixed_parameters.append( abs(list_single_t_evolution[-1] - list_actual_evol[-1]) ) # i save the distance at the ending point between the current simu and actual evol list_dist_at_ending_point_fixed_parameters.append( list_single_t_evolution[-1] - list_actual_evol[-1] ) # i save the distance at the ending point between the current simu and actual evol list_final_num_infected.append(list_single_t_evolution[-1]) ######## end loop Niter list_pair_dist_std_delta_end = [] list_pair_dist_std_delta_end.append( numpy.mean(list_dist_fixed_parameters) ) # average dist between the curves over Niter list_pair_dist_std_delta_end.append( numpy.std(list_dist_fixed_parameters)) list_pair_dist_std_delta_end.append( numpy.mean(list_abs_dist_at_ending_point_fixed_parameters)) if for_testing_fixed_set == "NO": file3 = open(output_file3, 'at') # i print out the landscape print >> file3, prob_infection, prob_Immune, numpy.mean( list_abs_dist_at_ending_point_fixed_parameters ), numpy.mean(list_dist_fixed_parameters), numpy.mean( list_final_num_infected), numpy.std( list_final_num_infected) file3.close() if ( numpy.mean(list_abs_dist_at_ending_point_fixed_parameters) ) <= delta_end: # i only consider situations close enough at the ending point dict_filenames_tot_distance[ output_file2] = list_pair_dist_std_delta_end file2 = open(output_file2, 'at') for s in range(len(list_single_t_evolution)): list_fixed_t = [] for iter in range(Niter): list_fixed_t.append(list_lists_t_evolutions[iter][s]) print >> file2, s, numpy.mean(list_fixed_t) file2.close() print "printed out: ", output_file2 # raw_input() if envelopes == "YES": calculate_envelope_set_curves.calculate_envelope( list_lists_t_evolutions, percent_envelope, "Infection", [prob_infection, prob_Immune]) if for_testing_fixed_set == "YES": num_valid_endings = 0. for item in list_abs_dist_at_ending_point_fixed_parameters: if item <= delta_end: # i count how many realizations i get close enough at the ending point num_valid_endings += 1. print "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters), numpy.std( list_dist_fixed_parameters ), list_dist_fixed_parameters, "\n" print "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" histogram_filename = "../Results/weight_shifts/histogr_raw_distances_ending_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + ".dat" histograma_gral_negv_posit.histograma( list_dist_at_ending_point_fixed_parameters, histogram_filename) histogram_filename2 = "../Results/weight_shifts/histogr_sum_dist_traject_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + ".dat" histograma_bines_gral.histograma_bins( list_dist_fixed_parameters, Nbins, histogram_filename2) output_file10 = "../Results/weight_shifts/Summary_results_training_segment_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + ".dat" file10 = open(output_file10, 'wt') print >> file10, "Summary results from train-testing infection with", Niter, "iter, and with values for the parameters: prob_inf ", prob_infection, " prob immune: ", prob_Immune, "\n" print >> file10, "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters), numpy.std( list_dist_fixed_parameters ), list_dist_fixed_parameters, "\n" print >> file10, "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" print >> file10, "written optimum train_test evolution file:", output_file2 print >> file10, "written histogram file: ", histogram_filename file10.close() print "written Summary file: ", output_file10 print "written histogram file: ", histogram_filename print "written histogram file: ", histogram_filename2 prob_infection += delta_prob prob_Immune += delta_prob_Immune if for_testing_fixed_set == "NO": # only if i am exploring the whole landscape, i need to call this function, otherwise, i already know the optimum compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end( dict_filenames_tot_distance, "Infection_weight", all_team, Niter, None) # last argument doesnt apply (cutting day) if for_testing_fixed_set == "NO": print "written landscape file:", output_file3
def main(graph_name): G = nx.read_gml(graph_name) cutting_day = 243 # i use this only for the filenames for_testing_fixed_set = "YES" # when YES, fixed values param, to get all statistics on final distances etc # change the range for the parameters accordingly envelopes = "YES" Niter = 1000 # 100 iter seems to be enough (no big diff. with respect to 1000it) percent_envelope = 95. list_id_weekends_T3 = look_for_T3_weekends( G ) # T3 doesnt share fellows in the weekend (but they are the exception) Nbins = 1000 # for the histogram of sum of distances all_team = "NO" # as adopters or not dir_real_data = '../Results/' dir = "../Results/weight_shifts/infection/" delta_end = 3. # >= than + or - dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!) ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### filename_actual_evol = "../Data/Attendings_Orders_from_inference_list_adopters_day.dat" file1 = open( filename_actual_evol, 'r' ) ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file = file1.readlines() dict_days_list_empirical_adopters = {} list_actual_evol = [] for line in list_lines_file: # [1:]: # i exclude the first row day = int(line.split(" ")[0]) num_adopters = float(line.split(" ")[1]) list_actual_evol.append(num_adopters) list_current_adopters = [] for element in line.split( " " )[2:]: # i need to ignore the empty columns from the original datafile if element: if element != '\n': list_current_adopters.append(element.strip('\n')) dict_days_list_empirical_adopters[day] = list_current_adopters list_actual_evol_testing = list_actual_evol[cutting_day:] ################################################################## prob_min = 0.8 prob_max = 0.801 delta_prob = 0.1 prob_Immune_min = 0.10 prob_Immune_max = 0.101 delta_prob_Immune = 0.1 # threshold is not personal, and set randomly to a value (0,1) dose_min = 0.2 # of a single encounter with an infected (starting from zero doesnt make sense) dose_max = 0.201 delta_dose = 0.101 prob_Immune = prob_Immune_min while prob_Immune <= prob_Immune_max: print "prom Immune:", prob_Immune prob_infection = prob_min while prob_infection <= prob_max: print " p:", prob_infection dose = dose_min while dose <= dose_max: print " dose:", dose output_file2 = dir + "Average_time_evolution_Infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune ) + "_FIXED_threshold_from_distrib_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_A_F_inferred_middle_real_ic.dat" file2 = open(output_file2, 'wt') file2.close() # list_final_I_values_fixed_p=[] # i dont care about the final values right now, but about the whole time evol list_lists_t_evolutions = [] list_dist_fixed_parameters_testing_segment = [] list_abs_dist_at_ending_point_fixed_parameters = [] list_dist_at_ending_point_fixed_parameters = [] list_final_num_infected = [] list_abs_dist_point_by_point_indiv_simus_to_actual = [] list_dist_point_by_point_indiv_simus_to_actual = [] # list_abs_dist_at_cutting_day=[] for iter in range(Niter): # print " iter:",iter ########### set I.C. list_I = [] #list infected doctors max_order = 0 for n in G.nodes(): G.node[n]["status"] = "S" # all nodes are Susceptible G.node[n][ "infec_value"] = 0. # when this value goes over the infect_threshold, the dr is infected G.node[n]["personal_threshold"] = random.random( ) # for a dr to become infected if G.node[n]['type'] == "shift": if G.node[n]['order'] > max_order: max_order = G.node[n][ 'order'] # to get the last shift-order for the time loop else: if G.node[n][ 'label'] in dict_days_list_empirical_adopters[ cutting_day]: G.node[n]["infec_value"] = G.node[n][ "personal_threshold"] + 1. G.node[n]["status"] = "I" list_I.append(G.node[n]['label']) list_single_t_evolution = [] old_num_adopters = len( dict_days_list_empirical_adopters[cutting_day]) list_single_t_evolution.append( old_num_adopters ) # I always start with TWO infected doctors!! for n in G.nodes( ): # i make some DOCTORs INMUNE (anyone except Weiss and Wunderink) if (G.node[n]['type'] == "A") or (G.node[n]['type'] == "F"): if G.node[n][ 'label'] not in dict_days_list_empirical_adopters[ cutting_day]: rand = random.random() if rand < prob_Immune: G.node[n]["status"] = "Immune" ################# the dynamics starts: shift_length = 5 #i know the first shift (order 0) is of length 5 t = cutting_day while t <= max_order: # loop over shifts, in order for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: shift_length = int(G.node[n]['shift_length']) if shift_length == 2 and n not in list_id_weekends_T3: shift_length = 1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) flag_possible_infection = 0 for doctor in G.neighbors( n ): #first i check if any doctor is infected in this shift if G.node[doctor]["status"] == "I": flag_possible_infection = 1 if flag_possible_infection: for doctor in G.neighbors( n ): # then the doctors in that shift, gets infected with prob_infection for i in range(shift_length): if G.node[doctor]["status"] == "S": rand = random.random() if rand < prob_infection: # with prob p the infection occurres G.node[doctor][ "infec_value"] += dose # and bumps the infection_value of that susceptible dr if G.node[doctor][ "infec_value"] >= G.node[ doctor][ "personal_threshold"]: # the threshold for infection is personal G.node[doctor][ "status"] = "I" # if G.node[doctor]["type"]=="A": # fellows participate in the dynamics, but i only consider the attendings as real adopters list_I.append( G.node[doctor] ["label"]) new_num_adopters = len(list_I) if shift_length == 5: # i estimate that adoption happens in the middle of the shift if t + 5 < max_order: list_single_t_evolution.append( old_num_adopters) if t + 4 < max_order: list_single_t_evolution.append( old_num_adopters) if t + 3 < max_order: list_single_t_evolution.append( new_num_adopters) if t + 2 < max_order: list_single_t_evolution.append( new_num_adopters) if t + 1 < max_order: list_single_t_evolution.append( new_num_adopters) t += 5 elif shift_length == 4: if t + 4 < max_order: list_single_t_evolution.append( old_num_adopters) if t + 3 < max_order: list_single_t_evolution.append( old_num_adopters) if t + 2 < max_order: list_single_t_evolution.append( new_num_adopters) if t + 1 < max_order: list_single_t_evolution.append( new_num_adopters) t += 4 elif shift_length == 3: if t + 3 < max_order: list_single_t_evolution.append( old_num_adopters) if t + 2 < max_order: list_single_t_evolution.append( new_num_adopters) if t + 1 < max_order: list_single_t_evolution.append( new_num_adopters) t += 3 elif shift_length == 2: if t + 2 < max_order: list_single_t_evolution.append( old_num_adopters) if t + 1 < max_order: list_single_t_evolution.append( new_num_adopters) t += 2 elif shift_length == 1: if t + 1 < max_order: list_single_t_evolution.append( new_num_adopters) t += 1 old_num_adopters = new_num_adopters ######## end t loop list_lists_t_evolutions.append(list_single_t_evolution) # i only run the testing segment list_dist_fixed_parameters_testing_segment.append( compare_real_evol_vs_simus_to_be_called. compare_two_curves(list_actual_evol_testing, list_single_t_evolution)) list_abs_dist_at_ending_point_fixed_parameters.append( abs(list_single_t_evolution[-1] - list_actual_evol_testing[-1]) ) # i save the distance at the ending point between the current simu and actual evol list_dist_at_ending_point_fixed_parameters.append( list_single_t_evolution[-1] - list_actual_evol_testing[-1] ) # i save the distance at the ending point between the current simu and actual evol list_final_num_infected.append(list_single_t_evolution[-1]) for index in range(len(list_single_t_evolution)): list_abs_dist_point_by_point_indiv_simus_to_actual.append( abs(list_single_t_evolution[index] - list_actual_evol_testing[index])) list_dist_point_by_point_indiv_simus_to_actual.append( list_single_t_evolution[index] - list_actual_evol_testing[index]) ######## end loop Niter file2 = open(output_file2, 'at') for s in range(len(list_single_t_evolution)): list_fixed_t = [] for iter in range(Niter): list_fixed_t.append(list_lists_t_evolutions[iter][s]) print >> file2, s + cutting_day, numpy.mean(list_fixed_t) file2.close() print "printed out: ", output_file2 # raw_input() if envelopes == "YES": calculate_envelope_set_curves.calculate_envelope( list_lists_t_evolutions, percent_envelope, "Infection", [prob_infection, prob_Immune]) num_valid_endings = 0. for item in list_abs_dist_at_ending_point_fixed_parameters: if item <= delta_end: # i count how many realizations i get close enough at the ending point num_valid_endings += 1. print "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters_testing_segment), numpy.std( list_dist_fixed_parameters_testing_segment ), list_dist_fixed_parameters_testing_segment, "\n" print "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" histogram_filename = "../Results/weight_shifts/histogr_raw_distances_ending_infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_threshold_from_distrib_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day ) + "_A_F_inferred_middle_real_ic.dat" histograma_gral_negv_posit.histograma( list_dist_at_ending_point_fixed_parameters, histogram_filename) # histogram_filename2="../Results/weight_shifts/histogr_sum_dist_traject_infection_memory_p"+str(prob_infection)+"_Immune"+str(prob_Immune)+"_threshold_from_distrib_dose"+str(dose)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred_middle.dat" # histograma_bines_gral.histograma_bins(list_dist_fixed_parameters,Nbins,histogram_filename2) histogram_filename3 = "../Results/weight_shifts/histogr_sum_dist_testing_segment_infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_threshold_from_distrib_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day ) + "_A_F_inferred_middle_real_ic.dat" #print list_dist_fixed_parameters_testing_segment histograma_bines_gral.histograma_bins_zero( list_dist_fixed_parameters_testing_segment, Nbins, histogram_filename3) print min(list_dist_fixed_parameters_testing_segment), max( list_dist_fixed_parameters_testing_segment) histogram_filename4 = "../Results/weight_shifts/histogr_abs_dist_point_by_point_infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_threshold_from_distrib_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day ) + "_A_F_inferred_middle_real_ic.dat" histograma_gral_negv_posit.histograma( list_abs_dist_point_by_point_indiv_simus_to_actual, histogram_filename4) histogram_filename5 = "../Results/weight_shifts/histogr_dist_point_by_point_infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_threshold_from_distrib_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day ) + "_A_F_inferred_middle_real_ic.dat" histograma_gral_negv_posit.histograma( list_dist_point_by_point_indiv_simus_to_actual, histogram_filename5) output_file10 = "../Results/weight_shifts/Summary_results_infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_threshold_from_distrib_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day ) + "_A_F_inferred_middle_real_ic.dat" file10 = open(output_file10, 'wt') print >> file10, "Summary results from best fit infection _memory with", Niter, "iter, and with values for the parameters: prob_inf ", prob_infection, " prob immune: ", prob_Immune, "\n" print >> file10, "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters_testing_segment), numpy.std( list_dist_fixed_parameters_testing_segment ), list_dist_fixed_parameters_testing_segment, "\n" print >> file10, "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" print >> file10, "written optimum best fit evolution file:", output_file2 print >> file10, "written histogram file: ", histogram_filename file10.close() print "written Summary file: ", output_file10 dose += delta_dose prob_infection += delta_prob prob_Immune += delta_prob_Immune
def main(graph_name): G = nx.read_gml( graph_name ) # about the "order" in the shift nodes: not all orders exist, only every 5/2 days. thats why shifts have length that we use to weight the interactions accordingly list_id_weekends_T3 = look_for_T3_weekends( G ) # T3 doesnt share fellows in the weekend (but they are the exception) Niter = 1000 dir_real_data = '../Results/' time_window_ahead = 4 # number of days in which there will be no Adopters on call basic_intervention_start_day = 20 # and then plus minus a small random number random_start = "YES" # if no, all iter with same initial re-seeding (intervention) day num_reseeds = 1 # per intervention min_bump = 0.0 # for the doctors that are re-seeded max_bump = 1.0 #same scale as the status, and the adoption threshold delta_bump = 0.03 all_team = "NO" # as adopters or not ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### if all_team == "YES": print "remember that now i use the file of adopters without fellows\n../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" exit() else: filename_actual_evol = "../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" file1 = open( filename_actual_evol, 'r' ) ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file = file1.readlines() list_actual_evol = [] for line in list_lines_file: # [1:]: # i exclude the first row num_adopters = float(line.split(" ")[1]) list_actual_evol.append(num_adopters) ################################################################## # i use the best fit (over the 250-day curve): # from only Att counted as adopters: ../Results/weight_shifts/persuasion/alpha0.10_damping0.00/Time_evol_Persuasion_alpha0.1_damping0.0_mutual0.5_threshold0.3_1000iter.dat THIS IS THE THIRD BEST SOLUTION, BUT ENDS UP CLOSER, SO I PREFER TO USE THIS, TO CHECK THE PERFORMANCE WHEN BUMP=0 alpha_F = 0.10 # alpha=0: nobody changes their mind alpha_A = alpha_F damping = 0.0 #its harder to go back from YES to NO again. =1 means no effect, =0.5 half the movement from Y->N than the other way around, =0 never go back from Y to N mutual_encouragement = 0.50 # when two Adopters meet, they convince each other even more threshold = 0.50 # larger than, to be an Adopte print "\n\nPersuasion process on network, with Niter:", Niter, "\n" dir = "../Results/weight_shifts/persuasion/" output_file2 = "../Results/weight_shifts/Final_distance_vs_bump_alpha" + str( alpha_F) + "_damping" + str(damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_num_reseed_per_shift" + str( num_reseeds) + "_" + str(Niter) + "iter_intervention_start" + str( basic_intervention_start_day) + "_window" + str( time_window_ahead) + ".dat" file2 = open(output_file2, 'wt') file2.close() bump = min_bump while bump <= max_bump: print "bump:", bump output_file = dir + "Time_evolutions_Persuasion_alpha" + str( alpha_F) + "_damping" + str(damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_num_reseed_per_shift" + str( num_reseeds) + "_" + str( Niter) + "iter_intervention_start" + str( basic_intervention_start_day) + "_window" + str( time_window_ahead) + "_bump" + str(bump) + ".dat" file = open(output_file, 'wt') file.close() list_distances_150day = [] list_distances_150day_Att_fellow = [] list_distances_200day = [] list_distances_200day_Att_fellow = [] list_ending_distances = [] list_ending_distances_Att_fellow = [] tot_number_interventions = 0 tot_number_interventions_Att = 0 tot_number_successful_interventions = 0 tot_number_successful_interventions_Att = 0 num_successfully_bumped = 0 num_successfully_bumped_Att = 0 time_evol_number_adopters_ITER = [ ] # list of complete single realizations of the dynamics for iter in range(Niter): print " iter: ", iter if random_start == "YES": # i pick the first intervention day sign = random.random() if sign < 0.5: sign = -1. else: sign = 1. delta_day = random.random() * 5. start_intervention = int( basic_intervention_start_day + sign * delta_day ) # i let the system evolve freely for a some time before i start re-seeding else: start_intervention = basic_intervention_start_day list_t = [] time_evol_number_adopters = [ ] # for a single realization of the dynamics ONLY ATTENDING ADOPTERS time_evol_number_tot_adopters = [ ] # Attendings and fellows as adopters num_adopters, seed_shift, max_shift = set_ic( G, threshold ) # i establish who is Adopter and NonAdopter initially, and count how many shifts i have total time_evol_number_adopters.append(float(num_adopters)) list_t.append(0) time_evol_number_tot_adopters.append( float(num_adopters)) # Attendings and fellows as adopters next_intervention_day = start_intervention # the dynamics starts: t = int(seed_shift) + 1 while t <= max_shift: # loop over shifts, in chronological order (the order is the day index since seeding_day) old_num_Att_Adopters = 0 old_num_Adopters = 0 #count number of adopters before an intervention for n in G.nodes(): try: if G.node[n]["status"] == "Adopter": old_num_Adopters += 1. if G.node[n]["type"] == "A": old_num_Att_Adopters += 1. except KeyError: pass #to ignore the shift-nodes flag_future = look_ahead( G, time_window_ahead, t ) # i evaluate the next few days, to see if any adopter will be on call: if not, i re-seed some more if flag_future == "YES" and t >= start_intervention and t == next_intervention_day: flag_Att = intervention( G, t, bump, threshold, num_reseeds) # num of Att intervened next_intervention_day += time_window_ahead # i dont reseed everyday of the look_ahead window, just the first day of it tot_number_interventions += 1 tot_number_interventions_Att += flag_Att num_Att_Adopters = 0 num_Adopters = 0 for n in G.nodes(): try: if G.node[n][ "status"] == "Adopter": #first i check if any doctor is an adopter in this shift num_Adopters += 1. if G.node[n][ "type"] == "A": #first i check if any doctor is an adopter in this shift num_Att_Adopters += 1. except KeyError: pass if old_num_Adopters < num_Adopters: tot_number_successful_interventions += 1 if num_Adopters - old_num_Adopters > num_reseeds: print "how did i bumped more doctors than", num_reseed, "?? ", t, ": ", num_Adopters - old_num_Adopters if old_num_Att_Adopters < num_Att_Adopters: tot_number_successful_interventions_Att += 1 list_t.append(t) for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: # i look for the shift corresponding to that time step shift_lenght = int(G.node[n]['shift_lenght']) if shift_lenght == 2 and n not in list_id_weekends_T3: shift_lenght = 1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) flag_possible_persuasion = 0 for doctor in G.neighbors(n): if G.node[doctor][ "status"] == "Adopter": #first i check if any doctor is an adopter in this shift flag_possible_persuasion = 1 break if flag_possible_persuasion == 1: list_doctors = [] for doctor in G.neighbors( n): # for all drs in that shift list_doctors.append(doctor) pairs = itertools.combinations( list_doctors, 2) # cos the shift can be 2 but also 3 doctors for pair in pairs: doctor1 = pair[0] doctor2 = pair[1] if G.node[doctor1]['status'] != G.node[doctor2][ 'status']: # if they think differently, # there will be persuasion persuasion( G, damping, doctor1, doctor2, alpha_A, alpha_F, threshold, shift_lenght ) # i move their values of opinion update_opinions( G, threshold, doctor1, doctor2 ) # i update status and make sure the values of the vectors stay between [0,1] else: # if two Adopters meet, they encourage each other (if two NonAdopters, nothing happens) mutual_reinforcement( G, mutual_encouragement, doctor1, doctor2, shift_lenght) list_Att_Adopters = [] list_Adopters = [] #count how many i have at this time for n in G.nodes(): try: if G.node[n]["status"] == "Adopter": list_Adopters.append(G.node[n]["label"]) if G.node[n]["type"] == "A": list_Att_Adopters.append(G.node[n]["label"]) except: pass # if the node is a shift, it doesnt have a 'status' attribute time_evol_number_adopters.append(float(len(list_Att_Adopters))) time_evol_number_tot_adopters.append(float( len(list_Adopters))) # Attendings and fellows as adopters t += 1 ############# end while loop over t time_evol_number_adopters_ITER.append(time_evol_number_adopters) list_distances_150day.append( time_evol_number_adopters[-93] - list_actual_evol[-93]) # because last day is 243 list_distances_150day_Att_fellow.append( time_evol_number_tot_adopters[-93] - list_actual_evol[-93]) list_distances_200day.append(time_evol_number_adopters[-43] - list_actual_evol[-43]) list_distances_200day_Att_fellow.append( time_evol_number_tot_adopters[-43] - list_actual_evol[-43]) list_ending_distances.append( time_evol_number_adopters[-1] - list_actual_evol[-1]) # ONLY att adopters list_ending_distances_Att_fellow.append( time_evol_number_tot_adopters[-1] - list_actual_evol[-1]) # att + fellow adopters print iter, t, time_evol_number_tot_adopters[ -1], time_evol_number_adopters[-1], list_actual_evol[-1] print "diff. at day 200:", time_evol_number_adopters[ -43] - list_actual_evol[-43], time_evol_number_adopters[ -43], list_actual_evol[ -43], "diff. at the end:", time_evol_number_adopters[ -1] - list_actual_evol[-1], time_evol_number_adopters[ -1], list_actual_evol[ -1], " if i count tot Att+Fellows at end:", time_evol_number_tot_adopters[ -1], "for bump:", bump ##############end loop Niter parameters = [ alpha_F, damping, mutual_encouragement, threshold, bump, time_window_ahead ] calculate_envelope_set_curves.calculate_envelope( time_evol_number_adopters_ITER, 95, "Persuasion_intervention", parameters) file = open(output_file, 'wt') for i in range( len(time_evol_number_adopters)): #time step by time step list_fixed_t = [] for iteracion in range( Niter): #loop over all independent iter of the process list_fixed_t.append( time_evol_number_adopters_ITER[iteracion] [i]) # i collect all values for the same t, different iter print >> file, list_t[i], numpy.mean(list_fixed_t), numpy.std( list_fixed_t), alpha_F, damping, mutual_encouragement file.close() try: fraction_success_interv = float( tot_number_successful_interventions) / float( tot_number_interventions) #averages over Niter except ZeroDivisionError: fraction_success_interv = 0 try: fraction_success_interv_Att = float( tot_number_successful_interventions_Att) / float( tot_number_interventions_Att) #averages over Niter except ZeroDivisionError: fraction_success_interv_Att = 0 file2 = open(output_file2, 'at') print >> file2, bump, numpy.mean(list_distances_150day),numpy.std(list_distances_150day), \ numpy.mean(list_distances_150day_Att_fellow),numpy.std(list_distances_150day_Att_fellow), \ numpy.mean(list_distances_200day),numpy.std(list_distances_200day), \ numpy.mean(list_distances_200day_Att_fellow),numpy.std(list_distances_200day_Att_fellow),\ numpy.mean(list_ending_distances),numpy.std(list_ending_distances), \ numpy.mean(list_ending_distances_Att_fellow), numpy.std(list_ending_distances_Att_fellow),\ float(tot_number_interventions)/float(Niter) , float(tot_number_interventions_Att)/float(Niter) ,fraction_success_interv, fraction_success_interv_Att,bump/float(threshold) file2.close() print "fraction successful interventions", fraction_success_interv, "fraction successful interventions on Att", fraction_success_interv_Att print " written:", output_file bump += delta_bump ########################## end loop over bump print "\nwritten:", output_file2
def main(graph_name): G = nx.read_gml(graph_name) list_id_weekends_T3=look_for_T3_weekends(G) # T3 doesnt share fellows in the weekend (but they are the exception) percent_envelope=95. Niter=1000 cutting_day=125 min_sum_dist=20 # to compute number of realizations that have a sum of distances smaller than this Nbins=200 # for the histogram of sum of distances envelopes="NO" delta_end=3. # >= than + or - dr difference at the end of the evolution dir_real_data='../Results/' ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### filename_actual_evol="../Data/Attendings_Orders_from_inference_list_adopters_day.dat" # "../Results/Actual_evolution_adopters_from_inference.dat" file1=open(filename_actual_evol,'r') ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file=file1.readlines() dict_days_list_empirical_adopters={} list_actual_evol=[] for line in list_lines_file: # [1:]: # i exclude the first row day=int(line.split(" ")[0]) num_adopters= float(line.split(" ")[1]) list_actual_evol.append(num_adopters) list_current_adopters=[] for element in line.split(" ")[2:]: # i need to ignore the empty columns from the original datafile if element: if element != '\n': list_current_adopters.append(element.strip('\n')) dict_days_list_empirical_adopters[day]=list_current_adopters list_actual_evol_testing=list_actual_evol[cutting_day:] ################################################################## #../Results/weight_shifts/persuasion/Time_evolutions_Persuasion_training_alpha0.5_damping0.4_mutual_encourg0.5_threshold0.5_unif_distr_1000iter_2012_seed31Oct_finalnetwork_day125.dat #OJO!!! NECESITO DOS DECIMALES SIEMPRE, PARA QUE CUADRE CON EL NOMBRE DE LOS SUB-DIRECTORIOS DONDE LO GUARDO alpha_F_min=0.50 #0.15 # alpha=0: nobody changes their mind alpha_F_max=0.501 #0.351 delta_alpha_F=0.10 #AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT! min_damping=0.600 #0.0 #its harder to go back from YES to NO again. =1 means no effect, =0.5 half the movement from Y->N than the other way around, =0 never go back from Y to N max_damping=0.601 #0.451 delta_damping=0.10 min_mutual_encouragement=0.40 #0.50 # when two Adopters meet, they convince each other even more max_mutual_encouragement=0.401 # 0.51 # KEEP THIS FIXED VALUES FOR NOW delta_mutual_encouragement=0.10 threshold_min=0.50 #0.50 # larger than, to be an Adopte threshold_max=0.501 # 0.51 # KEEP THIS FIXED VALUES FOR NOW delta_threshold=0.10 # AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT print "\n\nPersuasion process on network, with Niter:",Niter threshold=threshold_min while threshold<= threshold_max: print "thershold:",threshold alpha_F=alpha_F_min while alpha_F<= alpha_F_max: # i explore all the parameter space, and create a file per each set of valuesllkl alpha_A=1.0*alpha_F print " alpha_F:",alpha_F mutual_encouragement=min_mutual_encouragement while mutual_encouragement <= max_mutual_encouragement: print " mutual_encouragement:",mutual_encouragement damping=min_damping while damping <= max_damping: print " damping:",damping dir="../Results/weight_shifts/persuasion/alpha%.2f_damping%.2f/" % (alpha_F, damping ) output_file=dir+"Time_evol_Persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_"+str(cutting_day)+"_A_F_inferred_middle_real_ic.dat" file = open(output_file,'wt') file.close() time_evol_number_adopters_ITER=[] # list of complete single realizations of the dynamics list_dist_fixed_parameters_testing_segment=[] list_dist_abs_at_ending_point_fixed_parameters=[] list_dist_at_ending_point_fixed_parameters=[] list_final_num_adopt=[] list_abs_dist_point_by_point_indiv_simus_to_actual=[] list_dist_point_by_point_indiv_simus_to_actual=[] #list_abs_dist_at_cutting_day=[] for iter in range(Niter): # print " ",iter num_realizations_sum_dist_small=0. time_evol_number_adopters=[] # for a single realization of the dynamics num_adopters , max_shift= set_ic(G,threshold,cutting_day,dict_days_list_empirical_adopters) time_evol_number_adopters.append(float(num_adopters)) old_num_adopters=num_adopters # the dynamics starts: shift_length=5 #i know the first shift (order 0) is of length 5 t=cutting_day while t<= max_shift: # loop over shifts, in chronological order (the order is the day index since seeding_day) # print 't:',t for n in G.nodes(): if G.node[n]['type']=="shift" and G.node[n]['order']==t: # i look for the shift corresponding to that time step shift_length=int(G.node[n]['shift_length']) if shift_length==2 and n not in list_id_weekends_T3: shift_length=1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) # print "one-day weekend", G.node[n]['label'],G.node[n]['shift_length'] flag_possible_persuasion=0 for doctor in G.neighbors(n): if G.node[doctor]["status"]=="Adopter": #first i check if any doctor is an adopter in this shift flag_possible_persuasion=1 break if flag_possible_persuasion==1: list_doctors=[] for doctor in G.neighbors(n): # for all drs in that shift list_doctors.append(doctor) pairs=itertools.combinations(list_doctors,2) # cos the shift can be 2 but also 3 doctors for pair in pairs: doctor1=pair[0] doctor2=pair[1] if G.node[doctor1]['status'] != G.node[doctor2]['status']: # if they think differently, # there will be persuasion persuasion(G,damping,doctor1,doctor2,alpha_A,alpha_F,threshold,shift_length) # i move their values of opinion update_opinions(G,threshold,doctor1,doctor2) # i update status and make sure the values of the vectors stay between [0,1] else: # if two Adopters meet, they encourage each other (if two NonAdopters, nothing happens) mutual_reinforcement(G,mutual_encouragement,doctor1,doctor2,shift_length) # else: # print " no persuasion possible during shift (no adopters present)!" list_Adopters=[] for n in G.nodes(): try: if G.node[n]["status"]=="Adopter": if G.node[n]["label"] not in list_Adopters :#and G.node[n]["type"]=="A": list_Adopters.append(G.node[n]["label"]) except: pass # if the node is a shift, it doesnt have a 'status' attribute new_num_adopters=len(list_Adopters) if shift_length==5: # i estimate that adoption happens in the middle of the shift if t+5 < max_shift: time_evol_number_adopters.append(old_num_adopters) if t+4 < max_shift: time_evol_number_adopters.append(old_num_adopters) if t+3 < max_shift: time_evol_number_adopters.append(new_num_adopters) if t+2 < max_shift: time_evol_number_adopters.append(new_num_adopters) if t+1 < max_shift: time_evol_number_adopters.append(new_num_adopters) t+=5 elif shift_length==4: if t+4 < max_shift: time_evol_number_adopters.append(old_num_adopters) if t+3 < max_shift: time_evol_number_adopters.append(old_num_adopters) if t+2 < max_shift: time_evol_number_adopters.append(new_num_adopters) if t+1 < max_shift: time_evol_number_adopters.append(new_num_adopters) t+=4 elif shift_length==3: if t+3 < max_shift: time_evol_number_adopters.append(old_num_adopters) if t+2 < max_shift: time_evol_number_adopters.append(new_num_adopters) if t+1 < max_shift: time_evol_number_adopters.append(new_num_adopters) t+=3 elif shift_length==2: if t+2 < max_shift: time_evol_number_adopters.append(old_num_adopters) if t+1 < max_shift: time_evol_number_adopters.append(new_num_adopters) t+=2 elif shift_length==1: if t+1 < max_shift: time_evol_number_adopters.append(new_num_adopters) t+=1 old_num_adopters=new_num_adopters ############## end while loop over t time_evol_number_adopters_ITER.append(time_evol_number_adopters) # now i only run the testing segment! dist=compare_real_evol_vs_simus_to_be_called.compare_two_curves( list_actual_evol_testing,time_evol_number_adopters) list_dist_fixed_parameters_testing_segment.append(dist) if dist < min_sum_dist: num_realizations_sum_dist_small+=1 list_dist_abs_at_ending_point_fixed_parameters.append( abs(time_evol_number_adopters[-1]-list_actual_evol_testing[-1]) ) list_dist_at_ending_point_fixed_parameters.append(time_evol_number_adopters[-1]-list_actual_evol_testing[-1]) list_final_num_adopt.append(time_evol_number_adopters[-1]) for index in range(len(time_evol_number_adopters)): list_abs_dist_point_by_point_indiv_simus_to_actual.append(abs(time_evol_number_adopters[index]-list_actual_evol_testing[index])) list_dist_point_by_point_indiv_simus_to_actual.append(time_evol_number_adopters[index]-list_actual_evol_testing[index]) #######################end loop over Niter file = open(output_file,'wt') for i in range(len(time_evol_number_adopters)): #time step by time step list_fixed_t=[] for iteracion in range (Niter): #loop over all independent iter of the process list_fixed_t.append(time_evol_number_adopters_ITER[iteracion][i]) # i collect all values for the same t, different iter print >> file,i+cutting_day,numpy.mean(list_fixed_t),numpy.std(list_fixed_t), alpha_F,damping,mutual_encouragement file.close() print "printed out: ",output_file if envelopes=="YES": calculate_envelope_set_curves.calculate_envelope(time_evol_number_adopters_ITER,percent_envelope,"Persuasion",[alpha_F,damping,mutual_encouragement,threshold]) num_valid_endings=0. for item in list_dist_abs_at_ending_point_fixed_parameters: if item <= delta_end: # i count how many realizations i get close enough at the ending point num_valid_endings+=1. print "average distance of the optimum in the testing segment:",numpy.mean(list_dist_fixed_parameters_testing_segment),numpy.std(list_dist_fixed_parameters_testing_segment),list_dist_fixed_parameters_testing_segment,"\n" print "fraction of realizations that end within delta_doctor:",num_valid_endings/Niter,"mean ending dist:",numpy.mean(list_dist_at_ending_point_fixed_parameters), "SD final dist",numpy.std(list_dist_at_ending_point_fixed_parameters),list_dist_at_ending_point_fixed_parameters histogram_filename="../Results/weight_shifts/histogr_raw_distances_ending_persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred_middle_real_ic.dat" histograma_gral_negv_posit.histograma(list_dist_at_ending_point_fixed_parameters, histogram_filename) # histogram_filename2="../Results/weight_shifts/histogr_sum_dist_traject_persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_alphaA_eq_alphaF_day"+str(cutting_day)+"_A_F_inferred_middle.dat" # histograma_bines_gral.histograma_bins(list_dist_fixed_parameters,Nbins,histogram_filename2) histogram_filename3="../Results/weight_shifts/histogr_sum_dist_testing_segment_persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred_middle_real_ic.dat" histograma_bines_gral.histograma_bins_zero(list_dist_fixed_parameters_testing_segment,Nbins,histogram_filename3) print min(list_dist_fixed_parameters_testing_segment),max(list_dist_fixed_parameters_testing_segment) histogram_filename4="../Results/weight_shifts/histogr_abs_dist_point_by_point_persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred_middle_real_ic.dat" histograma_gral_negv_posit.histograma(list_abs_dist_point_by_point_indiv_simus_to_actual, histogram_filename4) histogram_filename5="../Results/weight_shifts/histogr_dist_point_by_point_persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred_middle_real_ic.dat" histograma_gral_negv_posit.histograma(list_dist_point_by_point_indiv_simus_to_actual, histogram_filename5) output_file10="../Results/weight_shifts/Summary_results_persuasion_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred_middle_real_ic.dat" file10 = open(output_file10,'wt') print >> file10, "Summary results from best fit persuasion with",Niter, "iter, and with values for the parameters: alpha ",alpha_F," damping: ",damping," mutual_encourg: ",mutual_encouragement," threshold:",threshold print >> file10, "average distance of the optimum in the testing segment:",numpy.mean(list_dist_fixed_parameters_testing_segment),numpy.std(list_dist_fixed_parameters_testing_segment),list_dist_fixed_parameters_testing_segment print >> file10, "fraction of realizations that end within delta_doctor:",num_valid_endings/Niter,"mean ending dist:",numpy.mean(list_dist_at_ending_point_fixed_parameters), "SD final dist",numpy.std(list_dist_at_ending_point_fixed_parameters),list_dist_at_ending_point_fixed_parameters print >> file10, "written optimum train_test evolution file:",output_file print >> file10,"written histogram file: ",histogram_filename # print >> file10,"written histogram file: ",histogram_filename2 file10.close() print "written optimum train_test evolution file:",output_file print "written summary file: ",output_file10 damping += delta_damping mutual_encouragement += delta_mutual_encouragement alpha_F += delta_alpha_F threshold += delta_threshold
def main(graph_name): G = nx.read_gml(graph_name) list_id_weekends_T3 = look_for_T3_weekends( G ) # T3 doesnt share fellows in the weekend (but they are the exception) percent_envelope = 95. Niter = 1000 cutting_day = 125 Nbins = 200 # for the histogram of sum of distances for_testing_fixed_set = "YES" # when YES, fixed values param and get all statistics on final distances etc envelopes = "NO" delta_end = 3. # >= than + or - dr difference at the end of the evolution dir_real_data = '../Results/' all_team = "NO" # as adopters or not NO now means i use the file without fellows, only attendings if for_testing_fixed_set == "NO": output_file3 = "../Results/weight_shifts/Landscape_parameters_persuasion_" + str( Niter) + "iter_A_F_inferred.dat" file3 = open(output_file3, 'wt') file3.close() ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### if all_team == "YES": print "remember that now i use the file of adopters without fellows\n../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" exit() else: filename_actual_evol = "../Results/Actual_evolution_adopters_from_inference.dat" file1 = open( filename_actual_evol, 'r' ) ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file = file1.readlines() list_actual_evol = [] for line in list_lines_file: # [1:]: # i exclude the first row num_adopters = float(line.split("\t")[1]) list_actual_evol.append(num_adopters) ################################################################## #../Results/weight_shifts/persuasion/Time_evolutions_Persuasion_training_alpha0.2_damping0.0_mutual_encourg0.5_threshold0.7_unif_distr_1000iter_2012_seed31Oct_finalnetwork_day125.dat #../Results/weight_shifts/persuasion/Time_evolutions_Persuasion_training_alpha0.5_damping0.4_mutual_encourg0.5_threshold0.5_unif_distr_1000iter_2012_seed31Oct_finalnetwork_day125.dat #OJO!!! NECESITO DOS DECIMALES SIEMPRE, PARA QUE CUADRE CON EL NOMBRE DE LOS SUB-DIRECTORIOS DONDE LO GUARDO alpha_F_min = 0.10 #0.15 # alpha=0: nobody changes their mind alpha_F_max = 0.101 #0.351 delta_alpha_F = 0.10 #AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT! min_damping = 0.00 #0.0 #its harder to go back from YES to NO again. =1 means no effect, =0.5 half the movement from Y->N than the other way around, =0 never go back from Y to N max_damping = 0.001 #0.451 delta_damping = 0.10 min_mutual_encouragement = 0.000 #0.50 # when two Adopters meet, they convince each other even more max_mutual_encouragement = 0.001 # 0.51 # KEEP THIS FIXED VALUES FOR NOW delta_mutual_encouragement = 0.10 threshold_min = 0.50 #0.50 # larger than, to be an Adopte threshold_max = 0.501 # 0.51 # KEEP THIS FIXED VALUES FOR NOW delta_threshold = 0.10 # AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT print "\n\nPersuasion process on network, with Niter:", Niter dict_filenames_tot_distance = { } # i will save the filename as key and the tot distance from that curve to the original one threshold = threshold_min while threshold <= threshold_max: print "thershold:", threshold alpha_F = alpha_F_min while alpha_F <= alpha_F_max: # i explore all the parameter space, and create a file per each set of valuesllkl alpha_A = 1.0 * alpha_F print " alpha_F:", alpha_F mutual_encouragement = min_mutual_encouragement while mutual_encouragement <= max_mutual_encouragement: print " mutual_encouragement:", mutual_encouragement damping = min_damping while damping <= max_damping: print " damping:", damping dir = "../Results/weight_shifts/persuasion/alpha%.2f_damping%.2f/" % ( alpha_F, damping) if for_testing_fixed_set == "YES": output_file = dir + "Time_evol_Persuasion_alpha" + str( alpha_F ) + "_damping" + str(damping) + "_mutual" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_A_F_inferred.dat" else: output_file = dir + "Time_evol_Persuasion_alpha" + str( alpha_F ) + "_damping" + str(damping) + "_mutual" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_A_F_inferred.dat" file = open(output_file, 'wt') file.close() time_evol_number_adopters_ITER = [ ] # list of complete single realizations of the dynamics list_dist_fixed_parameters = [] list_dist_fixed_parameters_testing_segment = [] list_dist_abs_at_ending_point_fixed_parameters = [] list_dist_at_ending_point_fixed_parameters = [] list_final_num_adopt = [] list_abs_dist_point_by_point_indiv_simus_to_actual = [] list_dist_point_by_point_indiv_simus_to_actual = [] #list_abs_dist_at_cutting_day=[] for iter in range(Niter): print " ", iter list_t = [] time_evol_number_adopters = [ ] # for a single realization of the dynamics num_adopters, seed_shift, max_shift = set_ic( G, threshold ) # i establish who is Adopter and NonAdopter initially, and count how many shifts i have total time_evol_number_adopters.append(float(num_adopters)) # print "initial number of adopters:", num_adopters list_t.append(0) ########OJO~!!!!!!!!!! COMENTAR ESTO CUANDO ESTOY BARRIENDO TOOOOOOOOOODO EL ESPACIO DE PARAMETROS # file4 = open(output_file.split('.dat')[0]+"_indiv_iter"+str(iter)+".dat",'wt') # file4.close() ########################################## # the dynamics starts: t = int(seed_shift ) + 1 # the first time step is just IC.??? while t <= max_shift: # loop over shifts, in chronological order (the order is the day index since seeding_day) # print 't:',t list_t.append(t) for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: # i look for the shift corresponding to that time step shift_length = int( G.node[n]['shift_length']) if shift_length == 2 and n not in list_id_weekends_T3: shift_length = 1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) # print "one-day weekend", G.node[n]['label'],G.node[n]['shift_length'] flag_possible_persuasion = 0 for doctor in G.neighbors(n): if G.node[doctor][ "status"] == "Adopter": #first i check if any doctor is an adopter in this shift flag_possible_persuasion = 1 break if flag_possible_persuasion == 1: list_doctors = [] for doctor in G.neighbors( n ): # for all drs in that shift list_doctors.append(doctor) pairs = itertools.combinations( list_doctors, 2 ) # cos the shift can be 2 but also 3 doctors for pair in pairs: doctor1 = pair[0] doctor2 = pair[1] if G.node[doctor1][ 'status'] != G.node[doctor2][ 'status']: # if they think differently, # there will be persuasion persuasion( G, damping, doctor1, doctor2, alpha_A, alpha_F, threshold, shift_length ) # i move their values of opinion update_opinions( G, threshold, doctor1, doctor2 ) # i update status and make sure the values of the vectors stay between [0,1] else: # if two Adopters meet, they encourage each other (if two NonAdopters, nothing happens) mutual_reinforcement( G, mutual_encouragement, doctor1, doctor2, shift_length) # else: # print " no persuasion possible during shift (no adopters present)!" list_Adopters = [ ] #count how many i have at this time for n in G.nodes(): try: if G.node[n]["status"] == "Adopter": if G.node[n][ "label"] not in list_Adopters: # and G.node[n]["type"]=="A": list_Adopters.append( G.node[n]["label"]) except: pass # if the node is a shift, it doesnt have a 'status' attribute # if for_testing_fixed_set=="YES": # if t==cutting_day: # list_abs_dist_at_cutting_day.append(abs(float(list_actual_evol[-1])-float(len(list_Adopters)))) # print abs(float(list_actual_evol[-1])-float(len(list_Adopters))), float(list_actual_evol[t]),float(len(list_Adopters)) time_evol_number_adopters.append( float(len(list_Adopters))) t += 1 ############## end while loop over t time_evol_number_adopters_ITER.append( time_evol_number_adopters) list_dist_fixed_parameters.append( compare_real_evol_vs_simus_to_be_called. compare_two_curves(list_actual_evol, time_evol_number_adopters)) list_dist_fixed_parameters_testing_segment.append( compare_real_evol_vs_simus_to_be_called. compare_two_curves_testing_segment( list_actual_evol, time_evol_number_adopters, cutting_day)) list_dist_abs_at_ending_point_fixed_parameters.append( abs(time_evol_number_adopters[-1] - list_actual_evol[-1])) list_dist_at_ending_point_fixed_parameters.append( time_evol_number_adopters[-1] - list_actual_evol[-1]) list_final_num_adopt.append( time_evol_number_adopters[-1]) ########OJO~!!!!!!!!!! COMENTAR ESTO CUANDO ESTOY BARRIENDO TOOOOOOOOOODO EL ESPACIO DE PARAMETROS # file4 = open(output_file.split('.dat')[0]+"_indiv_iter"+str(iter)+".dat",'at') # for i in range(len(time_evol_number_adopters)): #ime step by time step # print >> file4, i,time_evol_number_adopters[i], alpha_F,damping,mutual_encouragement #file4.close() ######################################################## for index in range(len(time_evol_number_adopters)): list_abs_dist_point_by_point_indiv_simus_to_actual.append( abs(time_evol_number_adopters[index] - list_actual_evol[index])) list_dist_point_by_point_indiv_simus_to_actual.append( time_evol_number_adopters[index] - list_actual_evol[index]) #######################end loop over Niter list_pair_dist_std_delta_end = [] list_pair_dist_std_delta_end.append( numpy.mean(list_dist_fixed_parameters) ) # average dist between the curves over Niter list_pair_dist_std_delta_end.append( numpy.std(list_dist_fixed_parameters)) list_pair_dist_std_delta_end.append( numpy.mean( list_dist_abs_at_ending_point_fixed_parameters)) if for_testing_fixed_set == "NO": file3 = open(output_file3, 'at') # i print out the landscape print >> file3, alpha_F, damping, mutual_encouragement, threshold, numpy.mean( list_dist_abs_at_ending_point_fixed_parameters ), numpy.mean(list_dist_fixed_parameters), numpy.mean( list_final_num_adopt), numpy.std( list_final_num_adopt ), numpy.std(list_final_num_adopt) / numpy.mean( list_final_num_adopt) file3.close() if ( numpy.mean( list_dist_abs_at_ending_point_fixed_parameters) ) <= delta_end: # i only consider situations close enough at the ending point dict_filenames_tot_distance[ output_file] = list_pair_dist_std_delta_end file = open(output_file, 'wt') for i in range(len(time_evol_number_adopters) ): #time step by time step list_fixed_t = [] for iteracion in range( Niter ): #loop over all independent iter of the process list_fixed_t.append( time_evol_number_adopters_ITER[iteracion][i] ) # i collect all values for the same t, different iter print >> file, list_t[i], numpy.mean( list_fixed_t), numpy.std( list_fixed_t ), alpha_F, damping, mutual_encouragement file.close() print "printed out: ", output_file if envelopes == "YES": calculate_envelope_set_curves.calculate_envelope( time_evol_number_adopters_ITER, percent_envelope, "Persuasion", [ alpha_F, damping, mutual_encouragement, threshold ]) if for_testing_fixed_set == "YES": num_valid_endings = 0. for item in list_dist_abs_at_ending_point_fixed_parameters: if item <= delta_end: # i count how many realizations i get close enough at the ending point num_valid_endings += 1. print "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters), numpy.std( list_dist_fixed_parameters ), list_dist_fixed_parameters, "\n" print "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters histogram_filename = "../Results/weight_shifts/histogr_raw_distances_ending_persuasion_alpha" + str( alpha_F) + "_damping" + str( damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_day" + str( cutting_day) + "_A_F_inferred.dat" histograma_gral_negv_posit.histograma( list_dist_at_ending_point_fixed_parameters, histogram_filename) histogram_filename2 = "../Results/weight_shifts/histogr_sum_dist_traject_persuasion_alpha" + str( alpha_F) + "_damping" + str( damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_day" + str( cutting_day) + "_A_F_inferred.dat" histograma_bines_gral.histograma_bins( list_dist_fixed_parameters, Nbins, histogram_filename2) histogram_filename3 = "../Results/weight_shifts/histogr_sum_dist_testing_segment_persuasion_alpha" + str( alpha_F) + "_damping" + str( damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_day" + str( cutting_day) + "_A_F_inferred.dat" histograma_bines_gral.histograma_bins_zero( list_dist_fixed_parameters_testing_segment, Nbins, histogram_filename3) histogram_filename4 = "../Results/weight_shifts/histogr_abs_dist_point_by_point_persuasion_alpha" + str( alpha_F) + "_damping" + str( damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_day" + str( cutting_day) + "_A_F_inferred.dat" histograma_gral_negv_posit.histograma( list_abs_dist_point_by_point_indiv_simus_to_actual, histogram_filename4) histogram_filename5 = "../Results/weight_shifts/histogr_dist_point_by_point_persuasion_alpha" + str( alpha_F) + "_damping" + str( damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_day" + str( cutting_day) + "_A_F_inferred.dat" histograma_gral_negv_posit.histograma( list_dist_point_by_point_indiv_simus_to_actual, histogram_filename5) output_file10 = "../Results/weight_shifts/Summary_results_persuasion_alpha" + str( alpha_F) + "_damping" + str( damping) + "_mutual_encourg" + str( mutual_encouragement ) + "_threshold" + str(threshold) + "_" + str( Niter) + "iter_alphaA_eq_alphaF_day" + str( cutting_day) + "_A_F_inferred.dat" file10 = open(output_file10, 'wt') print >> file10, "Summary results from best fit persuasion with", Niter, "iter, and with values for the parameters: alpha ", alpha_F, " damping: ", damping, " mutual_encourg: ", mutual_encouragement, " threshold:", threshold print >> file10, "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters), numpy.std( list_dist_fixed_parameters ), list_dist_fixed_parameters print >> file10, "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters print >> file10, "written optimum train_test evolution file:", output_file print >> file10, "written histogram file: ", histogram_filename print >> file10, "written histogram file: ", histogram_filename2 file10.close() print "written optimum train_test evolution file:", output_file print "written summary file: ", output_file10 damping += delta_damping mutual_encouragement += delta_mutual_encouragement alpha_F += delta_alpha_F threshold += delta_threshold if for_testing_fixed_set == "NO": # only if i am exploring the whole landscape, i need to call this function, otherwise, i already know the optimum compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end( dict_filenames_tot_distance, "Persuasion_weight", all_team, Niter, None) #last argument, cutting day (it doesnt apply) if for_testing_fixed_set == "NO": print "written landscape file:", output_file3
def main(graph_name): G = nx.read_gml(graph_name) percent_envelope = 95. #../Results/save/Average_time_evolution_Infection_p0.3_Immune0.15_1000iter_2012.dat prob_infection = 0.2 prob_Immune = 0.1 Niter = 1000 # i create the empty list of list for the Niter temporal evolutions num_shifts = 0 for n in G.nodes(): G.node[n]["status"] = "S" if G.node[n]['type'] == "shift": num_shifts += 1 # list_final_I_values_fixed_p=[] # i dont care about the final values right now, but about the whole time evol list_lists_t_evolutions = [] for iter in range(Niter): print " iter:", iter list_I = [] #list infected doctors list_ordering = [] list_s = [] list_A = [] list_F = [] ########### set I.C. max_order = 0 for n in G.nodes(): G.node[n]["status"] = "S" # all nodes are Susceptible if G.node[n]['type'] == "shift": list_s.append(n) if G.node[n]['order'] > max_order: max_order = G.node[n]['order'] else: if G.node[n]['label'] == "Wunderink" or G.node[n][ "label"] == "Weiss": G.node[n]["status"] = "I" list_I.append(G.node[n]['label']) ######################## WHAT ABOUT SMITH AND SPORN??? if G.node[n]['type'] == "A": list_A.append(n) if G.node[n]['type'] == "F": list_F.append(n) list_single_t_evolution = [] list_single_t_evolution.append( 2.0) # I always start with TWO infected doctors!! for n in G.nodes( ): # i make some DOCTORs INMUNE (anyone except Weiss and Wunderink) if (G.node[n]['type'] == "A") or (G.node[n]['type'] == "F"): if G.node[n]['label'] != "Wunderink" and G.node[n][ "label"] != "Weiss": # these particular two cant be immune rand = random.random() if rand < prob_Immune: G.node[n]["status"] = "Immune" # print max_order ################# the dynamics starts: t = 1 while t <= max_order: # loop over shifts, in order for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n]['order'] == t: flag_possible_infection = 0 for doctor in G.neighbors( n ): #first i check if any doctor is infected in this shift if G.node[doctor]["status"] == "I": flag_possible_infection = 1 if flag_possible_infection: for doctor in G.neighbors( n ): # then the doctors in that shift, gets infected with prob_infection if G.node[doctor]["status"] == "S": rand = random.random() if rand < prob_infection: G.node[doctor]["status"] = "I" list_I.append(G.node[doctor]["label"]) list_single_t_evolution.append(float( len(list_I))) #/(len(list_A)+len(list_F))) t += 1 list_lists_t_evolutions.append(list_single_t_evolution) ######## end Niter calculate_envelope_set_curves.calculate_envelope( list_lists_t_evolutions, percent_envelope, "Infection", [prob_infection, prob_Immune])
def main(graph_name): G = nx.read_gml(graph_name) percent_envelope = 95. Niter = 1000 #"../Results/save/Time_evolutions_Persuasion_alpha0.25_damping0.0_mutual_encourg0.5_threshold0.5_unif_distr_1000iter_2012_seed31Oct_finalnetwork.dat" alpha_F = 0.1 # alpha=0: nobody changes their mind alpha_A = 1. * alpha_F damping = 0.0 #its harder to go back from YES to NO again. =1 means no effect, =0.5 half the movement from Y->N than the other way around, =0 never go back from Y to N mutual_encouragement = 0.0 # when two Adopters meet, they convince each other even more threshold = 0.5 # larger than, to be an Adopte print "\n\nPersuasion process on network, with Niter:", Niter dir = "../Results/weight_shifts/" output_file = dir + "Time_evolutions_Persuasion_alpha" + str( alpha_F) + "_damping" + str(damping) + "_mutual_encourg" + str( mutual_encouragement) + "_threshold" + str( threshold) + "_unif_distr_" + str( Niter) + "iter_2012_seed31Oct_finalnetwork.dat" file = open(output_file, 'wt') file.close() time_evol_number_adopters_ITER = [ ] # list of complete single realizations of the dynamics for iter in range(Niter): print "iter:", iter list_t = [] time_evol_number_adopters = [ ] # for a single realization of the dynamics num_adopters, seed_shift, max_shift = set_ic( G, threshold ) # i establish who is Adopter and NonAdopter initially, and count how many shifts i have total time_evol_number_adopters.append(float(num_adopters)) # print "initial number of adopters:", num_adopters list_t.append(0) ########OJO~!!!!!!!!!! COMENTAR ESTO CUANDO ESTOY BARRIENDO TOOOOOOOOOODO EL ESPACIO DE PARAMETROS # file2 = open(output_file.split('.dat')[0]+"_indiv_iter"+str(iter)+".dat",'wt') # file2.close() ########################################## # the dynamics starts: t = int(seed_shift) + 1 # the first time step is just IC.??? while t <= max_shift: # loop over shifts, in chronological order (the order is the day index since seeding_day) list_t.append(t) for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: # i look for the shift corresponding to that time step flag_possible_persuasion = 0 for doctor in G.neighbors(n): if G.node[doctor][ "status"] == "Adopter": #first i check if any doctor is an adopter in this shift flag_possible_persuasion = 1 break if flag_possible_persuasion == 1: list_doctors = [] for doctor in G.neighbors( n): # for all drs in that shift list_doctors.append(doctor) pairs = itertools.combinations( list_doctors, 2) # cos the shift can be 2 but also 3 doctors for pair in pairs: doctor1 = pair[0] doctor2 = pair[1] if G.node[doctor1]['status'] != G.node[doctor2][ 'status']: # if they think differently, # there will be persuasion persuasion(G, damping, doctor1, doctor2, alpha_A, alpha_F, threshold ) # i move their values of opinion update_opinions( G, threshold, doctor1, doctor2 ) # i update status and make sure the values of the vectors stay between [0,1] else: # if two Adopters meet, they encourage each other (if two NonAdopters, nothing happens) mutual_reinforcement(G, mutual_encouragement, doctor1, doctor2) list_Adopters = [] #count how many i have at this time for n in G.nodes(): try: if G.node[n]["status"] == "Adopter": if G.node[n]["label"] not in list_Adopters: list_Adopters.append(G.node[n]["label"]) except: pass # if the node is a shift, it doesnt have a 'status' attribute time_evol_number_adopters.append(float(len(list_Adopters))) t += 1 ############## end while loop over t ########OJO~!!!!!!!!!! COMENTAR ESTO CUANDO ESTOY BARRIENDO TOOOOOOOOOODO EL ESPACIO DE PARAMETROS #file2 = open(output_file.split('.dat')[0]+"_indiv_iter"+str(iter)+".dat",'at') #for i in range(len(time_evol_number_adopters)): #ime step by time step # print >> file2, i,time_evol_number_adopters[i], alpha_F,damping,mutual_encouragement #file.close() ######################################################## time_evol_number_adopters_ITER.append(time_evol_number_adopters) #end loop over Niter file = open(output_file, 'wt') for i in range(len(time_evol_number_adopters)): #time step by time step list_fixed_t = [] for iteracion in range( Niter): #loop over all independent iter of the process list_fixed_t.append( time_evol_number_adopters_ITER[iteracion] [i]) # i collect all values for the same t, different iter print >> file, list_t[i], numpy.mean(list_fixed_t), numpy.std( list_fixed_t), alpha_F, damping, mutual_encouragement file.close() calculate_envelope_set_curves.calculate_envelope( time_evol_number_adopters_ITER, percent_envelope, "Persuasion", [alpha_F, damping, mutual_encouragement, threshold])
def main(graph_name): G = nx.read_gml(graph_name) cutting_day = 175 # i use this only for the filenames for_testing_fixed_set = "YES" # when YES, fixed values param, to get all statistics on final distances etc # change the range for the parameters accordingly envelopes = "YES" Niter = 1000 # 100 iter seems to be enough (no big diff. with respect to 1000it) percent_envelope = 95. list_id_weekends_T3 = look_for_T3_weekends( G ) # T3 doesnt share fellows in the weekend (but they are the exception) Nbins = 1000 # for the histogram of sum of distances all_team = "NO" # as adopters or not dir_real_data = '../Results/' dir = "../Results/weight_shifts/infection/" delta_end = 3. # >= than + or - dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!) ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### filename_actual_evol = "../Data/Actual_evolution_adopters_NO_fellows_only_attendings_with_list_names.csv" # "../Results/Actual_evolution_adopters_from_inference.dat" file1 = open( filename_actual_evol, 'r' ) ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file = file1.readlines() dict_days_list_empirical_att_adopters = {} list_actual_evol = [] for line in list_lines_file: # [1:]: # i exclude the first row day = int(line.split(" ")[0]) num_adopters = float(line.split(" ")[1]) list_actual_evol.append(num_adopters) list_current_att_adopters = [] for element in line.split( " " )[2:]: # i need to ignore the empty columns from the original datafile if element: if element != '\n': list_current_att_adopters.append( element.strip('\n').title()) dict_days_list_empirical_att_adopters[day] = list_current_att_adopters list_actual_evol_testing = list_actual_evol[cutting_day:] ################################################################## #../Results/weight_shifts/infection/Average_time_evolution_Infection_training_p0.8_Immune0.3_1000iter_2012_avg_ic_day125.dat ESTOS VALORES SON EL OPTIMUM FIT THE 152-DIAS prob_min = 1.00 prob_max = 1.001 delta_prob = 0.1 prob_Immune_min = 0.700 prob_Immune_max = 0.7001 delta_prob_Immune = 0.1 prob_Immune = prob_Immune_min while prob_Immune <= prob_Immune_max: print "prom Immune:", prob_Immune prob_infection = prob_min while prob_infection <= prob_max: print " p:", prob_infection output_file2 = dir + "Average_time_evolution_Infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_Att_only_middle_real_ic.dat" file2 = open(output_file2, 'wt') file2.close() ########## i read the list of frequent adopters from simulations, to estimate the ic for fellows filename_list_simu_adopt = "../Results/weight_shifts/infection/List_adopters_fellows_descending_frequency_Infection_training_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_1000iter_2012_avg_ic_day" + str( cutting_day) + "_Att_only_middle.dat" # print filename_list_simu_adopt file_list_simu_adopt = open(filename_list_simu_adopt, 'r') list_lines_file = file_list_simu_adopt.readlines() list_sorted_fellow_adopters = [] for line in list_lines_file[1:]: # i exclude the first row adopter = line.split(" ")[0] list_sorted_fellow_adopters.append(adopter) # print "list sorted fellows:",list_sorted_fellow_adopters num_simu_Fellow_adopters_cutting_day = int( round( float(list_lines_file[0].split("Avg # F adopters ") [1].split(" ")[0]))) #print "avg simu number Fellow adopters:",num_simu_Fellow_adopters_cutting_day,int( round(num_simu_Fellow_adopters_cutting_day)) ################ list_lists_t_evolutions = [] list_dist_fixed_parameters_testing_segment = [] list_abs_dist_at_ending_point_fixed_parameters = [] list_dist_at_ending_point_fixed_parameters = [] list_final_num_infected = [] list_abs_dist_point_by_point_indiv_simus_to_actual = [] list_dist_point_by_point_indiv_simus_to_actual = [] for iter in range(Niter): # print " iter:",iter ########### set I.C. according to the empirical data dict_name_node = {} list_I = [] max_order = 0 for n in G.nodes(): G.node[n]["status"] = "S" # all nodes are Susceptible if G.node[n]['type'] == "shift": if G.node[n]['order'] > max_order: max_order = G.node[n][ 'order'] # to get the last shift-order for the time loop else: dict_name_node[G.node[n]["label"]] = n if G.node[n][ 'label'] in dict_days_list_empirical_att_adopters[ cutting_day]: G.node[n]["status"] = "I" list_I.append(G.node[n]['label']) list_fellows = [] for i in range(num_simu_Fellow_adopters_cutting_day): fellow_adopter = list_sorted_fellow_adopters[ i] # this list is sorted from more to less frequent adopter node = dict_name_node[fellow_adopter] G.node[node]["status"] = "I" list_fellows.append(fellow_adopter) list_single_t_evolution = [] old_num_adopters = len(list_I) list_single_t_evolution.append( old_num_adopters ) # I always start with TWO infected doctors!! for n in G.nodes( ): # i make some DOCTORs INMUNE (anyone except Weiss and Wunderink) if (G.node[n]['type'] == "A") or (G.node[n]['type'] == "F"): if (G.node[n]['label'] not in dict_days_list_empirical_att_adopters[ cutting_day]) and (G.node[n]['label'] not in list_fellows): rand = random.random() if rand < prob_Immune: G.node[n]["status"] = "Immune" ################# the dynamics starts: shift_length = 5 #i know the first shift (order 0) is of length 5 t = cutting_day while t <= max_order: # loop over shifts, in order for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: shift_length = int(G.node[n]['shift_length']) if shift_length == 2 and n not in list_id_weekends_T3: shift_length = 1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) # print "one-day weekend", G.node[n]['label'],G.node[n]['shift_length'] flag_possible_infection = 0 for doctor in G.neighbors( n ): #first i check if any doctor is infected in this shift if G.node[doctor]["status"] == "I": flag_possible_infection = 1 if flag_possible_infection: for doctor in G.neighbors( n ): # then the doctors in that shift, gets infected with prob_infection for i in range( shift_length ): # i repeat the infection process several times, to acount for shift length if G.node[doctor]["status"] == "S": rand = random.random() if rand < prob_infection: G.node[doctor]["status"] = "I" if G.node[doctor][ "type"] == "A": # fellows participate in the dynamics, but i only consider the attendings as real adopters list_I.append( G.node[doctor] ["label"]) new_num_adopters = len(list_I) if shift_length == 5: # i estimate that adoption happens in the middle of the shift if t + 5 < max_order: list_single_t_evolution.append(old_num_adopters) if t + 4 < max_order: list_single_t_evolution.append(old_num_adopters) if t + 3 < max_order: list_single_t_evolution.append(new_num_adopters) if t + 2 < max_order: list_single_t_evolution.append(new_num_adopters) if t + 1 < max_order: list_single_t_evolution.append(new_num_adopters) t += 5 elif shift_length == 4: if t + 4 < max_order: list_single_t_evolution.append(old_num_adopters) if t + 3 < max_order: list_single_t_evolution.append(old_num_adopters) if t + 2 < max_order: list_single_t_evolution.append(new_num_adopters) if t + 1 < max_order: list_single_t_evolution.append(new_num_adopters) t += 4 elif shift_length == 3: if t + 3 < max_order: list_single_t_evolution.append(old_num_adopters) if t + 2 < max_order: list_single_t_evolution.append(new_num_adopters) if t + 1 < max_order: list_single_t_evolution.append(new_num_adopters) t += 3 elif shift_length == 2: if t + 2 < max_order: list_single_t_evolution.append(old_num_adopters) if t + 1 < max_order: list_single_t_evolution.append(new_num_adopters) t += 2 elif shift_length == 1: if t + 1 < max_order: list_single_t_evolution.append(new_num_adopters) t += 1 old_num_adopters = new_num_adopters ######## end t loop list_lists_t_evolutions.append(list_single_t_evolution) # now i only run the testing segment! list_dist_fixed_parameters_testing_segment.append( compare_real_evol_vs_simus_to_be_called.compare_two_curves( list_actual_evol_testing, list_single_t_evolution)) list_abs_dist_at_ending_point_fixed_parameters.append( abs(list_single_t_evolution[-1] - list_actual_evol_testing[-1]) ) # i save the distance at the ending point between the current simu and actual evol list_dist_at_ending_point_fixed_parameters.append( list_single_t_evolution[-1] - list_actual_evol_testing[-1] ) # i save the distance at the ending point between the current simu and actual evol list_final_num_infected.append(list_single_t_evolution[-1]) for index in range(len(list_single_t_evolution)): list_abs_dist_point_by_point_indiv_simus_to_actual.append( abs(list_single_t_evolution[index] - list_actual_evol_testing[index])) list_dist_point_by_point_indiv_simus_to_actual.append( list_single_t_evolution[index] - list_actual_evol_testing[index]) ######## end loop Niter file2 = open(output_file2, 'at') for s in range(len(list_single_t_evolution)): list_fixed_t = [] for iter in range(Niter): list_fixed_t.append(list_lists_t_evolutions[iter][s]) print >> file2, s + cutting_day, numpy.mean(list_fixed_t) file2.close() print "printed out: ", output_file2 if envelopes == "YES": calculate_envelope_set_curves.calculate_envelope( list_lists_t_evolutions, percent_envelope, "Infection", [prob_infection, prob_Immune]) num_valid_endings = 0. for item in list_abs_dist_at_ending_point_fixed_parameters: if item <= delta_end: # i count how many realizations i get close enough at the ending point num_valid_endings += 1. print "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters_testing_segment), numpy.std( list_dist_fixed_parameters_testing_segment ), list_dist_fixed_parameters_testing_segment, "\n" print "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" histogram_filename = "../Results/weight_shifts/histogr_raw_distances_ending_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_Att_only_middle_real_ic.dat" histograma_gral_negv_posit.histograma( list_dist_at_ending_point_fixed_parameters, histogram_filename) histogram_filename3 = "../Results/weight_shifts/histogr_sum_dist_testing_segment_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_Att_only_middle_real_ic.dat" histograma_bines_gral.histograma_bins_zero( list_dist_fixed_parameters_testing_segment, Nbins, histogram_filename3) print min(list_dist_fixed_parameters_testing_segment), max( list_dist_fixed_parameters_testing_segment) histogram_filename4 = "../Results/weight_shifts/histogr_abs_dist_point_by_point_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_Att_only_middle_real_ic.dat" histograma_gral_negv_posit.histograma( list_abs_dist_point_by_point_indiv_simus_to_actual, histogram_filename4) histogram_filename5 = "../Results/weight_shifts/histogr_dist_point_by_point_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_Att_only_middle_real_ic.dat" histograma_gral_negv_posit.histograma( list_dist_point_by_point_indiv_simus_to_actual, histogram_filename5) output_file10 = "../Results/weight_shifts/Summary_results_infection_p" + str( prob_infection) + "_" + "Immune" + str( prob_Immune) + "_" + str(Niter) + "iter_day" + str( cutting_day) + "_Att_only_middle_real_ic.dat" file10 = open(output_file10, 'wt') print >> file10, "Summary results from best fit infection with", Niter, "iter, and with values for the parameters: prob_inf ", prob_infection, " prob immune: ", prob_Immune, "\n" print >> file10, "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters_testing_segment), numpy.std( list_dist_fixed_parameters_testing_segment ), list_dist_fixed_parameters_testing_segment, "\n" print >> file10, "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" print >> file10, "written optimum best fit evolution file:", output_file2 print >> file10, "written histogram file: ", histogram_filename file10.close() print "written Summary file: ", output_file10 prob_infection += delta_prob prob_Immune += delta_prob_Immune
def main(graph_name): G = nx.read_gml(graph_name) for_testing_fixed_set = "YES" # when YES, fixed values param, to get all statistics on final distances etc # change the range for the parameters accordingly envelopes = "NO" Niter = 1000 percent_envelope = 95. list_id_weekends_T3 = look_for_T3_weekends( G ) # T3 doesnt share fellows in the weekend (but they are the exception) cutting_day = 175 all_team = "NO" # as adopters or not dir_real_data = '../Results/' dir = "../Results/weight_shifts/infection/" delta_end = 3. # >= than + or - dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!) Nbins = 20 # for the histogram of sum of distances if for_testing_fixed_set == "NO": output_file3 = "../Results/weight_shifts/Landscape_parameters_infection_memory_fixed_dose_thr_" + str( Niter) + "iterFIXED_Thr0.2_Imm0.0.dat" file3 = open(output_file3, 'wt') file3.close() ###################################################################################### # I read the file of the actual evolution of the idea spreading in the hospital: ## ###################################################################################### if all_team == "YES": print "remember that now i use the file of adopters without fellows\n../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" exit() else: filename_actual_evol = "../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat" file1 = open( filename_actual_evol, 'r' ) ## i read the file: list_dates_and_names_current_adopters.txt (created with: extract_real_evolution_number_adopters.py) list_lines_file = file1.readlines() list_actual_evol = [] for line in list_lines_file: # [1:]: # i exclude the first row num_adopters = float(line.split(" ")[1]) list_actual_evol.append(num_adopters) ################################################################################ prob_min = 0.3 prob_max = 0.301 delta_prob = 0.1 prob_Immune_min = 0.00 prob_Immune_max = 0.001 delta_prob_Immune = 0.1 dose_min = 0.7 # of a single encounter with an infected (starting from zero doesnt make sense) dose_max = 0.701 delta_dose = 0.01 ########## KEEP FIXED TO ONE infect_threshold_min = 1.00 # i can define the dose in units of the threshold infect_threshold_max = 1.001 delta_infect_threshold = 0.1 ############ dict_filenames_tot_distance = { } # i will save the filename as key and the tot distance from that curve to the original one prob_Immune = prob_Immune_min while prob_Immune <= prob_Immune_max: print "prom Immune:", prob_Immune prob_infection = prob_min while prob_infection <= prob_max: print " p:", prob_infection infect_threshold = infect_threshold_min while infect_threshold <= infect_threshold_max: print " threshold:", infect_threshold dose = dose_min while dose <= dose_max: print " dose:", dose if for_testing_fixed_set == "YES": output_file2 = dir + "Average_time_evolution_Infection_memory_train_test_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_FIXED_threshold" + str( infect_threshold) + "_dose" + str( dose) + "_" + str(Niter) + "iter.dat" else: output_file2 = dir + "Average_time_evolution_Infection_memory_p" + str( prob_infection) + "_Immune" + str( prob_Immune) + "_FIXED_threshold" + str( infect_threshold) + "_dose" + str( dose) + "_" + str(Niter) + "iter.dat" file2 = open(output_file2, 'wt') file2.close() num_shifts = 0 for n in G.nodes(): G.node[n]["status"] = "S" G.node[n][ "infec_value"] = 0. # when this value goes over the infect_threshold, the dr is infected if G.node[n]['type'] == "shift": num_shifts += 1 list_lists_t_evolutions = [ ] # i create the empty list of list for the Niter temporal evolutions list_dist_fixed_parameters = [] list_abs_dist_at_ending_point_fixed_parameters = [] list_dist_at_ending_point_fixed_parameters = [] list_final_num_infected = [] for iter in range(Niter): # print " iter:",iter list_I = [] #list infected doctors list_ordering = [] list_s = [] ########### set I.C. max_order = 0 for n in G.nodes(): G.node[n][ "status"] = "S" # all nodes are Susceptible if G.node[n]['type'] == "shift": list_s.append(n) if G.node[n]['order'] > max_order: max_order = G.node[n]['order'] else: if G.node[n]['label'] == "Wunderink" or G.node[ n]["label"] == "Weiss": G.node[n]["status"] = "I" G.node[n][ "infec_value"] = infect_threshold + 1. list_I.append(G.node[n]['label']) list_single_t_evolution = [] list_single_t_evolution.append( 2.0) # I always start with TWO infected doctors!! for n in G.nodes( ): # i make some DOCTORs INMUNE (anyone except Weiss and Wunderink) if (G.node[n]['type'] == "A") or (G.node[n]['type'] == "F"): if G.node[n]['label'] != "Wunderink" and G.node[ n]["label"] != "Weiss": rand = random.random() if rand < prob_Immune: G.node[n]["status"] = "Immune" ################# the dynamics starts: t = 1 while t <= max_order: # loop over shifts, in order for n in G.nodes(): if G.node[n]['type'] == "shift" and G.node[n][ 'order'] == t: shift_lenght = int( G.node[n]['shift_lenght']) if shift_lenght == 2 and n not in list_id_weekends_T3: shift_lenght = 1 # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2. (weekend shifts for T3 are two day long, with no sharing fellows) flag_possible_infection = 0 for doctor in G.neighbors( n ): #first i check if any doctor is infected in this shift if G.node[doctor]["status"] == "I": flag_possible_infection = 1 if flag_possible_infection: for doctor in G.neighbors( n ): # then the doctors in that shift, gets infected with prob_infection for i in range(shift_lenght): if G.node[doctor][ "status"] == "S": rand = random.random() if rand < prob_infection: # with prob p the infection occurres G.node[doctor][ "infec_value"] += dose # and bumps the infection_value of that susceptible dr if G.node[doctor][ "infec_value"] >= infect_threshold: # becomes infected G.node[doctor][ "status"] = "I" if G.node[doctor][ "type"] == "A": # fellows participate in the dynamics, but i only consider the attendings as real adopters list_I.append( G.node[ doctor] ["label"]) # for node in G.nodes(): # if G.node[node]['type']!="shift": # print t, G.node[node]['label'], G.node[node]["infec_value"] #raw_input() list_single_t_evolution.append(float(len(list_I))) t += 1 ######## end t loop list_lists_t_evolutions.append(list_single_t_evolution) list_dist_fixed_parameters.append( compare_real_evol_vs_simus_to_be_called. compare_two_curves(list_actual_evol, list_single_t_evolution)) list_abs_dist_at_ending_point_fixed_parameters.append( abs(list_single_t_evolution[-1] - list_actual_evol[-1]) ) # i save the distance at the ending point between the current simu and actual evol list_dist_at_ending_point_fixed_parameters.append( list_single_t_evolution[-1] - list_actual_evol[-1] ) # i save the distance at the ending point between the current simu and actual evol list_final_num_infected.append( list_single_t_evolution[-1]) ######## end loop Niter list_pair_dist_std_delta_end = [] list_pair_dist_std_delta_end.append( numpy.mean(list_dist_fixed_parameters) ) # average dist between the curves over Niter list_pair_dist_std_delta_end.append( numpy.std(list_dist_fixed_parameters)) list_pair_dist_std_delta_end.append( numpy.mean( list_abs_dist_at_ending_point_fixed_parameters)) if for_testing_fixed_set == "NO": file3 = open(output_file3, 'at') # i print out the landscape print >> file3, prob_infection, prob_Immune, numpy.mean( list_abs_dist_at_ending_point_fixed_parameters ), numpy.mean(list_dist_fixed_parameters), numpy.mean( list_final_num_infected), numpy.std( list_final_num_infected ), numpy.std(list_final_num_infected) / numpy.mean( list_final_num_infected) file3.close() if ( numpy.mean( list_abs_dist_at_ending_point_fixed_parameters) ) <= delta_end: # i only consider situations close enough at the ending point dict_filenames_tot_distance[ output_file2] = list_pair_dist_std_delta_end file2 = open(output_file2, 'at') for s in range(len(list_single_t_evolution)): list_fixed_t = [] for iter in range(Niter): list_fixed_t.append( list_lists_t_evolutions[iter][s]) print >> file2, s, numpy.mean(list_fixed_t) file2.close() print "printed out: ", output_file2 if for_testing_fixed_set == "YES": num_valid_endings = 0. for item in list_abs_dist_at_ending_point_fixed_parameters: if item <= delta_end: # i count how many realizations i get close enough at the ending point num_valid_endings += 1. print "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters), numpy.std( list_dist_fixed_parameters ), list_dist_fixed_parameters, "\n" print "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" histogram_filename = "../Results/weight_shifts/histogr_raw_distances_ending_infection_memory_p" + str( prob_infection ) + "_Immune" + str(prob_Immune) + "_threshold" + str( infect_threshold) + "_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day) + ".dat" histograma_gral_negv_posit.histograma( list_dist_at_ending_point_fixed_parameters, histogram_filename) histogram_filename2 = "../Results/weight_shifts/histogr_sum_dist_traject_infection_memory_p" + str( prob_infection ) + "_Immune" + str(prob_Immune) + "_threshold" + str( infect_threshold) + "_dose" + str( dose) + "_" + str(Niter) + "iter_day" + str( cutting_day) + ".dat" histograma_bines_gral.histograma_bins( list_dist_fixed_parameters, Nbins, histogram_filename2) output_file10 = "../Results/weight_shifts/Summary_results_training_segment_infection_memory_distrib_p" + str( prob_infection) + "_" + "FIXED_Immune" + str( prob_Immune) + "_FIXED_threshold" + str( infect_threshold ) + "_dose" + str(dose) + "_" + str( Niter) + "iter_day" + str(cutting_day) + ".dat" file10 = open(output_file10, 'wt') print >> file10, "Summary results from train-testing infection with", Niter, "iter, and with values for the parameters: prob_inf ", prob_infection, " prob immune: ", prob_Immune, "infect. threshold:", infect_threshold, "dose:", dose, "\n" print >> file10, "average distance of the optimum in the testing segment:", numpy.mean( list_dist_fixed_parameters), numpy.std( list_dist_fixed_parameters ), list_dist_fixed_parameters, "\n" print >> file10, "fraction of realizations that end within delta_doctor:", num_valid_endings / Niter, "mean ending dist:", numpy.mean( list_dist_at_ending_point_fixed_parameters ), "SD final dist", numpy.std( list_dist_at_ending_point_fixed_parameters ), list_dist_at_ending_point_fixed_parameters, "\n" print >> file10, "written optimum train_test evolution file:", output_file2 print >> file10, "written histogram file: ", histogram_filename file10.close() print "written Summary file: ", output_file10 print "written histogram file: ", histogram_filename if envelopes == "YES": calculate_envelope_set_curves.calculate_envelope( list_lists_t_evolutions, percent_envelope, "Infection_memory_fixed", [ prob_infection, prob_Immune, infect_threshold, dose ]) dose += delta_dose infect_threshold += delta_infect_threshold prob_infection += delta_prob prob_Immune += delta_prob_Immune if for_testing_fixed_set == "NO": # only if i am exploring the whole landscape, i need to call this function, otherwise, i already know the optimum compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end( dict_filenames_tot_distance, "Infection_memory", all_team, Niter, None) print "written landscape file:", output_file3