Exemple #1
0
def main(graph_name):

    cutting_day = 243  # to separate   training-testing

    Niter = 1000

    G = nx.read_gml(graph_name)

    list_id_weekends_T3 = look_for_T3_weekends(
        G
    )  # T3 doesnt share fellows in the weekend  (but they are the exception)

    all_team = "NO"  # as adopters or not

    dir_real_data = '../Results/'

    dir = "../Results/weight_shifts/infection/"

    delta_end = 3.  # >= than + or -  dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!)

    ######################################################################################
    #  I read the file of the actual evolution of the idea spreading in the hospital:   ##
    ######################################################################################

    filename_actual_evol = "../Data/Actual_evolution_adopters_NO_fellows_only_attendings.dat"

    file1 = open(
        filename_actual_evol, 'r'
    )  ## i read the file:  list_dates_and_names_current_adopters.txt  (created with: extract_real_evolution_number_adopters.py)
    list_lines_file = file1.readlines()

    list_actual_evol = []
    for line in list_lines_file:  # [1:]:   # i exclude the first row

        num_adopters = float(line.split("\t")[1])
        list_actual_evol.append(num_adopters)

    list_actual_evol_training = list_actual_evol[:cutting_day]
    #   list_actual_evol_testing=list_actual_evol[(cutting_day-1):]   #i dont use this

    ##################################################################

    #../Results/network_final_schedule_withTeam3/infection/Average_time_evolution_Infection_p0.9_Immune0.5_1000iter_2012.dat

    prob_min = 1.00
    prob_max = 1.0
    delta_prob = 0.1

    prob_Immune_min = 0.0
    prob_Immune_max = 0.0
    delta_prob_Immune = 0.1

    #######
    infect_threshold_min = 1.0  # THIS IS FIXED, BECAUSE DOSE CAN BE DEFINED IN UNITS OF IT!!
    infect_threshold_max = 1.01
    delta_infect_threshold = 0.1
    #######

    # of a single encounter with an infected  (it cant be zero or it doesnt make sense!)
    dose_min = 0.200  #infect_threshold_min
    dose_max = 0.201  #######infect_threshold_min/10.
    delta_dose = 0.1  ##infect_threshold_min/10.

    fixed_param = ""  #FIXED_Pimm0_"    # or ""  # for the Results file that contains the sorted list of best parameters

    print_landscape = "NO"  #YES"  # for the whole exploration

    print_training_evol = "YES"  #  "NO"   # once i know the best fit for the training segment, i run it again to get the curve

    if print_landscape == "YES":
        output_file3 = "../Results/weight_shifts/Landscape_parameters_infection_memory_train_" + fixed_param + "_" + str(
            Niter) + "iter_Att_only_middle_day" + str(cutting_day) + ".dat"
        file3 = open(output_file3, 'wt')

        file3.close()

    list_dist_at_ending_point_fixed_parameters = []
    dict_filenames_tot_distance = {
    }  # i will save the filename as key and the tot distance from that curve to the original one
    dict_filenames_prod_distances = {}

    prob_Immune = prob_Immune_min
    while prob_Immune <= prob_Immune_max:

        print "prom Immune:", prob_Immune

        prob_infection = prob_min
        while prob_infection <= prob_max:

            print "  p:", prob_infection

            infect_threshold = infect_threshold_min

            print "  threshold:", infect_threshold

            dose = dose_min
            while dose <= dose_max:

                print "  dose:", dose

                output_file2 = dir + "Average_time_evolution_Infection_memory_training_p" + str(
                    prob_infection) + "_Immune" + str(
                        prob_Immune) + "_FIXED_threshold" + str(
                            infect_threshold) + "_dose" + str(
                                dose) + "_" + str(Niter) + "iter_day" + str(
                                    cutting_day) + "_Att_only_middle.dat"

                #      I DONT NEED TO WRITE IT, COS I WILL USE THE WHOLE FILE FROM THE WHOLE FIT, WITH THE PARAMETER VALUES THAT THE TESTING-UP-TODAY-125 TELLS ME

                output_file4 = dir + "List_adopters_fellows_descending_frequency_Infection_memory_training_p" + str(
                    prob_infection) + "_Immune" + str(
                        prob_Immune) + "_FIXED_threshold" + str(
                            infect_threshold) + "_dose" + str(
                                dose) + "_" + str(Niter) + "iter_day" + str(
                                    cutting_day) + "_Att_only_middle.dat"

                num_Att_adopters = 0.
                num_F_adopters = 0.
                dict_att_freq_adoption_end = {
                }  # to keep track of what fellow is an adopter at the end (to use along with the real ic)
                dict_fellow_freq_adoption_end = {
                }  # to keep track of what fellow is an adopter at the end (to use along with the real ic)
                for n in G.nodes():
                    doctor = G.node[n]["label"]
                    if G.node[n]['type'] == "F":
                        dict_fellow_freq_adoption_end[doctor] = 0.
                    elif G.node[n]['type'] == "A":
                        dict_att_freq_adoption_end[doctor] = 0.

                list_lists_t_evolutions = []

                list_dist_fixed_parameters = []
                list_dist_abs_at_ending_point_fixed_parameters = []
                list_final_num_infected = []

                for iter in range(Niter):

                    #    print "     iter:",iter

                    ########### set I.C.

                    list_I = []  #list infected doctors

                    for n in G.nodes():
                        G.node[n]["status"] = "S"  # all nodes are Susceptible
                        G.node[n]["infec_value"] = 0.
                        if G.node[n]['type'] == "shift":
                            pass

                        else:
                            if G.node[n]['label'] == "Wunderink" or G.node[n][
                                    "label"] == "Weiss":
                                G.node[n]["status"] = "I"
                                G.node[n][
                                    "infec_value"] = infect_threshold + 1.
                                list_I.append(G.node[n]['label'])

                    list_single_t_evolution = []
                    list_single_t_evolution.append(
                        2.0)  # I always start with TWO infected doctors!!
                    old_num_adopters = 2

                    for n in G.nodes(
                    ):  # i make some DOCTORs INMUNE  (anyone except Weiss and Wunderink)
                        if (G.node[n]['type'] == "A") or (G.node[n]['type']
                                                          == "F"):
                            if G.node[n]['label'] != "Wunderink" and G.node[n][
                                    "label"] != "Weiss":
                                rand = random.random()
                                if rand < prob_Immune:
                                    G.node[n]["status"] = "Immune"

                    ################# the dynamics starts:

                    shift_length = 5  #i know the first shift (order 0) is of length 5

                    t = 0
                    while t < cutting_day:  # loop over shifts, in order   just until cutting day (training segment)

                        for n in G.nodes():
                            if G.node[n]['type'] == "shift" and G.node[n][
                                    'order'] == t:
                                shift_length = int(G.node[n]['shift_length'])
                                effective_shift_length = shift_length

                                if shift_length == 2 and n not in list_id_weekends_T3:
                                    effective_shift_length = 1  # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2.  (weekend shifts for T3 are two day long, with no sharing fellows)

                                flag_possible_infection = 0
                                for doctor in G.neighbors(
                                        n
                                ):  #first i check if any doctor is infected in this shift
                                    if G.node[doctor]["status"] == "I":
                                        flag_possible_infection = 1

                                if flag_possible_infection:
                                    for doctor in G.neighbors(
                                            n
                                    ):  # then the doctors in that shift, gets infected with prob_infection

                                        for i in range(
                                                effective_shift_length
                                        ):  # i repeat the infection process several times, to acount for shift length
                                            if G.node[doctor]["status"] == "S":
                                                rand = random.random()
                                                if rand < prob_infection:  # with prob p the infection occurres

                                                    G.node[doctor][
                                                        "infec_value"] += dose  # and bumps the infection_value of that susceptible dr

                                                    if G.node[doctor][
                                                            "infec_value"] >= infect_threshold:  # becomes  infected

                                                        G.node[doctor][
                                                            "status"] = "I"

                                                        if G.node[doctor][
                                                                "type"] == "A":  # fellows participate in the dynamics, but i only consider the attendings as real adopters

                                                            list_I.append(
                                                                G.node[doctor]
                                                                ["label"])

                        new_num_adopters = len(list_I)

                        if shift_length == 5:  # i estimate that adoption happens in the middle of the shift
                            if t + 5 < cutting_day:
                                list_single_t_evolution.append(
                                    old_num_adopters)
                            if t + 4 < cutting_day:
                                list_single_t_evolution.append(
                                    old_num_adopters)
                            if t + 3 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)
                            if t + 2 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)
                            if t + 1 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)
                            t += 5

                        elif shift_length == 4:
                            if t + 4 < cutting_day:
                                list_single_t_evolution.append(
                                    old_num_adopters)
                            if t + 3 < cutting_day:
                                list_single_t_evolution.append(
                                    old_num_adopters)

                            if t + 2 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)

                            if t + 1 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)
                            t += 4

                        elif shift_length == 3:
                            if t + 3 < cutting_day:
                                list_single_t_evolution.append(
                                    old_num_adopters)

                            if t + 2 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)

                            if t + 1 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)

                            t += 3

                        elif shift_length == 2:
                            if t + 2 < cutting_day:
                                list_single_t_evolution.append(
                                    old_num_adopters)

                            if t + 1 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)

                            t += 2

                        elif shift_length == 1:
                            if t + 1 < cutting_day:
                                list_single_t_evolution.append(
                                    new_num_adopters)

                            t += 1

                        old_num_adopters = new_num_adopters

                    ######## end t loop

                    list_lists_t_evolutions.append(list_single_t_evolution)

                    list_dist_fixed_parameters.append(
                        compare_real_evol_vs_simus_to_be_called.
                        compare_two_curves(list_actual_evol_training,
                                           list_single_t_evolution))

                    list_dist_abs_at_ending_point_fixed_parameters.append(
                        abs(list_single_t_evolution[-1] -
                            list_actual_evol_training[-1])
                    )  # i save the distance at the ending point between the current simu and actual evol

                    #  print "actual:",len(list_actual_evol_training),"  simu:",len(list_single_t_evolution)   # 125, 125

                    list_final_num_infected.append(list_single_t_evolution[-1])

                    list_dist_at_ending_point_fixed_parameters.append(
                        list_single_t_evolution[-1] -
                        list_actual_evol_training[-1]
                    )  # i save the distance at the ending point between the current simu and actual evol

                    for n in G.nodes():
                        doctor = G.node[n]["label"]
                        if G.node[n]['type'] != "shift":
                            if G.node[n]['status'] == "I":
                                if G.node[n]['type'] == "F":
                                    dict_fellow_freq_adoption_end[doctor] += 1.
                                    num_F_adopters += 1.
                                elif G.node[n]['type'] == "A":
                                    dict_att_freq_adoption_end[doctor] += 1.
                                    num_Att_adopters += 1.

                ######## end loop Niter for the training fase

                list_pair_dist_std_delta_end = []

                list_pair_dist_std_delta_end.append(
                    numpy.mean(list_dist_fixed_parameters)
                )  # average dist between the curves over Niter
                list_pair_dist_std_delta_end.append(
                    numpy.std(list_dist_fixed_parameters))

                list_pair_dist_std_delta_end.append(
                    numpy.mean(list_dist_abs_at_ending_point_fixed_parameters))

                if print_landscape == "YES":
                    file3 = open(output_file3,
                                 'at')  # i print out the landscape
                    print >> file3, prob_infection, prob_Immune, infect_threshold, dose, numpy.mean(
                        list_dist_abs_at_ending_point_fixed_parameters
                    ), numpy.mean(list_dist_fixed_parameters), numpy.mean(
                        list_final_num_infected), numpy.std(
                            list_final_num_infected
                        ), numpy.std(list_final_num_infected) / numpy.mean(
                            list_final_num_infected)
                    file3.close()

                if print_training_evol == "YES":
                    file2 = open(output_file2, 'wt')
                    for s in range(len(list_single_t_evolution)):
                        list_fixed_t = []
                        for iter in range(Niter):
                            list_fixed_t.append(
                                list_lists_t_evolutions[iter][s])
                        print >> file2, s, numpy.mean(list_fixed_t)
                        last_adoption_value = numpy.mean(list_fixed_t)
                    file2.close()
                    print "written evolution file:", output_file2

                    print "\nFraction of times each fellow was an adopter at the end of the training segment:"
                    for n in G.nodes():
                        if G.node[n]['type'] == "F":
                            doctor = G.node[n]["label"]
                            dict_fellow_freq_adoption_end[
                                doctor] = dict_fellow_freq_adoption_end[
                                    doctor] / float(Niter)

                    sorted_list_tuples = sorted(
                        dict_fellow_freq_adoption_end.iteritems(),
                        key=operator.itemgetter(1),
                        reverse=True)

                    file4 = open(output_file4, 'wt')
                    print >> file4, last_adoption_value, "(value adoption among Att at cutting day)", "Avg # F adopters", num_F_adopters / Niter, "Avg # A adopters", num_Att_adopters / Niter
                    for pair in sorted_list_tuples:
                        print >> file4, pair[0], pair[1]
                    file4.close()
                    print "written adoption frecuency file for fellows:", output_file4

                value = numpy.mean(list_dist_fixed_parameters) * numpy.mean(
                    list_dist_abs_at_ending_point_fixed_parameters
                )  # if SD=0, it is a problem, because then that is the minimun value, but not the optimum i am looking for!!

                dict_filenames_prod_distances[output_file2] = value

                if (
                        numpy.mean(
                            list_dist_abs_at_ending_point_fixed_parameters)
                ) <= delta_end:  # i only consider situations close enough at the ending point

                    dict_filenames_tot_distance[
                        output_file2] = list_pair_dist_std_delta_end

                dose += delta_dose
            prob_infection += delta_prob
        prob_Immune += delta_prob_Immune

    if print_training_evol == "NO":  #if i am exploring the whole space

        string_name = "infection_memory_training_" + fixed_param + str(
            Niter
        ) + "iter_day" + str(
            cutting_day
        ) + "_Att_only_middle.dat"  # for the "Results" file with the sorted list of files
        list_order_dict = compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end(
            dict_filenames_tot_distance, string_name, Niter, cutting_day)

        # it returns a list of tuples like this :  ('../Results/network_final_schedule_withTeam3_local/infection/Average_time_evolution_Infection_training_p0.7_Immune0.0_2iter_2012.dat', [2540.0, 208.0, 1.0])  the best set of parameters  being the fist one of the elements in the list.

        list_order_dict2 = compare_real_evol_vs_simus_to_be_called.pick_minimum_prod_distances(
            dict_filenames_prod_distances, string_name, Niter, cutting_day)

    if print_landscape == "YES":
        print "printed out landscape file:", output_file3
Exemple #2
0
def main(graph_name):
 


   cutting_day=175  # to separate   training-testing





   G = nx.read_gml(graph_name)


   all_team="NO"   # as adopters or not

   list_id_weekends_T3=look_for_T3_weekends(G)  # T3 doesnt share fellows in the weekend  (but they are the exception)


   dir_real_data='../Results/'
   Nbins=20   # for the histogram of sum of distances


   delta_end=3.  # >= than + or -  dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!)

   Niter=1000
  
   fixed_param=""#FIXED_Pimm0_"    # or ""  # for the Results file that contains the sorted list of best parameters


######################################################################################
#  I read the file of the actual evolution of the idea spreading in the hospital:   ##
######################################################################################



  
   filename_actual_evol="../Results/Actual_evolution_adopters_from_inference.dat"
  


   file1=open(filename_actual_evol,'r')         ## i read the file:  list_dates_and_names_current_adopters.txt  (created with: extract_real_evolution_number_adopters.py)
   list_lines_file=file1.readlines()
            

   list_actual_evol=[]  
   for line in list_lines_file:      # [1:]:   # i exclude the first row   
     
      num_adopters= float(line.split("\t")[1])          
      list_actual_evol.append(num_adopters)



   list_actual_evol_training=list_actual_evol[:cutting_day]
  # list_actual_evol_testing=list_actual_evol[(cutting_day-1):]  # i dont need this one
   
  
##################################################################


   prob_min=0.10
   prob_max=1.01
   delta_prob=0.1
   
   

   prob_Immune_min=0.0    
   prob_Immune_max=1.001
   delta_prob_Immune=0.1

# threshold is not personal, and set randomly to a value (0,1)
 
                   # of a single encounter with an infected  (it cant be zero or it doesnt make sense!)
   dose_min=0.05              #infect_threshold_min
   dose_max=1.001         #######infect_threshold_min/10.
   delta_dose=0.05           ##infect_threshold_min/10.


   dir="../Results/weight_shifts/infection/"       

   dict_filenames_tot_distance={}   # i will save the filename as key and the tot distance from that curve to the original one
   dict_filenames_prod_distances={}   

  

   prob_Immune=prob_Immune_min
   while prob_Immune<= prob_Immune_max:
        
      print "prom Immune:",prob_Immune        

      prob_infection=prob_min
      while prob_infection<= prob_max:
                 
            print "  p:",prob_infection                              
            
            dose=dose_min
            while dose <= dose_max:
               
               print "  dose:",dose





               output_file2=dir+"Average_time_evolution_Infection_memory_training_p"+str(prob_infection)+"_"+"Immune"+str(prob_Immune)+"_threshold_from_distrib_dose"+str(dose)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred.dat"
             #  file2 = open(output_file2,'wt')                                       
              # file2.close()
               



      
               list_lists_t_evolutions=[]     # i create the empty list of list for the Niter temporal evolutions
               
               list_dist_fixed_parameters=[]
               list_dist_at_ending_point_fixed_parameters=[]
               list_dist_abs_at_ending_point_fixed_parameters=[]
              
               
               for iter in range(Niter):
            
              #    print "     iter:",iter


               
                  

            ########### set I.C.


                  list_I=[]  #list infected doctors
                  max_order=0
                  for n in G.nodes():
                     G.node[n]["status"]="S"  # all nodes are Susceptible
                     G.node[n]["infec_value"]=0. 
                     G.node[n]["personal_threshold"]=random.random()  # for a dr to become infected

                     if G.node[n]['type']=="shift":                        
                        if  G.node[n]['order']>max_order:
                           max_order=G.node[n]['order'] # to get the last shift-order for the time loop
                     else:
                        if G.node[n]['label']=="Wunderink"  or G.node[n]["label"]=="Weiss":           
                           G.node[n]["status"]="I"                       
                           G.node[n]["infec_value"]=G.node[n]["personal_threshold"]+ 1.
                           list_I.append(G.node[n]['label'])
          

            
           
                  list_single_t_evolution=[]
                  list_single_t_evolution.append(2.0)  # I always start with TWO infected doctors!!


                  for n in G.nodes():   # i make some DOCTORs INMUNE  (anyone except Weiss and Wunderink)
                     if (G.node[n]['type']=="A") or ( G.node[n]['type']=="F"):
                        if G.node[n]['label']!="Wunderink"  and G.node[n]["label"]!="Weiss": 
                           rand=random.random()
                           if rand< prob_Immune:
                              G.node[n]["status"]="Immune"
                              


        
  
                  ################# the dynamics starts: 
            
                  t=1
                  while t< cutting_day:  # loop over shifts, in order           
                     for n in G.nodes():
                        if G.node[n]['type']=="shift" and G.node[n]['order']==t:
                           shift_length=int(G.node[n]['shift_length'])

                           if shift_length==2 and n not in list_id_weekends_T3:
                              shift_length=1   # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2.  (weekend shifts for T3 are two day long, with no sharing fellows)

                           flag_possible_infection=0
                           for doctor in G.neighbors(n): #first i check if any doctor is infected in this shift
                              if G.node[doctor]["status"]=="I":
                                 flag_possible_infection=1
                                

                           if flag_possible_infection:
                              for doctor in G.neighbors(n): # then the doctors in that shift, gets infected with prob_infection

                                 for i in range(shift_length):   # i repeat the infection process several times, to acount for shift lenght
                                    if G.node[doctor]["status"]=="S":
                                       rand=random.random()
                                       if rand<prob_infection:  # with prob p the infection occurres
                                          
                                          G.node[doctor]["infec_value"]+=dose  # and bumps the infection_value of that susceptible dr
                                          
                                          if G.node[doctor]["infec_value"]>= G.node[doctor]["personal_threshold"]:  # the threshold for infection is personal
                                             
                                             G.node[doctor]["status"]="I"
                                             
                                        
                                                
                                             list_I.append(G.node[doctor]["label"])
                                                

                     list_single_t_evolution.append(float(len(list_I)))

                     t+=1   
                     ######## end t loop





          


                  list_lists_t_evolutions.append(list_single_t_evolution)
             
 
                  #print "actual:",len(list_actual_evol_training),"  simu:",len(list_single_t_evolution)
                  list_dist_fixed_parameters.append(compare_real_evol_vs_simus_to_be_called.compare_two_curves( list_actual_evol_training,list_single_t_evolution))
                  
                  list_dist_abs_at_ending_point_fixed_parameters.append( abs(list_single_t_evolution[-1]-list_actual_evol_training[-1]) )   # i save the distance at the ending point between the current simu and actual evol
                  list_dist_at_ending_point_fixed_parameters.append( list_single_t_evolution[-1]-list_actual_evol_training[-1])    # i save the distance at the ending point between the current simu and actual evol

                           
                  ######## end loop Niter for the training fase
      

       


       
       
               list_pair_dist_std_delta_end=[]
               
               list_pair_dist_std_delta_end.append(numpy.mean(list_dist_fixed_parameters) )   # average dist between the curves over Niter
               list_pair_dist_std_delta_end.append(numpy.std(list_dist_fixed_parameters) )
               
               list_pair_dist_std_delta_end.append(numpy.mean(list_dist_abs_at_ending_point_fixed_parameters))
               

               value=numpy.mean(list_dist_fixed_parameters) *numpy.mean(list_dist_abs_at_ending_point_fixed_parameters)# if SD=0, it is a problem, because then that is the minimun value, but not the optimum i am looking for!!
    
               dict_filenames_prod_distances[output_file2]=  value

               
               if (numpy.mean(list_dist_abs_at_ending_point_fixed_parameters)) <= delta_end:  # i only consider situations close enough at the ending point   
                  
                  dict_filenames_tot_distance[output_file2]=list_pair_dist_std_delta_end
                                                                                          

                  histogram_filename="../Results/weight_shifts/histogr_raw_distances_ending_infection_memory_training_p"+str(prob_infection)+"_"+"Immune"+str(prob_Immune)+"_threshold_from_distrib_dose"+str(dose)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred.dat"
                  histograma_gral_negv_posit.histograma(list_dist_at_ending_point_fixed_parameters,histogram_filename)
                  
                  histogram_filename2="../Results/weight_shifts/histogr_sum_dist_traject_infection_memory_training_p"+str(prob_infection)+"_"+"Immune"+str(prob_Immune)+"_threshold_from_distrib_dose"+str(dose)+"_"+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred.dat"
                  
                  histograma_bines_gral.histograma_bins(list_dist_fixed_parameters,Nbins,histogram_filename2)
                  
                  
                  print  "written histogram file: ",histogram_filename
                  print  "written histogram file: ",histogram_filename2


               dose+= delta_dose          
            prob_infection+= delta_prob
      prob_Immune+= delta_prob_Immune



 

   string_name="infection_memory_training_"+fixed_param+str(Niter)+"iter_day"+str(cutting_day)+"_A_F_inferred.dat"   # for the "Results" file with the sorted list of files
   
   list_order_dict= compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end(dict_filenames_tot_distance,string_name,Niter,cutting_day)
# it returns a list of tuples like this :  ('../Results/network_final_schedule_withTeam3_local/infection/Average_time_evolution_Infection_training_p0.7_Immune0.0_2iter_2012.dat', [2540.0, 208.0, 1.0])  the best set of parameters  being the fist one of the elements in that list.



   
   
   
   list_order_dict2= compare_real_evol_vs_simus_to_be_called.pick_minimum_prod_distances(dict_filenames_prod_distances,string_name,Niter,cutting_day)


   

   prob_infection=float(list_order_dict[0][0].split("_p")[1].split("_")[0])
   prob_Immune=float(list_order_dict[0][0].split("_Immune")[1].split("_")[0]) 
   dose=float(list_order_dict[0][0].split("_dose")[1].split("_")[0])
 
  
   
  
   print "\nOptimum parameters (old method) at day",cutting_day," are: p=",prob_infection," Pimmune=",prob_Immune," infection threshold from distribution, and dose=",dose
   


  # optimum_filename=list_order_dict2[0][0]

   prob_infection=float(list_order_dict2[0][0].split("_p")[1].split("_")[0])
   prob_Immune=float(list_order_dict2[0][0].split("_Immune")[1].split("_")[0])  
   dose=float(list_order_dict2[0][0].split("_dose")[1].split("_")[0])
 
  
 
   print "Optimum parameters (product of distances and SDs) at day",cutting_day," are: p=",prob_infection," Pimmune=",prob_Immune," infection threshold from distribution, and dose=",dose
def main(graph_name):

    cutting_day = 175  # to separate   training-testing

    G = nx.read_gml(graph_name)

    list_id_weekends_T3 = look_for_T3_weekends(
        G
    )  # T3 doesnt share fellows in the weekend  (but they are the exception)

    all_team = "NO"  # as adopters or not
    Nbins = 20  # for the histogram of sum of distances

    dir_real_data = '../Results/'

    dir = "../Results/weight_shifts/infection/"

    delta_end = 3.  # >= than + or -  dr difference at the end of the evolution (NO realization ends up closer than this!!!! if 2, i get and empty list!!!)

    Niter_training = 1000

    fixed_param = ""  #"FIXED_Pimm0_"    # or ""  # for the Results file that contains the sorted list of best parameters

    output_file3 = "../Results/weight_shifts/Landscape_parameters_infection_train_test_" + str(
        Niter_training) + "iter.dat"
    file3 = open(output_file3, 'wt')

    file3.close()

    ######################################################################################
    #  I read the file of the actual evolution of the idea spreading in the hospital:   ##
    ######################################################################################

    if all_team == "YES":
        print "remember that now i use the file of adopters without fellows\n../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat"
        exit()

    else:
        filename_actual_evol = "../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat"

    file1 = open(
        filename_actual_evol, 'r'
    )  ## i read the file:  list_dates_and_names_current_adopters.txt  (created with: extract_real_evolution_number_adopters.py)
    list_lines_file = file1.readlines()

    list_actual_evol = []
    for line in list_lines_file:  # [1:]:   # i exclude the first row

        num_adopters = float(line.split(" ")[1])
        list_actual_evol.append(num_adopters)

    list_actual_evol_training = list_actual_evol[:cutting_day]
    #   list_actual_evol_testing=list_actual_evol[(cutting_day-1):]   #i dont use this

    ##################################################################

    #../Results/network_final_schedule_withTeam3/infection/Average_time_evolution_Infection_p0.9_Immune0.5_1000iter_2012.dat

    prob_min = 0.0
    prob_max = 1.01
    delta_prob = 0.1

    prob_Immune_min = 0.00
    prob_Immune_max = 1.01
    delta_prob_Immune = 0.1

    list_dist_at_ending_point_fixed_parameters = []
    dict_filenames_tot_distance = {
    }  # i will save the filename as key and the tot distance from that curve to the original one
    dict_filenames_prod_distances = {}

    prob_Immune = prob_Immune_min
    while prob_Immune <= prob_Immune_max:

        print "prom Immune:", prob_Immune

        prob_infection = prob_min
        while prob_infection <= prob_max:

            print "  p:", prob_infection

            output_file2 = dir + "Average_time_evolution_Infection_training_p" + str(
                prob_infection) + "_" + "Immune" + str(
                    prob_Immune) + "_" + str(
                        Niter_training) + "iter_2012_avg_ic_day" + str(
                            cutting_day) + ".dat"
            #   file2 = open(output_file2,'wt')                                          I DONT NEED TO WRITE IT, COS I WILL USE THE WHOLE FILE FROM THE WHOLE FIT, WITH THE PARAMETER VALUES THAT THE TESTING-UP-TODAY-125 TELLS ME
            #  file2.close()

            # i create the empty list of list for the Niter temporal evolutions
            num_shifts = 0
            num_Drs = 0.
            for n in G.nodes():
                G.node[n]["status"] = "S"
                if G.node[n]['type'] == "shift":
                    num_shifts += 1
                else:
                    num_Drs += 1.

        #  list_final_I_values_fixed_p=[]  # i dont care about the final values right now, but about the whole time evol
            list_lists_t_evolutions = []

            list_dist_fixed_parameters = []
            list_dist_abs_at_ending_point_fixed_parameters = []
            list_final_num_infected = []

            for iter in range(Niter_training):

                #   print "     iter:",iter

                list_I = []  #list infected doctors
                list_ordering = []
                list_s = []
                list_A = []
                list_F = []

                ########### set I.C.

                max_order = 0
                for n in G.nodes():
                    G.node[n]["status"] = "S"  # all nodes are Susceptible
                    if G.node[n]['type'] == "shift":
                        list_s.append(n)
                        if G.node[n]['order'] > max_order:
                            max_order = G.node[n]['order']
                    else:
                        if G.node[n]['label'] == "Wunderink" or G.node[n][
                                "label"] == "Weiss":
                            G.node[n]["status"] = "I"
                            list_I.append(G.node[n]['label'])

                        if G.node[n]['type'] == "A":
                            list_A.append(n)

                        if G.node[n]['type'] == "F":
                            list_F.append(n)

                list_single_t_evolution = []
                list_single_t_evolution.append(
                    2.0)  # I always start with TWO infected doctors!!

                for n in G.nodes(
                ):  # i make some DOCTORs INMUNE  (anyone except Weiss and Wunderink)
                    if (G.node[n]['type'] == "A") or (G.node[n]['type']
                                                      == "F"):
                        if G.node[n]['label'] != "Wunderink" and G.node[n][
                                "label"] != "Weiss":
                            rand = random.random()
                            if rand < prob_Immune:
                                G.node[n]["status"] = "Immune"

            #   print max_order

            ################# the dynamics starts:

                t = 1
                while t < cutting_day:  # loop over shifts, in order   just until cutting day (training segment)
                    for n in G.nodes():
                        if G.node[n]['type'] == "shift" and G.node[n][
                                'order'] == t:

                            shift_lenght = int(G.node[n]['shift_lenght'])

                            if shift_lenght == 2 and n not in list_id_weekends_T3:
                                shift_lenght = 1  # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2.  (weekend shifts for T3 are two day long, with no sharing fellows)
                            #  print "one-day weekend", G.node[n]['label'],G.node[n]['shift_lenght']

                            flag_possible_infection = 0
                            for doctor in G.neighbors(
                                    n
                            ):  #first i check if any doctor is infected in this shift
                                if G.node[doctor]["status"] == "I":
                                    flag_possible_infection = 1

                            if flag_possible_infection:
                                for doctor in G.neighbors(
                                        n
                                ):  # then the doctors in that shift, gets infected with prob_infection
                                    for i in range(shift_lenght):
                                        if G.node[doctor]["status"] == "S":
                                            rand = random.random()
                                            if rand < prob_infection:
                                                G.node[doctor]["status"] = "I"
                                                if G.node[doctor][
                                                        "type"] == "A":
                                                    list_I.append(
                                                        G.node[doctor]
                                                        ["label"])

                    list_single_t_evolution.append(float(
                        len(list_I)))  #/(len(list_A)+len(list_F)))

                    t += 1

                ######## end t loop

                list_lists_t_evolutions.append(list_single_t_evolution)

                list_dist_fixed_parameters.append(
                    compare_real_evol_vs_simus_to_be_called.compare_two_curves(
                        list_actual_evol_training, list_single_t_evolution))

                list_dist_abs_at_ending_point_fixed_parameters.append(
                    abs(list_single_t_evolution[-1] -
                        list_actual_evol_training[-1])
                )  # i save the distance at the ending point between the current simu and actual evol

                #  print "actual:",len(list_actual_evol_training),"  simu:",len(list_single_t_evolution)   # 125, 125

                list_final_num_infected.append(list_single_t_evolution[-1])

                list_dist_at_ending_point_fixed_parameters.append(
                    list_single_t_evolution[-1] - list_actual_evol_training[-1]
                )  # i save the distance at the ending point between the current simu and actual evol

            ######## end loop Niter for the training fase

            list_pair_dist_std_delta_end = []

            list_pair_dist_std_delta_end.append(
                numpy.mean(list_dist_fixed_parameters)
            )  # average dist between the curves over Niter
            list_pair_dist_std_delta_end.append(
                numpy.std(list_dist_fixed_parameters))

            list_pair_dist_std_delta_end.append(
                numpy.mean(list_dist_abs_at_ending_point_fixed_parameters))

            file3 = open(output_file3, 'at')  # i print out the landscape
            print >> file3, prob_infection, prob_Immune, numpy.mean(
                list_dist_abs_at_ending_point_fixed_parameters
            ), numpy.mean(list_dist_fixed_parameters), numpy.mean(
                list_final_num_infected
            ), numpy.std(list_final_num_infected), numpy.std(
                list_final_num_infected) / numpy.mean(list_final_num_infected)
            file3.close()

            histogram_filename = "../Results/weight_shifts/histogr_raw_distances_ending_test_train_infection_p" + str(
                prob_infection) + "_Immune" + str(prob_Immune) + "_" + str(
                    Niter_training) + "iter_day" + str(cutting_day) + ".dat"
            histograma_gral_negv_posit.histograma(
                list_dist_at_ending_point_fixed_parameters, histogram_filename)

            histogram_filename2 = "../Results/weight_shifts/histogr_sum_dist_traject_infection_training_p" + str(
                prob_infection
            ) + "_" + "Immune" + str(prob_Immune) + "_" + str(
                Niter_training) + "iter_day" + str(cutting_day) + ".dat"

            histograma_bines_gral.histograma_bins(list_dist_fixed_parameters,
                                                  Nbins, histogram_filename2)

            print "written histogram file: ", histogram_filename
            print "written histogram file: ", histogram_filename2

            value = numpy.mean(list_dist_fixed_parameters) * numpy.mean(
                list_dist_abs_at_ending_point_fixed_parameters
            )  # if SD=0, it is a problem, because then that is the minimun value, but not the optimum i am looking for!!

            dict_filenames_prod_distances[output_file2] = value

            if (
                    numpy.mean(list_dist_abs_at_ending_point_fixed_parameters)
            ) <= delta_end:  # i only consider situations close enough at the ending point

                dict_filenames_tot_distance[
                    output_file2] = list_pair_dist_std_delta_end

                print numpy.mean(list_dist_abs_at_ending_point_fixed_parameters
                                 ), "added scenario:", output_file2

        # file2 = open(output_file2,'at')
        #for s in range(len(list_single_t_evolution)):
        #   list_fixed_t=[]
        #  for iter in range (Niter_training):
        #     list_fixed_t.append(list_lists_t_evolutions[iter][s])
        #print >> file2, s,numpy.mean(list_fixed_t)
        #file2.close()

            prob_infection += delta_prob
        prob_Immune += delta_prob_Immune

    list_order_dict = compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end(
        dict_filenames_tot_distance, "Infection_training_weight", all_team,
        Niter_training, cutting_day)

    # it returns a list of tuples like this :  ('../Results/network_final_schedule_withTeam3_local/infection/Average_time_evolution_Infection_training_p0.7_Immune0.0_2iter_2012.dat', [2540.0, 208.0, 1.0])  the best set of parameters  being the fist one of the elements in the list.

    string_name = "infection_training_" + fixed_param + str(
        Niter_training) + "iter_day" + str(
            cutting_day
        ) + ".dat"  # for the "Results" file with the sorted list of files

    list_order_dict2 = compare_real_evol_vs_simus_to_be_called.pick_minimum_prod_distances(
        dict_filenames_prod_distances, string_name, all_team, Niter_training,
        cutting_day)

    optimum_filename = list_order_dict[0][0]
    prob_infection = float(list_order_dict[0][0].split("_p")[1].split("_")[0])
    prob_Immune = float(
        list_order_dict[0][0].split("_Immune")[1].split("_")[0])

    print "Optimum parameters (old method) at day", cutting_day, " are: p=", prob_infection, " and Pimmune=", prob_Immune

    #  i already know the optimum, now i run the dynamics with those values, starting from the average state on the cutting point, and test:

    optimum_filename = list_order_dict2[0][0]
    prob_infection = float(list_order_dict2[0][0].split("_p")[1].split("_")[0])
    prob_Immune = float(
        list_order_dict2[0][0].split("_Immune")[1].split("_")[0])

    print "Optimum parameters (product of distances along_traject and at the end) at day", cutting_day, " are: p=", prob_infection, " and Pimmune=", prob_Immune

    print "Run that simulation with the optimum parameter set:", optimum_filename

    print "printed out landscape file:", output_file3

    output_file10 = "../Results/weight_shifts/Summary_results_training_segment_infection_p" + str(
        prob_infection) + "_" + "Immune" + str(prob_Immune) + "_" + str(
            Niter_training) + "iter_avg_ic_day" + str(cutting_day) + ".dat"
    file10 = open(output_file10, 'wt')

    print >> file10, "Summary results from train-testing persuasion with", Niter_training, "iter , using all the individual cutting points as IC, and with values for the parameters:  prob_inf ", prob_infection, " prob immune: ", prob_Immune, "\n"

    print >> file10, "Look for the file (or run that simulation) with the optimum parameter set:", optimum_filename
    file10.close()
Exemple #4
0
def main(graph_name):
 

   G = nx.read_gml(graph_name)
 
   list_id_weekends_T3=look_for_T3_weekends(G)  # T3 doesnt share fellows in the weekend  (but they are the exception)


   cutting_day=243     # to separate   training-testing



   Niter=1000

   delta_end=3  # >= than + or -  dr difference at the end of the evolution

   dir_real_data='../Results/'
   dir="../Results/weight_shifts/persuasion/"  


  
   Nbins=20   # for the histogram of sum of distances






######################################################################################
#  I read the file of the actual evolution of the idea spreading in the hospital:   ##
######################################################################################



  
   filename_actual_evol="../Data/Actual_evolution_adopters_NO_fellows_only_attendings.dat"
  


   file1=open(filename_actual_evol,'r')         ## i read the file:  list_dates_and_names_current_adopters.txt  (created with: extract_real_evolution_number_adopters.py)
   list_lines_file=file1.readlines()
            

   list_actual_evol=[]  
   for line in list_lines_file:      # [1:]:   # i exclude the first row   
     
      num_adopters= float(line.split("\t")[1])          
      list_actual_evol.append(num_adopters)

   list_actual_evol_training=list_actual_evol[:cutting_day]

##################################################################


#../Results/network_final_schedule_withTeam3/Time_evolutions_Persuasion_alpha0.2_damping0.0_mutual_encourg0.7_threshold0.4_unif_distr_50iter_2012_seed31Oct_finalnetwork.dat

 
   alpha_F_min=0.100   #   # alpha=0: nobody changes their mind
   alpha_F_max=0.1001    
   delta_alpha_F=0.10    #AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT!
   

   min_damping=0.000   #0.0     #its harder to go back from YES to NO again. =1 means no effect, =0.5 half the movement from Y->N than the other way around, =0 never go back from Y to N
   max_damping=0.0001    #0.451
   delta_damping=0.10  
      

   min_mutual_encouragement=0.300   #  # when two Adopters meet, they convince each other even more
   max_mutual_encouragement=0.301   
   delta_mutual_encouragement=0.10
   
   
   threshold_min=0.500   #  # larger than, to be an Adopter
   threshold_max=0.501 
   delta_threshold=0.10   # AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT
 
   fixed_param="FIXED_threshold0.5_damping0_"    # or ""  # for the Results file that contains the sorted list of best parameters


   print_landscape="NO"#NO"  # for the whole exploration

   print_training_evol= "YES"#"#YES"   # once i know the best fit for the training segment, i run it again to get the curve









   if print_landscape =="YES":
      
      output_file3="../Results/weight_shifts/Landscape_parameters_persuasion_train_"+fixed_param+str(Niter)+"iter_Att_only_middle_day"+str(cutting_day)+".dat"  
      file3 = open(output_file3,'wt')        
      file3.close()

 
   
   
   print "\n\nPersuasion process on network, with Niter:",Niter
   
   
   dict_filenames_tot_distance={}   # i will save the filename as key and the tot distance from that curve to the original one
   dict_filenames_prod_distances={}   


  

   threshold=threshold_min
   while   threshold<= threshold_max:
      print   "thershold:",threshold

      alpha_F=alpha_F_min
      while alpha_F<= alpha_F_max:            # i explore all the parameter space, and create a file per each set of values
        alpha_A=1.0*alpha_F
        print "  alpha_F:",alpha_F

        mutual_encouragement=min_mutual_encouragement  
        while  mutual_encouragement <= max_mutual_encouragement:
          print "    mutual_encouragement:",mutual_encouragement

          damping=min_damping
          while   damping <= max_damping:
            print "      damping:",damping


         
#            dir="../Results/weight_shifts/persuasion/alpha%.2f_damping%.2f/"  % (alpha_F, damping )
           
            output_file=dir+"Time_evolutions_Persuasion_training_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_"+str(cutting_day)+"_Att_only_middle.dat"  



            output_file4=dir+"List_adopters_fellows_descending_frequency_persuasion_training_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_"+str(Niter)+"iter_"+str(cutting_day)+"_Att_only_middle.dat"  


            num_Att_adopters=0.
            num_F_adopters=0.
            dict_att_freq_adoption_end={}   # to keep track of what fellow is an adopter at the end (to use along with the real ic)
            dict_fellow_freq_adoption_end={}   # to keep track of what fellow is an adopter at the end (to use along with the real ic)
            for n in G.nodes():              
               doctor=G.node[n]["label"]       
               if G.node[n]['type'] =="F":                      
                  dict_fellow_freq_adoption_end[doctor]=0.
               elif G.node[n]['type'] =="A":                      
                  dict_att_freq_adoption_end[doctor]=0.



            time_evol_number_adopters_ITER=[]  # list of complete single realizations of the dynamics
            list_dist_fixed_parameters=[]
            list_dist_at_ending_point_fixed_parameters=[]
            list_dist_abs_at_ending_point_fixed_parameters=[]

           
            list_networks_at_cutting_day=[]

            list_final_num_adopt=[]


            for iter in range(Niter):

               # print "         ",iter
              
           
                time_evol_number_adopters=[]   # for a single realization of the dynamics               

                num_adopters , seed_shift ,max_shift= set_ic(G,threshold)   # i establish who is Adopter and NonAdopter initially, and count how many shifts i have total

                time_evol_number_adopters.append(float(num_adopters))               
               

                old_num_adopters=num_adopters

                
               ########### the dynamics starts:                 
             
                shift_length=5    #i know the first shift (order 0) is of length 5

                t=0   
                while t< cutting_day:  # loop over shifts, in chronological order  (the order is the day index since seeding_day) 
                                             
                    for n in G.nodes():
                        if G.node[n]['type']=="shift" and G.node[n]['order']==t:  # i look for the shift corresponding to that time step                      (not all t's exists as 'orders' in the network!! just the days corresponding to the beginning of each shift)

                            shift_length=int(G.node[n]['shift_length'])
                            effective_shift_length=shift_length

                            if shift_length==2 and n not in list_id_weekends_T3:
                               effective_shift_length=1   # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2.  (weekend shifts for T3 are two day long, with no sharing fellows)



                            flag_possible_persuasion=0
                            for doctor in G.neighbors(n):                               
                                if G.node[doctor]["status"]=="Adopter":   #first i check if any doctor is an adopter in this shift         
                                    flag_possible_persuasion=1                               
                                    break

                            if flag_possible_persuasion==1:
                                list_doctors=[]
                                for doctor in G.neighbors(n):   # for all drs in that shift
                                    list_doctors.append(doctor)
                                
                                
                                pairs=itertools.combinations(list_doctors,2)    # cos the shift can be 2 but also 3 doctors 
                                for pair in pairs:
                                    doctor1=pair[0]
                                    doctor2=pair[1]
                                                                                        
                                    if G.node[doctor1]['status'] != G.node[doctor2]['status']:  # if they think differently, 
                                                                                              # there will be persuasion
                                        persuasion(G,damping,doctor1,doctor2,alpha_A,alpha_F,threshold,effective_shift_length)   # i move their values of opinion 
                                        update_opinions(G,threshold,doctor1,doctor2) #  i update status and make sure the values of the vectors stay between [0,1] 
                                  
                                    else:  # if two Adopters meet, they encourage each other (if two NonAdopters, nothing happens)
                                   
                                       mutual_reinforcement(G,mutual_encouragement,doctor1,doctor2,shift_length)
                                                                 
                    list_ALL_Adopters=[]       
                    list_Adopters=[]        
                    for n in G.nodes():              
                       try:
                          if  G.node[n]["status"]=="Adopter":                                                    
                             if G.node[n]["label"] not in list_Adopters and G.node[n]["type"]=="A":
                                list_Adopters.append(G.node[n]["label"])
                             list_ALL_Adopters.append(G.node[n]["label"])

                       except: pass  # if the node is a shift, it doesnt have a 'status' attribute                   
                    new_num_adopters=len(list_Adopters)


                    if  shift_length==5: # i estimate that adoption happens in the middle of the shift
                       if t+5 < cutting_day:
                          time_evol_number_adopters.append(old_num_adopters) 
                       if t+4 < cutting_day:
                          time_evol_number_adopters.append(old_num_adopters) 
                       if t+3 < cutting_day:
                          time_evol_number_adopters.append(new_num_adopters)
                       if t+2 < cutting_day:
                          time_evol_number_adopters.append(new_num_adopters)
                       if t+1 < cutting_day:
                          time_evol_number_adopters.append(new_num_adopters)
                       t+=5
                      
        
                    elif  shift_length==4:
                        if t+4 < cutting_day:
                           time_evol_number_adopters.append(old_num_adopters)                     
                        if t+3 < cutting_day:
                           time_evol_number_adopters.append(old_num_adopters) 

                        if t+2 < cutting_day:
                           time_evol_number_adopters.append(new_num_adopters)                       
                       
                        if t+1 < cutting_day:
                           time_evol_number_adopters.append(new_num_adopters)
                        t+=4
                     
 
                    elif  shift_length==3:
                        if t+3 < cutting_day:
                           time_evol_number_adopters.append(old_num_adopters)                     
                       
                        if t+2 < cutting_day:
                           time_evol_number_adopters.append(new_num_adopters)
                       
                        if t+1 < cutting_day:
                           time_evol_number_adopters.append(new_num_adopters)
                       
                        t+=3
                      


                    elif  shift_length==2:
                        if t+2 < cutting_day:
                           time_evol_number_adopters.append(old_num_adopters)                     
                       
                        if t+1 < cutting_day:
                           time_evol_number_adopters.append(new_num_adopters)
                       
                      
                        t+=2
                      

                    elif  shift_length==1:                      
                        if t+1 < cutting_day:
                           time_evol_number_adopters.append(new_num_adopters)                       
                       
                        t+=1
                      

                    old_num_adopters=new_num_adopters

                ############## end while loop over t
               

               
               
                time_evol_number_adopters_ITER.append(time_evol_number_adopters)


                list_final_num_adopt.append(time_evol_number_adopters[-1])

               
                list_dist_fixed_parameters.append(compare_real_evol_vs_simus_to_be_called.compare_two_curves( list_actual_evol_training,time_evol_number_adopters))
               
                list_dist_abs_at_ending_point_fixed_parameters.append( abs(time_evol_number_adopters[-1]-list_actual_evol_training[-1]) )

                list_dist_at_ending_point_fixed_parameters.append( time_evol_number_adopters[-1]-list_actual_evol_training[-1]) 


                
                for n in G.nodes():              
                    doctor= G.node[n]["label"]  
                    if G.node[n]['type'] != "shift":
                      if  G.node[n]['status'] =="Adopter":                                    
                         if G.node[n]['type'] =="F":                                                                                              
                            dict_fellow_freq_adoption_end[doctor]   += 1.  
                            num_F_adopters+=1.
                         elif G.node[n]['type'] =="A":                                                                                              
                            dict_att_freq_adoption_end[doctor]   += 1.                        
                            num_Att_adopters+=1.

            #######################   end loop Niter for the training fase


            list_pair_dist_std_delta_end=[]
        
            list_pair_dist_std_delta_end.append(numpy.mean(list_dist_fixed_parameters) )   # average dist between the curves over Niter
            list_pair_dist_std_delta_end.append(numpy.std(list_dist_fixed_parameters) )

            list_pair_dist_std_delta_end.append(numpy.mean(list_dist_abs_at_ending_point_fixed_parameters))

         

                     
            value=numpy.mean(list_dist_fixed_parameters) *numpy.mean(list_dist_abs_at_ending_point_fixed_parameters) # if SD=0, it is a problem, because then that is the minimun value, but not the optimum i am looking for!!
        
            dict_filenames_prod_distances[output_file]=  value                  


            if (numpy.mean(list_dist_abs_at_ending_point_fixed_parameters)) <= delta_end:  # i only consider situations close enough at the ending point   
               dict_filenames_tot_distance[output_file]=list_pair_dist_std_delta_end 
  
          
            if print_landscape =="YES":
               file3 = open(output_file3,'at')          # i print out the landscape           
               print >> file3, alpha_F, damping, mutual_encouragement, threshold,numpy.mean(list_dist_abs_at_ending_point_fixed_parameters), numpy.mean(list_dist_fixed_parameters),  numpy.mean(list_final_num_adopt),numpy.std(list_final_num_adopt),  numpy.std(list_final_num_adopt)/numpy.mean(list_final_num_adopt)
               file3.close()



            if print_training_evol=="YES":
               file = open(output_file,'wt')        
               for i in range(len(time_evol_number_adopters)):  #time step by time step
                  list_fixed_t=[]
                  for iteracion in range (Niter): #loop over all independent iter of the process
                     list_fixed_t.append(time_evol_number_adopters_ITER[iteracion][i])  # i collect all values for the same t, different iter  

                  print >> file, i,numpy.mean(list_fixed_t),numpy.std(list_fixed_t), alpha_F,damping,mutual_encouragement       
                  last_adoption_value=numpy.mean(list_fixed_t)
               file.close()
               print "written evolution file:", output_file
           

               print  "\nFraction of times each fellow was an adopter at the end of the training segment:"
               for n in G.nodes():              
                  if G.node[n]['type'] =="F":   
                     doctor= G.node[n]["label"]    
                     dict_fellow_freq_adoption_end[doctor]= dict_fellow_freq_adoption_end[doctor]/float(Niter)         
                     print doctor,"\t", dict_fellow_freq_adoption_end[doctor]
             
               print "(out of",len(dict_fellow_freq_adoption_end),"fellows)\n"
               sorted_list_tuples=sorted(dict_fellow_freq_adoption_end.iteritems(), key=operator.itemgetter(1),reverse=True)
             

               file4 = open(output_file4,'wt')
               print >> file4,last_adoption_value, "(value adoption among Att at cutting day)","Avg # F adopters",num_F_adopters/Niter, "Avg # A adopters",num_Att_adopters/Niter
               for pair in sorted_list_tuples:
                  print >> file4, pair[0], pair[1]
               file4.close()
               print "written adoption frecuency file for fellows:", output_file4


            damping += delta_damping
          mutual_encouragement += delta_mutual_encouragement
        alpha_F += delta_alpha_F
      threshold  += delta_threshold
    


   string_name="persuasion_training_"+fixed_param+str(Niter)+"iter_"+str(cutting_day)+"_Att_only_middle.dat"    # for the "Results" file with the sorted list of files
   list_order_dict=  compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end(dict_filenames_tot_distance,string_name,Niter,cutting_day)


  
   
   list_order_dict2= compare_real_evol_vs_simus_to_be_called.pick_minimum_prod_distances(dict_filenames_prod_distances,string_name,Niter,cutting_day)

  



  
   if print_landscape =="YES":
      print "printed out landscape file:",output_file3


   print "\n\n"
def main(graph_name):
 

   G = nx.read_gml(graph_name)
 
   list_id_weekends_T3=look_for_T3_weekends(G)  # T3 doesnt share fellows in the weekend  (but they are the exception)



   cutting_day=175  # to separate   training-testing

   Niter_training=1000
  

   delta_end=3  # >= than + or -  dr difference at the end of the evolution

   dir_real_data='../Results/'
   dir="../Results/weight_shifts/persuasion/"  


   all_team="NO"   # as adopters or not
   Nbins=20   # for the histogram of sum of distances


   fixed_param="FIXED_mutual0.5_damping.5_"    # or ""  # for the Results file that contains the sorted list of best parameters




  # fixed_parameters="mutual_encoug0.5_threshold0.5"   # for the Landscape text file CHANGE PARAMETERS ACCORDINGLY!!!

#output_file3="../Results/weight_shifts/Landscape_parameters_persuasion_train_test_"+str(fixed_parameters)+"_"+str(Niter_training)+"iter.dat"
   output_file3="../Results/weight_shifts/Landscape_parameters_persuasion_train_FIXED_damping0.1_threshold0.7_"+str(Niter_training)+"iter_alphaA_eq_alphaF.dat"  
   file3 = open(output_file3,'wt')        
   file3.close()

 


######################################################################################
#  I read the file of the actual evolution of the idea spreading in the hospital:   ##
######################################################################################



   if all_team=="YES":    
      print "remember that now i use the file of adopters without fellows\n../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat"
      exit()

   else:
      filename_actual_evol="../Results/Actual_evolution_adopters_NO_fellows_only_attendings.dat"
  


   file1=open(filename_actual_evol,'r')         ## i read the file:  list_dates_and_names_current_adopters.txt  (created with: extract_real_evolution_number_adopters.py)
   list_lines_file=file1.readlines()
            

   list_actual_evol=[]  
   for line in list_lines_file:      # [1:]:   # i exclude the first row   
     
      num_adopters= float(line.split(" ")[1])          
      list_actual_evol.append(num_adopters)

   list_actual_evol_training=list_actual_evol[:cutting_day]

##################################################################


#../Results/network_final_schedule_withTeam3/Time_evolutions_Persuasion_alpha0.2_damping0.0_mutual_encourg0.7_threshold0.4_unif_distr_50iter_2012_seed31Oct_finalnetwork.dat

 
   alpha_F_min=0.10   #   # alpha=0: nobody changes their mind
   alpha_F_max=0.9    
   delta_alpha_F=0.10    #AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT!
   

   min_damping=0.500   #0.0     #its harder to go back from YES to NO again. =1 means no effect, =0.5 half the movement from Y->N than the other way around, =0 never go back from Y to N
   max_damping=0.501    #0.451
   delta_damping=0.10  
   
   


   min_mutual_encouragement=0.50   #  # when two Adopters meet, they convince each other even more
   max_mutual_encouragement=0.501   
   delta_mutual_encouragement=0.10
   
   
   threshold_min=0.10   #  # larger than, to be an Adopter
   threshold_max=0.901 
   delta_threshold=0.10   # AVOID 1.0 OR THE DYNAMICS GETS TOTALLY STUCK AND IT IS NOT ABLE TO PREDICT SHIT
 


   
   
   print "\n\nPersuasion process on network, with Niter:",Niter_training
   
   
   dict_filenames_tot_distance={}   # i will save the filename as key and the tot distance from that curve to the original one
   dict_filenames_prod_distances={}   


  

   threshold=threshold_min
   while   threshold<= threshold_max:
      print   "thershold:",threshold

      alpha_F=alpha_F_min
      while alpha_F<= alpha_F_max:            # i explore all the parameter space, and create a file per each set of values
        alpha_A=1.0*alpha_F
        print "  alpha_F:",alpha_F

        mutual_encouragement=min_mutual_encouragement  
        while  mutual_encouragement <= max_mutual_encouragement:
          print "    mutual_encouragement:",mutual_encouragement

          damping=min_damping
          while   damping <= max_damping:
            print "      damping:",damping


         
#            dir="../Results/weight_shifts/persuasion/alpha%.2f_damping%.2f/"  % (alpha_F, damping )
           
            output_file=dir+"Time_evolutions_Persuasion_training_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_unif_distr_"+str(Niter_training)+"iter_alphaA_eq_alphaF"+"_"+str(cutting_day)+".dat"         


           # file = open(output_file,'wt')     # i am not saving the train file, because i will just want to know 
            #file.close()          # the optimum parameter set and go look for the whole-250-day file
            


            time_evol_number_adopters_ITER=[]  # list of complete single realizations of the dynamics
            list_dist_fixed_parameters=[]
            list_dist_at_ending_point_fixed_parameters=[]
            list_dist_abs_at_ending_point_fixed_parameters=[]

           
            list_networks_at_cutting_day=[]

            list_final_num_adopt=[]


            for iter in range(Niter_training):

               # print "         ",iter
                list_t=[]
           
                time_evol_number_adopters=[]   # for a single realization of the dynamics

               


                num_adopters , seed_shift ,max_shift= set_ic(G,threshold)   # i establish who is Adopter and NonAdopter initially, and count how many shifts i have total

                time_evol_number_adopters.append(float(num_adopters))               
                list_t.append(0)



                
               ########### the dynamics starts:                 
                t=int(seed_shift)+1   # the first time step is just IC.???


                while t< cutting_day:  # loop over shifts, in chronological order  (the order is the day index since seeding_day) 
                         
                    list_t.append(t)
                    for n in G.nodes():
                        if G.node[n]['type']=="shift" and G.node[n]['order']==t:  # i look for the shift corresponding to that time step                    

                            shift_lenght=int(G.node[n]['shift_lenght'])
                           
                            if shift_lenght==2 and n not in list_id_weekends_T3:
                               shift_lenght=1   # because during weekends, the fellow does rounds one day with Att1 and the other day with Att2.  (weekend shifts for T3 are two day long, with no sharing fellows)



                            flag_possible_persuasion=0
                            for doctor in G.neighbors(n):                               
                                if G.node[doctor]["status"]=="Adopter":   #first i check if any doctor is an adopter in this shift         
                                    flag_possible_persuasion=1                               
                                    break

                            if flag_possible_persuasion==1:
                                list_doctors=[]
                                for doctor in G.neighbors(n):   # for all drs in that shift
                                    list_doctors.append(doctor)
                                
                                
                                pairs=itertools.combinations(list_doctors,2)    # cos the shift can be 2 but also 3 doctors 
                                for pair in pairs:
                                    doctor1=pair[0]
                                    doctor2=pair[1]
                                                                                        
                                    if G.node[doctor1]['status'] != G.node[doctor2]['status']:  # if they think differently, 
                                                                                              # there will be persuasion
                                        persuasion(G,damping,doctor1,doctor2,alpha_A,alpha_F,threshold,shift_lenght)   # i move their values of opinion                  
                                        update_opinions(G,threshold,doctor1,doctor2) #  i update status and make sure the values of the vectors stay between [0,1] 
                                  
                                    else:  # if two Adopters meet, they encourage each other (if two NonAdopters, nothing happens)
                                   
                                       mutual_reinforcement(G,mutual_encouragement,doctor1,doctor2,shift_lenght)
                                  
                               
                    list_all_Adopters=[]  #including fellows        
                    list_Adopters=[]        #NOT including fellows 
                    for n in G.nodes():              
                        try:
                            if  G.node[n]["status"]=="Adopter":                                                    
                                if G.node[n]["label"] not in list_Adopters and G.node[n]["type"]=="A":
                                    list_Adopters.append(G.node[n]["label"])
                        except: pass  # if the node is a shift, it doesnt have a 'status' attribute


        
                   


                    time_evol_number_adopters.append(float(len(list_Adopters)))

                    t+=1
   

                ############## end while loop over t
               


               
                time_evol_number_adopters_ITER.append(time_evol_number_adopters)


                list_final_num_adopt.append(time_evol_number_adopters[-1])

               
                list_dist_fixed_parameters.append(compare_real_evol_vs_simus_to_be_called.compare_two_curves( list_actual_evol_training,time_evol_number_adopters))
               
                list_dist_abs_at_ending_point_fixed_parameters.append( abs(time_evol_number_adopters[-1]-list_actual_evol_training[-1]) )

                list_dist_at_ending_point_fixed_parameters.append( time_evol_number_adopters[-1]-list_actual_evol_training[-1]) 



               
              
             

            #######################   end loop Niter for the training fase


            list_pair_dist_std_delta_end=[]
        
            list_pair_dist_std_delta_end.append(numpy.mean(list_dist_fixed_parameters) )   # average dist between the curves over Niter
            list_pair_dist_std_delta_end.append(numpy.std(list_dist_fixed_parameters) )

            list_pair_dist_std_delta_end.append(numpy.mean(list_dist_abs_at_ending_point_fixed_parameters))

         

                     
            value=numpy.mean(list_dist_fixed_parameters) *numpy.mean(list_dist_abs_at_ending_point_fixed_parameters) # if SD=0, it is a problem, because then that is the minimun value, but not the optimum i am looking for!!
        
            dict_filenames_prod_distances[output_file]=  value                  



            file3 = open(output_file3,'at')          # i print out the landscape           
            print >> file3, alpha_F, damping, mutual_encouragement, threshold,numpy.mean(list_dist_abs_at_ending_point_fixed_parameters), numpy.mean(list_dist_fixed_parameters),  numpy.mean(list_final_num_adopt),numpy.std(list_final_num_adopt),  numpy.std(list_final_num_adopt)/numpy.mean(list_final_num_adopt)
            file3.close()




            histogram_filename="../Results/weight_shifts/histogr_raw_distances_ending_test_train_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_unif_distr_"+str(Niter_training)+"iter_alphaA_eq_alphaF"+"_"+str(cutting_day)+".dat"     
            histograma_gral_negv_posit.histograma(list_dist_at_ending_point_fixed_parameters,histogram_filename)
            
            histogram_filename2="../Results/weight_shifts/histogr_sum_dist_traject_infection_training_alpha"+str(alpha_F)+"_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_threshold"+str(threshold)+"_unif_distr_"+str(Niter_training)+"iter_alphaA_eq_alphaF"+"_"+str(cutting_day)+".dat"     
            
            histograma_bines_gral.histograma_bins(list_dist_fixed_parameters,Nbins,histogram_filename2)


            print  "written histogram file: ",histogram_filename
            print  "written histogram file: ",histogram_filename2


            if (numpy.mean(list_dist_abs_at_ending_point_fixed_parameters)) <= delta_end:  # i only consider situations close enough at the ending point   

               dict_filenames_tot_distance[output_file]=list_pair_dist_std_delta_end 


             



   
          #  file = open(output_file,'wt')        
           # for i in range(len(time_evol_number_adopters)):  #time step by time step
            #    list_fixed_t=[]
             #   for iteracion in range (Niter_training): #loop over all independent iter of the process
              #      list_fixed_t.append(time_evol_number_adopters_ITER[iteracion][i])  # i collect all values for the same t, different iter  

               # print >> file, list_t[i],numpy.mean(list_fixed_t),numpy.std(list_fixed_t), alpha_F,damping,mutual_encouragement       
            #file.close()

           

          
            damping += delta_damping
          mutual_encouragement += delta_mutual_encouragement
        alpha_F += delta_alpha_F
      threshold  += delta_threshold
    



   list_order_dict=  compare_real_evol_vs_simus_to_be_called.pick_minimum_same_end(dict_filenames_tot_distance,"Persuasion_training_land_weight",all_team,Niter_training,cutting_day)


  
   string_name="_persuasion_training_"+fixed_param+str(Niter_training)+"iter_"+str(cutting_day)+".dat"            # for the "Results" file with the sorted list of files
   
   list_order_dict2= compare_real_evol_vs_simus_to_be_called.pick_minimum_prod_distances(dict_filenames_prod_distances,string_name,all_team,Niter_training,cutting_day)

  


#./Results/network_final_schedule_withTeam3_local/Time_evolutions_Persuasion_alpha0.4_damping0.4_mutual_encourg0.6_threshold0.5_unif_distr_2iter_2012_seed31Oct_finalnetwork.dat


   optimum_filename=list_order_dict[0][0]


   print optimum_filename   
   alpha_F=float(list_order_dict[0][0].split("_alpha")[1].split("_")[0])
   alpha_A=0.5*alpha_F
   damping=float(list_order_dict[0][0].split("_damping")[1].split("_")[0])
   mutual_encouragement=float(list_order_dict[0][0].split("_mutual_encourg")[1].split("_")[0])
   threshold=float(list_order_dict[0][0].split("_threshold")[1].split("_")[0])
  
  
            
                

  
   print "Optimum (old method) alpha=", alpha_F, " damping=",damping," mutual encourag=",mutual_encouragement," threshold",threshold
   
  
  
   optimum_filename=list_order_dict2[0][0]

   print optimum_filename   
   alpha_F=float(list_order_dict2[0][0].split("_alpha")[1].split("_")[0])
   alpha_A=0.5*alpha_F
   damping=float(list_order_dict2[0][0].split("_damping")[1].split("_")[0])
   mutual_encouragement=float(list_order_dict2[0][0].split("_mutual_encourg")[1].split("_")[0])
   threshold=float(list_order_dict2[0][0].split("_threshold")[1].split("_")[0])
  
  
            
                

  
   print "Optimum (product distances and SDs) alpha=", alpha_F, " damping=",damping," mutual encourag=",mutual_encouragement," threshold",threshold
   
  
  





   output_file10="../Results/weight_shifts/Summary_results_train_test_persuasion_alpha"+str(alpha_F)+"_FIXED_damping"+str(damping)+"_mutual_encourg"+str(mutual_encouragement)+"_FIXED_threshold"+str(threshold)+"_"+str(Niter_training)+"iter_alphaA_eq_alphaF_day"+str(cutting_day)+".dat"         
   file10 = open(output_file10,'wt')    

   print >> file10, "Summary results from train-testing persuasion with",Niter_training, "iter, using the avg of the cutting points as IC, and with values for the parameters:  alpha ",alpha_F," damping: ",damping," mutual_encourg: ",mutual_encouragement," threshold:",threshold


   print >> file10,  "Look for optimum the file set of parameters (or run those simulations):",optimum_filename
  

   file10.close()




   print "Look for optimum the file set of parameters (or run those simulations):",optimum_filename
  

   print "printed out landscape file:",output_file3