def fitness_evaluate(population, V_l, E_l, investment, GA_Reinfoce_open,
                     GA_Construct_open, G):
    '''caculate the fitness value beased on equationn (22), constrain(20) is handled as a negetive penalty function'''
    chromos_fitness = {}

    if GA_Reinfoce_open == 1:  #reinforce current network
        for eachChromo in population.keys():
            bridge_reliability = copy.deepcopy(G.resilience)
            for (i, j) in population[eachChromo].keys():
                #update resilience for corresponding bridge
                if population[eachChromo][i, j] == 1:
                    bridge_reliability[i, j] = 0.99

        #update the Fresilience_Path
            GAL, GALength_Path, GAFresilience_Path, GAPath_ADTT = PS.main(
                G.nodes, G.arcs, G.emergnode, G.length, bridge_reliability,
                G.ADTT)  #normmalize the path length
            GANormal_Path_Length = {}
            for headnode, tailnode in GALength_Path.keys():
                GANormal_Path_Length[headnode, tailnode] = {}
                if len(GALength_Path[headnode, tailnode].keys()) == 1:
                    GANormal_Path_Length[headnode, tailnode][1] = 1
                elif len(GALength_Path[headnode, tailnode].keys()) > 1:
                    Temp_Sum = 0
                    for k, value in GALength_Path[headnode, tailnode].items():
                        Temp_Sum += value
                    Normal_sum = 0
                    for k, value in GALength_Path[headnode, tailnode].items():
                        Normal_sum += Temp_Sum - value
                    for k, value in GALength_Path[headnode, tailnode].items():
                        GANormal_Path_Length[headnode, tailnode][k] = (
                            (Temp_Sum - value) / Normal_sum) * len(
                                GALength_Path[headnode, tailnode].keys())

            #normalize ADTT
            GANormal_Path_ADTT = {}
            for headnode, tailnode in GAPath_ADTT.keys():
                GANormal_Path_ADTT[headnode, tailnode] = {}
                if len(GAPath_ADTT[headnode, tailnode].keys()) == 1:
                    GANormal_Path_ADTT[headnode, tailnode][1] = 1
                elif len(GAPath_ADTT[headnode, tailnode].keys()) > 1:
                    Temp_Sum = 0
                    for k, value in GAPath_ADTT[headnode, tailnode].items():
                        Temp_Sum += value
                    for k, value in GAPath_ADTT[headnode, tailnode].items():
                        GANormal_Path_ADTT[
                            headnode, tailnode][k] = (value / Temp_Sum) * len(
                                GAPath_ADTT[headnode, tailnode].keys())

            #shortest distance of node i with all emergency nodes
            omega = {}
            for node in G.nodes:
                if node in G.emergnode:
                    omega[node] = 1
                else:
                    omega[node] = 0

            for head, tail in GALength_Path.keys():
                if head in G.emergnode or tail in G.emergnode:
                    omega[head] = max(omega[head],
                                      1 / GALength_Path[head, tail][1])
                    omega[tail] = max(omega[tail],
                                      1 / GALength_Path[head, tail][1])

            #compute the weight of each node
            GANodeWeight = {}
            sumomega = sum(omega.values())
            for node in G.nodes:
                GANodeWeight[node] = omega[node] / sumomega

            R_G_x = RE.resilience_evaluation(G.nodes, GAL,
                                             GANormal_Path_Length,
                                             GAFresilience_Path, GANodeWeight,
                                             GANormal_Path_ADTT)

            Total_cost = 0
            for headnode, tailnode in G.arcs:
                Total_cost += population[eachChromo][
                    headnode, tailnode] * G.cost[headnode, tailnode]

            if Total_cost > 2 * investment:
                Penalty_value = -1000000
            else:
                Penalty_value = 0

            chromos_fitness[eachChromo] = R_G_x + Penalty_value

            print Total_cost, chromos_fitness[eachChromo], sum(
                population[eachChromo].values())

    if GA_Construct_open == 1:  #construct new bridges
        for eachChromo in population.keys():
            #add the new link into the network based on chromo
            print population[eachChromo]
            for (i, j) in population[eachChromo].keys():
                if (i, j) not in E_l and population[eachChromo][i, j] != 0:
                    E_l.append((i, j))
                    q_l[i, j] = 1
                if (i, j) in E_l and population[eachChromo][i, j] == 0:
                    E_l.remove((i, j))
                    del q_l[i, j]
        E_comp = copy.deepcopy(E_l)
        q_comp = copy.deepcopy(q_l)
        ##print E_l
        #R_G_x = RE.resilience_evalueation(V_l, E_comp, u_l, q_comp)

        print "eachChromo", R_G_x
        E_comp = copy.deepcopy(E_l)
        q_comp = copy.deepcopy(q_l)
        result = FE.friability_evaluation(V_l, E_comp, u_l, q_comp, R_G_x)
        F_max_x = result[1]
        print "eachChromo", F_max_x

    return chromos_fitness
def fitness_evaluate(population, V_l, E_l, investment, GA_Reinfoce_open, GA_Construct_open, G):
    """caculate the fitness value beased on equationn (22), constrain(20) is handled as a negetive penalty function"""
    chromos_fitness = {}

    if GA_Reinfoce_open == 1:  # reinforce current network
        for eachChromo in population.keys():
            bridge_reliability = {}
            for (i, j) in population[eachChromo].keys():
                # update resilience for corresponding bridge
                if population[eachChromo][i, j] == 1:
                    bridge_reliability[i, j] = 0.99
                else:
                    bridge_reliability[i, j] = G.resilience[i, j]

                    # update the Fresilience_Path
            GAL, GALength_Path, GAFresilience_Path, GAPath_ADTT = PS.main(
                G.nodes, G.arcs, G.emergnode, G.length, bridge_reliability, G.ADTT
            )  # normmalize the path length
            GANormal_Path_Length = {}
            for headnode, tailnode in GALength_Path.keys():
                GANormal_Path_Length[headnode, tailnode] = {}
                if len(GALength_Path[headnode, tailnode].keys()) == 1:
                    GANormal_Path_Length[headnode, tailnode][1] = 1
                elif len(GALength_Path[headnode, tailnode].keys()) > 1:
                    Temp_Sum = 0
                    for k, value in GALength_Path[headnode, tailnode].items():
                        Temp_Sum += value
                    Normal_sum = 0
                    for k, value in GALength_Path[headnode, tailnode].items():
                        Normal_sum += Temp_Sum - value
                    for k, value in GALength_Path[headnode, tailnode].items():
                        GANormal_Path_Length[headnode, tailnode][k] = ((Temp_Sum - value) / Normal_sum) * len(
                            GALength_Path[headnode, tailnode].keys()
                        )

                        # normalize ADTT
            GANormal_Path_ADTT = {}
            for headnode, tailnode in GAPath_ADTT.keys():
                GANormal_Path_ADTT[headnode, tailnode] = {}
                if len(GAPath_ADTT[headnode, tailnode].keys()) == 1:
                    GANormal_Path_ADTT[headnode, tailnode][1] = 1
                elif len(GAPath_ADTT[headnode, tailnode].keys()) > 1:
                    Temp_Sum = 0
                    for k, value in GAPath_ADTT[headnode, tailnode].items():
                        Temp_Sum += value
                    for k, value in GAPath_ADTT[headnode, tailnode].items():
                        GANormal_Path_ADTT[headnode, tailnode][k] = (value / Temp_Sum) * len(
                            GAPath_ADTT[headnode, tailnode].keys()
                        )

                        # shortest distance of node i with all emergency nodes
            omega = {}
            for node in G.nodes:
                if node in G.emergnode:
                    omega[node] = 1
                else:
                    omega[node] = 0

            for head, tail in GALength_Path.keys():
                if head in G.emergnode or tail in G.emergnode:
                    omega[head] = max(omega[head], 1 / GALength_Path[head, tail][1])
                    omega[tail] = max(omega[tail], 1 / GALength_Path[head, tail][1])

                    # compute the weight of each node
            GANodeWeight = {}
            sumomega = sum(omega.values())
            for node in G.nodes:
                GANodeWeight[node] = omega[node] / sumomega

            R_G_x = RE.resilience_evaluation(
                G.nodes, GAL, GANormal_Path_Length, GAFresilience_Path, GANodeWeight, GANormal_Path_ADTT
            )

            Total_cost = 0
            for headnode, tailnode in G.arcs:
                Total_cost += population[eachChromo][headnode, tailnode] * G.cost[headnode, tailnode]

            if Total_cost > 2 * investment:
                Penalty_value = -1000000
            else:
                Penalty_value = 0

            chromos_fitness[eachChromo] = R_G_x + Penalty_value

            print Total_cost, chromos_fitness[eachChromo], sum(population[eachChromo].values())

            g = open("GA_details_{}.txt".format(investment), "a")
            g.write(
                "{}\t {} \t {} \n".format(Total_cost, chromos_fitness[eachChromo], sum(population[eachChromo].values()))
            )
            g.close()

    if GA_Construct_open == 1:  # construct new bridges
        for eachChromo in population.keys():
            # add the new link into the network based on chromo
            print population[eachChromo]
            for (i, j) in population[eachChromo].keys():
                if (i, j) not in E_l and population[eachChromo][i, j] != 0:
                    E_l.append((i, j))
                    q_l[i, j] = 1
                if (i, j) in E_l and population[eachChromo][i, j] == 0:
                    E_l.remove((i, j))
                    del q_l[i, j]
        E_comp = copy.deepcopy(E_l)
        q_comp = copy.deepcopy(q_l)
        ##print E_l
        # R_G_x = RE.resilience_evalueation(V_l, E_comp, u_l, q_comp)

        print "eachChromo", R_G_x
        E_comp = copy.deepcopy(E_l)
        q_comp = copy.deepcopy(q_l)
        result = FE.friability_evaluation(V_l, E_comp, u_l, q_comp, R_G_x)
        F_max_x = result[1]
        print "eachChromo", F_max_x

    return chromos_fitness
Пример #3
0
def main():

    seed = 100
    random.seed(seed)
    np.random.seed(seed)

    SampleNum = 1000  #Number of Monte Carlo Sample
    numNodes = 30  #Number of nodes
    emerg_nodes = [9, 17]

    #genetic algorithm parameters
    GA_Reinfoce_open = 1  #run GA solve the repairment problem
    GA_Construct_open = 0  #run GA solve the construction problem
    pareto = 1  #run GA to find the pareto frontier when the objectives are conflict
    #GA parameters
    num_population = 10  #number of populations
    max_iteration = 500  #maximum number of iterations
    max_time = 60  #maximum runtime
    crossover_rate = 0.7  #probabilty of crossover
    mutation_rate = 0.3  #probability of crossover
    top = 4  #number of chromos selected fromthe top of ranked population
    investment = 0  #budget

    #V = range(1,numNodes+1) #set of nodes or vertices representing the cities in the network

    #set of edges or arcs representingthe roalds connected to nodes in the network
    E = [(1, 2), (1, 4), (2, 5), (3, 5), (4, 9), (5, 6), (5, 9), (6, 10),
         (6, 11), (7, 11), (8, 9), (9, 10), (9, 12), (10, 13), (10, 14),
         (11, 14), (12, 13), (13, 16), (13, 17), (15, 16), (16, 19), (17, 18),
         (17, 19), (17, 22), (18, 20), (19, 21), (19, 22), (20, 22), (20, 23),
         (21, 26), (22, 24), (22, 29), (22, 30), (23, 25), (23, 24), (26, 27),
         (27, 28)]

    print "number of bridge", len(E)

    #****************************input end***************************************

    #create the network according to above input

    G = Network()
    G.addnode(numNodes)
    G.addemergenode(emerg_nodes)
    G.addarc(E)
    G.addlength()

    #devide the bridges to two classes with 19 and 18 respectively

    bridgeclass1, bridgeclass2 = [], []
    for headnode, tailnode in G.arcs:
        if headnode < tailnode:
            if random.random() < 0.5:
                if len(bridgeclass1) < 38:
                    bridgeclass1.append((headnode, tailnode))
                    bridgeclass1.append((tailnode, headnode))
                else:
                    bridgeclass2.append((headnode, tailnode))
                    bridgeclass2.append((tailnode, headnode))

            else:
                if len(bridgeclass2) < 38:
                    bridgeclass2.append((headnode, tailnode))
                    bridgeclass2.append((tailnode, headnode))
                else:
                    bridgeclass1.append((headnode, tailnode))
                    bridgeclass1.append((tailnode, headnode))

    #assign same mean for each class
    reliability_mean1, reliability_mean2 = [], []
    for headnode, tailnode in bridgeclass1:
        if headnode < tailnode:
            reliability_mean1.append(0.7)

    for headnode, tailnode in bridgeclass2:
        if headnode < tailnode:
            reliability_mean2.append(0.6)

    #use these mean values to generate 19 random number from MVND

    reliability_COV1 = []
    with open('reliability_COV1.txt', 'r') as f:
        for line in f:
            reliability_COV1.append(map(float, line.split(',')))

    f.close()

    reliability_COV2 = []
    with open('reliability_COV2.txt', 'r') as f:
        for line in f:
            reliability_COV2.append(map(float, line.split(',')))

    f.close()

    #check the length of mean and cov
    if len(reliability_mean1) == len(reliability_COV1):
        #change the coefficients fo variation to covariance: var = (mean*cov)**2
        reliability_Covar1 = cov_to_covar(reliability_mean1, reliability_COV1)
        true_reliability_mean_list1 = MNDNgenerator(reliability_mean1,
                                                    reliability_COV1)
    elif len(reliability_mean1) == len(reliability_COV2):
        #change the coefficients fo variation to covariance: var = (mean*cov)**2
        reliability_Covar1 = cov_to_covar(reliability_mean1, reliability_COV2)
        true_reliability_mean_list1 = MNDNgenerator(reliability_mean1,
                                                    reliability_COV2)
        pass
    else:
        sys.exit(
            "Numpy Error message:  mean and cov must have same length, mean is {} and cov is {}"
            .format(len(reliability_mean1), len(reliability_COV1)))

    #assign these random values to each bridge in class1
    bridge_true_reliability_mean = {}
    i = 0
    for headnode, tailnode in bridgeclass1:
        if headnode < tailnode:

            bridge_true_reliability_mean[
                headnode, tailnode] = true_reliability_mean_list1[i]
            bridge_true_reliability_mean[
                tailnode, headnode] = bridge_true_reliability_mean[headnode,
                                                                   tailnode]
            i += 1

    #use these mean values to generate 16 random numbers  from MVND

    #check the length of mean and cov
    if len(reliability_mean2) == len(reliability_COV2):
        #change the coefficients fo variation to covariance: var = (mean*cov)**2
        reliability_Covar2 = cov_to_covar(reliability_mean2, reliability_COV2)
        true_reliability_mean_list2 = MNDNgenerator(reliability_mean2,
                                                    reliability_COV2)
    elif len(reliability_mean2) == len(reliability_COV1):
        #change the coefficients fo variation to covariance: var = (mean*cov)**2
        reliability_Covar2 = cov_to_covar(reliability_mean2, reliability_COV1)
        true_reliability_mean_list2 = MNDNgenerator(reliability_mean2,
                                                    reliability_COV1)
    else:
        sys.exit(
            "Numpy Error message:  mean and cov must have same length, mean is {} and cov is {}"
            .format(len(reliability_mean2), len(reliability_COV2)))

    #assign these random values to each bridge in class2
    i = 0
    for headnode, tailnode in bridgeclass2:
        if headnode < tailnode:
            if (headnode, tailnode) in bridge_true_reliability_mean.keys() or (
                    tailnode, headnode) in bridge_true_reliability_mean.keys():
                sys.exit("Duplicate edges in both bridge classes")
                print headnode, tailnode

            bridge_true_reliability_mean[
                headnode, tailnode] = true_reliability_mean_list2[i]
            bridge_true_reliability_mean[
                tailnode, headnode] = bridge_true_reliability_mean[headnode,
                                                                   tailnode]
            i += 1

    #use bridge_true_reliability_mean to generate distribution for each bridge named hazard correlation

    hazard_mean = []
    for headnode, tailnode in G.arcs:
        if headnode < tailnode:
            hazard_mean.append(bridge_true_reliability_mean[headnode,
                                                            tailnode])

    hazard_matrix = hazard_matrix_compute.generate_hazard()

    #generate ADT mean from a Uniform distribution
    ADT_mean = {}
    for headnode, tailnode in G.arcs:
        if headnode < tailnode:
            ADT_mean[headnode, tailnode] = random.randint(200, 3000)

    #generate cost mean from a normal distribution
    cost_mean = {}
    for headnode, tailnode in G.arcs:
        if headnode < tailnode:
            cost_mean[headnode, tailnode] = 5 * (
                G.length[headnode, tailnode] / 100 +
                (1 - bridge_true_reliability_mean[headnode, tailnode]))

    #export the parameters of bridges
    f = open('bridge_parameters.txt', 'w')
    f.write('bridge :  reliability, ADT, length, cost\n')
    for key, value in bridge_true_reliability_mean.items():
        if key[0] < key[1]:
            f.write('{}:{},{},{}, {}\n'.format(key, value, ADT_mean[key],
                                               G.length[key], cost_mean[key]))
    f.close()

    f1 = open('network original resilience.txt', 'w')
    f1.close()
    f2 = open('network GA Repair resilience.txt', 'w')
    f2.close()

    #Monete Carlo Sampling Starts---------------------------------------------------------------------------------
    for sample_ID in xrange(1, 2):  #SampleNum

        seed = 7
        random.seed(seed)
        np.random.seed(seed)

        #Monte Carlo Sampling on ADT
        ADT_sample = []

        for key, value in ADT_mean.items():
            #ADT_sample.append(value)
            #ADT_sample.append(random.normalvariate(value, 0.08*value))
            ADT_sample.append(random.normalvariate(value, 0.08 * value))
        #print ADT_sample

        G.addADTT(ADT_sample)

        #print G.ADTT

        #Monte Carlo Sampling on ADT
        cost_sample = []
        for key, value in cost_mean.items():
            cost_sample.append(value)
            #cost_sample.append(random.normalvariate(value, 0.08*value))

        G.addcost(cost_sample)

        #print G.cost

        #Generate SampleNum of Monete Carlo Samples
        #check the length of mean and cov
        true_reliability_sample_list = []
        if len(hazard_mean) == len(hazard_matrix):
            #transfer hazard cov to hazard covar
            hazard_covar_matrix = cov_to_covar(hazard_mean, hazard_matrix)
            true_reliability_sample_list = MNDNgenerator(
                hazard_mean, hazard_covar_matrix)
        else:
            sys.exit(
                "Numpy Error message:  mean and cov must have same length, mean is {} and cov is {}"
                .format(len(hazard_mean), len(hazard_matrix)))

        #true_reliability_sample_list = hazard_mean

        G.addresilience(true_reliability_sample_list.tolist())
        #G.addresilience(true_reliability_sample_list)

        #G.resilience[1,2], G.resilience[2,1] = 1,1

        #for key, value in G.resilience.items():
        #G.resilience[key] = 0.99
        #G.resilience[23,24], G.resilience[24,23] = 0.99, 0.99
        #G.resilience[23,25], G.resilience[25,23] = 0.99, 0.99

        nodelist = copy.deepcopy(G.nodes)  #store the orginal node list
        edgelist = copy.deepcopy(G.arcs)  #store the orginal node list

        maxADTT = max(G.ADTT.values())
        minADTT = min(G.ADTT.values())
        #Normal_ADTT = minnormalize(G.ADTT, maxADTT, minADTT, 1)
        #Normal_ADTT = sumnormalize(G.ADTT, 1)

        L, Length_Path, Fresilience_Path, Path_ADTT = PS.main(
            G.nodes, G.arcs, G.emergnode, G.length, G.resilience,
            G.ADTT)  #all passageway between node pair i and j, for all i and j

        f = open('Independet_Paths.txt', 'w')
        f.write('Sample ID {}\n'.format(sample_ID))
        for i, j in L.keys():
            f.write(
                'nodes pair: ({},{}), Total independent paths {} \n '.format(
                    i, j, len(L[i, j].keys())))
            for k in L[i, j].keys():
                f.write('{},{},{},{},{}\n'.format(k, L[i, j][k],
                                                  Length_Path[i, j][k],
                                                  Fresilience_Path[i, j][k],
                                                  Path_ADTT[i, j][k]))

        f.close()

        #get the max and min value from L_pk(i,j)
        #Max_Length_Path, Min_Length_Path, Sum_Length_Path = -float("inf"), float("inf"), 0
        #for key1,key2 in Length_Path.keys():
        #for key3,value in Length_Path[key1,key2].items():
        #Sum_Length_Path += value
        #if value < Min_Length_Path:
        #Min_Length_Path = value
        #if value > Max_Length_Path:
        #Max_Length_Path = value

        #Normal_Path_Length = {}    #normalized length of path
        #for key1,key2 in Length_Path.keys():
        #Normal_Path_Length[key1,key2] = {}
        #for key3,value in Length_Path[key1,key2].items():
        #Normal_Path_Length[key1,key2][key3] = (Max_Length_Path - value)/(Max_Length_Path - Min_Length_Path)

        #normmalize the path length
        Normal_Path_Length = {}
        for headnode, tailnode in Length_Path.keys():
            Normal_Path_Length[headnode, tailnode] = {}
            if len(Length_Path[headnode, tailnode].keys()) == 1:
                Normal_Path_Length[headnode, tailnode][1] = 1
            elif len(Length_Path[headnode, tailnode].keys()) > 1:
                Temp_Sum = 0
                for k, value in Length_Path[headnode, tailnode].items():
                    Temp_Sum += value
                Normal_sum = 0
                for k, value in Length_Path[headnode, tailnode].items():
                    Normal_sum += Temp_Sum - value
                for k, value in Length_Path[headnode, tailnode].items():
                    Normal_Path_Length[headnode, tailnode][k] = (
                        (Temp_Sum - value) / Normal_sum) * len(
                            Length_Path[headnode, tailnode].keys())

        #normalize ADTT
        Normal_Path_ADTT = {}
        for headnode, tailnode in Path_ADTT.keys():
            Normal_Path_ADTT[headnode, tailnode] = {}
            if len(Path_ADTT[headnode, tailnode].keys()) == 1:
                Normal_Path_ADTT[headnode, tailnode][1] = 1
            elif len(Path_ADTT[headnode, tailnode].keys()) > 1:
                Temp_Sum = 0
                for k, value in Path_ADTT[headnode, tailnode].items():
                    Temp_Sum += value
                for k, value in Path_ADTT[headnode, tailnode].items():
                    Normal_Path_ADTT[headnode,
                                     tailnode][k] = (value / Temp_Sum) * len(
                                         Path_ADTT[headnode, tailnode].keys())

        #shortest distance of node i with all emergency nodes
        omega = {}
        for node in G.nodes:
            if node in G.emergnode:
                omega[node] = 1
            else:
                omega[node] = 0

        for head, tail in Length_Path.keys():
            if head in G.emergnode or tail in G.emergnode:
                omega[head] = max(omega[head], 1 / Length_Path[head, tail][1])
                omega[tail] = max(omega[tail], 1 / Length_Path[head, tail][1])

        #compute the weight of each node
        NodeWeight = {}
        sumomega = sum(omega.values())
        for node in G.nodes:
            NodeWeight[node] = omega[node] / sumomega

        G_Resilience = resilience_evaluation(G.nodes, L, Normal_Path_Length,
                                             Fresilience_Path, NodeWeight,
                                             Normal_Path_ADTT)

        total_cost = sum(G.cost.values()) / 2.0

        #result = FE.friability_evaluation(V,E,u,q,R_G)

        #F_G = result[0]     #friability of the whole network
        #F_max = result[1]   #maximum friability of nodes

        print "The resilience of network G is: ", G_Resilience
        #print "The friability of network G is: ", F_G
        #print "The maximum friability of network G is: ", F_max

        f1 = open('network original resilience.txt', 'a')
        f1.write('{}\n'.format(G_Resilience))
        f1.close()

        if GA_Reinfoce_open == 1:  #case 1 reinforcement
            #next use GA to solve the optimization problem
            BinVar = []
            #for i in V:    #these are loop to find out all the complementary edges
            #for j in V:
            #if j != i:
            #if (i,j) not in E:
            #BinVar.append((i,j))
            #BinVar.append((j,i))

            #BinVar = [(1,5),(1,3),(1,8),(1,10), (2,10),(3,10),(3,7),(6,8),(7,10)]

            for headnode, tailnode in G.arcs:
                if headnode < tailnode:
                    BinVar.append((headnode, tailnode))

            BinVar_swap = swap(BinVar)
            BinVar = combine(BinVar, BinVar_swap)

            #for (i,j) in BinVar:
            #q[i,j] = 0.99

            if pareto == 0:
                GA_iteration, GA_run_time, GA_best_fitness, GA_best_solution = GA.main(
                    nodelist, edgelist, BinVar, num_population, max_iteration,
                    max_time, crossover_rate, mutation_rate, top, seed,
                    investment, GA_Reinfoce_open, GA_Construct_open, G,
                    sample_ID)

                GA_best_solution2 = {}
                for headnode, tailnode in GA_best_solution.keys():
                    if headnode < tailnode:
                        if GA_best_solution[headnode, tailnode] == 1:
                            GA_best_solution2[headnode, tailnode] = 1

                Total_cost = 0
                for headnode, tailnode in GA_best_solution2.keys():
                    if headnode < tailnode:
                        Total_cost += GA_best_solution[
                            headnode, tailnode] * G.cost[headnode, tailnode]

                print 'GA_iteration', GA_iteration, 'GA_run_time', GA_run_time, 'GA_best_fitness', GA_best_fitness, 'Total number of bridges', sum(
                    GA_best_solution2.values()
                ), 'Total cost', Total_cost, 'GA_best_solution', GA_best_solution2

                f2 = open('network GA Repair resilience.txt', 'a')
                f2.write('{} \t {} \t {} \t {}\n'.format(
                    GA_best_fitness, sum(GA_best_solution2.values()),
                    Total_cost, GA_best_solution2))
                f2.close()

            if pareto == 1:
                for investment in range(0, 190, 3):
                    print investment

                    GA_iteration, GA_run_time, GA_best_fitness, GA_best_solution = GA.main(
                        nodelist, edgelist, BinVar, num_population,
                        max_iteration, max_time, crossover_rate, mutation_rate,
                        top, seed, investment, GA_Reinfoce_open,
                        GA_Construct_open, G, sample_ID)

                    GA_best_solution2 = {}
                    for headnode, tailnode in GA_best_solution.keys():
                        if headnode < tailnode:
                            if GA_best_solution[headnode, tailnode] == 1:
                                GA_best_solution2[headnode, tailnode] = 1

                    Total_cost = 0
                    for headnode, tailnode in GA_best_solution2.keys():
                        if headnode < tailnode:
                            Total_cost += GA_best_solution[
                                headnode, tailnode] * G.cost[headnode,
                                                             tailnode]

                    print 'budget', investment, 'GA_iteration', GA_iteration, 'GA_run_time', GA_run_time, 'GA_best_fitness', GA_best_fitness, 'Total number of bridges', sum(
                        GA_best_solution2.values()
                    ), 'Total cost', Total_cost, 'GA_best_solution', GA_best_solution2

                    f2 = open('network GA Repair resilience.txt', 'a')
                    f2.write('{} \t {} \t {} \t {} \t {}\n'.format(
                        investment, GA_best_fitness,
                        sum(GA_best_solution2.values()), Total_cost,
                        GA_best_solution2))
                    f2.close()