def single_experiment(size):
    # Original graph to clone several times, with the second best path strategy applied
    network_graph = rip_gen.generate_rip_graph(size)

    # This creates two more copies of the graph so I can apply each backup strategy on them
    network_graph_wbp = network_graph.subgraph(network_graph.nodes())
    network_graph_lbp = network_graph.subgraph(network_graph.nodes())
    lbp.least_overlapping_backup_path(network_graph_lbp)

    # Compute all the shortest paths in the graph as primary paths
    print "A graph with %d nodes and %d edges was generated." % (
        network_graph.number_of_nodes(), network_graph.number_of_edges())
    primary_paths = []
    for k, v in nx.shortest_path(network_graph).iteritems():
        for n, m in v.iteritems():
            if len(m) > 1:
                primary_paths.append(m)
    # print primary_paths

    # rip_gen.draw_graph(network_graph)

    global outfile
    # Changing the topology by deleting 10%, 30%, 50% and 70% of the edges
    for tp in [10, 30, 50, 70]:
        print "\n%d%% of topology change" % tp
        print "======================"
        reduced_graph = network_graph.subgraph(network_graph.nodes())
        deleted_edges = reduce_edges(reduced_graph, tp)
        print "%d edges deleted" % len(deleted_edges)
        print deleted_edges
        ap = detect_affected_paths(primary_paths, deleted_edges)
        # print ap
        print "Affection rate (paths affected after deletion): %d/%d = %.2f%%" % (
            len(ap), len(primary_paths), len(ap) * 100.0 / len(primary_paths))

        sbp.second_best_cost_backup_path(network_graph)
        global fail_count
        fail_count = 0
        for (s, d) in ap:
            check_backup_strategy(network_graph, reduced_graph, s, d)
        print "\nUsing SECOND BEST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (
            fail_count, len(ap), fail_count * 100.0 / len(ap))
        #outfile.write("\nUsing SECOND BEST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap)))

        wbp.worst_cost_backup_path(network_graph_wbp)
        global fail_count
        fail_count = 0
        for (s, d) in ap:
            check_backup_strategy(network_graph_wbp, reduced_graph, s, d)
        print "Using WORST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (
            fail_count, len(ap), fail_count * 100.0 / len(ap))
        #outfile.write("Using WORST BEST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap)))

        global fail_count
        fail_count = 0
        check_backup_strategy_lbp(network_graph_lbp, ap, deleted_edges)
        print "Using LEAST OVERLAPPING backup strategy...   Fail rate: %d/%d = %.2f%%" % (
            fail_count, len(ap), fail_count * 100.0 / len(ap))
def single_experiment(size):
    # Original graph to clone several times, with the second best path strategy applied
    network_graph = rip_gen.generate_rip_graph(size)

    # This creates two more copies of the graph so I can apply each backup strategy on them
    network_graph_wbp = network_graph.subgraph(network_graph.nodes())
    network_graph_lbp = network_graph.subgraph(network_graph.nodes())
    lbp.least_overlapping_backup_path(network_graph_lbp)

    # Compute all the shortest paths in the graph as primary paths
    print "A graph with %d nodes and %d edges was generated." % (network_graph.number_of_nodes(), network_graph.number_of_edges())
    primary_paths = []
    for k,v in nx.shortest_path(network_graph).iteritems():
        for n,m in v.iteritems():
            if len(m) > 1:
                primary_paths.append(m)
    # print primary_paths

    # rip_gen.draw_graph(network_graph)

    global outfile
    # Changing the topology by deleting 10%, 30%, 50% and 70% of the edges
    for tp in [10, 30, 50, 70]:
        print "\n%d%% of topology change" % tp
        print "======================"
        reduced_graph = network_graph.subgraph(network_graph.nodes())
        deleted_edges = reduce_edges(reduced_graph, tp)
        print "%d edges deleted" % len(deleted_edges)
        print deleted_edges
        ap = detect_affected_paths(primary_paths, deleted_edges)
        # print ap
        print "Affection rate (paths affected after deletion): %d/%d = %.2f%%" % (len(ap), len(primary_paths), len(ap)*100.0/len(primary_paths))

        sbp.second_best_cost_backup_path(network_graph)
        global fail_count
        fail_count = 0
        for (s,d) in ap:
            check_backup_strategy(network_graph, reduced_graph, s, d)
        print "\nUsing SECOND BEST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap))
        #outfile.write("\nUsing SECOND BEST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap)))

        wbp.worst_cost_backup_path(network_graph_wbp)
        global fail_count
        fail_count = 0
        for (s,d) in ap:
            check_backup_strategy(network_graph_wbp, reduced_graph, s, d)
        print "Using WORST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap))
        #outfile.write("Using WORST BEST COST backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap)))

        global fail_count
        fail_count = 0
        check_backup_strategy_lbp(network_graph_lbp, ap, deleted_edges)
        print "Using LEAST OVERLAPPING backup strategy...   Fail rate: %d/%d = %.2f%%" % (fail_count, len(ap), fail_count*100.0/len(ap))
                worst_distance = np.max(array[array < sys.maxint])
                result = np.where(array == worst_distance)[0]
                bnhv[i] = result[0]

                # If unluckily there is just one different from infinity or two with same distance
                if bnhv[i] == nhv[i] and len(result) <= 1:
                    bnhv[i] = None
                elif bnhv[i] == nhv[i]:
                    bnhv[i] = result[1]
        #print nhv
        #print bnhv


if __name__ == '__main__':

    network_graph = rip_gen.generate_rip_graph(6)

    np.set_printoptions(precision=1)

    worst_cost_backup_path(network_graph)

    #Comparing with bellman-ford for testing
    for n, nattr in network_graph.nodes(
            data=True):  # For each node n and attribute nattr
        print n
        print nattr['distance_matrix']
        print nattr['best_weights_vector']
        print nattr['default_next_hop']
        print nattr['backup_next_hop']

        print "\n"
                worst_distance = np.max(array[array < sys.maxint])
                result = np.where(array == worst_distance)[0]
                bnhv[i] = result[0]
                
                # If unluckily there is just one different from infinity or two with same distance
                if bnhv[i] == nhv[i] and len(result) <= 1:
                    bnhv[i] = None
                elif bnhv[i] == nhv[i]:
                    bnhv[i] = result[1]
        #print nhv
        #print bnhv
        

if __name__ == '__main__':    
    
    network_graph = rip_gen.generate_rip_graph(6)
    
    np.set_printoptions(precision=1)
    
    worst_cost_backup_path(network_graph)

    #Comparing with bellman-ford for testing
    for n, nattr in network_graph.nodes(data=True):  # For each node n and attribute nattr
        print n
        print nattr['distance_matrix']
        print nattr['best_weights_vector']
        print nattr['default_next_hop']
        print nattr['backup_next_hop']

        print "\n"