Ejemplo n.º 1
0
def print_paths_to_database(paths, reverse_map, table_name):
    # Timeout = 6000s
    
    insert_string = "INSERT INTO %s VALUES (?, ?, ?, ?, ?, ?, ?)" % table_name
    
    queries = []
    for p_node in paths:
        path_string = ""
        for port in p_node["visits"]:
            path_string += ("%d " % port)
        path_string += ("%d " % p_node["port"])
        port_count = len(p_node["visits"]) + 1
        
        rl_id = ""
        for (n, r, s) in p_node["hdr"].applied_rule_ids:
            rl_id += (r + " ")
        rule_count = len(p_node["hdr"].applied_rule_ids)
        
        input_port = p_node["visits"][0]
        output_port = p_node["port"]
        output_hs = p_node["hdr"].copy()
        applied_rule_ids = list(output_hs.applied_rule_ids)
        input_hs = trace_hs_back(applied_rule_ids, output_hs, output_port)[0]
        header_string = json.dumps(parse_hs(juniperRouter(1).hs_format, input_hs.hs_list[0]))
        
        #header_string = byte_array_to_pretty_hs_string(input_hs.hs_list[0])
        queries.append((header_string, input_port, output_port, path_string, port_count, rl_id, rule_count))
    
    conn = sqlite3.connect(DATABASE_FILE, 6000)
    for query in queries:    
        conn.execute(insert_string, query)
        
    conn.commit()
    conn.close()
Ejemplo n.º 2
0
def load_internet2_backbone_port_to_id_map():
    f = open("Internet2/port_map.txt",'r')
    id_to_name = {}
    map = {}
    rtr = ""
    cs = juniperRouter(1)
    for line in f:
        if line.startswith("$"):
            rtr = line[1:].strip()
            map[rtr] = {}
        elif line != "":
            tokens = line.strip().split(":")
            map[rtr][tokens[0]] = int(tokens[-1])
            id_to_name[tokens[-1]] = "%s-%s"%(rtr,":".join(tokens[0:-1]))
            out_port = int(tokens[-1]) + cs.PORT_TYPE_MULTIPLIER * cs.OUTPUT_PORT_TYPE_CONST
            id_to_name["%s"%out_port] = "%s-%s"%(rtr,":".join(tokens[0:-1]))
    return (map,id_to_name)
Ejemplo n.º 3
0
def load_internet2_backbone_port_to_id_map():
    f = open("work/Internet2/port_map.txt", 'r')
    id_to_name = {}
    map = {}
    rtr = ""
    cs = juniperRouter(1)
    for line in f:
        if line.startswith("$"):
            rtr = line[1:].strip()
            map[rtr] = {}
        elif line != "":
            tokens = line.strip().split(":")
            map[rtr][tokens[0]] = int(tokens[-1])
            id_to_name[tokens[-1]] = "%s-%s" % (rtr, ":".join(tokens[0:-1]))
            out_port = int(
                tokens[-1]
            ) + cs.PORT_TYPE_MULTIPLIER * cs.OUTPUT_PORT_TYPE_CONST
            id_to_name["%s" %
                       out_port] = "%s-%s" % (rtr, ":".join(tokens[0:-1]))
    return (map, id_to_name)
Ejemplo n.º 4
0
def main():
    global ntf_global
    global ttf_global
    global dst_port_ids_global

    ntf_global = load_internet2_backbone_ntf()
    ttf_global = load_internet2_backbone_ttf()
    (port_map, port_reverse_map) = load_internet2_backbone_port_to_id_map()
    cs = juniperRouter(1)
    output_port_addition = cs.PORT_TYPE_MULTIPLIER * cs.OUTPUT_PORT_TYPE_CONST

    all_x = byte_array_get_all_x(ntf_global.length)

    #cs.set_field(all_x, "vlan", 32, 0)
    #cs.set_field(all_x, "ip_dst", dotted_ip_to_int("64.57.28.243"), 8)
    #cs.set_field(all_x, "ip_src", dotted_ip_to_int("172.27.76.233"), 0)
    #cs.set_field(all_x, "tcp_dst", 22, 0)
    #cs.set_field(all_x, "ip_proto", 6, 0)

    test_pkt = headerspace(ntf_global.length)
    test_pkt.add_hs(all_x)

    src_port_id = port_map["atla"]["xe-0/1/1"]
    dst_port_ids_global = [port_map["atla"]["xe-1/0/2"] + output_port_addition]

    st = time()
    paths = find_reachability_multiprocess(src_port_id, test_pkt)

    #paths = find_reachability(ntf_global, ttf_global, src_port_id, dst_port_ids_global, test_pkt)
    en = time()
    print_loops(paths, port_reverse_map)
    print len(paths)

    #loops = detect_loop(ntf,ttf,loop_port_ids,port_reverse_map,None,output_port_addition)
    #en = time()
    #print_loops(loops, port_reverse_map)
    #print len(loops)

    print en - st
def main():
    global ntf_global
    global ttf_global
    global dst_port_ids_global 
    
    ntf_global = load_internet2_backbone_ntf()
    ttf_global = load_internet2_backbone_ttf()
    (port_map,port_reverse_map) = load_internet2_backbone_port_to_id_map()
    cs = juniperRouter(1)
    output_port_addition = cs.PORT_TYPE_MULTIPLIER * cs.OUTPUT_PORT_TYPE_CONST
    
    all_x = byte_array_get_all_x(ntf_global.length)
    
    #cs.set_field(all_x, "vlan", 32, 0)
    #cs.set_field(all_x, "ip_dst", dotted_ip_to_int("64.57.28.243"), 8)
    #cs.set_field(all_x, "ip_src", dotted_ip_to_int("172.27.76.233"), 0)
    #cs.set_field(all_x, "tcp_dst", 22, 0)
    #cs.set_field(all_x, "ip_proto", 6, 0)
    
    test_pkt = headerspace(ntf_global.length)
    test_pkt.add_hs(all_x)
    
    src_port_id = port_map["atla"]["xe-0/1/1"]
    dst_port_ids_global = [port_map["atla"]["xe-1/0/2"]+output_port_addition]

    st = time()
    paths = find_reachability_multiprocess(src_port_id,test_pkt)
    
    #paths = find_reachability(ntf_global, ttf_global, src_port_id, dst_port_ids_global, test_pkt)
    en = time()
    print_loops(paths, port_reverse_map)
    print len(paths)
    
    #loops = detect_loop(ntf,ttf,loop_port_ids,port_reverse_map,None,output_port_addition)
    #en = time()
    #print_loops(loops, port_reverse_map)
    #print len(loops)
    
    print en-st
Ejemplo n.º 6
0
def main():
    global src_port_ids_global
    global dst_port_ids_global
    global port_map_global
    global port_reverse_map_global
    global ntf_global
    global ttf_global
    global DATABASE_FILE

    parser = ArgumentParser(description="Generate Test Packets for Internet2")
    parser.add_argument("-p",
                        dest="percentage",
                        type=int,
                        default="100",
                        help="Percentage of test terminals")
    parser.add_argument("-f",
                        dest="filename",
                        default="internet2.sqlite",
                        help="Filename of the database")
    parser.add_argument("-e",
                        action="store_true",
                        default=False,
                        help="Edge port only")
    args = parser.parse_args()

    DATABASE_FILE = "work/%s" % args.filename

    cs = juniperRouter(1)
    output_port_addition = cs.PORT_TYPE_MULTIPLIER * cs.OUTPUT_PORT_TYPE_CONST

    # Load .tf files
    ntf_global = load_internet2_backbone_ntf()
    ttf_global = load_internet2_backbone_ttf()
    (port_map_global,
     port_reverse_map_global) = load_internet2_backbone_port_to_id_map()

    # Initialize the database
    if os.access(DATABASE_FILE, os.F_OK):
        os.remove(DATABASE_FILE)

    conn = sqlite3.connect(DATABASE_FILE)
    conn.execute(
        'CREATE TABLE %s (rule TEXT, input_port TEXT, output_port TEXT, action TEXT, file TEXT, line TEXT)'
        % TABLE_NETWORK_RULES)
    conn.execute(
        'CREATE TABLE %s (rule TEXT, input_port TEXT, output_port TEXT)' %
        TABLE_TOPOLOGY_RULES)
    conn.execute(
        'CREATE TABLE %s (header TEXT, input_port INTEGER, output_port INTEGER, ports TEXT, no_of_ports INTEGER, rules TEXT, no_of_rules INTEGER)'
        % TABLE_TEST_PACKETS)
    conn.execute(
        'CREATE TABLE %s (header TEXT, input_port INTEGER, output_port INTEGER, ports TEXT, no_of_ports INTEGER, rules TEXT, no_of_rules INTEGER)'
        % TABLE_TEST_PACKETS_LOCALLY_COMPRESSED)
    conn.execute('CREATE TABLE %s (rules TEXT, no_of_rules INTEGER)' %
                 TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED)
    conn.execute('CREATE TABLE %s (rule TEXT)' % TABLE_RESULT_RULES)

    rule_count = 0
    for tf in ntf_global.tf_list:
        rule_count += len(tf.rules)
        for rule in tf.rules:
            query = "INSERT INTO %s VALUES (?, ?, ?, ?, ?, ?)" % TABLE_NETWORK_RULES
            conn.execute(
                query,
                (rule['id'], ' '.join(map(str, rule['in_ports'])), ' '.join(
                    map(str, rule['out_ports'])), rule['action'], rule["file"],
                 ' '.join(map(str, rule["line"]))))
    print "Total Rules: %d" % rule_count
    conn.commit()

    rule_count = len(ttf_global.rules)
    for rule in ttf_global.rules:
        query = "INSERT INTO %s VALUES (?, ?, ?)" % TABLE_TOPOLOGY_RULES
        conn.execute(query, (rule['id'], ' '.join(map(
            str, rule['in_ports'])), ' '.join(map(str, rule['out_ports']))))
    print "Total Links: %d" % rule_count

    # Generate all ports
    for rtr in port_map_global.keys():
        src_port_ids_global |= set(port_map_global[rtr].values())

    total_length = len(src_port_ids_global)
    if args.e == True:
        for rule in ttf_global.rules:
            if rule['out_ports'][0] in src_port_ids_global:
                src_port_ids_global.remove(rule['out_ports'][0])

    new_length = len(src_port_ids_global) * args.percentage / 100
    src_port_ids_global = random.sample(src_port_ids_global, new_length)
    print "Total Length: %d" % total_length
    print "New Length: %d" % new_length

    for port in src_port_ids_global:
        port += output_port_addition
        dst_port_ids_global.add(port)

    #src_port_ids_global = [300013]
    #dst_port_ids_global = [320010]

    conn.commit()
    conn.close()

    # Run reachability
    start_time = time.time()

    pool = Pool()
    result = pool.map_async(find_test_packets, src_port_ids_global)

    # Close
    pool.close()
    pool.join()

    end_time = time.time()

    test_packet_count = result.get()
    total_paths = sum(test_packet_count)
    print "========== Before Compression ========="
    print "Total Paths = %d" % total_paths
    print "Average packets per port = %f" % (float(total_paths) /
                                             len(src_port_ids_global))
    print "Total Time = %fs" % (end_time - start_time)

    #Global Compressing
    start_time = time.time()

    conn = sqlite3.connect(DATABASE_FILE, 6000)
    result_rule_lists = []
    query = "SELECT rules FROM %s" % TABLE_TEST_PACKETS_LOCALLY_COMPRESSED
    rows = conn.execute(query)

    for row in rows:
        result_rule_lists.append(row[0].split())
    conn.close()

    chunk_size = 80000
    while (True):
        print "Start a new round!"
        conn = sqlite3.connect(DATABASE_FILE, 6000)
        conn.execute('DROP TABLE IF EXISTS %s' % TABLE_SCRATCHPAD)
        conn.execute('CREATE TABLE %s (rules TEXT, no_of_rules INTEGER)' %
                     TABLE_SCRATCHPAD)
        conn.commit()
        conn.close()

        start_len = len(result_rule_lists)
        print start_len

        pool = Pool()
        no_of_chunks = len(result_rule_lists) / chunk_size + 1
        rule_list_chunks = chunks(result_rule_lists, no_of_chunks)
        result = pool.map_async(rule_lists_compress, rule_list_chunks)

        # Close
        pool.close()
        pool.join()
        result.get()

        print "End of this round."

        result_rule_lists = read_rule_lists_from_database(TABLE_SCRATCHPAD)

        end_len = len(result_rule_lists)
        if (float(end_len) / float(start_len) > 0.99):
            break

    end_time = time.time()

    query = "INSERT INTO %s VALUES (?, ?)" % TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED
    query2 = "INSERT INTO %s VALUES (?)" % TABLE_RESULT_RULES

    total_paths = len(result_rule_lists)
    total_length = 0

    conn = sqlite3.connect(DATABASE_FILE, 6000)
    conn.execute('DROP TABLE IF EXISTS %s' %
                 TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED)
    conn.execute('CREATE TABLE %s (rules TEXT, no_of_rules INTEGER)' %
                 TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED)

    for rule_list in result_rule_lists:
        total_length += len(rule_list)
        conn.execute(query, (" ".join(rule_list), len(rule_list)))
        for rule in rule_list:
            conn.execute(query2, (rule, ))

    conn.commit()
    conn.close()

    print "========== After Compression ========="
    print "Total Paths = %d" % total_paths
    print "Average packets per port = %f" % (float(total_paths) /
                                             len(src_port_ids_global))
    print "Average length of rule list = %f" % (float(total_length) /
                                                total_paths)
    print "Total Time = %fs" % (end_time - start_time)
Ejemplo n.º 7
0
def main():  
    global src_port_ids_global
    global dst_port_ids_global
    global port_map_global
    global port_reverse_map_global
    global ntf_global
    global ttf_global
    global DATABASE_FILE
    
    parser = ArgumentParser(description="Generate Test Packets for Internet2")
    parser.add_argument("-p", dest="percentage", type=int,
                      default="100",
                      help="Percentage of test terminals")
    parser.add_argument("-f", dest="filename",
                      default="internet2.sqlite",
                      help="Filename of the database")
    parser.add_argument("-e", action="store_true",
                      default=False,
                      help="Edge port only")
    args = parser.parse_args()
    
    DATABASE_FILE = "work/%s" % args.filename
     
    cs = juniperRouter(1)
    output_port_addition = cs.PORT_TYPE_MULTIPLIER * cs.OUTPUT_PORT_TYPE_CONST
     
    # Load .tf files
    ntf_global = load_internet2_backbone_ntf()
    ttf_global = load_internet2_backbone_ttf()
    (port_map_global, port_reverse_map_global) = load_internet2_backbone_port_to_id_map()
    
    # Initialize the database
    if os.access(DATABASE_FILE, os.F_OK):
        os.remove(DATABASE_FILE)
    
    conn = sqlite3.connect(DATABASE_FILE)
    conn.execute('CREATE TABLE %s (rule TEXT, input_port TEXT, output_port TEXT, action TEXT, file TEXT, line TEXT)' % TABLE_NETWORK_RULES)
    conn.execute('CREATE TABLE %s (rule TEXT, input_port TEXT, output_port TEXT)' % TABLE_TOPOLOGY_RULES)
    conn.execute('CREATE TABLE %s (header TEXT, input_port INTEGER, output_port INTEGER, ports TEXT, no_of_ports INTEGER, rules TEXT, no_of_rules INTEGER)' % TABLE_TEST_PACKETS)
    conn.execute('CREATE TABLE %s (header TEXT, input_port INTEGER, output_port INTEGER, ports TEXT, no_of_ports INTEGER, rules TEXT, no_of_rules INTEGER)' % TABLE_TEST_PACKETS_LOCALLY_COMPRESSED)
    conn.execute('CREATE TABLE %s (rules TEXT, no_of_rules INTEGER)' % TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED)
    conn.execute('CREATE TABLE %s (rule TEXT)' % TABLE_RESULT_RULES)

    rule_count = 0
    for tf in ntf_global.tf_list:
        rule_count += len(tf.rules)
        for rule in tf.rules:
            query = "INSERT INTO %s VALUES (?, ?, ?, ?, ?, ?)" % TABLE_NETWORK_RULES
            conn.execute(query, (rule['id'],' '.join(map(str, rule['in_ports'])), ' '.join(map(str, rule['out_ports'])), rule['action'], rule["file"], ' '.join(map(str, rule["line"]))))
    print "Total Rules: %d" % rule_count
    conn.commit()
    
    rule_count = len(ttf_global.rules) 
    for rule in ttf_global.rules:
        query = "INSERT INTO %s VALUES (?, ?, ?)" % TABLE_TOPOLOGY_RULES 
        conn.execute(query, (rule['id'],' '.join(map(str, rule['in_ports'])), ' '.join(map(str, rule['out_ports']))))  
    print "Total Links: %d" % rule_count
   
    # Generate all ports
    for rtr in port_map_global.keys():
        src_port_ids_global |= set(port_map_global[rtr].values())
    
    
    total_length = len(src_port_ids_global)
    if args.e == True:
        for rule in ttf_global.rules:
            if rule['out_ports'][0] in src_port_ids_global:
                src_port_ids_global.remove(rule['out_ports'][0])    
    
    new_length = len(src_port_ids_global)* args.percentage / 100
    src_port_ids_global = random.sample(src_port_ids_global, new_length)
    print "Total Length: %d" % total_length
    print "New Length: %d" % new_length
    
    for port in src_port_ids_global:
        port += output_port_addition
        dst_port_ids_global.add(port)
    
    #src_port_ids_global = [300013]
    #dst_port_ids_global = [320010]
    
    conn.commit()
    conn.close()
    
    # Run reachability
    start_time = time.time()
    
    pool = Pool()
    result = pool.map_async(find_test_packets, src_port_ids_global)

    # Close
    pool.close()
    pool.join()
    
    end_time = time.time()
    
    test_packet_count = result.get()
    total_paths = sum(test_packet_count)    
    print "========== Before Compression ========="
    print "Total Paths = %d" % total_paths
    print "Average packets per port = %f" % (float(total_paths) / len(src_port_ids_global))
    print "Total Time = %fs" % (end_time - start_time)
    
    #Global Compressing 
    start_time = time.time()
       
    conn = sqlite3.connect(DATABASE_FILE, 6000)    
    result_rule_lists = []
    query = "SELECT rules FROM %s"  % TABLE_TEST_PACKETS_LOCALLY_COMPRESSED
    rows = conn.execute(query)

    for row in rows:
        result_rule_lists.append(row[0].split())
    conn.close()
  
    chunk_size = 80000
    while(True):
        print "Start a new round!"
        conn = sqlite3.connect(DATABASE_FILE, 6000)
        conn.execute('DROP TABLE IF EXISTS %s' % TABLE_SCRATCHPAD)
        conn.execute('CREATE TABLE %s (rules TEXT, no_of_rules INTEGER)' % TABLE_SCRATCHPAD)
        conn.commit()    
        conn.close()
        
        start_len = len(result_rule_lists)
        print start_len
        
        pool = Pool()        
        no_of_chunks = len(result_rule_lists) / chunk_size + 1      
        rule_list_chunks = chunks(result_rule_lists, no_of_chunks)            
        result = pool.map_async(rule_lists_compress, rule_list_chunks)

        # Close
        pool.close()
        pool.join()
        result.get()
        
        print "End of this round."
        
        result_rule_lists = read_rule_lists_from_database(TABLE_SCRATCHPAD)
        
        end_len = len(result_rule_lists)
        if(float(end_len) / float(start_len) > 0.99):
            break

    end_time = time.time()
    
    query = "INSERT INTO %s VALUES (?, ?)" % TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED
    query2 = "INSERT INTO %s VALUES (?)" % TABLE_RESULT_RULES
    
    total_paths = len(result_rule_lists)
    total_length = 0
    
    conn = sqlite3.connect(DATABASE_FILE, 6000)
    conn.execute('DROP TABLE IF EXISTS %s' % TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED)
    conn.execute('CREATE TABLE %s (rules TEXT, no_of_rules INTEGER)' % TABLE_TEST_PACKETS_GLOBALLY_COMPRESSED)

    for rule_list in result_rule_lists:
        total_length += len(rule_list)
        conn.execute(query, (" ".join(rule_list), len(rule_list)))
        for rule in rule_list:
            conn.execute(query2, (rule,))
     
    conn.commit()    
    conn.close()
    
    print "========== After Compression ========="
    print "Total Paths = %d" % total_paths
    print "Average packets per port = %f" % (float(total_paths) / len(src_port_ids_global))
    print "Average length of rule list = %f" % (float(total_length) / total_paths)
    print "Total Time = %fs" % (end_time - start_time)