示例#1
0
文件: minsets.py 项目: p-john/UPF
def minsets_simulation(seed_dir, output_dir):
    seeds = os.listdir(seed_dir)
    rule_size = list(range(1, 101, 1))
    opts = ["hypersplit"]
    complete = ""
    binths = [2,4,8,16,32]

    states = {  "seeds" : 0,
                "opts" : 0,
                "rule_size" : 0,
                "complete" : 0,
                "binths" : 0
                }
    save_file = os.path.join("minimal_sets_simulation",output_dir, "minsets_sim.sav")
    sav = StateObject(save_file,states) # Loads old states if available
    state_loaded = False

    for id_s, seed in enumerate(seeds):
        if not state_loaded and sav.states["seeds"] > id_s:
            continue
        seed_file = os.path.join(seed_dir,seed)
        seed_name = os.path.basename(seeds[0]).split('_')[0]
        output = os.path.join("minimal_sets_simulation",output_dir, seed_name)
        if not os.path.exists(output):
            os.makedirs(output)
        for id_r, num_rules in enumerate(rule_size):
            if not state_loaded and sav.states["rule_size"] > id_r:
                continue
            # for id_c, cnt in enumerate(count):
            #     if not state_loaded and sav.states["count"] > id_c:
            #         continue
            # Generate ClassBench file
            set_name = os.path.basename(seed_file).split('_')[0]
            random_seed = random.randint(0, sys.maxsize)
            cb_file = os.path.join(output,(set_name + "_" + str(num_rules)))
            cb_filepath = generate_cb_file(seed_file, num_rules, 1, 0,
                                               0, random_seed, cb_file)
            # Generate Trace File
            scale = int(float(100000) / num_rules) + 1
            trace_filepath = generate_trace_file(cb_file, scale, 100000)
            for id_o, opt in enumerate(opts):
                if not state_loaded and sav.states["opts"] > id_o:
                    continue
                for id_b, binth in enumerate(binths):
                    if not state_loaded and sav.states["binths"] > id_b:
                        continue
                    state_loaded = True
                    # Test for each optimization
                    if opt == "hypersplit":
                        simulate_hypersplit(output, cb_filepath, trace_filepath,
                                                100, binth, complete)
                    sav.update_state({"binths" : id_b})
                sav.update_state({"opts" : id_o})
                simulate_original(output,cb_filepath,trace_filepath)
                os.unlink(cb_filepath)
                os.unlink(trace_filepath)
                # sav.update_state({"count" : id_c})
                sav.update_state({"rule_size" : id_r})
            sav.update_state({"seeds" : id_s})
示例#2
0
文件: minsets.py 项目: p-john/UPF
def minsets_performance(seed_dir):

    seeds = os.listdir(seed_dir)
    rule_size = list(range(1, 101, 1))
    engines = ["iptables", "ipfw", "pf"]
    opts = ["hypersplit"]
    complete = ""
    count = list(range(0,10))
    states = {  "seeds" : 0,
                "engines" : 0,
                "opts" : 0,
                "rule_size" : 0,
                "complete" : 0,
                "count" : 0}
    save_file = os.path.join("minimal_sets_performance","minsets_perf.sav")
    sav = StateObject(save_file,states) # Loads old states if available
    last_seed = seeds[sav.states["seeds"]]
    last_engine = engines[sav.states["engines"]]
    last_opt = opts[sav.states["opts"]]
    last_num = rule_size[sav.states["rule_size"]]
    for root, dirs, files in os.walk("minimal_sets_performance"):
        for x in files:
            if (last_seed in x and last_engine in x and last_opt in x
                and last_num in x):
                os.unlink(x)

    state_loaded = False
    for id_s, seed in enumerate(seeds):
        if not state_loaded and sav.states["seeds"] > id_s:
            continue
        seed_file = os.path.join(seed_dir,seed)
        seed_name = os.path.basename(seeds[0]).split('_')[0]
        output = os.path.join("minimal_sets_simulation", seed_name)
        if not os.path.exists(output):
            os.makedirs(output)
        for id_r, num_rules in enumerate(rule_size):
            if not state_loaded and sav.states["rule_size"] > id_r:
                continue
            for id_c, cnt in enumerate(count):
                if not state_loaded and sav.states["count"] > id_c:
                    continue
                # Generate ClassBench file
                set_name = os.path.basename(seed_file).split('_')[0]
                random_seed = random.randint(0, sys.maxsize)
                cb_file = os.path.join(output,(set_name + "_" + str(num_rules)))
                cb_filepath = generate_cb_file(seed_file, num_rules, 1, 0,
                                               0, random_seed, cb_file)
                # Generate Trace File
                scale = int(float(100000) / num_rules) + 1
                trace_filepath = generate_trace_file(cb_file, scale, 100000)
                for id_o, opt in enumerate(opts):
                    if not state_loaded and sav.states["opts"] > id_o:
                        continue
                    for id_e, engine in enumerate(engines):
                        if not state_loaded and sav.states["engines"] > id_e:
                            continue
                        state_loaded = True
                        # Test for each optimization
                        if opt == "hypersplit":
                            test_hypersplit(output, cb_filepath, trace_filepath,
                                            engine, 100, 1, complete)
                        sav.update_state({"opts" : id_o})
                        test_original(output,cb_filepath,trace_filepath,engine)
                        os.unlink(cb_filepath)
                        os.unlink(trace_filepath)
                sav.update_state({"count" : id_c})
            sav.update_state({"rule_size" : id_r})
        sav.update_state({"seeds" : id_s})
示例#3
0
def test_rulesets(seed_dir):
    seeds = os.listdir(seed_dir)
    seeds.remove("ipc1_seed")
    seeds.remove("fw1_seed")

    rule_size = list(range(250, 5001, 250))
    # block_sizes = list(range(50, 501, 50))
    block_sizes = [5000]
    # engines = ["iptables", "ipfw", "pf"]
    engines = ["ipfw"]
    # opts = ["simple_redundancy", "fdd_redundancy",
    #        "firewall_compressor", "saxpac", "hypersplit"]
    opts = ["hypersplit", "fwc_and_hyp"]
    # binths = ["2", "4", "8", "16", "32"]
    binths = ["4"]
    complete = ["", "--complete_transform"]
    use_hypersplit = ["", "--use_hypersplit"]
    count = list(range(0, 20))
    states = {
        "seeds": 0,
        "opts": 0,
        "binths": 0,
        "engines": 0,
        "rule_size": 0,
        "complete": 0,
        "count": 0,
        "block_sizes": 0,
    }
    save_file = os.path.join("ipfw_table_count", "ipfw_table.sav")
    sav = StateObject(save_file, states)  # Loads old states if available

    state_loaded = False
    for id_s, seed in enumerate(seeds):
        if not state_loaded and sav.states["seeds"] > id_s:
            continue
        seed_file = os.path.join(seed_dir, seed)
        seed_name = os.path.basename(seed).split("_")[0]
        output = os.path.join("ipfw_table_count", seed_name)
        if not os.path.exists(output):
            os.makedirs(output)

        for id_r, num_rules in enumerate(rule_size):
            if not state_loaded and sav.states["rule_size"] > id_r:
                continue
            for id_c, cnt in enumerate(count):
                if not state_loaded and sav.states["count"] > id_c:
                    continue
                # Generate ClassBench file
                set_name = os.path.basename(seed_file).split("_")[0]
                random_seed = cnt
                cb_file = os.path.join(output, (set_name + "_" + str(num_rules)))
                cb_filepath = generate_cb_file(seed_file, num_rules, 1, 0, 0, random_seed, cb_file)
                # Generate Trace File
                scale = int(float(100000) / num_rules) + 1
                trace_filepath = generate_trace_file(cb_file, scale, 100000)
                for id_b, block in enumerate(block_sizes):
                    if not state_loaded and sav.states["block_sizes"] > id_b:
                        continue
                    for id_e, engine in enumerate(engines):
                        if not state_loaded and sav.states["engines"] > id_e:
                            continue
                        for id_o, opt in enumerate(opts):
                            if not state_loaded and sav.states["opts"] > id_o:
                                continue
                            state_loaded = True
                            # Test for each optimization
                            if opt == "simple_redundancy":
                                test_simple_redundancy(output, cb_filepath, trace_filepath, engine, block, complete)
                            elif opt == "fdd_redundancy":
                                test_fdd_redundancy(output, cb_filepath, trace_filepath, engine, block, complete)
                            elif opt == "firewall_compressor":
                                test_fw_compressor(output, cb_filepath, trace_filepath, engine, block, complete)
                            elif opt == "saxpac":
                                for switch in use_hypersplit:
                                    test_saxpac(output, cb_filepath, trace_filepath, engine, block, switch, complete)
                            elif opt == "hypersplit":
                                for binth in binths:
                                    test_hypersplit(output, cb_filepath, trace_filepath, engine, block, binth, complete)
                            elif opt == "fwc_and_hyp":
                                for binth in binths:
                                    test_fwc_and_hyp(
                                        output, cb_filepath, trace_filepath, engine, block, binth, complete
                                    )
                            sav.update_state({"opts": id_o})
                        sav.update_state({"engines": id_e})
                    sav.update_state({"block_sizes": id_b})
                os.unlink(cb_filepath)
                os.unlink(trace_filepath)
                sav.update_state({"count": id_c})
            sav.update_state({"rule_size": id_r})
        sav.update_state({"seeds": id_s})
示例#4
0
def simulate_rulesets(seed_dir,output_dir):
    seeds = os.listdir(seed_dir)
    rule_size = list(range(200, 10001, 200))
    block_sizes = list(range(50,501,50))
    opts = ["simple_redundancy", "fdd_redundancy",
            "firewall_compressor", "saxpac", "hypersplit"]
    binths = ["2", "4", "8", "16", "32"]
    complete = ["", "--complete_transform"]
    use_hypersplit = ["", "--use_hypersplit"]
    count = list(range(0,10))
    states = {  "seeds" : 0,
                "opts" : 0,
                "binths" : 0,
                "rule_size" : 0,
                "complete" : 0,
                # "count" : 0,
                "block_sizes" : 0}
    save_file = os.path.join("simulate_rulesets",output_dir,"simulate_rulesets.sav")
    sav = StateObject(save_file,states) # Loads old states if available
    state_loaded = False

    for id_s,seed in enumerate(seeds):
        if not state_loaded and sav.states["seeds"] > id_s:
            continue
        seed_file = os.path.join(seed_dir,seed)
        seed_name = os.path.basename(seed).split('_')[0]
        output = os.path.join("simulate_rulesets",output_dir,seed_name)
        if not os.path.exists(output):
            os.makedirs(output)

        for id_r, num_rules in enumerate(rule_size):
            if not state_loaded and sav.states["rule_size"] > id_r:
                continue
                # for id_c, cnt in enumerate(count):
                #     if not state_loaded and sav.states["count"] > id_c:
                #         continue
            # Generate ClassBench file

            set_name = os.path.basename(seed_file).split('_')[0]
            random_seed = random.randint(0, sys.maxsize)
            cb_file = os.path.join(output,(set_name + "_" + str(num_rules)))
            cb_filepath = generate_cb_file(seed_file, num_rules, 1, 0,
                                               0, random_seed, cb_file)
            # Generate Trace File
            scale = int(float(100000) / num_rules) + 1
            trace_filepath = generate_trace_file(cb_file, scale, num_rules*10)
            for id_b, block in enumerate(block_sizes):
                if not state_loaded and sav.states["block_sizes"] > id_b:
                    continue
                for id_o, opt in enumerate(opts):
                    if not state_loaded and sav.states["opts"] > id_o:
                        continue
                    state_loaded = True
                    # Simulation for each optimization
                    if opt == "simple_redundancy":
                        # threading.Thread(target=simulate_simple_redundancy,
                        #                  args=(output,cb_filepath,trace_filepath,block,
                        #                  complete)).start()
                        simulate_simple_redundancy(output, cb_filepath,
                                                   trace_filepath, block, complete)
                    elif opt == "fdd_redundancy":
                        simulate_fdd_redundancy(output,cb_filepath,trace_filepath,
                                                block,complete)
                    elif opt == "firewall_compressor":
                        simulate_fw_compressor(output,cb_filepath,trace_filepath,
                                               block,complete)
                    elif opt == "saxpac":
                        for switch in use_hypersplit:
                            simulate_saxpac(output,cb_filepath,trace_filepath,
                                            block,switch, complete)
                    elif opt == "hypersplit":
                        for binth in binths:
                            simulate_hypersplit(output,cb_filepath,trace_filepath,
                                                block, binth, complete)
                    sav.update_state({"opts" : id_o})
                sav.update_state({"block_sizes" : id_b})
            simulate_original(output,cb_filepath,trace_filepath)
            os.unlink(cb_filepath)
            os.unlink(trace_filepath)
            # sav.update_state({"count" : id_c})
            sav.update_state({"rule_size" : id_r})
        sav.update_state({"seeds" : id_s})
示例#5
0
文件: log_sender.py 项目: p-john/UPF
                                            str(binth) + "_block_" + str(
        block_size)))
    cmd = "./UPF -i " + cb_filepath + " -cb -o " + output_file + " -" + \
          engine + " -optimize " + "hypersplit " + "--binth " + str(binth) \
          + " --block_size " + str(block_size)
    print(cmd)
    subprocess.call(cmd, shell=True)
    log_test(output_file, trace_filepath, engine, output_dir)
    os.system("rm " + output_file)


seed = args.seed
output_dir = args.output
cb_file = os.path.join(os.path.basename(seed)[0] + "_1000")
cb_filepath = generate_cb_file(seed,1000,1,0,0,5,cb_file)
trace_filepath = generate_trace_file(cb_filepath,1,100000)
engines = ["iptables","pf","ipfw","ipf"]
opts = ["simple_redundancy","fdd_redundancy","firewall_compressor","hypersplit","saxpac"]
binth = "4"
block = "100"
complete = ""
switch = "use_hypersplit"
output = os.path.join("log_temp")
packets = get_scapy_packets_from_tracefile(trace_filepath)

for engine in engines:
    for opt in opts:
             if opt == "simple_redundancy":
                 log_simple_redundancy(output, cb_filepath,
                                        trace_filepath,  engine,
                                        block,  complete)