def test_cfe_trials():
    PATCH_TYPES = [str(p) for p in GLOBAL_PATCH_TYPES]
    print "PATCH_TYPES:", PATCH_TYPES
    nose.tools.assert_equal(len(PATCH_TYPES), len(set(PATCH_TYPES)))
    all_named_types = set(PATCH_TYPES_AS_ORIGINAL + PATCH_TYPES_WITH_BACKDOOR +
                          PATCH_TYPES_WITH_RULES)
    nose.tools.assert_equal(len(all_named_types - set(PATCH_TYPES)), 0)

    tfolder = bin_location
    tests = utils.find_files(tfolder, "*", only_exec=True)

    bins = ["KPRCA_00044", "NRFIN_00073"]
    titerator = [t for t in tests if any([b in t for b in bins])]
    generated_patches = set()
    tests = []
    with patcherex.utils.tempdir() as td:
        for test in titerator:
            for patch_type in PATCH_TYPES:
                tests.append((test, patch_type, td))

        if sys.argv[-1] == "--single":
            res = []
            for test in tests:
                print "=" * 10, "testing", test
                r = try_one_patch(test)
                res.append(r)
        else:
            pool = multiprocessing.Pool(5)
            res = pool.map(try_one_patch, tests)
            for r in res:
                if r != None and r[0] == False:
                    print "ERROR:", "while running", res[1]

    generated_patches = [r[2] for r in res if r != None]
    # it is not impossible that two patches are exactly the same, but it is worth investigation
    nose.tools.assert_equal(
        len(set(generated_patches)),
        len(bins) * len(PATCH_TYPES) - len(PATCH_TYPES_EXPECTED_FAIL))
    print "Generated", len(generated_patches), "different patches of", len(
        PATCH_TYPES), "types:", PATCH_TYPES
def test_uniquelabels():
    all_py_files = utils.find_files(patcherex_main_folder, "*.py")
    blacklist = ["networkrules.py"]
    all_py_files = [
        f for f in all_py_files if not os.path.basename(f) in blacklist
    ]
    # print patcherex_main_folder,all_py_files

    labels_dict = defaultdict(list)
    for i, pyfile in enumerate(all_py_files):
        labels = []

        # not really necessary:
        fp = open(pyfile, "r")
        content = fp.read()
        fp.close()
        # asm_lines = ""
        # old_index = 0
        # index = content.find("'''")
        # t=0
        # while True:
        #   t+=1
        #     old_index = index
        #     index = content.find("'''",min(old_index+3,len(content)))
        #    if index==-1:
        #        break
        #    if (t%2) != 0:
        #        asm_lines+="\n"+content[min(old_index+3,len(content)):index-3]
        #asm_lines+="\n"
        labels = utils.string_to_labels(content)
        for l in labels:
            labels_dict[l].append(pyfile)

    duplicates = {}
    for k, v in labels_dict.items():
        if len(v) > 1:
            print(k, [os.path.basename(x) for x in v])
            duplicates[k] = v
    nose.tools.assert_equal(len(duplicates), 0)
Example #3
0
def fname_unpatched_to_key(f):
    cs, ctype, cb = f.split(os.path.sep)[-3:]
    return tuple([ctype, cb])


def fname_patched_to_key(f):
    ctype, ptype, cs, cb = f.split(os.path.sep)[-4:]
    return tuple([ctype, ptype, cs, cb])


if __name__ == "__main__":
    unpatched_folder = sys.argv[1]
    patched_folder = sys.argv[2]

    unpatched_files = utils.find_files(unpatched_folder, "*", True)
    unpatched_sizes = {
        fname_unpatched_to_key(f): os.path.getsize(f)
        for f in unpatched_files
    }

    patched_files = utils.find_files(patched_folder, "*", True)
    patched_sizes = {
        fname_patched_to_key(f): os.path.getsize(f)
        for f in patched_files
    }
    size_overheads = {}
    for k, v in patched_sizes.iteritems():
        unpatched_size = unpatched_sizes[(k[0], k[3])]
        size_overheads[k] = (unpatched_size, v, v / float(unpatched_size))
Example #4
0
def filter_functionality(flist, blacklist):
    if blacklist == None:
        return flist
    else:
        return list(set(flist) - set(blacklist))


if __name__ == "__main__":
    res_folder = sys.argv[1]
    blacklist_folder = None
    if len(sys.argv) == 3:
        blacklist_folder = sys.argv[2]

    if blacklist_folder is not None:
        blacklist = {}
        tfiles = utils.find_files(blacklist_folder, "*.txt")
        for f in tfiles:
            blacklist[os.path.basename(f)[:-4]] = file_to_list(f)
        blacklist["original"] = []
    else:
        blacklist = None

    results = defaultdict(list)
    times = defaultdict(dict)
    log_files = utils.find_files(res_folder, "*_log")
    for lfile in log_files:
        with open(lfile) as fp:
            content = fp.read()
        ret_code = int(content.split("==== RETCODE: ")[1].split()[0])
        log_payload = content.split("==== RETCODE: ")[0].split("\n")[-3].split(
            ":")[0]