def get_libraries(firmware_folder, work_dir): library_folder = os.path.join(work_dir, "libs") os.mkdir(library_folder) executables, libraries = fhc.get_executable_files(firmware_folder) for lib in libraries: shutil.copy(lib, library_folder) print("[+] Created library folder at {}".format(library_folder)) return library_folder
def get_vulnerabilities_directory(folder_name, ld_path): executables, shared_libs = fhc.get_executable_files(folder_name) cgi_files = [x for x in executables if '.cgi' in x] function_lists = get_all_funcs_async(cgi_files) all_arg_funcs = [] for func_list in function_lists: all_arg_funcs.extend(func_list) all_arg_funcs = fix_functions(all_arg_funcs) return get_bugs_from_functions(all_arg_funcs, ld_path)
def main(): parser = argparse.ArgumentParser() parser.add_argument("Directory") parser.add_argument("-L", "--LD_PATH", default="", help="Path to libraries to load") parser.add_argument("-F", "--Function", default="") parser.add_argument("-V", "--Vuln_Pickle", default="") args = parser.parse_args() executables, shared_libs = fhc.get_executable_files(args.Directory) all_files = executables + shared_libs all_arg_funcs = [] m_pool = Pool() print("Getting functions from executables") for x, y in zip(m_pool.map(fh.get_arg_funcs, all_files), all_files): for func in x: func['file_name'] = y all_arg_funcs.append(func) m_pool.close() m_pool.join() if args.Vuln_Pickle is "": cores = psutil.cpu_count() - 1 mem_limit = (psutil.virtual_memory()[1] / (1024 * 1024)) / cores vulns = get_vulnerabilities(all_arg_funcs, cores, mem_limit, args.LD_PATH) with open("Directory_Vulnerabilities", 'wb') as f: pickle.dump(vulns, f, -1) else: print("[+] Loading from pickle file {}".format(args.Vuln_Pickle)) with open(args.Vuln_Pickle, 'rb') as f: vulns = pickle.load(f) print("[+] Getting sparse functions") all_functions = fhc.get_firmware_sparse(all_files) bad_prefix = ["fcn.", "sub.", "loc.", "aav.", "sym._fini", "sym._init"] all_functions = [ x for x in all_functions if not any([y in x['name'] for y in bad_prefix]) ] bad_features = [ 'bits', 'calltype', 'maxbound', 'minbound', 'offset', 'size' ] for func in all_functions: for feat in bad_features: func.pop(feat) func_labels = fhc.plot_clustering(all_functions, True) #func_labels = fhc.single_cluster(all_functions, len(vulns)) file_names = [x['file_name'] for x in all_functions] func_names = [x['name'] for x in all_functions] clust_group = [x for x in zip(file_names, func_names, func_labels)] vuln_labels = {} if args.Function != "": for k in list(vulns.keys()): if args.Function not in k: vulns.pop(k) for func_ident in vulns.keys(): file_name = func_ident.split(':')[0].rstrip(' ') func_name = func_ident.split(':')[1].lstrip(' ') try: label = list( filter(lambda x: x[0] in file_name and x[1] in func_name, clust_group))[0] vuln_labels[label[2]] = func_ident except: pass #IPython.embed() #vuln_labels.append((func_ident, label)) for x, y, z in zip(file_names, func_names, func_labels): test = not any([y in a for a in vulns.keys()]) #test2 = not any([x in a for a in vulns.keys()]) #print(test,test2) if z in vuln_labels.keys() and test: x = x.split('/')[-1] print("{:<30} | {:<30} | {} -> {}".format(x, y, z, vuln_labels[z]))
def main(): parser = argparse.ArgumentParser() parser.add_argument("Directory") parser.add_argument("-L", "--LD_PATH", default="", help="Path to libraries to load") parser.add_argument("-F", "--Function", default="") parser.add_argument("-V", "--Vuln_Pickle", default="") args = parser.parse_args() executables, shared_libs = fhc.get_executable_files(args.Directory) all_files = executables #all_files = executables + shared_libs # all_arg_funcs = async_and_iter(async_get_arg_funcs, all_files) if args.Vuln_Pickle: with open(args.Vuln_Pickle, 'rb') as f: file_vulnerabilities = pickle.load(f) pass else: file_vulnerabilities = vd.process_file_or_folder( args.Directory, args.LD_PATH) with open('cluster_pickle', 'wb') as f: pickle.dump(file_vulnerabilities, f, -1) print("[+] Getting sparse functions") all_functions = [] all_trim_funcs = [] for function_list in async_and_iter(async_get_sparse_file_data, all_files): all_functions.extend(copy.deepcopy(function_list)) all_trim_funcs.extend( fhc.trim_funcs(function_list, function_list[0]['file_name'])) all_functions = fhc.remove_non_needed_functions(all_functions, remove_features=False) all_trim_funcs = fhc.remove_non_needed_functions(all_trim_funcs) print("[+] Clustering and scoring centroid counts") all_scores = async_and_iter_clusters(all_trim_funcs, 50) largest_dif = 200 large_index = 0 for x in range(1, len(all_scores) - 2): if largest_dif > all_scores[x]['score']: largest_dif = all_scores[x]['score'] large_index = x ''' if largest_dif < abs(scores[x] - scores[x+1]): largest_dif = abs(scores[x] - scores[x+1]) large_index = x+1 ''' print("Largest drop at {} with {}".format(all_scores[large_index]['count'], largest_dif)) Largest_Score_Drop = all_scores[large_index] function_distances = fhc.get_cosine_dist(all_trim_funcs) if args.Function: bugs = [x for x in file_vulnerabilities if args.Function in x['name']] else: bugs = [x for x in file_vulnerabilities if x['result']] for file_vuln in bugs: vuln_index = get_function_index(file_vuln, all_functions) if vuln_index is None: continue vuln_cluster = Largest_Score_Drop['labels'][vuln_index] similar_list = get_functions_on_cluster(all_functions, vuln_cluster, Largest_Score_Drop['labels']) reduced_list = [] for func in similar_list: func_distance = get_func_dist(file_vuln, func, function_distances, all_functions) reduced_list.append({ 'file_name': func['file_name'], 'func_name': func['name'], 'distance': func_distance }) reduced_list = sorted(reduced_list, key=lambda x: x['distance']) file_vuln['Similar_Funcs'] = reduced_list if bugs: print_function(bugs[0])