def testCrawlRealData(self): test_binary = "" this_dir = os.path.abspath(os.path.join(os.path.dirname(__file__))) binary_path = os.path.join(this_dir, "example_dump.bin") with open(binary_path, "rb") as f_in: test_binary = f_in.read() db_path = os.path.join(this_dir, "minimal_db.json") scout = ApiScout(db_path) print(scout.crawl(test_binary)) results = { u'Windows 7': [(256, 2105895504, u'KernelBase.dll', u'InterlockedIncrement', 32, None, 1), (264, 8792746496016, u'KernelBase.dll', u'WaitForSingleObjectEx', 64, None, 1)] } self.assertEqual(results, scout.crawl(test_binary))
def main(): tools = IdaTools() parameters = tools.formGetParameters() if parameters: scout = ApiScout() scout.ignoreAslrOffsets(parameters["ignore_aslr_offset"]) for path in parameters["api_dbs"]: scout.loadDbFile(path) bitness_string = "32bit and 64bit" if scout.has_64bit else "32bit" segments = tools.getAllMemoryFromIda() base_address = tools.getBaseAddress() all_results = {} for segment_address, binary in sorted(segments.items()): scout.setLoadOffset(segment_address - base_address) print("Scanning %d bytes @0x%x in %s mode." % (len(binary), segment_address, bitness_string)) updateResults(all_results, scout.crawl(binary)) selected_apis = tools.formSelectResults(all_results) if selected_apis: tools.importTypeLibraries() num_renamed, num_skipped, num_xrefs_adapted = tools.applyApiNames( selected_apis) print("Annotated %d APIs and adapted %d Xrefs(%d skipped)." % (num_renamed, num_xrefs_adapted, num_skipped)) else: print("No APIs selected for annotation, closing.")
def main(): parser = argparse.ArgumentParser( description= 'Demo: Use apiscout with a prepared api database (created using DatabaseBuilder.py) to crawl a dump for imports and render the results.' ) parser.add_argument( '-f', '--filter', type=int, default=0, help='Filter out APIs that do not have a neighbour within N bytes.') parser.add_argument( '-i', '--ignore_aslr', action='store_true', help= 'Do not apply the per-module ASLR offset potentially contained in a API DB file.' ) parser.add_argument('binary_path', type=str, default='', help='Path to the memory dump to crawl.') parser.add_argument( 'db_path', type=str, nargs='*', help= 'Path to the DB(s). If no argument is given, use all files found in "./dbs"' ) args = parser.parse_args() if args.binary_path: binary = "" if os.path.isfile(args.binary_path): with open(args.binary_path, "rb") as f_binary: binary = f_binary.read() if not args.db_path: args.db_path = get_all_db_files() scout = ApiScout() # override potential ASLR offsets that are stored in the API DB files. scout.ignoreAslrOffsets(args.ignore_aslr) # load DB file for db_path in args.db_path: scout.loadDbFile(db_path) print("Using '{}' to analyze '{}.".format(args.db_path, args.binary_path)) num_apis_loaded = scout.getNumApisLoaded() filter_info = " - neighbour filter: 0x%x" % args.filter if args.filter else "" print("Buffer size is {} bytes, {} APIs loaded{}.\n".format( len(binary), num_apis_loaded, filter_info)) results = scout.crawl(binary) filtered_results = scout.filter(results, 0, 0, args.filter) print(scout.render(filtered_results)) else: parser.print_help()
def testCrawlToyData(self): test_binary = b"\00" * 0x10 + b"\x34\x12\x00\x00" + b"\00" * 0x10 + b"\x78\x56\x00\x00\x00\x00\x00\x00" + b"\00" * 0x10 scout = ApiScout() scout.api_maps["test_1"] = {0x1234: ("test.dll", "TestApi", 32)} scout.api_maps["test_2"] = {0x5678: ("test2.dll", "TestApi2", 64)} scout.has_64bit = True results = { 'test_2': [(36, 22136, 'test2.dll', 'TestApi2', 64, None, 1)], 'test_1': [(16, 4660, 'test.dll', 'TestApi', 32, None, 1)] } self.assertEqual(results, scout.crawl(test_binary))
def testCrawlToyData(self): test_binary = "\00" * 0x10 + struct.pack( "I", 0x1234) + "\00" * 0x10 + struct.pack("Q", 0x5678) + "\00" * 0x10 scout = ApiScout() scout.api_maps["test_1"] = {0x1234: ("test.dll", "TestApi", 32)} scout.api_maps["test_2"] = {0x5678: ("test2.dll", "TestApi2", 64)} scout.has_64bit = True results = { 'test_2': [(36, 22136, 'test2.dll', 'TestApi2', 64)], 'test_1': [(16, 4660, 'test.dll', 'TestApi', 32)] } self.assertEqual(results, scout.crawl(test_binary))
def main(): parser = argparse.ArgumentParser(description='Demo: Use apiscout with a prepared api database (created using DatabaseBuilder.py) to crawl a dump for imports and render the results.') parser.add_argument('-f', '--filter', type=int, default=0, help='Filter out APIs that do not have a neighbour within N bytes.') parser.add_argument('-i', '--ignore_aslr', action='store_true', help='Do not apply the per-module ASLR offset potentially contained in a API DB file.') parser.add_argument('-c', '--collection_file', type=str, default='', help='Optionally match the output against a WinApi1024 vector collection file.') parser.add_argument('-b', '--base_addr', type=str, default='', help='Set base address to given value (int or 0x-hex format).') parser.add_argument('-t', '--import_table_only', action='store_true', help='Do not crawl for API references but only parse the import table instead - assumes an unmapped PE file as input.') parser.add_argument('binary_path', type=str, default='', help='Path to the memory dump to crawl.') parser.add_argument('db_path', type=str, nargs='*', help='Path to the DB(s). If no argument is given, use all files found in "./dbs"') args = parser.parse_args() if args.binary_path: binary = "" if os.path.isfile(args.binary_path): with open(args.binary_path, "rb") as f_binary: binary = f_binary.read() scout = ApiScout() base_addr = get_base_addr(args) print("Using base adress 0x{:x} to infer reference counts.".format(base_addr)) scout.setBaseAddress(base_addr) # override potential ASLR offsets that are stored in the API DB files. scout.ignoreAslrOffsets(args.ignore_aslr) # load DB file db_paths = [] if args.db_path: db_paths = args.db_path elif not args.import_table_only: db_paths = get_all_db_files() for db_path in db_paths: scout.loadDbFile(db_path) # load WinApi1024 vector scout.loadWinApi1024(get_winapi1024_path()) # scout the binary results = {} if args.import_table_only: print("Parsing Import Table for\n {}.".format(args.binary_path)) results = scout.evaluateImportTable(binary, is_unmapped=True) else: print("Using \n {}\nto analyze\n {}.".format("\n ".join(db_paths), args.binary_path)) num_apis_loaded = scout.getNumApisLoaded() filter_info = " - neighbour filter: 0x%x" % args.filter if args.filter else "" print("Buffer size is {} bytes, {} APIs loaded{}.\n".format(len(binary), num_apis_loaded, filter_info)) results = scout.crawl(binary) filtered_results = scout.filter(results, 0, 0, args.filter) print(scout.render(filtered_results)) print(scout.renderVectorResults(filtered_results)) if args.collection_file: print(scout.renderResultsVsCollection(filtered_results, args.collection_file)) else: parser.print_help()