def run(self, args, binary, data): # TODO: test if FAT binary if isinstance(binary, lief.MachO.FatBinary): if args.json: res = "{" for c in binary: if len(res) > 1: res += "," res += '"{}":{}'.format(c.header.cpu_type.name, lief.to_json(c)) res += "}" print(res) else: # Fat binary print("Fat binary") print("=" * 80) print("This binary is a fat-binary containing {} binaries : {}".format( binary.size, ' '.join([a.header.cpu_type.name for a in binary]) )) self.display_hashes(data) print("") for c in binary: print(c.header.cpu_type.name) print("="*80) self.display_macho(c) print("") else: if args.json: print(lief.to_json(binary)) else: print("General Information") print("=" * 80) self.display_hashes(data) print("{:15} {} bytes".format("Size:", len(data))) self.display_macho(binary)
def each(self, target): self.results = dict() try: # lief oat = lief.parse(target) oat_dict = json.loads(lief.to_json(oat), parse_int=str) self.results.update(oat_dict) # add extracted dex files for idx, dex_file in enumerate(oat.dex_files): temp = tempdir() fname = 'classes_{}.dex'.format(str(idx)) dex_filepath = os.path.join(temp, fname) dex_file.save(dex_filepath) if os.path.isfile(dex_filepath): self.add_extracted_file(dex_filepath) # androguard sha256, vm, vm_analysis = AnalyzeDex(target) aguard_dict = { 'androguard': { 'internal_classes': self._get_internal_classes(vm_analysis) } } self.results.update(aguard_dict) except: self.log('error', traceback.print_exc()) return True
def test_code_view_pdb(self): path = get_sample('PE/PE64_x86-64_binary_ConsoleApplication1.exe') sample = lief.parse(path) self.assertTrue(sample.has_debug) debug_code_view = list(filter(lambda deb: deb.has_code_view, sample.debug)) self.assertTrue(len(debug_code_view) == 1) debug = debug_code_view[0] code_view = debug.code_view self.assertEqual(code_view.cv_signature, lief.PE.CODE_VIEW_SIGNATURES.PDB_70) self.assertEqual(code_view.signature, [245, 217, 227, 182, 71, 113, 1, 79, 162, 3, 170, 71, 124, 74, 186, 84]) self.assertEqual(code_view.age, 1) self.assertEqual(code_view.filename, r"c:\users\romain\documents\visual studio 2015\Projects\HelloWorld\x64\Release\ConsoleApplication1.pdb") json_view = json.loads(lief.to_json(debug)) self.assertDictEqual(json_view, { 'addressof_rawdata': 8996, 'characteristics': 0, 'code_view': { 'age': 1, 'cv_signature': 'PDB_70', 'filename': 'c:\\users\\romain\\documents\\visual studio 2015\\Projects\\HelloWorld\\x64\\Release\\ConsoleApplication1.pdb', 'signature': [245, 217, 227, 182, 71, 113, 1, 79, 162, 3, 170, 71, 124, 74, 186, 84] }, 'major_version': 0, 'minor_version': 0, 'pointerto_rawdata': 5412, 'sizeof_data': 125, 'timestamp': 1459952944, 'type': 'CODEVIEW' })
def scan(self, payload, **kwargs): """ Scan a payload using LIEF :param bytes payload: Payload to be scanned :param **kwargs filename: Filename of the file to scan :param **kwargs path: Path of the file to scan :returns: Results from scan :rtype: dict or None """ super().scan() if 'filename' in kwargs: filename = kwargs.pop('filename') elif 'path' in kwargs: filename = os.path.basename(kwargs.pop('path')) else: filename = 'unknown' self.lief_open(payload, filename) if self.binary: if self.abstract: results = lief.abstract_to_json(self.binary) else: results = lief.to_json(self.binary) return self.stoq.loads(results) # Not any of the supported file formats, return none return None
def _inspect_linkages_this(filename, sysroot='', arch='native'): ''' :param filename: :param sysroot: :param arch: :return: ''' if not os.path.exists(filename): return None, [], [] sysroot = _trim_sysroot(sysroot) try: binary = lief.parse(filename) # Future lief has this: # json_data = json.loads(lief.to_json_from_abstract(binary)) json_data = json.loads(lief.to_json(binary)) if json_data: return filename, json_data['imported_libraries'], json_data[ 'imported_libraries'] except: print('WARNING: liefldd: failed _inspect_linkages_this({})'.format( filename)) return None, [], []
def main(): parser = argparse.ArgumentParser() parser.add_argument('binary', help='PE binary') args = parser.parse_args() binary = lief.parse(args.binary) json_data = json.loads(lief.to_json(binary)) print(json.dumps(json_data, sort_keys=True, indent=4))
def each(self, target): self.results = dict() try: binary = lief.parse(target) #convert very long ints to str for Mongo binary_dict = json.loads(lief.to_json(binary), parse_int=str) self.results.update(binary_dict) except: self.log('error', traceback.print_exc()) return True
def main(): parser = argparse.ArgumentParser() parser.add_argument('file', help='VDEX file') args = parser.parse_args() if not lief.VDEX.is_vdex(args.file): print("{} is not a VDEX file".format(args.file)) return 1 dexfile = lief.VDEX.parse(args.file) json_data = json.loads(lief.to_json(dexfile)) print(json.dumps(json_data, sort_keys = True, indent = 4))
def test_json_serialization(self): avast = lief.PE.parse( get_sample( "PE/PE32_x86-64_binary_avast-free-antivirus-setup-online.exe")) with open( get_sample( "PE/PE32_x86-64_binary_avast-free-antivirus-setup-online-signature.json" ), "rb") as f: json_sig = json.load(f) self.assertEqual(json.loads(lief.to_json(avast.signatures[0])), json_sig)
def each(self, target): self.results = dict() try: if not lief.ART.is_art(target): self.log('error', '{} is not a ART file'.format(target)) return False binary = lief.ART.parse(target) binary_dict = json.loads(lief.to_json(binary), parse_int=str) self.results.update(binary_dict) except: self.log('error', traceback.print_exc()) return True
def each(self, target): self.results = dict() try: if lief.OAT.is_oat(target): self.change_type(target, 'oat') self.results = {'message': 'File type was changed to oat.'} return True binary = lief.parse(target) binary_dict = json.loads(lief.to_json(binary), parse_int=str) self.results.update(binary_dict) except: self.log('error', traceback.print_exc()) return True
def each(self, target): self.results = dict() try: # lief binary = lief.DEX.parse(target) lief_dict = {'lief': json.loads(lief.to_json(binary), parse_int=str)} self.results.update(lief_dict) # androguard sha256, vm, vm_analysis = AnalyzeDex(target) aguard_dict = {'androguard': {'internal_classes':self._get_internal_classes(vm_analysis)}} self.results.update(aguard_dict) except: self.log('error', traceback.print_exc()) return True
def test_basic(self): path = get_sample('PE/PE32_x86_library_kernel32.dll') sample = lief.parse(path) exports = sample.get_export() forwarded_exports = [ exp for exp in exports.entries if exp.is_forwarded ] self.assertEqual(len(forwarded_exports), 82) # Test JSON Serialization json_serialized = json.loads(lief.to_json(forwarded_exports[0])) self.assertTrue("forward_information" in json_serialized) self.assertEqual(json_serialized["forward_information"]["library"], "NTDLL") self.assertEqual(json_serialized["forward_information"]["function"], "RtlInterlockedPushListSList")
def each(self, target): self.results = dict() try: # lief analysis binary = lief.parse(target) binary_dict = json.loads(lief.to_json(binary), parse_int=str) self.results.update(binary_dict) # packet detect using pefile (PEiD sigs) # sig file obtained here https://github.com/erocarrera/pefile/blob/wiki/PEiDSignatures.md # named after date downloaded & added to FAME signatures = peutils.SignatureDatabase(PE_ID_SIGS_FILE) pe = pefile.PE(target, fast_load=True) matches = signatures.match(pe, ep_only=True) packer_dict = {'packers': matches} self.results.update(packer_dict) except: self.log('error', traceback.print_exc()) return True
def each(self, target): self.results = dict() try: if not lief.VDEX.is_vdex(target): self.log('error', '{} is not a VDEX file'.format(target)) return False vdex = lief.VDEX.parse(target) vdex_dict = json.loads(lief.to_json(vdex), parse_int=str) self.results.update(vdex_dict) # add extracted dex files for idx, dex_file in enumerate(vdex.dex_files): tempdir = tempdir() fname = 'classes_{}.dex'.format(str(idx)) dex_filepath = os.path.join(tempdir, fname) dex_file.save(dex_filepath) if os.path.isfile(dex_filepath): self.add_extracted_file(dex_filepath) except: self.log('error', traceback.print_exc()) return True
async def scan(self, payload: Payload, request: Request) -> WorkerResponse: """ Scan a payload using LIEF """ filename = payload.results.payload_meta.extra_data.get( 'filename', payload.results.payload_id) try: binary = lief.parse(raw=payload.content, name=filename) except lief.exception as err: raise StoqPluginException(f'Unable to parse payload: {err}') if binary is None: raise StoqPluginException('The file type isn\'t supported by LIEF') if self.abstract == True: results = lief.to_json_from_abstract(binary.abstract) else: results = lief.to_json(binary) return WorkerResponse(json.loads(results))
def _inspect_linkages_this(filename, sysroot='', arch='native'): ''' :param filename: :param sysroot: :param arch: :return: ''' if not os.path.exists(filename): return None, [], [] sysroot = _trim_sysroot(sysroot) try: binary = lief.parse(filename) # Future lief has this: # json_data = json.loads(lief.to_json_from_abstract(binary)) json_data = json.loads(lief.to_json(binary)) if json_data: return filename, json_data['imported_libraries'], json_data['imported_libraries'] except: print('WARNING: liefldd: failed _inspect_linkages_this({})'.format(filename)) return None, [], []
import lief import json import sys if len(sys.argv) != 2: print("Usage: {} <file>".format(sys.argv[0])) sys.exit(1) obj = lief.parse(sys.argv[1]) json_data = json.loads(lief.to_json(obj)) print(json.dumps(json_data, sort_keys=True, indent=4))
def process_dir(src_dir, dst_dir, sample_type): print(("Processing: " + src_dir + " ...")) md5 = "" sha1 = "" sha2 = "" yara_scan = YaraScan() yara_rules = yara.compile('./yara_sigs/index.yar') yara_idrules = yara.compile('./yara_sigs/index_id.yar') yara_mitre_rules = yara.compile('./yara_sigs/index_mitre.yar') for root_dir, dirs, files in os.walk(src_dir): for filename in files: failed = False src_file = os.path.join(root_dir, filename) try: with open(src_file, 'rb') as f: contents = f.read() file_size = len(contents) sha1 = hashlib.sha1(contents).hexdigest() sha2 = hashlib.sha256(contents).hexdigest() md5_obj = hashlib.md5() for i in range(0, len(contents), 8192): md5_obj.update(contents[i:i + 8192]) md5 = md5_obj.hexdigest() basic_info = { 'MD5': md5, 'SHA1': sha1, 'SHA256': sha2, 'File Size': file_size } with open( os.path.join(dst_dir, filename) + ".basic_info.json", 'w') as fw: json.dump(basic_info, fw) print("basic info done") except Exception as e: print(("Error: " + str(e))) failed = True try: if md5 != "" and sha1 != "" and sha2 != "" and opts["config"][ "ENABLE_AV_OTX"] == 1: retrun_val = threat_intel_lookup_file(md5, sha1, sha2) final_alerts = retrun_val[0] if retrun_val[1] == True: failed = True if len(final_alerts.keys()) > 0: with open( os.path.join(dst_dir, filename) + ".threat_intel_file.json", 'w') as fw: json.dump(final_alerts, fw) else: print("No, Threat Data found") print("Threat Intel File done") except Exception as e: print(("Error: " + str(e))) failed = True if sample_type == "PE": try: peparsed = peparser.parse(src_file) with open( os.path.join(dst_dir, filename) + ".static.json", 'w') as fp: json.dump(peparsed, fp) print("Static done") with open( os.path.join(dst_dir, filename) + ".cert.json", 'w') as fp: digiSig = DigitalSignatureCheck() digiSig.run(src_file) json.dump(digiSig._REQ_DATA_FIELD, fp) print("Cert done") except Exception as e: print((str(e))) print("No static data.. !!") failed = True elif sample_type == "ELF": try: binary = lief.parse(src_file) elfparsed = json.loads(lief.to_json(binary)) with open( os.path.join(dst_dir, filename) + ".static.json", 'w') as fp: json.dump(elfparsed, fp) print("Linux Static done") except Exception as e: print((str(e))) print("No static data.. !!") failed = True try: dst_file_static = os.path.join(dst_dir, filename) + ".yara.json" dst_file_mitre = os.path.join(dst_dir, filename) + ".mitre.json" # run yara rules on file process_file(yara_scan, yara_rules, yara_idrules, yara_mitre_rules, src_file, dst_file_static, dst_file_mitre) except Exception as e: print((str(e))) print("Yara Part did not run") failed = True try: tip_file = os.path.join(dst_dir, filename) + ".tip.json" tip_json = {} if opts["config"]["ENABLE_AV_OTX"] == 1 and os.path.exists( dst_file_static): ret_val = tip_lookup(dst_file_static) tip_json = ret_val[0] if ret_val[1]: failed = True if (len(tip_json.keys()) > 0): with open(tip_file, 'w') as fw: json.dump(tip_json, fw) except Exception as e: print((str(e))) print("Lookup Part did not run") try: if opts["config"][ "ENABLE_EMULATION"] == 1 and sample_type == "PE": dst_binee_file = os.path.abspath( os.path.join(dst_dir, filename) + ".binee.json") report_emulation_json = binee.emulate( os.path.abspath(src_file), dst_binee_file) if len(report_emulation_json.keys()) > 0: report_emulation_file = os.path.abspath( os.path.join(dst_dir, filename) + ".emulation.json") with open(report_emulation_file, 'w') as fw: json.dump(report_emulation_json, fw) except Exception as e: print((str(e))) print("Emulation part did not run") try: dst_file = os.path.join(dst_dir, filename) + ".behav.json" get_behaviors(src_file, dst_file, dst_dir) except Exception as e: print((str(e))) print("Behavior part did not run..!!") failed = True try: if os.path.exists( os.path.join(dst_dir, filename) + ".behav.json"): with open( os.path.join(dst_dir, filename) + ".behav.json", 'rb') as fp: file_data = fp.read() json_data = {} yara_mitre_api = yara.compile( '.\\yara_sigs\\mitre\\api_based.yar') yara_scan.yara_sig_matched = {} yara_mitre_api.match( data=file_data, callback=yara_scan.yara_callback_desc, which_callbacks=yara.CALLBACK_MATCHES) json_data['API_MITRE'] = yara_scan.yara_sig_matched dst_file_mitre = os.path.join(dst_dir, filename) + ".mitre.json" try: with open(dst_file_mitre, 'rb') as fs: mitre_matched_json = json.loads(fs.read()) dump_mitre = mitre_matched_json for matched_tid in list( json_data['API_MITRE'].keys()): if matched_tid in mitre_matched_json.keys( ): dump_mitre[matched_tid].update( json_data['API_MITRE'] [matched_tid]) else: dump_mitre[matched_tid] = json_data[ 'API_MITRE'][matched_tid] except: dst_file_mitre = os.path.join( dst_dir, filename) + ".mitre.json" with open(dst_file_mitre, 'rb') as fs: dump_mitre = json.loads(fs.read()) with open(dst_file_mitre, 'wb') as fs: fs.write( json.dumps(dump_mitre, sort_keys=True, indent=4).encode('utf-8')) dst_campaign_file = os.path.join( dst_dir, filename) + ".campaign.json" playbooksig(opts["config"]["PLAYBOOK_JSON"], dst_file_mitre, dst_campaign_file) print("Playbook part done") else: dst_file_mitre = os.path.join(dst_dir, filename) + ".mitre.json" with open(dst_file_mitre, 'rb') as fs: mitre_matched_json = json.loads(fs.read()) with open(dst_file_mitre, 'wb') as fs: fs.write( json.dumps(mitre_matched_json, sort_keys=True, indent=4).encode('utf-8')) dst_campaign_file = os.path.join( dst_dir, filename) + ".campaign.json" playbooksig(opts["config"]["PLAYBOOK_JSON"], dst_file_mitre, dst_campaign_file) print("Playbook part done") except Exception as e: print((str(e))) print("MITRE and Playbook part did not work properly") failed = True try: report_folder_name = dst_dir.split("\\")[-1] zipf = zipfile.ZipFile( os.path.join(opts["config"]["OUTPUT_DIR"], report_folder_name + '.zip'), 'w', zipfile.ZIP_DEFLATED) zipdir(dst_dir, zipf) zipf.close() except Exception as e: print((str(e))) failed = True if failed: return False return True
def compute_lief(path): return json.loads(lief.to_json(lief.PE.parse(path)))
def analyze(filepath): res = lief.parse(filepath) json_res = lief.to_json(res) return json.loads(json_res)
for file in AllFiles: hasher_md5 = hashlib.md5() hasher_sha256 = hashlib.sha256() with open(file, 'rb') as afile: buf = afile.read() hasher_md5.update(buf) hasher_sha256.update(buf) dictHashes['path'] = AllRootPath[i] dictHashes['hash_md5'] = hasher_md5.hexdigest() dictHashes['hash_sha256'] = hasher_sha256.hexdigest() AllFilesHashes.append(dictHashes) i += 1 for p in AllFiles: try: pe = lief.parse(p) pe_json = to_json(pe) pp.pprint(json.loads(pe_json)) AllPE.append(pe) except: pass i = 0 for p in AllPE: json_data = json.loads(to_json(p)) print(AllFilesHashes) with open("file_" + md5, 'w') as f: f.write(json.dumps(json_data, indent=4)) i += 1