def each(self, target): self.results = dict() try: # lief oat = lief.parse(target) oat_dict = json.loads(lief.to_json(oat), parse_int=str) self.results.update(oat_dict) # add extracted dex files for idx, dex_file in enumerate(oat.dex_files): temp = tempdir() fname = 'classes_{}.dex'.format(str(idx)) dex_filepath = os.path.join(temp, fname) dex_file.save(dex_filepath) if os.path.isfile(dex_filepath): self.add_extracted_file(dex_filepath) # androguard sha256, vm, vm_analysis = AnalyzeDex(target) aguard_dict = { 'androguard': { 'internal_classes': self._get_internal_classes(vm_analysis) } } self.results.update(aguard_dict) except: self.log('error', traceback.print_exc()) return True
def each(self, target): tmpdir = tempdir() rf = RarFile(target) namelist = rf.namelist() for name in namelist: try: rf.extract(name, tmpdir) filepath = os.path.join( tmpdir, name, ) if os.path.isfile(filepath): self.add_extracted_file(filepath) except RuntimeError: for password in ['virus', 'infected']: try: filepath = rf.extract(name, tmpdir, pwd=password) if os.path.isfile(filepath): self.add_extracted_file(filepath) break except RuntimeError: pass else: self.log('error', 'Could not extract {}'.format(name)) return True
def register_response_as(self, type, response, zipped=False): if response.status_code != 200: self.log( 'error', 'could not find {0} for task id {1}'.format( type, self.task_id)) else: tmpdir = tempdir() if zipped: f = open(os.path.join(tmpdir, 'zip'), 'a+b') else: filename = os.path.join(tmpdir, 'cuckoo_response') f = open(filename, "wb") for chunk in response.iter_content(1024): f.write(chunk) if zipped: f.seek(0) z = zipfile.ZipFile(f) for name in z.namelist(): filename = z.extract(name, tmpdir) self.register_files(type, filename) f.close() else: f.close() self.register_files(type, filename)
def extract_info(self, report): self.results['unpacked_executables'] = [] if report.get('results'): for item in report['results']: sig = dict() sig['name'] = item['hashes']['sha256'] sig['malwares'] = [] sig['detects'] = [] if item.get('malware_id'): for mal in item['malware_id']: sig['malwares'].append(mal['name']) if item.get('detectit'): for detect in item['detectit']: sig['detects'].append(detect['name']) self.results['unpacked_executables'].append(sig) if self.collect_unpacked: API_KEY_STRING = 'Key %s' % self.api_key auth_header = {'Authorization': API_KEY_STRING} r = requests.get(self.api_endpoint + 'download/' + item['hashes']['sha256'], headers=auth_header) sample_data = r.content tmpdir = tempdir() filename = os.path.join(tmpdir, 'unpacme_unpacked_executable') with open(filename, "wb") as f: f.write(sample_data) self.register_files('unpacked_executable', filename)
def process_report(self): url = self.base_url + 'analysis/download' # Download JSON report to extract IOCs params = dict(self.joe_params) params['type'] = 'lightjson' response = urlopen(url, urlencode(params)) if response.getcode() != 200: self.log( 'error', 'could not find report for task id {}: {}'.format( self.joe_params['webid'], response.read())) else: self.extract_iocs(response) # Download HTML report to extract execution graph params['type'] = 'lighthtml' response = urlopen(url, urlencode(params)) if response.getcode() != 200: self.log( 'error', 'could not find report for task id {}: {}'.format( self.joe_params['webid'], response.read())) else: tmpdir = tempdir() filepath = os.path.join(tmpdir, 'joe_report.html') with open(filepath, 'w+b') as fd: copyfileobj(response, fd) fd.seek(0, 0) self.extract_graph(fd) self.add_support_file('Report', filepath)
def each(self, target): self.results = {'warnings': []} # Create temporary directory to get results self.outdir = tempdir() self.results_dir = os.path.join(self.outdir, 'output') if not os.path.isdir(self.results_dir): os.mkdir(self.results_dir) with open(os.path.join(self.outdir, "passwords_candidates.txt"), "w+") as f: f.write(self.password_candidates) copyfile(target, os.path.join(self.outdir, os.path.basename(target))) target = os.path.join("/data/", os.path.basename(target)) # execute docker container output = self.extract(target) # save log output from dockerized app, extract potential redirections self.save_output(output) return True
def each(self, target): self.add_ioc(target) response = requests.get(target, stream=True) if response.status_code == 200: tmpdir = tempdir() try: filename = parse_header(response.headers['content-disposition'])[1]['filename'] except KeyError: filename = target.split('/')[-1] if not filename: filename = "no_filename" filepath = os.path.join(tmpdir, filename) with open(filepath, 'wb') as fd: for chunk in response.iter_content(1024): fd.write(chunk) self.add_extracted_file(filepath) self.add_ioc(target, 'payload_delivery') return True else: raise ModuleExecutionError("Could not download file. Status: {}".format(response.status_code))
def each(self, target): self.results = {'warnings': []} tmpdir = tempdir() password_candidates = self.password_candidates.split("\n") zf = ZipFile(target) namelist = zf.namelist() if 'classes.dex' in namelist and 'META-INF/MANIFEST.MF' in namelist: self.change_type(target, 'apk') self.results['warnings'].append( 'File type was changed to apk, files were not extracted.') else: should_extract = len(namelist) <= self.maximum_extracted_files should_analyze = len(namelist) <= self.maximum_automatic_analyses if should_extract: for name in namelist: try: filepath = zf.extract(name, tmpdir) if os.path.isfile(filepath): self.add_extracted_file( filepath, automatic_analysis=should_analyze) except RuntimeError: for password in password_candidates: try: filepath = zf.extract(name, tmpdir, pwd=password) if os.path.isfile(filepath): self.add_extracted_file( filepath, automatic_analysis=should_analyze) break except RuntimeError: pass else: self.results['warnings'].append( u'Could not extract {} (password not known)'. format(name)) if not should_analyze: self.results['warnings'].append( "Archive contains more than {} files ({}), so no analysis was automatically created." .format(self.maximum_automatic_analyses, len(namelist))) else: self.results['warnings'].append( "Archive contains more than {} files ({}), so they were not extracted." .format(self.maximum_extracted_files, len(namelist))) if self.results['warnings']: self.results['files'] = namelist else: self.results = None return True
def each(self, target): tmpdir = tempdir() password_candidates = self.password_candidates.split("\n") with open(target, "rb") as myfile: document = OfficeFile(myfile) for password in password_candidates: password = password.strip() try: document.load_key(password=password) out_file = tmpdir + os.path.sep + "decrypted_" + os.path.basename( target) with open(out_file, "wb") as output: document.decrypt(output) if os.path.isfile(out_file): self.add_extracted_file(out_file) break except: pass else: self.log( 'error', 'Could not extract {} (password not known)'.format(target)) return True
def each(self, target): fp = open(target) msg = email.message_from_file(fp) fp.close() path_temp = tempdir() counter = 1 for part in msg.walk(): # multipart/* are just containers if part.get_content_maintype() == 'multipart': continue # Applications should really sanitize the given filename so that an # email message can't be used to overwrite important files filename = part.get_filename() if not filename: ext = mimetypes.guess_extension(part.get_content_type()) if not ext: # Use a generic bag-of-bits extension ext = '.bin' filename = 'part-%03d%s' % (counter, ext) counter += 1 filepath = os.path.join(path_temp, filename) fp = open(filepath, 'wb') fp.write(part.get_payload(decode=True)) fp.close() self.add_extracted_file(filepath)
def store_report_summary(self): url = self.web_base_url + '/filereport/{}/htmlsummary/'.format( self.task_id) tmpdir = tempdir() filepath = urlretrieve(url, os.path.join(tmpdir, 'cuckoo_report.html'))[0] self.add_support_file('Report', filepath)
def extract(self, target): tmpdir = tempdir() os.system("acefile-unace -d {} -x {}".format(tmpdir, target)) files = os.popen("acefile-unace -l {}".format(target)).read().split('\n') for i in range(len(files)): files[i] = tmpdir + '/' + files[i] return files
def each(self, target): tmpdir = tempdir() password_candidates = self.password_candidates.split("\n") zf = ZipFile(target) namelist = zf.namelist() if 'classes.dex' in namelist and 'META-INF/MANIFEST.MF' in namelist: self.change_type(target, 'apk') self.results = {'message': 'File type was changed to apk.'} else: for name in namelist: try: filepath = zf.extract(name, tmpdir) if os.path.isfile(filepath): self.add_extracted_file(filepath) except RuntimeError: for password in password_candidates: try: filepath = zf.extract(name, tmpdir, pwd=password) if os.path.isfile(filepath): self.add_extracted_file(filepath) break except RuntimeError: pass else: self.log( 'error', 'Could not extract {} (password not known)'.format( name)) return True
def each(self, target): with open(target, 'r') as f: msg = email.message_from_file(f) outdir = tempdir() # Extract Headers self.register_headers(msg, outdir) self.extract_urls(msg) # Extract Attachments counter = 1 for part in msg.walk(): # multipart/* are just containers if part.get_content_maintype() == 'multipart': continue content_disposition = part.get('Content-Disposition', None) if content_disposition and 'attachment' in content_disposition: filename = part.get_filename() if not filename: ext = mimetypes.guess_extension(part.get_content_type()) if not ext: # Use a generic bag-of-bits extension ext = '.bin' filename = 'part-{}{}'.format(counter, ext) counter += 1 filepath = os.path.join(outdir, filename) with open(filepath, 'wb') as out: out.write(part.get_payload(decode=True)) self.add_extracted_file(filepath)
def get_unpacked_executables(self): url = self.base_url + 'analysis/download' # Download JSON report to extract IOCs params = dict(self.joe_params) params['type'] = 'unpackpe' response = urlopen(url, urlencode(params)) if response.getcode() != 200: self.log( 'error', 'could not find unpacked PEs for task id {}: {}'.format( self.joe_params['webid'], response.read())) else: tmpdir = tempdir() filepath = os.path.join(tmpdir, 'unpacked.zip') with open(filepath, 'w+b') as fd: copyfileobj(response, fd) try: unpacked_files = [] zf = ZipFile(filepath) for name in zf.namelist(): unpacked_files.append( zf.extract(name, tmpdir, pwd='infected')) self.register_files('unpacked_executable', unpacked_files) except BadZipfile: pass
def _store_internal_classes(self): filepath = os.path.join(tempdir(), 'internal_classes.json') with open(filepath, 'w') as f: json.dump(self.results['internal_classes'], f, sort_keys=True, indent=4) self.add_support_file('Internal Classes & Methods', filepath)
def each(self, target): tmpdir = tempdir() filepath = os.path.join(tmpdir, target+'-extracted') with gzip.open(target, 'rb') as f_in: with open(filepath, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) if os.path.isfile(filepath): self.add_extracted_file(filepath) return True
def temp_volume(target): """Create a temporary directory and copy the target to it. Meant to be mounted inside the Docker container to send the target and get the results.""" tmp = tempdir() os.makedirs(os.path.join(tmp, 'output')) copy(target, os.path.join(tmp, os.path.basename(target))) return tmp
def each(self, target): if not target.lower().endswith('.jse'): return False tmpdir = tempdir() decoded_filename = target.split('/')[-1][:-1] dest_file = "{}/{}".format(tmpdir, decoded_filename) os.system('fame/modules/srozb/processing/decoder {} {}'.format( target, dest_file)) self.add_extracted_file(dest_file) return True
def __extract(self, name, checksum, countid, data): fpath = "%s/res%d_%s" % (tempdir(), countid, name) with open(fpath, 'wb') as f: f.write(data) for i in range(0, 5): try: self.add_extracted_file(fpath) break except: continue return fpath
def each(self, target): self.results = {'warnings': []} tmpdir = tempdir() password_candidates = self.password_candidates.split("\n") rf = rarfile.RarFile(target) filelist = rf.infolist() should_extract = len(filelist) <= self.maximum_extracted_files should_analyze = len(filelist) <= self.maximum_automatic_analyses if should_extract: for f in filelist: try: filepath = rf.extract(f.filename, path=tmpdir) if os.path.isfile(filepath): self.add_extracted_file( filepath, automatic_analysis=should_analyze) except RuntimeError: for password in password_candidates: try: filepath = rf.extract(f.filename, path=tmpdir, pwd=password) if os.path.isfile(filepath): self.add_extracted_file( filepath, automatic_analysis=should_analyze) break except RuntimeError: pass else: self.results['warnings'].append( u'Could not extract {} (password not known)'. format(f.filename)) if not should_analyze: self.results['warnings'].append( "Archive contains more than {} files ({}), so no analysis was automatically created." .format(self.maximum_automatic_analyses, len(filelist))) else: self.results['warnings'].append( "Archive contains more than {} files ({}), so they were not extracted." .format(self.maximum_extracted_files, len(filelist))) if self.results['warnings']: self.results['files'] = filelist else: self.results = None return True
def each(self, target): tmpdir = tempdir() zf = ZipFile(target) namelist = zf.namelist() if 'classes.dex' in namelist and 'META-INF/MANIFEST.MF' in namelist: self.change_type(target, 'apk') self.results = { 'message': 'File type was changed to apk.' } return True
def get_unpacked_executables(self): try: data = self.joe.analysis_download(self.webid, "unpackpe") unpackpe = io.BytesIO(data[1]) tmpdir = tempdir() unpacked_files = [] with ZipFile(unpackpe) as zf: for name in zf.namelist(): unpacked_files.append(zf.extract(name, tmpdir, pwd='infected')) self.register_files('unpacked_executable', unpacked_files) except Exception as err: raise ModuleExecutionError('Error encountered while processing unpacked executables:\n{}'.format(err))
def each(self, target): tmpdir = tempdir() f = magic.Magic(mime=False, uncompress=False) details = f.from_file(target) if details.find('UPX compressed') != -1: output = os.path.join(tmpdir, 'unpacked_upx_%s' % os.path.basename(target)) r = self.unpack(target, output) if os.path.isfile(output) and r: self.add_extracted_file(output) else: return False return True
def each_with_type(self, target, filetype): self.results = { 'redirections': [], 'target': None } # Create temporary directory to get results self.outdir = tempdir() # Check if we're trying to analyze a local html file # if it is, the file is copied to the docker volume if filetype == "html": copyfile(target, os.path.join(self.outdir, "input.html")) target = "file:///data/input.html" # add http protocol if missing # requests lib needs it if filetype == "url" and not target.startswith('http'): target = 'http://{}'.format(target) if filetype == "url": self.add_ioc(target) # output dir results_dir = os.path.join(self.outdir, 'output') if not os.path.isdir(results_dir): os.mkdir(results_dir) # execute docker container output = self.preview(target) # save log output from dockerized app, extract potential redirections self.save_output(output) # save preview image screenshot = self.save_preview(results_dir) with open(os.path.join(results_dir, "output.html")) as f: parser = MyHTMLParser() parser.feed(f.read()) for url in parser.get_urls(): self.add_ioc(url) if len(self.results['redirections']) > 0: # save redirections as observable self.add_ioc(self.results['redirections'], ['redirection']) # save target as observable self.add_ioc(self.results['target']) return len(self.results['redirections']) > 0 or screenshot
def register_response_as(self, type, response, zipped=False): if response.status_code != 200: self.log('error', 'could not find {0} for task id {1}'.format(type, self.task_id)) else: tmpdir = tempdir() filename = os.path.join(tmpdir, 'cuckoo_response') f = open(filename, "wb") for chunk in response.iter_content(1024): f.write(chunk) f.close() self.register_files(type, filename)
def each(self, target): self.tmpdir = tempdir() self.results = dict() apk, vm, vm_analysis = AnalyzeAPK(target) self.results['package'] = apk.get_package() self.validate_signature(target) ref_apk = self.download_reference_apk() self.validate_signature(ref_apk, "ref") self.results['verification_result'] = self.results['target_status'] and self.results['ref_status'] and (self.results['target_certificate'] == self.results['ref_certificate']) return True
def each_dump(self): self.ignored_rules = list_value(self.ignored_rules) # Create file containing rules tmpdir = tempdir() rules_path = os.path.join(tmpdir, "rules") rules = open(rules_path, "w") rules.write(self.rules) rules.close() # Build a VadYaraScan plugin instance vad_yara_scan = self.configure_plugin( "windows.vadyarascan.VadYaraScan", yara_file="file://{}".format(rules_path)) rules = yarascan.YaraScan.process_yara_options( dict(vad_yara_scan.config)) for task in pslist.PsList.list_processes( context=vad_yara_scan.context, layer_name=vad_yara_scan.config["primary"], symbol_table=vad_yara_scan.config["nt_symbols"], ): layer_name = task.add_process_layer() layer = vad_yara_scan.context.layers[layer_name] for offset, rule_name, name, value in layer.scan( context=vad_yara_scan.context, scanner=yarascan.YaraScanner(rules=rules), sections=vad_yara_scan.get_vad_maps(task), ): if rule_name not in self.ignored_rules: self.results.append({ "rule": rule_name, "owner": task.ImageFileName.cast( "string", max_length=task.ImageFileName.vol.count, errors="replace"), "pid": task.UniqueProcessId, "variable": name, "hexdump": hexdump(value, result="return"), }) self.add_tag(rule_name) return len(self.results) > 0
def each(self, target): self.results = {'warnings': []} # Create temporary directory to get results self.outdir = tempdir() copyfile(target, os.path.join(self.outdir, "archive.ace")) target = "/data/archive.ace" # execute docker container output = self.extract(target) # save log output from dockerized app, extract potential redirections self.save_output(output) return True
def process_report(self): try: data = self.joe.analysis_download(self.webid, type="lightjson") report = io.BytesIO(data[1]) self.extract_iocs(report) data = self.joe.analysis_download(self.webid, type="html") report = io.BytesIO(data[1]) self.extract_graph(report) tmpdir = tempdir() filepath = os.path.join(tmpdir, 'joe_report.html') with open(filepath, 'w+b') as fd: fd.write(data[1]) self.add_support_file('Report', filepath) except Exception as error: raise ModuleExecutionError( 'Error encountered while processing report:\n{}'.format(error))