def execute(self, request): # We have the sha1 digest in the task object so there is no need to # fetch the sample for NSRL execution. cur_result = Result() try: dbresults = self.connection.query(request.sha1) except NSRLDatasource.DatabaseException: raise RecoverableError("Query failed") # If we found a result in the NSRL database, drop this task as we don't want to process it further. if dbresults: request.drop() benign = "This file was found in the NSRL database. It is not malware." res = ResultSection(title_text=benign) res.score = SCORE.NOT for dbresult in dbresults[:10]: res.add_line(dbresult[0] + " - %s (%s) - v: %s - by: %s [%s]" % (dbresult[1], dbresult[2], dbresult[3], dbresult[4], dbresult[5])) if len(dbresults) > 10: res.add_line("And %s more..." % str(len(dbresults) - 10)) cur_result.add_section(res) request.result = cur_result
def parse_direct_db(self, response): result = Result() res = self.lookup_source(response) if res: # Display source frequency if found result.add_section(res) res = self.lookup_upatre_downloader(response) if res: # Display Upatre data result.add_section(res) res, tags = self.lookup_callouts(response) if res: # Display Call-Outs result.add_section(res) # Add domain, ip and port tags _ = [result.append_tag(tag) for tag in tags] res = self.lookup_spam_feed(response) if res: # Display info from SPAM feed result.add_section(res) res, tags = self.lookup_av_hits(response) if res: # Display Anti-virus result result.add_section(res) # Add Virus Tags _ = [result.append_tag(tag) for tag in tags] return result
def parse_results(self, response): res = Result() response = response.get('results', response) if response is not None and response.get('response_code') == 1: av_hits = ResultSection(title_text='Anti-Virus Detections') url_section = ResultSection( SCORE.NULL, 'Virus total report permalink', self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.URL, body=json.dumps({"url": response.get('permalink')})) res.add_section(url_section) scans = response.get('scans', response) av_hits.add_line( 'Found %d AV hit(s) from %d scans.' % (response.get('positives'), response.get('total'))) for majorkey, subdict in sorted(scans.iteritems()): if subdict['detected']: virus_name = subdict['result'] res.append_tag( VirusHitTag(virus_name, context="scanner:%s" % majorkey)) av_hits.add_section( AvHitSection(majorkey, virus_name, SCORE.SURE)) res.add_result(av_hits) return res
def icap_to_alresult(self, icap_result): x_response_info = None x_virus_id = None result_lines = icap_result.strip().splitlines() if not len(result_lines) > 3: raise Exception('Invalid result from Kaspersky ICAP server: %s' % str(icap_result)) xri_key = 'X-Response-Info:' xvirus_key = 'X-Virus-ID:' for line in result_lines: if line.startswith(xri_key): x_response_info = line[len(xri_key):].strip() elif line.startswith(xvirus_key): x_virus_id = line[len(xvirus_key):].strip() result = Result() # Virus hits should have XRI of 'blocked' and XVIRUS containing the virus information. # Virus misses should have XRI of 'passed' and no XVIRUS section if x_virus_id: if not x_response_info == 'blocked': self.log.warn('found virus id but response was: %s', str(x_response_info)) virus_name = x_virus_id.replace('INFECTED ', '') result.add_section(VirusHitSection(virus_name, SCORE.SURE)) result.append_tag(VirusHitTag(virus_name)) return result
def parse_results(self, response): res = Result() response = response.get('results', response) if response is not None and response.get('response_code') == 204: message = "You exceeded the public API request rate limit (4 requests of any nature per minute)" raise VTException(message) elif response is not None and response.get('response_code') == 203: message = "You tried to perform calls to functions for which you require a Private API key." raise VTException(message) elif response is not None and response.get('response_code') == 1: av_hits = ResultSection(title_text='Anti-Virus Detections') url_section = ResultSection( SCORE.NULL, 'Virus total report permalink', self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.URL, body=json.dumps({"url": response.get('permalink')})) res.add_section(url_section) scans = response.get('scans', response) av_hits.add_line('Found %d AV hit(s) from %d scans.' % (response.get('positives'), response.get('total'))) for majorkey, subdict in sorted(scans.iteritems()): if subdict['detected']: virus_name = subdict['result'] res.append_tag(VirusHitTag(virus_name, context="scanner:%s" % majorkey)) av_hits.add_section(AvHitSection(majorkey, virus_name, SCORE.SURE)) res.add_result(av_hits) return res
def execute(self, request): local = request.download() self.result = request.result #Start construction of CLI string local_dir = os.path.dirname( os.path.realpath(__file__)) + '/Manalyze/bin' os.chdir(local_dir) cmdLine = ['./manalyze', local, '-o', 'json', '-d', 'all', '--hashes'] self.construct_plugins(cmdLine) try: result_section = self.parse(output=subprocess.check_output( cmdLine, preexec_fn=set_death_signal())) except: result_section = ResultSection(SCORE.NULL, "Summary") result_section.add_line(subprocess.check_output(cmdLine)) result_section.add_line("JSON Decoding Failed!") raise result = Result() result.add_section(result_section) # result.add_section(test_section) request.result = result
def icap_to_alresult(self, icap_result): infection_type = '' infection_name = '' result_lines = icap_result.strip().splitlines() if not len(result_lines) > 3: raise Exception('Invalid result from FSecure ICAP server: %s' % str(icap_result)) x_scan_result = 'X-FSecure-Scan-Result:' x_infection_name = 'X-FSecure-Infection-Name:' istag = 'ISTag:' for line in result_lines: if line.startswith(x_scan_result): infection_type = line[len(x_scan_result):].strip() elif line.startswith(x_infection_name): infection_name = line[len(x_infection_name):].strip().strip('"') elif line.startswith(istag): version_info = line[len(istag):].strip() self._set_av_ver(version_info) result = Result() if infection_name: result.add_section(VirusHitSection(infection_name, SCORE.SURE, detection_type=infection_type)) result.append_tag(VirusHitTag(infection_name)) return result
def execute(self, request): file_path = request.download() filename = os.path.basename(file_path) bndb = os.path.join(self.working_directory, "%s.bndb" % filename) disas = os.path.join(self.working_directory, filename) self.clean_structures() if request.tag.startswith("executable/windows/"): self.bv = binaryninja.BinaryViewType['PE'].open(file_path) else: return if self.bv is None: return result = Result() self.bv.update_analysis_and_wait() # Preparation self.linear_sweep() self.preprocess() self.symbol_usage() self.process_target_functions() # Check Signatures for sig in self.sigs: results = {} self.check_api_sig(sig, results) if len(results) > 0: for res in results: rn = "%s - %s" % (results[res].name.split("-A")[0], sig['name']) section = ResultSection(sig['score'], rn) if res in self.processed: fn = "%s_%s" % (disas, rn.replace(" ", "_")) with open(fn, "wb") as fp: fp.write("\n".join("%s" % l for l in self.processed[res])) request.add_supplementary( fn, "Linear Disassembly of Matched Function", rn + ".disas") results[res].name = rn result.add_section(section) # Finalize Results and Store BNDB self.bv.create_database(bndb) request.add_supplementary(bndb, "Binary Ninja DB", filename + ".bndb") section = ResultSection(self.apiscore, "Target Symbols X-refs") for sym in sorted(self.used_syms.items(), key=lambda x: x[1], reverse=True): section.add_line("%d\t%s" % (sym[1], sym[0])) result.add_section(section) request.result = result self.clean_structures()
def execute(self, request): result = Result() continue_after_extract = request.get_param('continue_after_extract') self._last_password = None local = request.download() password_protected = False white_listed = 0 try: password_protected, white_listed = self.extract(request, local) except ExtractMaxExceeded, e: result.add_section(ResultSection(score=SCORE["NULL"], title_text=str(e)))
def execute_batch(self, request_batch): # BitDefender scans a folder at a time. Download all inputs to a folder # and scan it. batch_folder = request_batch.download() # Initially mark all as failed. for request in request_batch.requests: request.successful = True request.result = Result() request.error_is_recoverable = True request.error_text = 'Did not found an entry for this file in the AV output' scanner = BitDefenderScanner(self.working_directory, self.exe_path) try: scan_results = scanner.scan_folder(batch_folder) for original_path, av_result in scan_results.results.iteritems(): request = request_batch.find_by_local_path(original_path) if not request: self.log.error( "Could not find task associated with path: %s\n.", original_path) continue result = Result() for embedded_file, (is_virus, infection_type, infection_name, _) in av_result.iteritems(): if not is_virus: continue score = SCORE.HIGH if infection_type == 'infected': score = SCORE.SURE result.append_tag(VirusHitTag(infection_name)) result.add_section( VirusHitSection(infection_name, score, embedded_file, infection_type)) # TODO(CVE / Exploit tag extraction) request.result = result request.successful = True request.task.report_service_context(self._av_info) except RecoverableError, rec_err: for request in request_batch.requests: request.successful = False request.error_text = rec_err.message
def execute(self, request): local = request.download() al_result = Result() command = self.construct_command(request) request.task.set_milestone("started", True) extract_section = ResultSection(SCORE.NULL, 'Extracted and Carved Files') for module in binwalk.scan(local, **command): section = ResultSection(SCORE.NULL, module.name, body_format=TEXT_FORMAT.MEMORY_DUMP) for result in module.results: section.add_line("0x%.8X : %s" % (result.offset, result.description)) if(module.extractor.output.has_key(result.file.path)): if module.extractor.output[result.file.path].carved.has_key(result.offset): extract_section.add_line("Carved data from offset 0x%X to %s" % (result.offset, module.extractor.output[result.file.path].carved[result.offset])) file_name = module.extractor.output[result.file.path].carved[result.offset].split("/")[-1] request.add_extracted(module.extractor.output[result.file.path].carved[result.offset], 'Carved File', file_name) if module.extractor.output[result.file.path].extracted.has_key(result.offset) and \ len(module.extractor.output[result.file.path].extracted[result.offset].files) > 0: path = module.extractor.output[result.file.path].extracted[result.offset].files[0] extract = module.extractor.output[result.file.path].extracted[result.offset].command extract_section.add_line("Extracted %d files from offset 0x%X to '%s' using '%s'" % ( len(module.extractor.output[result.file.path].extracted[result.offset].files), result.offset, path, extract)) if(os.path.isdir(path)): file = zipfile.ZipFile("%s.zip" % path.split("/")[-1], 'w', zipfile.ZIP_DEFLATED) self.zip_dir(path, file) file.close() request.add_supplementary(file.filename, extract, file.filename.split("/")[-1]) else: request.add_extracted(path, extract, path.split("/")[-1]) al_result.add_section(section) request.task.set_milestone("finished", True) al_result.add_section(extract_section) request.result = al_result
def execute_batch(self, request_batch): self.log.info('Execute batch of size %d', len(request_batch.requests)) request_batch.download() paths_to_scan = [] for request in request_batch.requests: if request.successful and request.local_path: paths_to_scan.append(request.local_path) # Initially mark all as failed. for request in request_batch.requests: request.successful = True request.error_is_recoverable = True request.result = Result() # request.error_text = 'Did not find an entry for this file in the AV output' scanner = McAfeeScanner(self.exe_path, self.dat_directory, self.working_directory) # pylint: disable=E0602 scan_results = scanner.scan_files(paths_to_scan) if not scan_results: return for original_path, av_result in scan_results.results.iteritems(): request = request_batch.find_by_local_path(original_path) if not request: self.log.error( 'Could not find request associated with path %s', original_path) continue request.task.report_service_context(self._av_info) result = Result() for embedded_file, (is_virus, detection_type, virus_name, _reserved) in av_result.iteritems(): if not is_virus: continue result.append_tag(VirusHitTag(virus_name)) result.add_section( VirusHitSection(virus_name, SCORE.SURE, embedded_file, detection_type)) request.result = result request.successful = True request_batch.delete_downloaded()
def execute(self, request): path = request.download() with open(path, 'rb') as fin: (entropy, part_entropies) = calculate_partition_entropy(fin) entropy_graph_data = { 'type': 'colormap', 'data': { 'domain': [0, 8], 'values': part_entropies } } section = ResultSection(SCORE.NULL, 'Entropy.\tEntire File: {}'.format( round(entropy, 3)), self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.GRAPH_DATA, body=json.dumps(entropy_graph_data)) result = Result() result.add_section(section) request.result = result
def execute(self, request): result = Result() try: res = self.connection.query(request.sha256) except CFMDDatasource.DatabaseException: raise RecoverableError("Query failed") if res: res_sec = ResultSection( title_text="This file was found in the %s. It is not malware." % CFMDDatasource.Name, score=SCORE['NOT']) for item in res: res_sec.add_line("%s (%s bytes)" % (item['filename'], item['size'])) res_sec.add_line(" MD5: %s" % item['md5']) res_sec.add_line(" SHA1: %s" % item['sha1']) res_sec.add_line(" SHA256: %s" % item['sha256']) res_sec.add_line("") result.add_section(res_sec) request.result = result
def execute(self, request): file_path = request.download() result = Result() # restart Suricata if we need to self.start_suricata_if_necessary() # Update our rules if they're stale, self.reload_rules_if_necessary() # Strip frame headers from the PCAP, since Suricata sometimes has trouble parsing strange PCAPs stripped_filepath = self.strip_frame_headers(file_path) # Pass the pcap file to Suricata via the socket ret = self.suricata_sc.send_command( "pcap-file", { "filename": stripped_filepath, "output-dir": self.working_directory }) if not ret or ret["return"] != "OK": self.log.exception("Failed to submit PCAP for processing: %s" % ret['message']) # Wait for the socket finish processing our PCAP while True: time.sleep(1) ret = self.suricata_sc.send_command("pcap-current") if ret and ret["message"] == "None": break alerts = {} signatures = {} domains = [] ips = [] urls = [] # Parse the json results of the service for line in open(os.path.join(self.working_directory, 'eve.json')): record = json.loads(line) timestamp = dateparser.parse(record['timestamp']).isoformat(' ') src_ip = record['src_ip'] src_port = record['src_port'] dest_ip = record['dest_ip'] dest_port = record['dest_port'] if src_ip not in ips: ips.append(src_ip) if dest_ip not in ips: ips.append(dest_ip) if record['event_type'] == 'http': if 'hostname' not in record['http'] or 'url' not in record[ 'http']: continue domain = record['http']['hostname'] if domain not in domains and domain not in ips: domains.append(domain) url = "http://" + domain + record['http']['url'] if url not in urls: urls.append(url) if record['event_type'] == 'dns': if 'rrname' not in record['dns']: continue domain = record['dns']['rrname'] if domain not in domains and domain not in ips: domains.append(domain) if record['event_type'] == 'alert': if 'signature_id' not in record[ 'alert'] or 'signature' not in record['alert']: continue signature_id = record['alert']['signature_id'] signature = record['alert']['signature'] if signature_id not in alerts: alerts[signature_id] = [] if signature_id not in signatures: signatures[signature_id] = signature alerts[signature_id].append( "%s %s:%s -> %s:%s" % (timestamp, src_ip, src_port, dest_ip, dest_port)) # Create the result sections if there are any hits if len(alerts) > 0: for signature_id, signature in signatures.iteritems(): score = SCORE.NULL tag_weight = TAG_WEIGHT.NULL if any(x in signature for x in self.cfg.get("SURE_SCORE").split()): score = SCORE.SURE tag_weight = TAG_WEIGHT.SURE if any(x in signature for x in self.cfg.get("VHIGH_SCORE").split()): score = SCORE.VHIGH tag_weight = TAG_WEIGHT.VHIGH section = ResultSection(score, '%s: %s' % (signature_id, signature)) for flow in alerts[signature_id][:10]: section.add_line(flow) if len(alerts[signature_id]) > 10: section.add_line('And %s more flows' % (len(alerts[signature_id]) - 10)) result.add_section(section) # Add a tag for the signature id and the message result.add_tag(TAG_TYPE.SURICATA_SIGNATURE_ID, str(signature_id), tag_weight, usage=TAG_USAGE.IDENTIFICATION) result.add_tag(TAG_TYPE.SURICATA_SIGNATURE_MESSAGE, signature, tag_weight, usage=TAG_USAGE.IDENTIFICATION) # Add tags for the domains, urls, and IPs we've discovered for domain in domains: result.add_tag(TAG_TYPE.NET_DOMAIN_NAME, domain, TAG_WEIGHT.VHIGH, usage=TAG_USAGE.CORRELATION) for url in urls: result.add_tag(TAG_TYPE.NET_FULL_URI, url, TAG_WEIGHT.VHIGH, usage=TAG_USAGE.CORRELATION) for ip in ips: result.add_tag(TAG_TYPE.NET_IP, ip, TAG_WEIGHT.VHIGH, usage=TAG_USAGE.CORRELATION) # Add the original Suricata output as a supplementary file in the result request.add_supplementary( os.path.join(self.working_directory, 'eve.json'), 'json', 'SuricataEventLog.json') # Add the stats.log to the result, which can be used to determine service success if os.path.exists(os.path.join(self.working_directory, 'stats.log')): request.add_supplementary( os.path.join(self.working_directory, 'stats.log'), 'log', 'stats.log') request.result = result
def execute(self, request): # Create a result object where all the sections will be stored result = Result() # ================================================================== # Default Section: # Default section basically just dumps the text to the screen... # All sections scores will be SUMed in the service result # The Result classification will be the highest classification found in the sections default_section = ResultSection(SCORE.LOW, 'Example of a default section', Classification.RESTRICTED) default_section.add_line("You can add line by line!") default_section.add_lines(["Or", "Multiple lines", "Inside a list!"]) # ================================================================== # Color map Section: # Creates a color map bar using a minimum and maximum domain cmap_min = 0 cmap_max = 20 color_map_data = { 'type': 'colormap', 'data': { 'domain': [cmap_min, cmap_max], 'values': [random.random() * cmap_max for _ in xrange(50)] } } section_color_map = ResultSection(SCORE.NULL, "Example of colormap result section", self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.GRAPH_DATA, body=json.dumps(color_map_data)) # ================================================================== # URL section: # Generate a list of clickable urls using a json encoded format url_section = ResultSection(SCORE.NULL, 'Example of a simple url section', self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.URL, body=json.dumps({ "name": "Google", "url": "https://www.google.com/" })) # You can add tags to any section although those tag will be brought up to the result object # Tags are defined by a type, value and weight (confidence lvl) # you can also add a classification and context if needed url_section.add_tag(TAG_TYPE.NET_DOMAIN_NAME, "google.com", TAG_WEIGHT.LOW) url_section.add_tag(TAG_TYPE.NET_DOMAIN_NAME, "bob.com", TAG_WEIGHT.LOW, classification=Classification.RESTRICTED) url_section.add_tag(TAG_TYPE.NET_DOMAIN_NAME, "baddomain.com", TAG_WEIGHT.LOW, context=Context.BEACONS) # You may also want to provide a list of url! Also, No need to provide a name, the url link will be displayed urls = [{ "url": "https://google.com/" }, { "url": "https://google.ca/" }, { "url": "https://microsoft.com/" }] url_section2 = ResultSection( SCORE.MED, 'Example of a url section with multiple links', self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.URL, body=json.dumps(urls)) # Add url_section2 as a subsection of url section # The score of the subsections will automatically be added to the parent section url_section.add_section(url_section2) # ================================================================== # Memory dump section: # Dump whatever string content you have into a <pre/> html tag so you can do your own formatting data = hexdump( "This is some random text that we will format as an hexdump and you'll see " "that the hexdump formatting will be preserved by the memory dump section!" ) memdump_section = ResultSection(SCORE.NULL, 'Example of a memory dump section', self.SERVICE_CLASSIFICATION, body_format=TEXT_FORMAT.MEMORY_DUMP, body=data) # ================================================================== # Re-Submitting files to the system # Adding extracted files will have them resubmitted to the system for analysis if request.srl != '8cf8277a71e85122bf7ea4610c7cfcc0bfb6dee799be50a41b2f4b1321b3317f': # This IF just prevents resubmitting the same file in a loop for this exemple... temp_path = tempfile.mktemp(dir=self.working_directory) with open(temp_path, "w") as myfile: myfile.write(data) request.add_extracted(temp_path, "Extracted by some random magic!", display_name="file.txt") # ================================================================== # Supplementary files # Adding supplementary files will save them on the datastore for future # reference but wont reprocess those files. temp_path = tempfile.mktemp(dir=self.working_directory) with open(temp_path, "w") as myfile: myfile.write(json.dumps(urls)) request.add_supplementary(temp_path, "These are urls as a JSON", display_name="urls.json") # ================================================================== # Wrap-up: # Add all sections to the Result object result.add_section(default_section) result.add_section(section_color_map) result.add_section(url_section) result.add_section(memdump_section) request.result = result
def populate_result(self, current_lines, filename, request): result = Result() should_filter_out = False dump_sign_tool_output = False skip_detailed_output = False status_line = current_lines[1] if len(current_lines) <= 1 or status_line == "\tVerified:\tUnsigned": return result elif status_line.find("\tVerified:") != 0 or \ status_line == "\tVerified:\tUntrusted Root" or \ status_line == "\tVerified:\tUntrusted Authority" or \ status_line == "\tVerified:\tUntrusted Certificate" or \ status_line == "\tVerified:\tMalformed" or \ status_line == "\tVerified:\tInvalid Chain": # This file has a signature but is not verified. result_section = ResultSection( score=SCORE.HIGH, title_text=("This file has an invalid/untrusted signature." "The file might have been modified or the " "signature is just a fake one.")) dump_sign_tool_output = True result.report_heuristic(SigCheck.AL_SigCheck_001) elif status_line == "\tVerified:\tExpired": # This file has a signature but is not verified. result_section = ResultSection( score=SCORE.LOW, title_text="This file has an expired signature.") dump_sign_tool_output = True result.report_heuristic(SigCheck.AL_SigCheck_002) elif status_line == "\tVerified:\tSigned": is_authorised_signers = False # Build the list of signers signers = [] signers_tag_found = False i = 0 while i < len(current_lines): if signers_tag_found: if current_lines[i][0:2] == '\t\t': # Skip the first two tabs. signers.append(current_lines[i][2:]) else: break elif current_lines[i].find("\tSigners:") == 0: signers_tag_found = True i += 1 for trusted_name_item in self.trusted_name_list: if trusted_name_item == signers: is_authorised_signers = True break if is_authorised_signers: result_section = ResultSection( score=SCORE.NOT, title_text="This file is signed with trusted signers") result.report_heuristic(SigCheck.AL_SigCheck_003) should_filter_out = True else: result_section = ResultSection( score=SCORE.INFO, title_text= "Signed with signers we don't automatically filter out") result.report_heuristic(SigCheck.AL_SigCheck_004) else: self.log.error( "The sigcheck output:\n%s\ncontained unexpected results %s" % ("\n".join(current_lines))) result_section = ResultSection( score=SCORE.MED, title_text="Unexpected result from sigcheck ... to investigate." ) result.report_heuristic(SigCheck.AL_SigCheck_005) if should_filter_out and not request.ignore_filtering: request.drop() if skip_detailed_output: result.add_section(result_section) return result # Expand our result with the sigcheck output. self._add_sigcheck_output(current_lines, result_section) # Optionally expand our result with the signtool output. if dump_sign_tool_output: self._add_signtool_output(filename, result_section) result.add_section(result_section) return result