def test_scan_url(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.scan_url('www.wired.com'), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def each_with_type(self, target, target_type): self.results = {} vt = VirusTotalPublicApi(self.api_key) if target_type == "url": response = vt.get_url_report(target) else: with open(target, "rb") as f: sha256 = hashlib.sha256(f.read()).hexdigest() response = vt.get_file_report(sha256) # if request successful if response["response_code"] == 200 and response["results"][ "response_code"] == 1: self.results["scan_date"] = response["results"]["scan_date"] self.results["permalink"] = response["results"]["permalink"] self.results["positives"] = response["results"]["positives"] self.results["total"] = response["results"]["total"] self.results["scans"] = response["results"]["scans"] return True self.log("debug", "no report found") return False
def test_scan_file_stringio(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.scan_file(StringIO.StringIO(EICAR)), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def test_scan_file_binary(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.scan_file('virus_total_apis/test/test.exe'), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def get_VT_name(hashes): try: vt = PublicApi(api_key=os.environ["VIRUSTOTAL_API_KEY"]) generator = ComputeVtUniqueName() names = [ generator.build_unique_name(vt.get_file_report(hash_) or "") for hash_ in hashes ] if len(names) >= 2 and all(names[0] == name for name in names[1:]): name = names[0] if name["pup"]: log.error( "PUA signatures are not implemented yet. Excpected name was: %s", str(name)) pass else: return "{}.{}.{}".format(name["platform"], name["category"], name["unique_name"]) except KeyError: log.warn( "No VIRUSTOTAL_API_KEY specified. Falling back to generic name.") except Exception: log.exception( "White trying to compute VT name. Falling back to generic name.") return GENERIC_CLAMAV_MALWARE_NAME
def test_get_domain_report(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.get_domain_report('www.wired.com'), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_rescan_file(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.rescan_file(EICAR_MD5), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_scan_file_stream(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.scan_file(EICAR, from_disk=False), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_get_ip_report(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.get_ip_report('23.6.113.133'), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def check_url(self, message, input): vt = VirusTotalPublicApi(self.API_KEY) try: input_list = [ input_item.strip() for input_item in input.split(',') ] for hash in input_list: scan_report = vt.get_file_report(hash) self.reply( message, "VirusTotal Scan Result \n" "Scan date: {scan_date} \n" "Detection rate: {positives} out of {total} \n" "SHA1: {sha1} \n" "MD5: {md5} \n" "Permalink: {permalink}".format( scan_date=scan_report.get("results").get("scan_date"), positives=scan_report.get("results").get("positives"), total=scan_report.get("results").get("total"), sha1=scan_report.get("results").get("sha1"), md5=scan_report.get("results").get("md5"), permalink=scan_report.get("results").get("permalink"))) except Exception as e: self.reply(message, "Request for " + input + " raised an exception!")
def test_md5_hash(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.get_file_report(EICAR_MD5), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def virustotal(conf, ipaddress, results): """This method updates the network results with the Virustotal reports. Args: conf (dict): dict of configuration ipaddress (string): ip address to analyze results (dict): dict where will put the results Returns: This method updates the results dict given """ if conf["enabled"]: from virus_total_apis import PublicApi as VirusTotalPublicApi vt = VirusTotalPublicApi(conf["api_key"]) # Error: {u'virustotal': {'error': SSLError(SSLEOFError(8, u'EOF # occurred in violation of protocol (_ssl.c:590)'),)}}') # TypeError: SSLError(SSLEOFError(8, u'EOF occurred in violation of # protocol (_ssl.c:590)'),) is not JSON serializable') try: r = vt.get_ip_report(ipaddress) report = json.dumps(r, ensure_ascii=False) except TypeError: log.error( "TypeError in VirusTotal report for ip {!r}".format(ipaddress)) else: if report: results["virustotal"] = report
def virustotal(conf, attachments): """This method updates the attachments results with the Virustotal reports. Args: attachments (list): all attachments of email conf (dict): conf of this post processor Returns: This method updates the attachments list given """ if conf["enabled"]: from virus_total_apis import PublicApi as VirusTotalPublicApi from .utils import reformat_virustotal vt = VirusTotalPublicApi(conf["api_key"]) for a in attachments: if not a.get("is_filtered", False): result = vt.get_file_report(a["sha1"]) reformat_virustotal(result) if result: a["virustotal"] = result for i in a.get("files", []): i_result = vt.get_file_report(i["sha1"]) reformat_virustotal(i_result) if i_result: i["virustotal"] = i_result
def send_hash(self, filehash): # De PublicApi wordt doorgegeven aan api api = PublicApi(self.api) # response terugvragen van virustotal response = api.get_file_report(filehash) return response
def main(url): apikey=fetchapikey() vt=VirusTotalPublicApi(apikey) response = vt.get_domain_report(url) if "error" in response.keys(): apiresco="NaN" whoisres="NaN" subdomsi=[] resolute=[] whotulst=[] whoitime="WhoIs timestamp not found" dnsrecdt="DNS record date not found" scandate="Scan date not found" verbmesg="Please check your internet connection and the URL for typos and try again" reply = [apiresco, whoisres, subdomsi, resolute, whotulst, whoitime, dnsrecdt, scandate, verbmesg] else: apiresco=str(response["response_code"]) whoisres=str(getwhoisres(response)) whoitime=str(getwhoitime(response)) dnsrecdt=str(getdnsrecdt(response)) subdomsi=getsubdomsi(response) whotulst=getwhoisdat(response) resolute=getresolute(response) scandate=str(time.ctime(int(time.time()))) verbmesg=response["results"]["verbose_msg"] reply=[apiresco,whoisres,subdomsi,resolute,whotulst,whoitime,dnsrecdt,scandate,verbmesg] return reply
def fetch_results(self): obj = Configuration() return_type, api_key = obj.get_virustotal_apikey() if not return_type: return [] else: self.__api_key__ = api_key vt = VirusTotalPublicApi(self.__api_key__) response = vt.get_file_report(self.__md5sum__) vt_results = {} results = [] try: if response["response_code"] == 200: # results.append({'total': response['results']['total']}) for av_name in response['results']['scans']: av_detection = {'av_name': av_name, 'detected': response['results']['scans'][av_name]['detected'], 'version': response['results']['scans'][av_name]['version'], 'result': response['results']['scans'][av_name]['result']} results.append(av_detection) vt_results['total_detection'] = response['results']['positives'] vt_results['detection_info'] = results return vt_results except KeyError: pass return vt_results
def analizarRe(carpeta): API_KEY = "a9089095456a6c812626239b837c894abcea66938853813118ebf16a5fff1690" api = PublicApi(API_KEY) archivos = carpetas = 0 for i in os.listdir(carpeta): if os.path.isfile(os.path.join(carpeta, i)): print(i + ": ") with open(os.path.join(carpeta, i), "rb") as f: file_hash = md5(f.read()).hexdigest() response = api.get_file_report(file_hash) if response["response_code"] == 200: if response["results"]["positives"] > 0: print("Archivo malicioso.") else: print("Archivo seguro.") else: print("No ha podido obtenerse el análisis del archivo.") print("==================================================") if os.path.isdir(os.path.join(carpeta, i)): carpetas += 1 for i in os.listdir(carpeta): if os.path.isdir(os.path.join(carpeta, i)): analizarRe(os.path.join(carpeta, i))
def check_virustotal(self, cr, uid, ids, context=None): config_obj = self.pool.get('antivir.config') config_ids = config_obj.search(cr, uid, [('active_config', '=', True)], context=context) if config_ids: config = config_obj.browse(cr, uid, config_ids, context=context) if config[0].virustotal_api_url and config[0].virustotal_api_key: quarantine_item = self.browse(cr, uid, ids, context=context) vt = VirusTotalPublicApi(config[0].virustotal_api_key) response = vt.get_file_report(quarantine_item[0].SHA256) scans = response['results'].get('scans') if scans: scans_results = ["<li>[{}] detected:{} result:{}</li>".format(str(key), str(val.get('detected')), str(val.get('result'))) for key, val in scans.iteritems()] virustotal_summary = "<ul>{}</ul>".format(''.join(scans_results)) else: virustotal_summary = _("Couldn't fetch virustotal_summary, try again later.") self.write(cr, uid, ids, {'virustotal_summary': virustotal_summary}, context=context) else: raise ConfigError(_("There is no active config."))
def test_put_comments(self): vt = PublicApi(API_KEY) comment = 'This is just a test of the virus-total-api. https://github.com/blacktop/virustotal-api' try: print(json.dumps(vt.put_comments(resource=EICAR_MD5, comment=comment), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_hash_found(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.get_file_report('44cda81782dc2a346abd7b2285530c5f'), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_sha256_hash(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.get_file_report(EICAR_SHA256), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def submit2vt(filename): """Submit a new file to VT for scanning""" # Check VT score vt = VirusTotalPublicApi(config['apiKey']) response = vt.scan_file(filename) # DEBUG fp = open('/tmp/vt.debug', 'a') fp.write(json.dumps(response, sort_keys=False, indent=4)) fp.close() if response['response_code'] == 200: writeLog("VT Reply: %s" % response['results']['verbose_msg']) else: writeLog('VT Error: %s' % response['error']) if config['esServer']: # Save results to Elasticsearch try: response['@timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%S+01:00") res = es.index(index=config['esIndex'], doc_type="VTresult", body=json.dumps(response)) except: writeLog("Cannot index to Elasticsearch") return
def virus_check(path : str): # Read A .exe File with open(rf'{path}', 'rb') as f: content = f.read() EICAR = content # Read a .py File # with open(R'C:\Users\tejas\Desktop\electric\setup.py', 'r') as f: # content = f.read() # EICAR = content.encode('utf-8') EICAR_MD5 = hashlib.md5(EICAR).hexdigest() vt = VTApi(API_KEY) res = vt.get_file_report(EICAR_MD5) keys = list(res.keys()) data = res[keys[0]]['scans'] detected = {} for value in data.items(): if value[1]['detected'] == True: detected.update({value[0] : value[1]['result']}) return detected
def selftest_function(opts): """ Placeholder for selftest function. An example use would be to test package api connectivity. Suggested return values are be unimplemented, success, or failure. """ TEST_IP = "8.8.8.8" options = opts.get("fn_virustotal", {}) reason = "" try: vt = VirusTotal(options['api_token'], options['proxies']) response = vt.get_ip_report(TEST_IP) if response and type(response) is not dict: state = "failure" reason = "no response" else: status = response.get('response_code', -1) if status != HTTP_OK: state = "failure" reason = state else: state = "success" except Exception as err: state = "failure" reason = str(err) result = {"state": state, "reason": reason} log.info(result) return result
def test_scan_file_stream(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.scan_file(EICAR), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def submit_hash_to_vt(usr_input): print "[*] Submiting " + usr_input + " to Virustotal..." vt = VirusTotalPublicApi(API_KEY) response = vt.get_file_report(usr_input) print "[*] Waiting for Report..." json_scan_report1 = json.dumps(response, sort_keys=False, indent=4) json_scan_report2 = json.loads(json_scan_report1) scan_date = json_scan_report2['results']['scan_date'] scan_id = json_scan_report2['results']['scan_id'] sha1 = json_scan_report2['results']['sha1'] sha256 = json_scan_report2['results']['sha256'] positives = json_scan_report2['results']['positives'] total = json_scan_report2['results']['total'] permalink = json_scan_report2['results']['permalink'] print '' print '{quote}' print '*Results:* \t' + str(positives) + ' | ' + str(total) print '*Scan Date:* \t' + str(scan_date) print '*SHA1:* \t' + sha1 print '*SHA256:* \t' + sha256 print '*Scan ID:* \t' + scan_id print '*Permalink:* \n' + '{code}' + permalink + '{code}' print '{quote}' print ""
def validate_virus_total_account(): normal("<*> VirusTotal API KEY Validation") EICAR = "X5O!P%@AP[4\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*".encode( 'utf-8') # EICAR_MD5 = hashlib.md5(EICAR).hexdigest() # EICAR_SHA1 = hashlib.sha1(EICAR).hexdigest() EICAR_SHA256 = hashlib.sha256(EICAR).hexdigest() for account in VIRUSTOTAL_ACCOUNT: vt = PublicApi(account['apikey']) try: vt_response = vt.get_file_report(EICAR_SHA256) normal("email : %s" % account['email']) normal("apikey : %s" % account['apikey']) if int(vt_response['response_code'] / 100) == 5: account['valid'] = False warning("valid : False") critical(vt_response['error']) elif int(vt_response['response_code'] / 100) == 4: account['valid'] = False warning("valid : False") critical(vt_response['error']) else: account['valid'] = True messageBold("valid : True") except: pass
def test_hash_not_found(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.get_file_report('A' * 32), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def _lookup_iocs(self): """Caches the OpenDNS info for a set of domains""" vt = PublicApi(self._api_key) for ioc in self._all_iocs: report = vt.get_file_report(ioc) self._threat_info_by_iocs[ioc] = report sleep(15)
def query_virus_total_db(self, md5_hash, file_name): try: virus_total_instance = VirusTotalPublicApi(API_KEY) return virus_total_instance.get_file_report(self.md5_hash) except Exception as e: logging.error( f'Unable to query VirusTotal repository for {self.file_name} got Error {e}' )
def send_request(file, from_disk, original_path): logger.add(" " * 10) logger.add("File {0}".format(original_path)) api_key = str(config['Main']['ApiKey']) vt = VirusTotalPublicApi(api_key) scanned_file = {'response_code': 000} report = {'response_code': 000} while scanned_file['response_code'] != 200: scanned_file = json.loads( json.dumps(vt.scan_file(this_file=file, from_disk=from_disk), sort_keys=True, indent=4)) try: logger.add("Response: scan_file: {0}".format( scanned_file['results']['verbose_msg'])) except KeyError: logger.add("Error in scan_file: {0}".format(scanned_file)) pass while report['response_code'] != 200: report = vt.get_file_report(scanned_file['results']['resource']) if report['response_code'] == 204: logger.add("Sleeping for 15 seconds: {0}".format(report)) sleep(15) continue try: logger.add("Response: get_file_report: {0} & {1}".format( report['response_code'], report['results']['response_code'])) except KeyError: logger.add("Error in get_file_report: {0}".format(report)) pass if report['response_code'] == 200 and report['results'][ 'response_code'] != 1: logger.add("API bad Response. File lost in queue, rescanning") check_one_file_on_virustotal([file]) break try: ratio = round( report['results']['positives'] / report['results']['total'] * 100, 2) logger.add("Positives: {0} , Total {1}".format( report['results']['positives'], report['results']['total'])) logger.add("Ratio is: {0}".format(ratio)) if ratio < 85.0: add_to_whitelist(original_path) except KeyError: pass return 0
def processZipFile(filename): """Extract files from a ZIP archive and test them against VT""" zf = zipfile.ZipFile(filename) for f in zf.namelist(): try: data = zf.read(f) except KeyError: writeLog("Cannot extract %s from zip file %s" % (f, filename)) return fp = open(os.path.join(generateDumpDirectory(args.directory), f), 'wb') fp.write(data) fp.close() md5 = hashlib.md5(data).hexdigest() if dbMD5Exists(md5): writeLog("DEBUG: MD5 %s exists" % md5) continue writeLog("DEBUG: Extracted MD5 %s from Zip" % md5) vt = VirusTotalPublicApi(config['apiKey']) response = vt.get_file_report(md5) writeLog("DEBUG: VT Response received") if config['esServer']: # Save results to Elasticsearch try: response['@timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%S+01:00") res = es.index(index=config['esIndex'], doc_type="VTresult", body=json.dumps(response)) except: writeLog("Cannot index to Elasticsearch") writeLog("DEBUG: Step1") # DEBUG fp = open('/tmp/vt.debug', 'a') fp.write(json.dumps(response, sort_keys=False, indent=4)) fp.close() writeLog("DEBUG: Step1: %s" % response['results']['response_code']) if response['response_code'] == 200: if response['results']['response_code']: positives = response['results']['positives'] total = response['results']['total'] scan_date = response['results']['scan_date'] writeLog('File: %s (%s) Score: %s/%s Scanned: %s (%s)' % (f, md5, positives, total, scan_date, timeDiff(scan_date))) else: submit2vt(os.path.join(generateDumpDirectory(args.directory), f)) writeLog('File: %s (%s) not found, submited for scanning' % (f, md5)) dbAddMD5(md5,f) else: writeLog('VT Error: %s' % response['error']) # Analyze OLE documents if API is available parseOLEDocument(os.path.join(generateDumpDirectory(args.directory), filename)) return
def test_sha256_hash(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.get_file_report(EICAR_SHA256), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def analize_hash(nhash: str) -> json: """ Peticion a la API de vt para que analize un hash :param nhash: :return: """ vt = VirusTotalPublicApi(VIRUSTOTAL_API_KEY) response = vt.get_file_report(nhash) return json.dumps(response, sort_keys=False)
def analize_url(url: str) -> json: """ Peticion a la API de vt para que analize una url asociada a un fichero :param url: :return: """ vt = VirusTotalPublicApi(VIRUSTOTAL_API_KEY) response = vt.scan_url(url) return json.dumps(response, sort_keys=False)
def url_report(url: str) -> json: """ Peticion a la API de vt para obtener el reporte de un analisis ya realizado por el :param url: :return: """ vt = VirusTotalPublicApi(VIRUSTOTAL_API_KEY) response = vt.get_url_report(url) return json.dumps(response, sort_keys=False)
def test_get_domain_report(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.get_domain_report('www.wired.com'), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def test_get_ip_report(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.get_ip_report('23.6.113.133'), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def test_hash_not_found(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.get_file_report('A' * 32), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def test_scan_file_binary(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.scan_file('test.exe'), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def test_md5_hash(self): vt = PublicApi(API_KEY) try: print( json.dumps(vt.get_file_report(EICAR_MD5), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_scan_url(self): vt = PublicApi(API_KEY) try: print json.dumps(vt.scan_url('www.wired.com'), sort_keys=False, indent=4) except Exception as e: self.fail(e)
def test_scan_file_binary(self): vt = PublicApi(API_KEY) vt.scan_file() try: print( json.dumps(vt.scan_file('virus_total_apis/test/test.exe'), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_rescan_file(self): vt = PublicApi(API_KEY) try: print( json.dumps(vt.rescan_file(EICAR_MD5), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def test_hash_bad_input(self): vt = PublicApi(API_KEY) try: print(json.dumps(vt.get_file_report('This is not a hash'), sort_keys=False, indent=4)) print(json.dumps(vt.get_file_report(None), sort_keys=False, indent=4)) print(json.dumps(vt.get_file_report(False), sort_keys=False, indent=4)) print(json.dumps(vt.get_file_report(-1), sort_keys=False, indent=4)) except Exception as e: self.fail(e)
def get_result(API_KEY, HASH, full=False): vt = VirusTotalPublicApi(API_KEY) response = vt.get_file_report(HASH) if full: return response try: return { "positives": response['results']['positives'], "total": response['results']['total'] } except: return { "positives": "", "total": "" }
def vt_url(input): vt = VirusTotalPublicApi("87ab79d0a21d9a7ae5c5558969c7d6b38defa1901b77d27796ae466b3823c776") try: input_list = [input_item.strip() for input_item in input.split(",")] for ip in input_list: scan_report = vt.get_url_report(ip) return render_template( "vt-url.html", url_request=scan_report.get("results").get("url").replace(":", "[:]").replace(".", "[.]"), scan_date=scan_report.get("results").get("scan_date"), positives=scan_report.get("results").get("positives"), total=scan_report.get("results").get("total"), link=scan_report.get("results").get("permalink"), ) except Exception as e: return render_template("vt-url.html", text="Error: Please try again.")
def vt_hash(input): vt = VirusTotalPublicApi("87ab79d0a21d9a7ae5c5558969c7d6b38defa1901b77d27796ae466b3823c776") try: input_list = [input_item.strip() for input_item in input.split(",")] for hash in input_list: scan_report = vt.get_file_report(hash) return render_template( "vt-hash.html", sd=scan_report.get("results").get("scan_date"), pos=scan_report.get("results").get("positives"), total=scan_report.get("results").get("total"), md5=scan_report.get("results").get("md5"), sha1=scan_report.get("results").get("sha1"), link=scan_report.get("results").get("permalink"), ) except Exception as e: return render_template("vt-hash.html", text="Error: Please try again.")
def link_to_virustotal(link, pkt): ''' IN CASE WE FOUND GET link, WE SCAN IT ''' print 'SCANNING %s'%link virus_total_instance = PublicApi('2e1d7b6e998ed0a9830269571ecffa110e41dd8bf34b88ad41e40b4351165d18') REQ = virus_total_instance.scan_url(link) print 'Waiting for virustotal' while True: if 'Scan finished' in str(virus_total_instance.get_url_report(link)): print 'Scan finished!' REP = virus_total_instance.get_url_report(link)['results']['positives'] break else: print 'Naaa not yet' if REP == '0' or REP == 0: print 'SCANNED %s - VERDICT OK [REP=%s]'%(link,REP) pkt.accept() else: print 'SCANNED %s - VERDICT KO [REP=%s]'%(link,REP) pkt.drop() '''
def submit2vt(filename): """Submit a new file to VT for scanning""" # Check VT score vt = VirusTotalPublicApi(config['apiKey']) response = vt.scan_file(filename) # DEBUG fp = open('/tmp/vt.debug', 'a') fp.write(json.dumps(response, sort_keys=False, indent=4)) fp.close() if config['esServer']: # Save results to Elasticsearch try: response['@timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%S+01:00") res = es.index(index=config['esIndex'], doc_type="VTresult", body=json.dumps(response)) except: writeLog("Cannot index to Elasticsearch") return
def main(virus_key): matches = [] ret_val = dict() starting_point = sys.argv[1] v = None if virus_key != 'UNCONFIGURED': v = VirusTotalPublicApi(virus_key) # directory if os.path.isdir(starting_point): #print('directory',starting_point) for root, _, filenames in os.walk(starting_point): for filename in fnmatch.filter(filenames, '*'): matches.append(os.path.join(root, filename)) # single file if os.path.isfile(starting_point): #print('file',starting_point) matches.append(starting_point) for match in matches: this_dict = {} av_result = av_results(match).split(':')[-1].strip() hash_result = hash_results(match) this_dict['clamav_results'] = av_result this_dict['hash_results'] = hash_result if v is not None: lookup = hash_result['md5'] response = v.get_file_report(lookup) this_dict['virustotal_report'] = response else: this_dict['virustotal_report'] = None ret_val[match]=this_dict return ret_val
def virusTotalExtractor(fpath): x=internet_on() md5=prelim(fpath) EICAR_MD5 = md5 if x: vt = VirusTotalPublicApi(API_KEY) response = vt.get_file_report(EICAR_MD5) jso=json.dumps(response, sort_keys=False, indent=4) pos=response["results"]["positives"] retu={"positives":pos, "connection":True } return retu else : print("Internet Connection Not Found") retu={"postitves":0, "connection":False } return retu
def __init__(self, config, log): if not isfile(realpath(expanduser(config))): print('Run \'sh configure.sh\' first!') exit() else: with open(realpath(expanduser(config)), 'r') as handler: self.host = handler.readline().strip('\n') self.port = int(handler.readline().strip('\n')) self.username = handler.readline().strip('\n') self.password = handler.readline().strip('\n') self.from_ = handler.readline().strip('\n') self.to = handler.readline().strip('\n') self.vt = PublicApi(handler.readline().strip('\n')) self.dlfolder = realpath(expanduser(handler.readline().strip('\n'))) self.stfolder = realpath(expanduser(handler.readline().strip('\n'))) handler.close() Daemon.__init__(self, join(self.stfolder, "pidfile")) logging.basicConfig(filename=realpath(expanduser(log)), format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO)
class App(Daemon): def __init__(self, config, log): if not isfile(realpath(expanduser(config))): print('Run \'sh configure.sh\' first!') exit() else: with open(realpath(expanduser(config)), 'r') as handler: self.host = handler.readline().strip('\n') self.port = int(handler.readline().strip('\n')) self.username = handler.readline().strip('\n') self.password = handler.readline().strip('\n') self.from_ = handler.readline().strip('\n') self.to = handler.readline().strip('\n') self.vt = PublicApi(handler.readline().strip('\n')) self.dlfolder = realpath(expanduser(handler.readline().strip('\n'))) self.stfolder = realpath(expanduser(handler.readline().strip('\n'))) handler.close() Daemon.__init__(self, join(self.stfolder, "pidfile")) logging.basicConfig(filename=realpath(expanduser(log)), format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO) def run(self): # Touch known_hashes if not isdir(self.stfolder): makedirs(self.stfolder) open(join(self.stfolder, "known_hashes"), "a").close() # Read out known_hashes hash_handler = open(join(self.stfolder, "known_hashes"), "r+") hashlist = hash_handler.read().split("\n") hashlist = {i[:64]:i[67:] for i in hashlist} while True: dlcontent = listdir(self.dlfolder) # Rescan all 60 seconds for new files if not dlcontent: sleep(60) continue today = date.today() year = today.year month = today.month day = today.day datestring = str(year)+"-"+str(month)+"-"+str(day) for i in dlcontent: # Check if download of file hash finished size = stat(join(self.dlfolder, i)).st_size sleep(1) if stat(join(self.dlfolder, i)).st_size != size: continue with open(join(self.dlfolder, i), "rb") as h: # Hash file in dl folder and look if it's known hashit = sha256() hashit.update(h.read()) if hashit.hexdigest() in hashlist.keys(): # Just remove already known files logging.info("Found already known file "+hashlist[hashit.hexdigest()]) remove(join(self.dlfolder, i)) else: # Save and scan unknown files and remember the hash hashdigest = hashit.hexdigest() hashlist[hashdigest] = join(datestring, i) hash_handler.write(hashdigest+" > "+join(datestring, i)+"\n") hash_handler.flush() if not isdir(join(self.stfolder, datestring)): makedirs(join(self.stfolder, datestring)) move(join(self.dlfolder, i), join(self.stfolder, datestring, i)) response = self.scan(hashdigest, join(self.stfolder, datestring, i), today) if not isdir(join(self.stfolder, "reports", datestring)): makedirs(join(self.stfolder, "reports", datestring)) open(join(self.stfolder, "reports", datestring, i), "a").close() with open(join(self.stfolder, "reports", datestring, i), "r+") as report: report.write(dumps(response, sort_keys=False, indent=4)) report.flush() def scan(self, hashdigest, filepath, today): scan_flag = True while True: # First look if file is known to VirusTotal response = self.vt.get_file_report(hashdigest) if response["response_code"] == 204: logging.info("Submission limit reached. I'll sleep for 60 seconds") sleep(60) elif response["results"]["response_code"] == 1: # Rescan needed? #scan_date = datetime.strptime(response["results"]["scan_date"][:10], #"%Y-%m-%d") #if abs((today-scan_date).days) >= 30: #self.vt.rescan_file(hashdigest) #continue # Send report for unknown file msg = """From: %s To: %s Subject: Virustotal report %s""" % (self.from_, self.to, dumps(response, sort_keys=False, indent=4)) self.send(msg) logging.info("Sent report for "+filepath) return response else: # Submit the unknown file if scan_flag: # Workaround for download bug # Another test for unfinished downloads # Sadly as the file is already moved # the file is lost for analysis :( with open(filepath, "rb") as h: hashit = sha256() hashit.update(h.read()) if hashit.hexdigest() != hashdigest: logging.info("File for submission has another hash as in download folder!") logging.info("Filepath is %s" % (filepath)) break response = self.vt.scan_file(filepath) msg = """From: %s To: %s Subject: Virustotal submit Submitted unknown file %s with hash %s for scan. %s""" % (self.from_, self.to, filepath, hashdigest, dumps(response, sort_keys=False, indent=4)) self.send(msg) logging.info("Submitted unknown file "+filepath+" with hash "+hashdigest+" for scan") logging.info("I will sleep know for 60 seconds and try to receive the result after that") sleep(60) scan_flag = False else: logging.info("Scan seems not finished. Will sleep for another 30 seconds") sleep(30) def send(self, msg): smtp = smtplib.SMTP(self.host, self.port) smtp.starttls() smtp.login(self.username, self.password) smtp.sendmail(self.from_, self.to, msg)
def main(): global args global config global es global verbose parser = argparse.ArgumentParser( description = 'Unpack MIME attachments from a file and check them against virustotal.com') parser.add_argument('-d', '--directory', dest = 'directory', help = 'directory where files will be extracted (default: /tmp) %%d,%%m,%%y can use used for dynamic names', metavar = 'DIRECTORY') parser.add_argument('-v', '--verbose', action = 'store_false', dest = 'verbose', help = 'verbose output', default = False) parser.add_argument('-c', '--config', dest = 'config_file', help = 'configuration file (default: /etc/mime2vt.conf)', metavar = 'CONFIG') parser.add_argument('-l', '--log', dest = 'dump_file', help = 'mail dump file (default /tmp/message.dump)', metavar = 'DUMPFILE') args = parser.parse_args() # Default values if not args.directory: args.directory = '/tmp' if not args.config_file: args.config_file = '/etc/mime2vt.conf' #writeLog('DEBUG: config_file = %s' % args.config_file) try: c = ConfigParser.ConfigParser() c.read(args.config_file) config['apiKey'] = c.get('virustotal', 'apikey') excludetypes = c.get('virustotal', 'exclude').split(',') # Elasticsearch config config['esServer'] = c.get('elasticsearch', 'server') config['esIndex'] = c.get('elasticsearch', 'index') config['dbPath'] = c.get('database', 'dbpath') except OSError as e: writeLog('Cannot read config file %s: %s' % (args.config_file, e.errno)) exit if config['esServer']: logging.basicConfig() es = Elasticsearch([config['esServer']]) # Create the SQLite DB dbCreate() # Read the mail flow from STDIN data = "" . join(sys.stdin) msg = email.message_from_string(data) mailheaders = parseMailheaders(data) if args.dump_file: try: fp = open(args.dump_file, 'a') except OSError as e: writeLog('Cannot dump message to %s: %s' % (args.dump_file, e.errno)) fp.write(data) fp.close() # Process MIME parts for part in msg.walk(): contenttype = part.get_content_type() filename = part.get_param('name') writeLog("DEBUG: Found data: %s (%s)" % (contenttype, filename)) data = part.get_payload(None, True) if data: md5 = hashlib.md5(data).hexdigest() if dbMD5Exists(md5): writeLog("Skipping existing MD5 %s" % md5) continue # New: Extract URLS if contenttype in [ 'text/html', 'text/plain' ]: urls = [] # Source: https://gist.github.com/uogbuji/705383 GRUBER_URLINTEXT_PAT = re.compile(ur'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019]))') lines = data.split('\n') for line in lines: try: #urls.append(re.search("(?P<url>https?://[^\s]+)", word).group("url")) for url in GRUBER_URLINTEXT_PAT.findall(line): if url[0]: urls.append(url[0]) except: pass fp = open('/var/tmp/urls.log', 'a') for url in urls: fp.write("%s\n" % url) fp.close() # Process only interesting files # if contenttype not in ('text/plain', 'text/html', 'image/jpeg', 'image/gif', 'image/png'): if contenttype not in excludetypes: if not filename: filename = md5 mime_ext = mimetypes.guess_extension(contenttype) if not mime_ext: # Use a generic bag-of-bits extension mime_ext = '.bin' f_name, f_ext = os.path.splitext(filename) if not f_ext: filename += mime_ext writeLog('Found interesting file: %s (%s)' % (filename, contenttype)) fp = open(os.path.join(generateDumpDirectory(args.directory), filename), 'wb') fp.write(data) fp.close() if contenttype in ['application/zip', 'application/x-zip-compressed']: # Process ZIP archive writeLog('Processing zip archive: %s' % filename) processZipFile(os.path.join(generateDumpDirectory(args.directory), filename)) else: # Check VT score vt = VirusTotalPublicApi(config['apiKey']) response = vt.get_file_report(md5) # Save results to Elasticsearch if config['esServer']: try: response['@timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%S+01:00") response['filename'] = filename response['mail'] = mailheaders res = es.index(index=config['esIndex'], doc_type="VTresult", body=json.dumps(response)) except: writeLog("Cannot index to Elasticsearch") # DEBUG fp = open('/tmp/vt.debug', 'a') fp.write(json.dumps(response, sort_keys=False, indent=4)) fp.close() if response['response_code'] == 200: if response['results']['response_code']: positives = response['results']['positives'] total = response['results']['total'] scan_date = response['results']['scan_date'] writeLog('File: %s (%s) Score: %s/%s Scanned: %s (%s)' % (filename, md5, positives, total, scan_date, timeDiff(scan_date))) else: submit2vt(os.path.join(generateDumpDirectory(args.directory), filename)) writeLog('File: %s (%s) not found, submited for scanning' % (filename, md5)) dbAddMD5(md5,filename) else: writeLog('VT Error: %s' % response['error']) # Analyze OLE documents if API is available parseOLEDocument(os.path.join(generateDumpDirectory(args.directory), filename))
def ajax_handler(request, command): if command == 'pollplugins': if 'session_id' in request.POST: session_id = request.POST['session_id'] plugin_rows = db.get_pluginbysession(ObjectId(session_id)) return render(request, 'plugin_poll.html', {'plugin_output': plugin_rows}) else: return HttpResponseServerError if command == 'dropplugin': if 'plugin_id' in request.POST: plugin_id = request.POST['plugin_id'] # update the plugin new_values = {'created': None,'plugin_output': None, 'status': None} db.update_plugin(ObjectId(plugin_id), new_values) return HttpResponse('OK') if command == 'runplugin': if 'plugin_id' in request.POST and 'session_id' in request.POST: plugin_name = run_plugin(request.POST['session_id'], request.POST['plugin_id']) return HttpResponse(plugin_name) if command == 'plugin_dir': # Platform PATH seperator seperator = ':' if sys.platform.startswith('win'): seperator = ';' # Set Plugins if 'plugin_dir' in request.POST: plugin_dir = request.POST['plugin_dir'] if os.path.exists(volrc_file): with open(volrc_file, 'a') as out: output = '{0}{1}'.format(seperator, plugin_dir) out.write(output) return HttpResponse(' No Plugin Path Provided') else: # Create new file. with open(volrc_file, 'w') as out: output = '[DEFAULT]\nPLUGINS = {0}'.format(plugin_dir) out.write(output) return HttpResponse(' No Plugin Path Provided') else: return HttpResponse(' No Plugin Path Provided') if command == 'filedetails': if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) file_datastore = db.search_datastore({'file_id': ObjectId(file_id)}) file_meta = {'vt': None, 'string_list': None, 'yara': None } for row in file_datastore: if 'vt' in row: file_meta['vt'] = row['vt'] if 'string_list' in row: file_meta['string_list'] = row['string_list'] if 'yara' in row: file_meta['yara'] = row['yara'] return render(request, 'file_details.html', {'file_details': file_object, 'file_id': file_id, 'file_datastore': file_meta }) if command == 'hivedetails': if 'plugin_id' and 'rowid' in request.POST: pluginid = request.POST['plugin_id'] rowid = request.POST['rowid'] plugin_details = db.get_pluginbyid(ObjectId(pluginid)) key_name = 'hive_keys_{0}'.format(rowid) if key_name in plugin_details: hive_details = plugin_details[key_name] else: session_id = plugin_details['session_id'] session = db.get_session(session_id) plugin_data = plugin_details['plugin_output'] for row in plugin_data['rows']: if str(row[0]) == rowid: hive_offset = str(row[1]) # Run the plugin vol_int = RunVol(session['session_profile'], session['session_path']) hive_details = vol_int.run_plugin('hivedump', hive_offset=hive_offset) # update the plugin / session new_values = {key_name: hive_details} db.update_plugin(ObjectId(ObjectId(pluginid)), new_values) # Update the session new_sess = {} new_sess['modified'] = datetime.now() db.update_session(session_id, new_sess) return render(request, 'hive_details.html', {'hive_details': hive_details}) if command == 'virustotal': if not VT_KEY or not VT_LIB: return HttpResponse("Unable to use Virus Total. No Key or Library Missing. Check the Console for details") if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) sha256 = file_object.sha256 vt = PublicApi(API_KEY) response = vt.get_file_report(sha256) vt_fields = {} if response['results']['response_code'] == 1: vt_fields['permalink'] = response['results']['permalink'] vt_fields['total'] = response['results']['total'] vt_fields['positives'] = response['results']['positives'] vt_fields['scandate'] = response['results']['scan_date'] # Store the results in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['vt'] = vt_fields update = db.create_datastore(store_data) return render(request, 'file_details_vt.html', {'vt_results': vt_fields}) if command == 'yara': if 'file_id' in request.POST: file_id = request.POST['file_id'] if 'rule_file' in request.POST: rule_file = request.POST['rule_file'] if rule_file and file_id and YARA: file_object = db.get_filebyid(ObjectId(file_id)) file_data = file_object.read() if os.path.exists(rule_file): rules = yara.compile(rule_file) matches = rules.match(data=file_data) results = [] for match in matches: for item in match.strings: results.append({'rule': match.rule, 'offset': item[0], 'string': string_clean_hex(item[2])}) else: return render(request, 'file_details_yara.html', {'yara': None, 'error': 'Could not find Rule File'}) if len(results) > 0: # Store the results in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['yara'] = results update = db.create_datastore(store_data) return render(request, 'file_details_yara.html', {'yara': results}) else: return HttpResponse('Either No file ID or No Yara Rule was provided') if command == 'strings': if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) file_data = file_object.read() regexp = '[\x20\x30-\x39\x41-\x5a\x61-\x7a\-\.:]{4,}' string_list = re.findall(regexp, file_data) # Store the list in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['string_list'] = string_list # Write to DB db.create_datastore(store_data) return render(request, 'file_details_strings.html', {'string_list': string_list}) if command == 'dropsession': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) db.drop_session(session_id) return HttpResponse('OK') if command == 'memhex': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) session = db.get_session(session_id) mem_path = session['session_path'] if 'start_offset' and 'end_offset' in request.POST: try: start_offset = int(request.POST['start_offset'], 0) end_offset = int(request.POST['end_offset'], 0) hex_cmd = 'hexdump -C -s {0} -n {1} {2}'.format(start_offset, end_offset - start_offset, mem_path) hex_output = hex_dump(hex_cmd) return HttpResponse(hex_output) except Exception as e: return HttpResponse(e) if command == 'memhexdump': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) session = db.get_session(session_id) mem_path = session['session_path'] if 'start_offset' and 'end_offset' in request.POST: try: start_offset = int(request.POST['start_offset'], 0) end_offset = int(request.POST['end_offset'], 0) mem_file = open(mem_path, 'rb') # Get to start mem_file.seek(start_offset) file_data = mem_file.read(end_offset - start_offset) response = HttpResponse(file_data, content_type='application/octet-stream') response['Content-Disposition'] = 'attachment; filename="{0}-{1}.bin"'.format(start_offset, end_offset) return response except Exception as e: logger.error('Error Getting hex dump: {0}'.format(e)) if command == 'addcomment': html_resp = '' if 'session_id' and 'comment_text' in request.POST: session_id = request.POST['session_id'] comment_text = request.POST['comment_text'] comment_data = {'session_id': ObjectId(session_id), 'comment_text': comment_text, 'date_added': datetime.now()} db.create_comment(comment_data) # now return all the comments for the ajax update for comment in db.get_commentbysession(ObjectId(session_id)): html_resp += '<pre>{0}</pre>'.format(comment['comment_text']) return HttpResponse(html_resp) if command == 'searchbar': if 'search_type' and 'search_text' and 'session_id' in request.POST: search_type = request.POST['search_type'] search_text = request.POST['search_text'] session_id = request.POST['session_id'] if search_type == 'plugin': results = {'rows':[]} results['columns'] = ['Plugin Name', 'View Results'] rows = db.search_plugins(search_text, session_id=ObjectId(session_id)) for row in rows: results['rows'].append([row['plugin_name'], '<a href="#" onclick="ajaxHandler(\'pluginresults\', {{\'plugin_id\':\'{0}\'}}, false ); return false">View Output</a>'.format(row['_id'])]) return render(request, 'plugin_output.html', {'plugin_results': results}) elif search_type == 'hash': pass elif search_type == 'registry': pass elif search_type == 'vol': # Run a vol command and get the output vol_output = getoutput('vol.py {0}'.format(search_text)) results = {'rows': [['<pre>{0}</pre>'.format(vol_output)]], 'columns': ['Volitlity Raw Output']} # Consider storing the output here as well. return render(request, 'plugin_output.html', {'plugin_results': results}) else: return HttpResponse('No valid search query found.') if command == 'pluginresults': if 'plugin_id' in request.POST: plugin_id = ObjectId(request.POST['plugin_id']) plugin_results = plugin_output(plugin_id) return render(request, 'plugin_output.html', {'plugin_results': plugin_results}) return HttpResponse('No valid search query found.')
def ajax_handler(request, command): if command == 'pollplugins': if 'session_id' in request.POST: session_id = request.POST['session_id'] plugin_rows = db.get_pluginbysession(ObjectId(session_id)) return render(request, 'plugin_poll.html', {'plugin_output': plugin_rows}) else: return HttpResponseServerError if command == 'dropplugin': if 'plugin_id' in request.POST: plugin_id = request.POST['plugin_id'] # update the plugin new_values = {'created': None,'plugin_output': None, 'status': None} db.update_plugin(ObjectId(plugin_id), new_values) return HttpResponse('OK') if command == 'runplugin': if 'plugin_id' in request.POST and 'session_id' in request.POST: plugin_name = run_plugin(request.POST['session_id'], request.POST['plugin_id']) return HttpResponse(plugin_name) if command == 'plugin_dir': # Platform PATH seperator seperator = ':' if sys.platform.startswith('win'): seperator = ';' # Set Plugins if 'plugin_dir' in request.POST: plugin_dir = request.POST['plugin_dir'] if os.path.exists(volrc_file): with open(volrc_file, 'a') as out: output = '{0}{1}'.format(seperator, plugin_dir) out.write(output) return HttpResponse(' No Plugin Path Provided') else: # Create new file. with open(volrc_file, 'w') as out: output = '[DEFAULT]\nPLUGINS = {0}'.format(plugin_dir) out.write(output) return HttpResponse(' No Plugin Path Provided') else: return HttpResponse(' No Plugin Path Provided') if command == 'filedetails': if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) file_datastore = db.search_datastore({'file_id': ObjectId(file_id)}) file_meta = {'vt': None, 'string_list': None, 'yara': None } for row in file_datastore: if 'vt' in row: file_meta['vt'] = row['vt'] if 'string_list' in row: file_meta['string_list'] = row['string_list'] if 'yara' in row: file_meta['yara'] = row['yara'] yara_list = os.listdir('yararules') return render(request, 'file_details.html', {'file_details': file_object, 'file_id': file_id, 'file_datastore': file_meta, 'yara_list': yara_list }) if command == 'hivedetails': if 'plugin_id' and 'rowid' in request.POST: pluginid = request.POST['plugin_id'] rowid = request.POST['rowid'] plugin_details = db.get_pluginbyid(ObjectId(pluginid)) key_name = 'hive_keys_{0}'.format(rowid) if key_name in plugin_details: hive_details = plugin_details[key_name] else: session_id = plugin_details['session_id'] session = db.get_session(session_id) plugin_data = plugin_details['plugin_output'] for row in plugin_data['rows']: if str(row[0]) == rowid: hive_offset = str(row[1]) # Run the plugin vol_int = RunVol(session['session_profile'], session['session_path']) hive_details = vol_int.run_plugin('hivedump', hive_offset=hive_offset) # update the plugin / session new_values = {key_name: hive_details} db.update_plugin(ObjectId(ObjectId(pluginid)), new_values) # Update the session new_sess = {} new_sess['modified'] = datetime.now() db.update_session(session_id, new_sess) return render(request, 'hive_details.html', {'hive_details': hive_details}) if command == 'dottree': session_id = request.POST['session_id'] session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('pstree', output_style='dot') return HttpResponse(results) if command == 'virustotal': if not VT_KEY or not VT_LIB: return HttpResponse("Unable to use Virus Total. No Key or Library Missing. Check the Console for details") if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) sha256 = file_object.sha256 vt = PublicApi(API_KEY) response = vt.get_file_report(sha256) vt_fields = {} if response['results']['response_code'] == 1: vt_fields['permalink'] = response['results']['permalink'] vt_fields['total'] = response['results']['total'] vt_fields['positives'] = response['results']['positives'] vt_fields['scandate'] = response['results']['scan_date'] # Store the results in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['vt'] = vt_fields update = db.create_datastore(store_data) return render(request, 'file_details_vt.html', {'vt_results': vt_fields}) if command == 'yara-string': session_id = request.POST['session_id'] if request.POST['yara-string'] != '': yara_string = request.POST['yara-string'] else: yara_string = False if request.POST['yara-file'] != '': yara_file = os.path.join('yararules', request.POST['yara-file']) yara_hex = request.POST['yara-hex'] if yara_hex != '': yara_hex = int(yara_hex) else: yara_hex = 256 yara_reverse = request.POST['yara-reverse'] if yara_reverse != '': yara_reverse = int(yara_reverse) else: yara_reverse = 0 yara_case = request.POST['yara-case'] if yara_case == 'true': yara_case = True else: yara_case = None yara_kernel = request.POST['yara-kernel'] if yara_kernel == 'true': yara_kernel = True else: yara_kernel = None yara_wide = request.POST['yara-wide'] if yara_wide == 'true': yara_wide = True else: yara_wide = None logger.debug('Yara String Scanner') try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) if yara_string: results = vol_int.run_plugin('yarascan', output_style='json', plugin_options={'YARA_RULES': yara_string, 'CASE': yara_case, 'ALL': yara_kernel, 'WIDE': yara_wide, 'SIZE': yara_hex, 'REVERSE': yara_reverse}) elif yara_file: results = vol_int.run_plugin('yarascan', output_style='json', plugin_options={'YARA_FILE': yara_file, 'CASE': yara_case, 'ALL': yara_kernel, 'WIDE': yara_wide, 'SIZE': yara_hex, 'REVERSE': yara_reverse}) else: return if 'Data' in results['columns']: row_loc = results['columns'].index('Data') for row in results['rows']: try: row[row_loc] = string_clean_hex(row[row_loc].decode('hex')) except Exception as e: logger.warning('Error converting hex to str: {0}'.format(e)) return render(request, 'plugin_output.html', {'plugin_results': results, 'plugin_id': None, 'bookmarks': []}) #return HttpResponse(results) except Exception as error: logger.error(error) if command == 'yara': print request.POST file_id = rule_file = False if 'file_id' in request.POST: file_id = request.POST['file_id'] if 'rule_file' in request.POST: rule_file = request.POST['rule_file'] if rule_file and file_id and YARA: file_object = db.get_filebyid(ObjectId(file_id)) file_data = file_object.read() rule_file = os.path.join('yararules', rule_file) if os.path.exists(rule_file): rules = yara.compile(rule_file) matches = rules.match(data=file_data) results = [] for match in matches: for item in match.strings: results.append({'rule': match.rule, 'offset': item[0], 'string': string_clean_hex(item[2])}) else: return render(request, 'file_details_yara.html', {'yara': None, 'error': 'Could not find Rule File'}) if len(results) > 0: # Store the results in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['yara'] = results update = db.create_datastore(store_data) return render(request, 'file_details_yara.html', {'yara': results}) else: return HttpResponse('Either No file ID or No Yara Rule was provided') if command == 'strings': if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) file_data = file_object.read() regexp = '[\x20\x30-\x39\x41-\x5a\x61-\x7a\-\.:]{4,}' string_list = re.findall(regexp, file_data) # Store the list in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['string_list'] = string_list # Write to DB db.create_datastore(store_data) return render(request, 'file_details_strings.html', {'string_list': string_list}) if command == 'dropsession': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) db.drop_session(session_id) return HttpResponse('OK') if command == 'memhex': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) session = db.get_session(session_id) mem_path = session['session_path'] if 'start_offset' and 'end_offset' in request.POST: try: start_offset = int(request.POST['start_offset'], 0) end_offset = int(request.POST['end_offset'], 0) hex_cmd = 'hexdump -C -s {0} -n {1} {2}'.format(start_offset, end_offset - start_offset, mem_path) hex_output = hex_dump(hex_cmd) return HttpResponse(hex_output) except Exception as e: return HttpResponse(e) if command == 'memhexdump': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) session = db.get_session(session_id) mem_path = session['session_path'] if 'start_offset' and 'end_offset' in request.POST: try: start_offset = int(request.POST['start_offset'], 0) end_offset = int(request.POST['end_offset'], 0) mem_file = open(mem_path, 'rb') # Get to start mem_file.seek(start_offset) file_data = mem_file.read(end_offset - start_offset) response = HttpResponse(file_data, content_type='application/octet-stream') response['Content-Disposition'] = 'attachment; filename="{0}-{1}.bin"'.format(start_offset, end_offset) return response except Exception as e: logger.error('Error Getting hex dump: {0}'.format(e)) if command == 'addcomment': html_resp = '' if 'session_id' and 'comment_text' in request.POST: session_id = request.POST['session_id'] comment_text = request.POST['comment_text'] comment_data = {'session_id': ObjectId(session_id), 'comment_text': comment_text, 'date_added': datetime.now()} db.create_comment(comment_data) # now return all the comments for the ajax update for comment in db.get_commentbysession(ObjectId(session_id)): html_resp += '<pre>{0}</pre>'.format(comment['comment_text']) return HttpResponse(html_resp) if command == 'searchbar': if 'search_type' and 'search_text' and 'session_id' in request.POST: search_type = request.POST['search_type'] search_text = request.POST['search_text'] session_id = request.POST['session_id'] logger.debug('{0} search for {1}'.format(search_type, search_text)) if search_type == 'plugin': results = {'rows':[]} results['columns'] = ['Plugin Name', 'View Results'] rows = db.search_plugins(search_text, session_id=ObjectId(session_id)) for row in rows: results['rows'].append([row['plugin_name'], '<a href="#" onclick="ajaxHandler(\'pluginresults\', {{\'plugin_id\':\'{0}\'}}, false ); return false">View Output</a>'.format(row['_id'])]) return render(request, 'plugin_output.html', {'plugin_results': results}) if search_type == 'hash': pass if search_type == 'string': logger.debug('yarascan for string') # If search string ends with .yar assume a yara rule if any(ext in search_text for ext in ['.yar', '.yara']): if os.path.exists(search_text): try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('yarascan', output_style='json', plugin_options={'YARA_FILE': search_text}) return render(request, 'plugin_output.html', {'plugin_results': results}) except Exception as error: logger.error(error) else: logger.error('No Yara Rule Found') else: try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('yarascan', output_style='json', plugin_options={'YARA_RULES': search_text}) return render(request, 'plugin_output.html', {'plugin_results': results}) except Exception as error: logger.error(error) if search_type == 'registry': logger.debug('Registry Search') try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('printkey', output_style='json', plugin_options={'KEY': search_text}) return render(request, 'plugin_output.html', {'plugin_results': results}) except Exception as error: logger.error(error) if search_type == 'vol': # Run a vol command and get the output vol_output = getoutput('vol.py {0}'.format(search_text)) results = {'rows': [['<pre>{0}</pre>'.format(vol_output)]], 'columns': ['Volitlity Raw Output']} # Consider storing the output here as well. return render(request, 'plugin_output.html', {'plugin_results': results}) return HttpResponse('No valid search query found.') if command == 'pluginresults': if 'plugin_id' in request.POST: plugin_id = ObjectId(request.POST['plugin_id']) plugin_results = plugin_output(plugin_id) try: bookmarks = db.get_pluginbyid(plugin_id)['bookmarks'] except: bookmarks = [] return render(request, 'plugin_output.html', {'plugin_results': plugin_results, 'plugin_id': plugin_id, 'bookmarks': bookmarks}) if command == 'bookmark': if 'row_id' in request.POST: plugin_id, row_id = request.POST['row_id'].split('_') plugin_id = ObjectId(plugin_id) row_id = int(row_id) # Get Bookmarks for plugin try: bookmarks = db.get_pluginbyid(plugin_id)['bookmarks'] except: bookmarks = [] # Update bookmarks if row_id in bookmarks: bookmarks.remove(row_id) bookmarked = 'remove' else: bookmarks.append(row_id) bookmarked = 'add' # Update Plugins new_values = {'bookmarks': bookmarks} db.update_plugin(ObjectId(plugin_id), new_values) return HttpResponse(bookmarked) return HttpResponse('No valid search query found.')
pass shutil.copy( 'install\\CrossMgrVideo_Setup.exe', 'install\\' + newExeName ) six.print_( 'executable copied to: ' + newExeName ) # Create comprssed executable. os.chdir( 'install' ) newExeName = os.path.basename( newExeName ) newZipName = newExeName.replace( '.exe', '.zip' ) try: os.remove( newZipName ) except: pass z = zipfile.ZipFile(newZipName, "w") z.write( newExeName ) z.close() six.print_( 'executable compressed.' ) shutil.copy( newZipName, googleDrive ) from virus_total_apis import PublicApi as VirusTotalPublicApi API_KEY = '64b7960464d4dbeed26ffa51cb2d3d2588cb95b1ab52fafd82fb8a5820b44779' vt = VirusTotalPublicApi(API_KEY) print ( 'VirusTotal Scan' ) vt.scan_file( os.path.abspath(newExeName) )
def ajax_handler(request, command): """ return data requested by the ajax handler in volutility.js :param request: :param command: :return: """ if command == 'pollplugins': if 'session_id' in request.POST: # Get Current Session session_id = request.POST['session_id'] session = db.get_session(ObjectId(session_id)) plugin_rows = db.get_pluginbysession(ObjectId(session_id)) # Check for new registered plugins # Get compatible plugins profile = session['session_profile'] session_path = session['session_path'] vol_int = RunVol(profile, session_path) plugin_list = vol_int.list_plugins() # Plugin Options plugin_filters = vol_interface.plugin_filters refresh_rows = False existing_plugins = [] for row in plugin_rows: existing_plugins.append(row['plugin_name']) # For each plugin create the entry for plugin in plugin_list: # Ignore plugins we cant handle if plugin[0] in plugin_filters['drop']: continue if plugin[0] in existing_plugins: continue else: db_results = {} db_results['session_id'] = ObjectId(session_id) db_results['plugin_name'] = plugin[0] db_results['help_string'] = plugin[1] db_results['created'] = None db_results['plugin_output'] = None db_results['status'] = None # Write to DB db.create_plugin(db_results) refresh_rows = True if refresh_rows: plugin_rows = db.get_pluginbysession(ObjectId(session_id)) return render(request, 'plugin_poll.html', {'plugin_output': plugin_rows}) else: return HttpResponseServerError if command == 'dropplugin': if 'plugin_id' in request.POST: plugin_id = request.POST['plugin_id'] # update the plugin new_values = {'created': None,'plugin_output': None, 'status': None} db.update_plugin(ObjectId(plugin_id), new_values) return HttpResponse('OK') if command == 'runplugin': if 'plugin_id' in request.POST and 'session_id' in request.POST: plugin_name = run_plugin(request.POST['session_id'], request.POST['plugin_id']) return HttpResponse(plugin_name) if command == 'plugin_dir': # Platform PATH seperator seperator = ':' if sys.platform.startswith('win'): seperator = ';' # Set Plugins if 'plugin_dir' in request.POST: plugin_dir = request.POST['plugin_dir'] if os.path.exists(volrc_file): with open(volrc_file, 'a') as out: output = '{0}{1}'.format(seperator, plugin_dir) out.write(output) return HttpResponse(' No Plugin Path Provided') else: # Create new file. with open(volrc_file, 'w') as out: output = '[DEFAULT]\nPLUGINS = {0}'.format(plugin_dir) out.write(output) return HttpResponse(' No Plugin Path Provided') else: return HttpResponse(' No Plugin Path Provided') if command == 'filedetails': if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) file_datastore = db.search_datastore({'file_id': ObjectId(file_id)}) vt_results = None yara_match = None string_list = None state = 'notchecked' for row in file_datastore: if 'vt' in row: vt_results = row['vt'] state = 'complete' if 'yara' in row: yara_match = row['yara'] # New String Store new_strings = db.get_strings(file_id) if new_strings: string_list = new_strings._id yara_list = sorted(os.listdir('yararules')) return render(request, 'file_details.html', {'file_details': file_object, 'file_id': file_id, 'yara_list': yara_list, 'yara': yara_match, 'vt_results': vt_results, 'string_list': string_list, 'state': state, 'error': None }) if command == 'hivedetails': if 'plugin_id' and 'rowid' in request.POST: pluginid = request.POST['plugin_id'] rowid = request.POST['rowid'] plugin_details = db.get_pluginbyid(ObjectId(pluginid)) key_name = 'hive_keys_{0}'.format(rowid) if key_name in plugin_details: hive_details = plugin_details[key_name] else: session_id = plugin_details['session_id'] session = db.get_session(session_id) plugin_data = plugin_details['plugin_output'] for row in plugin_data['rows']: if str(row[0]) == rowid: hive_offset = str(row[1]) # Run the plugin vol_int = RunVol(session['session_profile'], session['session_path']) hive_details = vol_int.run_plugin('hivedump', hive_offset=hive_offset) # update the plugin / session new_values = {key_name: hive_details} db.update_plugin(ObjectId(ObjectId(pluginid)), new_values) # Update the session new_sess = {} new_sess['modified'] = datetime.now() db.update_session(session_id, new_sess) return render(request, 'hive_details.html', {'hive_details': hive_details}) if command == 'dottree': session_id = request.POST['session_id'] session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('pstree', output_style='dot') return HttpResponse(results) if command == 'timeline': logger.debug('Running Timeline') session_id = request.POST['session_id'] session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('timeliner', output_style='dot') return HttpResponse(results) if command == 'virustotal': if not config.api_key or not VT_LIB: logger.error('No Virustotal key provided in volutitliy.conf') return HttpResponse("Unable to use Virus Total. No Key or Library Missing. Check the Console for details") if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) sha256 = file_object.sha256 vt = PublicApi(config.api_key) if 'upload' in request.POST: response = vt.scan_file(file_object.read(), filename=file_object.filename, from_disk=False) if response['results']['response_code'] == 1: return render(request, 'file_details_vt.html', {'state': 'pending', 'vt_results': '', 'file_id': file_id}) else: return render(request, 'file_details_vt.html', {'state': 'error', 'vt_results': '', 'file_id': file_id}) else: response = vt.get_file_report(sha256) vt_fields = {} if response['results']['response_code'] == 1: vt_fields['permalink'] = response['results']['permalink'] vt_fields['total'] = response['results']['total'] vt_fields['positives'] = response['results']['positives'] vt_fields['scandate'] = response['results']['scan_date'] vt_fields['scans'] = response['results']['scans'] # Store the results in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['vt'] = vt_fields update = db.create_datastore(store_data) return render(request, 'file_details_vt.html', {'state': 'complete', 'vt_results': vt_fields, 'file_id': file_id}) elif response['results']['response_code'] == -2: # Still Pending Analysis return render(request, 'file_details_vt.html', {'state': 'pending', 'vt_results': vt_fields, 'file_id': file_id}) elif response['results']['response_code'] == 0: # Not present in data set prompt to uploads return render(request, 'file_details_vt.html', {'state': 'missing', 'vt_results': vt_fields, 'file_id': file_id}) if command == 'yara-string': session_id = request.POST['session_id'] if request.POST['yara-string'] != '': yara_string = request.POST['yara-string'] else: yara_string = False if request.POST['yara-pid'] != '': yara_pid = request.POST['yara-pid'] else: yara_pid = None if request.POST['yara-file'] != '': yara_file = os.path.join('yararules', request.POST['yara-file']) yara_hex = request.POST['yara-hex'] if yara_hex != '': yara_hex = int(yara_hex) else: yara_hex = 256 yara_reverse = request.POST['yara-reverse'] if yara_reverse != '': yara_reverse = int(yara_reverse) else: yara_reverse = 0 yara_case = request.POST['yara-case'] if yara_case == 'true': yara_case = True else: yara_case = None yara_kernel = request.POST['yara-kernel'] if yara_kernel == 'true': yara_kernel = True else: yara_kernel = None yara_wide = request.POST['yara-wide'] if yara_wide == 'true': yara_wide = True else: yara_wide = None logger.debug('Yara String Scanner') try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) if yara_string: results = vol_int.run_plugin('yarascan', output_style='json', pid=yara_pid, plugin_options={'YARA_RULES': yara_string, 'CASE': yara_case, 'ALL': yara_kernel, 'WIDE': yara_wide, 'SIZE': yara_hex, 'REVERSE': yara_reverse}) elif yara_file: results = vol_int.run_plugin('yarascan', output_style='json', pid=yara_pid, plugin_options={'YARA_FILE': yara_file, 'CASE': yara_case, 'ALL': yara_kernel, 'WIDE': yara_wide, 'SIZE': yara_hex, 'REVERSE': yara_reverse}) else: return if 'Data' in results['columns']: row_loc = results['columns'].index('Data') for row in results['rows']: try: row[row_loc] = string_clean_hex(row[row_loc].decode('hex')) except Exception as e: logger.warning('Error converting hex to str: {0}'.format(e)) return render(request, 'file_details_yara.html', {'yara': results, 'error': None}) #return render(request, 'plugin_output_nohtml.html', {'plugin_results': results, #'plugin_id': None, #'bookmarks': []}) #return HttpResponse(results) except Exception as error: logger.error(error) if command == 'yara': file_id = rule_file = False if 'file_id' in request.POST: file_id = request.POST['file_id'] if 'rule_file' in request.POST: rule_file = request.POST['rule_file'] if rule_file and file_id and YARA: file_object = db.get_filebyid(ObjectId(file_id)) file_data = file_object.read() rule_file = os.path.join('yararules', rule_file) if os.path.exists(rule_file): rules = yara.compile(rule_file) matches = rules.match(data=file_data) results = {'rows': [], 'columns': ['Rule', 'process', 'Offset', 'Data']} for match in matches: for item in match.strings: results['rows'].append([match.rule, file_object.filename, item[0], string_clean_hex(item[2])]) else: return render(request, 'file_details_yara.html', {'yara': None, 'error': 'Could not find Rule File'}) if len(results) > 0: # Store the results in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['yara'] = results update = db.create_datastore(store_data) return render(request, 'file_details_yara.html', {'yara': results, 'error': None}) else: return HttpResponse('Either No file ID or No Yara Rule was provided') if command == 'strings': if 'file_id' in request.POST: file_id = request.POST['file_id'] file_object = db.get_filebyid(ObjectId(file_id)) file_data = file_object.read() chars = r"A-Za-z0-9/\-:.,_$%'()[\]<>@=+ " shortest_run = 4 regexp = '[%s]{%d,}' % (chars, shortest_run) pattern = re.compile(regexp) string_list = pattern.findall(file_data) logger.debug('Joining Strings') string_list = '\n'.join(string_list) ''' String lists can get larger than the 16Mb bson limit Need to store in GridFS ''' # Store the list in datastore store_data = {} store_data['file_id'] = ObjectId(file_id) store_data['string_list'] = string_list logger.debug('Store Strings in DB') string_id = db.create_file(string_list, 'session_id', 'sha256', '{0}_strings.txt'.format(file_id)) # Write to DB #db.create_datastore(store_data) return HttpResponse('<td><a class="btn btn-success" role="button" href="/download/file/{0}">Download</a></td>'.format(string_id)) if command == 'dropsession': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) db.drop_session(session_id) return HttpResponse('OK') if command == 'memhex': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) session = db.get_session(session_id) mem_path = session['session_path'] if 'start_offset' and 'end_offset' in request.POST: try: start_offset = int(request.POST['start_offset'], 0) end_offset = int(request.POST['end_offset'], 0) hex_cmd = 'hexdump -C -s {0} -n {1} {2}'.format(start_offset, end_offset - start_offset, mem_path) hex_output = hex_dump(hex_cmd) return HttpResponse(hex_output) except Exception as e: return HttpResponse(e) if command == 'memhexdump': if 'session_id' in request.POST: session_id = ObjectId(request.POST['session_id']) session = db.get_session(session_id) mem_path = session['session_path'] if 'start_offset' and 'end_offset' in request.POST: try: start_offset = int(request.POST['start_offset'], 0) end_offset = int(request.POST['end_offset'], 0) mem_file = open(mem_path, 'rb') # Get to start mem_file.seek(start_offset) file_data = mem_file.read(end_offset - start_offset) response = HttpResponse(file_data, content_type='application/octet-stream') response['Content-Disposition'] = 'attachment; filename="{0}-{1}.bin"'.format(start_offset, end_offset) return response except Exception as e: logger.error('Error Getting hex dump: {0}'.format(e)) if command == 'addcomment': html_resp = '' if 'session_id' and 'comment_text' in request.POST: session_id = request.POST['session_id'] comment_text = request.POST['comment_text'] comment_data = {'session_id': ObjectId(session_id), 'comment_text': comment_text, 'date_added': datetime.now()} db.create_comment(comment_data) # now return all the comments for the ajax update for comment in db.get_commentbysession(ObjectId(session_id)): html_resp += '<pre>{0}</pre>'.format(comment['comment_text']) return HttpResponse(html_resp) if command == 'searchbar': if 'search_type' and 'search_text' and 'session_id' in request.POST: search_type = request.POST['search_type'] search_text = request.POST['search_text'] session_id = request.POST['session_id'] logger.debug('{0} search for {1}'.format(search_type, search_text)) if search_type == 'plugin': results = {'rows':[]} results['columns'] = ['Plugin Name', 'View Results'] rows = db.search_plugins(search_text, session_id=ObjectId(session_id)) for row in rows: results['rows'].append([row['plugin_name'], '<a href="#" onclick="ajaxHandler(\'pluginresults\', {{\'plugin_id\':\'{0}\'}}, false ); return false">View Output</a>'.format(row['_id'])]) return render(request, 'plugin_output.html', {'plugin_results': results}) if search_type == 'hash': pass if search_type == 'string': logger.debug('yarascan for string') # If search string ends with .yar assume a yara rule if any(ext in search_text for ext in ['.yar', '.yara']): if os.path.exists(search_text): try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('yarascan', output_style='json', plugin_options={'YARA_FILE': search_text}) return render(request, 'plugin_output_nohtml.html', {'plugin_results': results}) except Exception as error: logger.error(error) else: logger.error('No Yara Rule Found') else: try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('yarascan', output_style='json', plugin_options={'YARA_RULES': search_text}) return render(request, 'plugin_output_nohtml.html', {'plugin_results': results}) except Exception as error: logger.error(error) if search_type == 'registry': logger.debug('Registry Search') try: session = db.get_session(ObjectId(session_id)) vol_int = RunVol(session['session_profile'], session['session_path']) results = vol_int.run_plugin('printkey', output_style='json', plugin_options={'KEY': search_text}) return render(request, 'plugin_output.html', {'plugin_results': results}) except Exception as error: logger.error(error) if search_type == 'vol': # Run a vol command and get the output session = db.get_session(ObjectId(session_id)) search_text = search_text.replace('%profile%', '--profile={0}'.format(session['session_profile'])) search_text = search_text.replace('%path%', '-f {0}'.format(session['session_path'])) vol_output = getoutput('vol.py {0}'.format(search_text)) results = {'rows': [['<pre>{0}</pre>'.format(vol_output)]], 'columns': ['Volatility Raw Output']} # Consider storing the output here as well. return render(request, 'plugin_output.html', {'plugin_results': results, 'bookmarks': []}) return HttpResponse('No valid search query found.') if command == 'pluginresults': if 'plugin_id' in request.POST: plugin_id = ObjectId(request.POST['plugin_id']) plugin_id = ObjectId(plugin_id) plugin_results = db.get_pluginbyid(plugin_id) try: bookmarks = db.get_pluginbyid(plugin_id)['bookmarks'] except: bookmarks = [] return render(request, 'plugin_output.html', {'plugin_results': plugin_results['plugin_output'], 'plugin_id': plugin_id, 'bookmarks': bookmarks, 'plugin_name': plugin_results['plugin_name']}) if command == 'bookmark': if 'row_id' in request.POST: plugin_id, row_id = request.POST['row_id'].split('_') plugin_id = ObjectId(plugin_id) row_id = int(row_id) # Get Bookmarks for plugin try: bookmarks = db.get_pluginbyid(plugin_id)['bookmarks'] except: bookmarks = [] # Update bookmarks if row_id in bookmarks: bookmarks.remove(row_id) bookmarked = 'remove' else: bookmarks.append(row_id) bookmarked = 'add' # Update Plugins new_values = {'bookmarks': bookmarks} db.update_plugin(ObjectId(plugin_id), new_values) return HttpResponse(bookmarked) if command == 'procmem': if 'row_id' in request.POST and 'session_id' in request.POST: plugin_id, row_id = request.POST['row_id'].split('_') session_id = request.POST['session_id'] plugin_id = ObjectId(plugin_id) row_id = int(row_id) plugin_data = db.get_pluginbyid(ObjectId(plugin_id))['plugin_output'] row = plugin_data['rows'][row_id - 1] pid = row[2] plugin_row = db.get_plugin_byname('memdump', ObjectId(session_id)) logger.debug('Running Plugin: memdump with pid {0}'.format(pid)) res = run_plugin(session_id, plugin_row['_id'], pid=pid) return HttpResponse(res) if command == 'filedump': if 'row_id' in request.POST and 'session_id' in request.POST: plugin_id, row_id = request.POST['row_id'].split('_') session_id = request.POST['session_id'] plugin_id = ObjectId(plugin_id) row_id = int(row_id) plugin_data = db.get_pluginbyid(ObjectId(plugin_id))['plugin_output'] row = plugin_data['rows'][row_id - 1] offset = row[0] plugin_row = db.get_plugin_byname('dumpfiles', ObjectId(session_id)) logger.debug('Running Plugin: dumpfiles with offset {0}'.format(offset)) res = run_plugin(session_id, plugin_row['_id'], plugin_options={'PHYSOFFSET':str(offset), 'NAME':True, 'REGEX':None}) return HttpResponse(res) return HttpResponse('No valid search query found.')