def setup(self): self.vt_key = self.options.get("api_key") self.vt_type = self.options.get("key_type", "") if self.vt_type == "public": self.vt = PublicApi(key=self.vt_key) elif self.vt_type == "": self.vt = PublicApi(key=self.vt_key) elif self.vt_type == "private": self.vt = PrivateApi(key=self.vt_key)
def main(): config = configparser.ConfigParser() if os.path.isfile(os.path.expanduser('~/.pyti')): config.read(os.path.expanduser('~/.pyti')) intelApi = IntelApi(config['VTi']['apikey']) privApi = PrivateApi(config['VTi']['apikey']) allNotes = get_all_notifications(intelApi) keyFunc = lambda f: f['ruleset_name'] dedup = list() dedupHash = list() for s in allNotes: if s['sha1'] not in dedupHash: dedup.append(s) dedupHash.append(s['sha1']) mwr = mwrepo.mwrepo(config['MalwareRepo']['basePath']) toDownload = [s for s in dedup if not mwr.sha1exists(s['sha1'])] printNoteSummary('To Download', groupby(sorted(toDownload, key=keyFunc), key=keyFunc)) print('Downloading {} files...'.format(len(toDownload))) ind = 0 errors = list() for s in toDownload: print('... ({}/{}): {} '.format(ind + 1, len(toDownload), s['sha1']), end='') ss = privApi.get_file(s['sha1']) if type(ss) == dict: print('= Error: {}'.format(ss['error']), end='') errors.append(s['sha1']) else: mwr.savefile(ss) print() ind = ind + 1 idsToRemove = list( set([s['id'] for s in allNotes if s['sha1'] not in errors])) delete_intel_notifications(intelApi, idsToRemove)
def get_VT_name(hashes): try: vt = PrivateApi(api_key = os.environ["VIRUSTOTAL_API_KEY"]) generator = ComputeVtUniqueName() names = [generator.build_unique_name(vt.get_file_report(hash_) or "") for hash_ in hashes] if len(names) >= 2 and all(names[0] == name for name in names[1:]): name = names[0] if name["pup"]: log.error("PUA signatures are not implemented yet. Excpected name was: %s", str(name)) pass else: return "{}.{}.{}".format(name["platform"], name["category"], name["unique_name"]) except KeyError: log.warn("No VIRUSTOTAL_API_KEY specified. Falling back to generic name.") except Exception: log.exception("White trying to compute VT name. Falling back to generic name.") return GENERIC_CLAMAV_MALWARE_NAME
def __init__(self): Analyzer.__init__(self) self.service = self.get_param("config.service", None, "Service parameter is missing") self.virustotal_key = self.get_param("config.key", None, "Missing VirusTotal API key") self.polling_interval = self.get_param("config.polling_interval", 60) self.rescan_hash_older_than_days = self.get_param( "config.rescan_hash_older_than_days", None) self.highlighted_antivirus = self.get_param( "config.highlighted_antivirus", None) self.download_sample = self.get_param("config.download_sample", False) self.download_sample_if_highlighted = self.get_param( "config.download_sample_if_highlighted", False) self.obs_path = None self.proxies = self.get_param("config.proxy", None) if (self.download_sample or self.download_sample_if_highlighted or self.service == "download"): self.vt_pay = PrivateApi(self.virustotal_key, self.proxies) self.vt = PublicApi(self.virustotal_key, self.proxies)
def vt(self, domain, conf, verbose): print('## Searching subdomains in Virus Total') if conf["VirusTotal"]["type"] == "public": vt = PublicApi(conf["VirusTotal"]["key"]) else: vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_domain_report(domain) try: for d in res['results']['subdomains']: print(d) except KeyError: pass
def get_private_vt(self): block = self.VTAPI config_dict = self.my_config.get(block, None) if config_dict is None: raise Exception("Missing %s config" % block) if self.PRIVATE in config_dict and \ config_dict.get(self.PRIVATE, False): apikey = config_dict.get(self.API_KEY) return PrivateApi(apikey) raise Exception("Unable to instantiate PrivateApi for VT")
def __init__(self): """ The function is a constructor of the Virus Scanner object. """ self._status = None try: self._virus_total_service = PublicApi( Virus_Total_Service_Secret_API) except: try: self._virus_total_service = PrivateApi( Virus_Total_Service_Secret_API) except ApiError as e: print(f'Could not active Virus Total Virus Scanner Service.' f'The error {e} occured.') sys.exit(0) # Stopping the function
def run(self, args): config_path = os.path.join(os.path.expanduser("~"), ".vtapi") if hasattr(args, 'subcommand'): if args.subcommand in ('check', 'similar'): if not os.path.isfile(config_path): print( "Invalid configuration file, please use pe vt config to configure your VT account" ) sys.exit(1) cf = self.read_config(config_path) if cf['type'] == 'private': vt = PrivateApi(cf['apikey']) else: vt = PublicApi(cf['apikey']) if args.subcommand == 'check': with open(args.PEFILE, 'rb') as f: data = f.read() m = hashlib.sha256() m.update(data) sha256 = m.hexdigest() response = vt.get_file_report(sha256) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: if response["response_code"] != 200: print("Error with the request (reponse code %i)" % response["response_code"]) sys.exit(1) if response["results"]["response_code"] == 0: print("File not found") else: print("[+] Detection: %i / %i" % (response["results"]["positives"], response["results"]["total"])) print("[+] MD5: %s" % response["results"]["md5"]) print("[+] SHA1: %s" % response["results"]["sha1"]) print("[+] SHA256: %s" % response["results"]["sha256"]) if "first_seen" in response['results']: print("[+] First Seen: %s" % response["results"]["first_seen"]) if "last_seen" in response['results']: print("[+] Last Seen: %s" % response["results"]["last_seen"]) print("[+] Link: %s" % response["results"]["permalink"]) elif args.subcommand == 'similar': if cf['type'] != 'private': print( 'I am sorry, you need a private VT access to do that' ) sys.exit(1) with open(args.PEFILE, 'rb') as f: data = f.read() m = hashlib.sha256() m.update(data) sha256 = m.hexdigest() # Check if this PE file is in VT first response = vt.get_file_report(sha256) if response["results"]["response_code"] == 0: print("File not in VT, computing imphash, ssdeep only") pe = pefile.PE(data=data) imphash = pe.get_imphash() ssd = ssdeep.hash(data) vhash = None authentihash = None dbg_filename = debug_filename(pe) dbg_guid = debug_guid(pe) if is_dot_net_assembly(pe): res = get_guid(pe, data) dotnet_mvid = res["mvid"] dotnet_typelib = res["typelib_id"] else: dotnet_mvid = None dotnet_typelib = None else: print("File identified in VT: {}".format( response['results']['permalink'])) vhash = response['results']['vhash'] ssd = response['results']['ssdeep'] authentihash = response['results']['authentihash'] imphash = response['results']['additional_info'][ "pe-imphash"] dbg_guid = None dbg_filename = None if "pe-debug" in response['results'][ 'additional_info']: if "codeview" in response['results'][ 'additional_info']["pe-debug"][0]: if "guid" in response['results'][ 'additional_info']["pe-debug"][0][ "codeview"]: dbg_guid = response['results'][ 'additional_info']["pe-debug"][0][ "codeview"]["guid"] if "name" in response['results'][ 'additional_info']["pe-debug"][0][ "codeview"]: dbg_filename = response['results'][ 'additional_info']['pe-debug'][0][ 'codeview']['name'] if "netguids" in response['results'][ 'additional_info']: dotnet_mvid = response['results'][ 'additional_info']['netguids']['mvid'] dotnet_typelib = response['results'][ 'additional_info']['netguids']['typelib_id'] else: dotnet_mvid = None dotnet_typelib = None # Start with imphash print("# Searching for imphash: {}".format(imphash)) res = vt.file_search('imphash:"{}"'.format(imphash)) self.print_results(res, sha256) # ssdeep print("# Searching for ssdeep: {}".format(ssd)) res = vt.file_search('ssdeep:"{}"'.format(ssd)) self.print_results(res, sha256) # authentihash if authentihash: print("# Searching for authentihash: {}".format( authentihash)) res = vt.file_search( 'authentihash:"{}"'.format(authentihash)) self.print_results(res, sha256) # vhash if vhash: print("# Searching for vhash: {}".format(vhash)) res = vt.file_search('vhash:"{}"'.format(vhash)) self.print_results(res, sha256) # .NET GUIDs if dotnet_mvid: print("# Searching for .NET Module Version id: {}". format(dotnet_mvid)) res = vt.file_search( 'netguid:"{}"'.format(dotnet_mvid)) self.print_results(res, sha256) if dotnet_typelib: print("# Searching for .NET TypeLib id: {}".format( dotnet_typelib)) res = vt.file_search( 'netguid:"{}"'.format(dotnet_typelib)) self.print_results(res, sha256) # Debug if dbg_filename: print("# Searching for Debug Filename: {}".format( dbg_filename)) res = vt.file_search('"{}"'.format(dbg_filename)) self.print_results(res, sha256) if dbg_guid: print( "# Searching for Debug GUID: {}".format(dbg_guid)) res = vt.file_search('"{}"'.format(dbg_guid)) self.print_results(res, sha256) elif args.subcommand == 'config': config = configparser.ConfigParser() if args.type == 'public': config['vt'] = { 'intelligence': False, 'engines': '', 'timeout': 60, 'apikey': args.APIKEY, 'type': 'public' } else: config['vt'] = { 'intelligence': True, 'engines': '', 'timeout': 60, 'apikey': args.APIKEY, 'type': 'private' } with open(config_path, 'w') as configfile: config.write(configfile) print("Config file {} updated".format(config_path)) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == "intel": # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % args.DOMAIN) passive_dns = [] urls = [] malware = [] files = [] # MISP misp_e = plugins['misp'].test_config(conf) if misp_e: print('[+] Downloading MISP information...') server = ExpandedPyMISP(conf['Misp']['url'], conf['Misp']['key']) misp_results = server.search('attributes', value=unbracket(args.DOMAIN)) # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') try: otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full(IndicatorTypes.DOMAIN, unbracket(args.DOMAIN)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "ip": r['hostname'], "first": parse(r["first"]).astimezone(pytz.utc), "last": parse(r["last"]).astimezone(pytz.utc), "source" : "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: if "result" in r: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": r["result"]["urlworker"]["ip"] if "ip" in r["result"]["urlworker"] else "" , "source": "OTX" }) else: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": "", "source": "OTX" }) except AttributeError: print('OTX crashed ¯\_(ツ)_/¯') # UrlScan us = UrlScan() print('[+] Downloading UrlScan information....') res = us.search(args.DOMAIN) for r in res['results']: urls.append({ "date": parse(r["task"]["time"]).astimezone(pytz.utc), "url": r["page"]["url"], "ip": r["page"]["ip"] if "ip" in r["page"] else "", "source": "UrlScan" }) # UrlHaus uh_e = plugins['urlhaus'].test_config(conf) if uh_e: print("[+] Checking urlhaus...") try: urlhaus = UrlHaus(conf["UrlHaus"]["key"]) res = urlhaus.get_host(unbracket(args.DOMAIN)) except UrlHausError: print("Error with the query") else: if "urls" in res: for r in res['urls']: urls.append({ "date": parse(r["date_added"]).astimezone(pytz.utc), "url": r["url"], "ip":"", "source": "UrlHaus" }) # CIRCL circl_e = plugins['circl'].test_config(conf) if circl_e: print('[+] Downloading CIRCL passive DNS information....') x = pypdns.PyPDNS( basic_auth=( conf['Circl']['user'], conf['Circl']['pass'] ) ) res = x.query(unbracket(args.DOMAIN)) for answer in res: passive_dns.append({ "ip": answer['rdata'], "first": answer['time_first'].astimezone(pytz.utc), "last": answer['time_last'].astimezone(pytz.utc), "source" : "CIRCL" }) # BinaryEdge be_e = plugins['binaryedge'].test_config(conf) if be_e: print('[+] Downloading BinaryEdge information....') try: be = BinaryEdge(conf['BinaryEdge']['key']) res = be.domain_dns(unbracket(args.DOMAIN)) for d in res['events']: if "A" in d: for a in d['A']: passive_dns.append({ "ip": a, "first": parse(d['updated_at']).astimezone(pytz.utc), "last": parse(d['updated_at']).astimezone(pytz.utc), "source" : "BinaryEdge" }) except BinaryEdgeException: print('You need a paid BinaryEdge subscription for this request') # RobTex print('[+] Downloading Robtex information....') try: rob = Robtex() res = rob.get_pdns_domain(args.DOMAIN) for d in res: if d['rrtype'] in ['A', 'AAAA']: passive_dns.append({ 'first': d['time_first_o'].astimezone(pytz.utc), 'last': d['time_last_o'].astimezone(pytz.utc), 'ip': d['rrdata'], 'source': 'Robtex' }) except RobtexError: print("Robtex query failed") # PT pt_e = plugins['pt'].test_config(conf) if pt_e: try: pt_osint = {} ptout = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns(query=unbracket(args.DOMAIN)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]).astimezone(pytz.utc), "last": parse(res["lastSeen"]).astimezone(pytz.utc), "ip": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("PT quota exceeded") ptout = True if not ptout: client2 = EnrichmentRequest(conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint(query=unbracket(args.DOMAIN)) # Get malware raw_results = client2.get_malware(query=unbracket(args.DOMAIN)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']).astimezone(pytz.utc), 'source' : 'PT (%s)' % r["source"] }) except requests.exceptions.ReadTimeout: print("PT: Time Out") # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_domain_report(unbracket(args.DOMAIN)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]).astimezone(pytz.utc), "last": parse(r["last_resolved"]).astimezone(pytz.utc), "ip": r["ip_address"], "source": "VT" }) if "undetected_downloaded_samples" in res['results']: for r in res['results']['undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc) if 'date' in r else '', 'source' : 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results']['undetected_referrer_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc) if 'date' in r else '', 'source' : 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results']['detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc), 'source' : 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results']['detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc), 'source' : 'VT' }) if "detected_urls" in res['results']: for r in res['results']['detected_urls']: urls.append({ 'date': parse(r['scan_date']).astimezone(pytz.utc), 'url': r['url'], 'ip': '', 'source': 'VT' }) else: vt_e = False tg_e = plugins['threatgrid'].test_config(conf) if tg_e: try: print('[+] Downloading Threat Grid....') tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.DOMAIN), type='domain') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']).astimezone(pytz.utc) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source' : 'ThreatGrid' }) already.append(r['sample_sha256']) except ThreatGridError as e: print("Failed to connect to Threat Grid: %s" % e.message) print('[+] Downloading ThreatMiner....') tm = ThreatMiner() response = tm.get_report(unbracket(args.DOMAIN)) if response['status_code'] == '200': tmm = response['results'] else: tmm = [] if response['status_code'] == '404': print("Request to ThreatMiner failed: {}".format(response['status_message'])) response = tm.get_related_samples(unbracket(args.DOMAIN)) if response['status_code'] == '200': for r in response['results']: malware.append({ 'hash': r, 'date': None, 'source': 'ThreatMiner' }) print('----------------- Intelligence Report') if misp_e: if len(misp_results['Attribute']) > 0: print('MISP:') for event in misp_results['Attribute']: print("- {} - {}".format( event['Event']['id'], event['Event']['info'] )) if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print('- %s (%s - %s)' % ( p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'] ) ) else: print('OTX: Not found in any pulse') if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print("PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % (pt_osint["results"][0]["sourceUrl"])) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("- %s %s" % (r["name"], r["sourceUrl"])) else: print("- %s" % (r["sourceUrl"])) else: print("PT: Nothing found!") else: print("PT: Nothing found!") # ThreatMiner if len(tmm) > 0: print("ThreatMiner:") for r in tmm: print("- {} {} - {}".format( r['year'], r['filename'], r['URL'] )) if len(malware) > 0: print('----------------- Malware') for r in malware: print("[%s] %s %s" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") if r["date"] else "" ) ) if len(files) > 0: print('----------------- Files') for r in files: if r['date'] != '': print("[%s] %s (%s)" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") ) ) else: print("[%s] %s" % ( r["source"], r["hash"], ) ) if len(urls) > 0: print('----------------- Urls') for r in sorted(urls, key=lambda x: x["date"], reverse=True): print("[%s] %s - %s %s" % ( r["source"], r["url"], r["ip"], r["date"].strftime("%Y-%m-%d") ) ) # TODO: add ASN + location info here if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % ( r["ip"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"] ) ) else: self.parser.print_help() else: self.parser.print_help()
def intel(self, type, query, data, conf): if type == "domain": if conf["VirusTotal"]["type"] != "public": print("[+] Checking VirusTotal....") vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_domain_report(query) if "results" in res: if "resolutions" in res["results"]: for r in res["results"]["resolutions"]: try: data["passive_dns"].append({ "first": parse(r["last_resolved"]).astimezone( pytz.utc), "last": parse(r["last_resolved"]).astimezone( pytz.utc), "ip": r["ip_address"], "source": "VT", }) except TypeError: # Error with the date pass if "undetected_downloaded_samples" in res["results"]: for r in res["results"][ "undetected_downloaded_samples"]: data["files"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc) if "date" in r else "", "source": "VT", }) if "undetected_referrer_samples" in res["results"]: for r in res["results"]["undetected_referrer_samples"]: data["files"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc) if "date" in r else "", "source": "VT", }) if "undetected_communicating_samples" in res["results"]: for r in res["results"][ "undetected_communicating_samples"]: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT" }) if "detected_communicating_samples" in res["results"]: for r in res["results"][ "detected_communicating_samples"]: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT" }) if "detected_downloaded_samples" in res["results"]: for r in res["results"]["detected_downloaded_samples"]: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT", }) if "detected_referrer_samples" in res["results"]: for r in res["results"]["detected_referrer_samples"]: if "date" in r: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT", }) if "detected_urls" in res["results"]: for r in res["results"]["detected_urls"]: data["urls"].append({ "date": parse(r["scan_date"]).astimezone(pytz.utc), "url": r["url"], "ip": "", "source": "VT", }) elif type == "ip": if conf["VirusTotal"]["type"] != "public": print("[+] Checking VirusTotal...") vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_ip_report(query) if "results" in res: if "resolutions" in res["results"]: for r in res["results"]["resolutions"]: try: data["passive_dns"].append({ "first": parse(r["last_resolved"]).astimezone( pytz.utc), "last": parse(r["last_resolved"]).astimezone( pytz.utc), "domain": r["hostname"], "source": "VT", }) except TypeError: # Error with the date pass if "undetected_downloaded_samples" in res["results"]: for r in res["results"][ "undetected_downloaded_samples"]: data["files"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc) if "date" in r else "", "source": "VT", }) if "undetected_referrer_samples" in res["results"]: for r in res["results"]["undetected_referrer_samples"]: data["files"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc) if "date" in r else "", "source": "VT", }) if "undetected_communicating_samples" in res["results"]: for r in res["results"][ "undetected_communicating_samples"]: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT", }) if "detected_communicating_samples" in res["results"]: for r in res["results"][ "detected_communicating_samples"]: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT", }) if "detected_downloaded_samples" in res["results"]: for r in res["results"]["detected_downloaded_samples"]: data["malware"].append({ "hash": r["sha256"], "date": parse(r["date"]).astimezone(pytz.utc), "source": "VT" }) if "detected_urls" in res["results"]: for r in res["results"]["detected_urls"]: data["urls"].append({ "date": parse(r["scan_date"]).astimezone(pytz.utc), "url": r["url"], "ip": "", "source": "VT", }) elif type == "hash": if conf["VirusTotal"]["type"] != "public": print("[+] Checking VirusTotal...") vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_file_report(query) if res["results"]["response_code"] == 1: # Found data["samples"].append({ "date": parse(res['results']['scan_date']).astimezone( pytz.utc), "source": "VT", "url": res['results']['permalink'], "infos": { "AV Result": "{} / {}".format(res['results']['positives'], res['results']['total']), "First Seen": res['results']["first_seen"], "File Names": ", ".join(res['results']["submission_names"][:5]) } }) if "ITW_urls" in res["results"]: for url in res['results']["ITW_urls"]: data["urls"].append({ "url": url, "source": "VT", "link": res['results']['permalink'] }) if "additional_info" in res["results"]: if "behaviour-v1" in res["results"]["additional_info"]: if "network" in res['results']['additional_info'][ 'behaviour-v1']: for d in res['results']['additional_info'][ 'behaviour-v1']["network"]["dns"]: data["network"].append({ "source": "VT", "url": res['results']['permalink'], "host": d["hostname"], "ip": d["ip"] })
def run(self, conf, args, plugins): if 'subcommand' in args: if conf["VirusTotal"]["type"] != "public": vt = PrivateApi(conf["VirusTotal"]["key"]) if args.subcommand == "hash": response = vt.get_file_report(args.HASH) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) if args.extended: response = vt.get_network_traffic(args.HASH) print( json.dumps(response, sort_keys=False, indent=4)) response = vt.get_file_behaviour(args.HASH) print( json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "dl": if os.path.isfile(args.HASH): print("File %s already exists" % args.HASH) sys.exit(0) data = vt.get_file(args.HASH) if isinstance(data, dict): if 'results' in data: with open(args.HASH, "wb") as f: f.write(data['results']) print("File downloaded as %s" % args.HASH) else: print('Invalid answer format') sys.exit(1) else: with open(args.HASH, "wb") as f: f.write(data) print("File downloaded as %s" % args.HASH) elif args.subcommand == "file": with open(args.FILE, "rb") as f: # FIXME : could be more efficient data = f.read() m = hashlib.sha256() m.update(data) h = m.hexdigest() response = vt.get_file_report(h) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "hashlist": with open(args.FILE, 'r') as infile: data = infile.read().split() hash_list = list(set([a.strip() for a in data])) print( "Hash;Found;Detection;Total AV;First Seen;Last Seen;Link" ) for h in hash_list: response = vt.get_file_report(h) if response["response_code"] != 200: print("Error with the request (reponse code %i)" % response["response_code"]) print( json.dumps(response, sort_keys=False, indent=4)) print("Quitting...") sys.exit(1) if "response_code" in response["results"]: if response["results"]["response_code"] == 0: print("%s;Not found;;;;;" % h) else: print("%s;Found;%i;%i;%s;%s;%s" % (h, response["results"]["positives"], response["results"]["total"], response["results"]["first_seen"], response["results"]["last_seen"], response["results"]["permalink"])) else: print("%s;Not found;;;;;" % h) elif args.subcommand == "domainlist": with open(args.FILE, 'r') as infile: data = infile.read().split() for d in data: print("################ Domain %s" % d.strip()) res = vt.get_domain_report(d.strip()) self.print_domaininfo(res) elif args.subcommand == "iplist": with open(args.FILE, 'r') as infile: data = infile.read().split() for d in data: print("################ IP %s" % d.strip()) res = vt.get_ip_report(unbracket(d.strip())) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domain": res = vt.get_domain_report(unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: self.print_domaininfo(res) elif args.subcommand == "ip": res = vt.get_ip_report(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "url": res = vt.get_url_report(args.URL) print(json.dumps(res, sort_keys=False, indent=4)) else: self.parser.print_help() else: vt = PublicApi(conf["VirusTotal"]["key"]) if args.subcommand == "hash": response = vt.get_file_report(args.HASH) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "file": with open(args.FILE, "rb") as f: # FIXME : could be more efficient data = f.read() m = hashlib.sha256() m.update(data) response = vt.get_file_report(m.hexdigest()) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "hashlist": with open(args.FILE, 'r') as infile: data = infile.read().split() hash_list = list(set([a.strip() for a in data])) print("Hash;Found;Detection;Total AV;Link") for h in hash_list: response = vt.get_file_report(h) if response["response_code"] != 200: print("Error with the request (reponse code %i)" % response["response_code"]) print( json.dumps(response, sort_keys=False, indent=4)) print("Quitting...") sys.exit(1) if "response_code" in response["results"]: if response["results"]["response_code"] == 0: print("%s;Not found;;;" % h) else: print("%s;Found;%i;%i;%s" % (h, response["results"]["positives"], response["results"]["total"], response["results"]["permalink"])) else: print("%s;Not found;;;" % h) elif args.subcommand == "domain": res = vt.get_domain_report(unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: self.print_domaininfo(res) elif args.subcommand == "ip": res = vt.get_ip_report(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "url": res = vt.get_url_report(args.URL) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domainlist": print( "Not implemented yet with public access, please propose PR if you need it" ) elif args.subcommand == "dl": print( "VirusTotal does not allow downloading files with a public feed, sorry" ) sys.exit(0) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'info': print("Not implemented yet") elif args.subcommand == "intel": # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % args.DOMAIN) passive_dns = [] urls = [] malware = [] files = [] # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full(IndicatorTypes.DOMAIN, unbracket(args.DOMAIN)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "ip": r['hostname'], "first": parse(r["first"]), "last": parse(r["last"]), "source" : "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: if "result" in r: urls.append({ "date": parse(r["date"]), "url": r["url"], "ip": r["result"]["urlworker"]["ip"] if "ip" in r["result"]["urlworker"] else "" , "source": "OTX" }) else: urls.append({ "date": parse(r["date"]), "url": r["url"], "ip": "", "source": "OTX" }) # CIRCL circl_e = plugins['circl'].test_config(conf) if circl_e: print('[+] Downloading CIRCL passive DNS information....') x = pypdns.PyPDNS( basic_auth=( conf['Circl']['user'], conf['Circl']['pass'] ) ) res = x.query(unbracket(args.DOMAIN)) for answer in res: passive_dns.append({ "ip": answer['rdata'], "first": answer['time_first'], "last": answer['time_last'], "source" : "CIRCL" }) # BinaryEdge be_e = plugins['binaryedge'].test_config(conf) if be_e: print('[+] Downloading BinaryEdge information....') be = BinaryEdge(conf['BinaryEdge']['key']) res = be.domain_dns(unbracket(args.DOMAIN)) for d in res['events']: if "A" in d: for a in d['A']: passive_dns.append({ "ip": a, "first": parse(d['updated_at']), "last": parse(d['updated_at']), "source" : "BinaryEdge" }) # RobTex print('[+] Downloading Robtex information....') rob = Robtex() res = rob.get_pdns_domain(args.DOMAIN) for d in res: if d['rrtype'] in ['A', 'AAAA']: passive_dns.append({ 'first': d['time_first_o'], 'last': d['time_last_o'], 'ip': d['rrdata'], 'source': 'Robtex' }) # PT pt_e = plugins['pt'].test_config(conf) if pt_e: try: pt_osint = {} ptout = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns(query=unbracket(args.DOMAIN)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]), "last": parse(res["lastSeen"]), "ip": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("PT quota exceeded") ptout = True if not ptout: client2 = EnrichmentRequest(conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint(query=unbracket(args.DOMAIN)) # Get malware raw_results = client2.get_malware(query=unbracket(args.DOMAIN)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']), 'source' : 'PT (%s)' % r["source"] }) except requests.exceptions.ReadTimeout: print("PT: Time Out") # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_domain_report(unbracket(args.DOMAIN)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]), "last": parse(r["last_resolved"]), "ip": r["ip_address"], "source": "VT" }) if "undetected_downloaded_samples" in res['results']: for r in res['results']['undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']) if 'date' in r else '', 'source' : 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results']['undetected_referrer_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']) if 'date' in r else '', 'source' : 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results']['detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source' : 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results']['detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source' : 'VT' }) if "detected_urls" in res['results']: for r in res['results']['detected_urls']: urls.append({ 'date': parse(r['scan_date']), 'url': r['url'], 'ip': '', 'source': 'VT' }) else: vt_e = False tg_e = plugins['threatgrid'].test_config(conf) if tg_e: try: print('[+] Downloading Threat Grid....') tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.DOMAIN), type='domain') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']) d = d.replace(tzinfo=None) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source' : 'ThreatGrid' }) already.append(r['sample_sha256']) except ThreatGridError as e: print("Failed to connect to Threat Grid: %s" % e.message) # TODO: Add MISP print('----------------- Intelligence Report') if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print(' -%s (%s - %s)' % ( p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'] ) ) else: print('OTX: Not found in any pulse') if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print("PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % (pt_osint["results"][0]["sourceUrl"])) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("-%s %s" % (r["name"], r["sourceUrl"])) else: print("-%s" % (r["sourceUrl"])) else: print("PT: Nothing found!") else: print("PT: Nothing found!") if len(malware) > 0: print('----------------- Malware') for r in sorted(malware, key=lambda x: x["date"]): print("[%s] %s %s" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") ) ) if len(files) > 0: print('----------------- Files') for r in files: if r['date'] != '': print("[%s] %s (%s)" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") ) ) else: print("[%s] %s" % ( r["source"], r["hash"], ) ) if len(urls) > 0: print('----------------- Urls') for r in sorted(urls, key=lambda x: x["date"], reverse=True): print("[%s] %s - %s %s" % ( r["source"], r["url"], r["ip"], r["date"].strftime("%Y-%m-%d") ) ) # TODO: add ASN + location info here if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % ( r["ip"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"] ) ) else: self.parser.print_help() else: self.parser.print_help()
def __init__(self): self.vt_req_counter = 0 self.vt_req_timer = time.time() self.vd = Validator() self.vt = VirusTotal(apikey.APIKEY_1) self.cache = {}
class VirusTotal(Processor): name = "virustotal" category = "sandbox" default_options = { "report_sleep": 60, "retry_count": 10, "send_files": True, "enabled": False, } required_options = ["api_key"] filetypes_exclude = FILETYPES_ARCHIVE + ["text/url"] def setup(self): self.vt_key = self.options.get("api_key") self.vt_type = self.options.get("key_type", "") if self.vt_type == "public": self.vt = PublicApi(key=self.vt_key) elif self.vt_type == "": self.vt = PublicApi(key=self.vt_key) elif self.vt_type == "private": self.vt = PrivateApi(key=self.vt_key) def get(self, file_hash): result = self.vt.get_file_report(file_hash) if result["response_code"] is not VT_RESPONSE_OK: return None report = result["results"] if report["response_code"] is not VT_SCAN_OK: return None return report def scan(self, sample_data): retry_count = 0 scan_request = self.vt.scan_file(sample_data, from_disk=False) file_hash = scan_request["results"]["sha256"] while retry_count <= self.options.get("retry_count"): sleep(self.options.get("report_sleep")) report = self.get(file_hash) if report: return report retry_count += 1 raise RuntimeError("Maximum retries waiting for scan result for %s" % file_hash) def process(self, sample): file_hash = hash_data(sample["data"]) report = self.get(file_hash) if not report: self.logger.info("Sample %s not found on VirusTotal" % file_hash) if not self.options.get("send_files"): return {"scan_id": "not found"} self.logger.info("Sending %s to VirusTotal" % file_hash) report = self.scan(sample["data"]) detections = [] for av, res in report["scans"].items(): if res["detected"]: self.parse_av_tags(res["result"]) detections.append({ "av": av, "version": res["version"], "result": res["result"] }) if report["positives"] > 0: self.add_tag("malware") if self.key_type == "private": if len(report["tags"]) > 1: self.parse_tags(report["tags"]) if len(report["ITW_urls"]): # add ITW urls as URL indicator for url in report["ITW_urls"]: self.add_ioc("urls", url) return { "scan_id": report["scan_id"], "positives": report["positives"], "scan_date": report["scan_date"], "detections": sorted(detections, key=itemgetter("av")), } def parse_av_tags(self, malware_name): if in_string(["banker", "banload"], malware_name): self.add_tag("malware-banker") if in_string(["trojan"], malware_name): self.add_tag("malware-trojan") if in_string(["bot"], malware_name): self.add_tag("malware-botnet") if in_string(["rat"], malware_name): self.add_tag("malware-rat") def parse_tags_malware(self, tag): if tag in ["upx", "asprox", "themida"]: self.add_tag("malware-packed") def parse_tags_documents(self, tag): if tag == "macros": self.add_tag("document-contains-macros") if tag.startswith("auto-"): self.add_tag("document-contains-{0}".format(tag)) if tag.endswith("-file"): self.add_tag("document-contains-{0}".format(tag)) if tag == "powershell": self.add_tag("document-contains-powershell") def parse_tags_pdf(self, tag): if tag == "js-embedded": self.add_tag("pdf-contains-javascript") if tag == "flash-embedded": self.add_tag("pdf-contains-flash") if tag == "autoaction": self.add_tag("pdf-contains-autoaction") if tag == "acroform": self.add_tag("pdf-contains-acroform") if tag == "launch-action": self.add_tag("pdf-contains-launchaction") if tag == "file-embedded": self.add_tag("pdf-contains-embeddedfiles") def parse_tags_flash(self, tag): if tag == "obfuscated": self.add_tag("flash-obfuscated") if tag == "javascript": self.add_tag("flash-contains-javascript") ignore_tags = ["flash-embedded", "js-embedded", "file-embedded"] if tag.endswith("-embedded") and tag not in ignore_tags: # swap $type-embedded. i.e., converts to 'flash-contains-embedded-exe', etc. tag = tag.split("-") self.add_tag("flash-contains-{0}-".format(tag[1], tag[0])) def parse_tags(self, sample_tags): for tag in sample_tags: # malware packer specific self.parse_tags_malware(tag) # osx specific if tag == "dropper": self.add_tag("malware-dropper") # Tags to be captured as-is as_is_tags = ["encrypted", "exploit"] if tag in as_is_tags: self.add_tag(tag) # cves if "cve" in tag: self.add_tag(tag.lower()) # document specific tags self.parse_tags_documents(tag) # pdf specific tags self.parse_tags_pdf(tag) # flash specific tags self.parse_tags_flash(tag)
def run(self): vt_logger = logging.getLogger('vtmonitor') self.banner() # Perform some sanity checks try: self.checkenv() except Exception as error: vt_logger.critical(error) return self.check_updates() vt_logger.debug('==================================================') vt_logger.debug('Application restart') vt_logger.debug('==================================================') if self.private_api: vt = VirusTotalPrivateApi(self.api_key) else: vt = VirusTotalPublicApi(self.api_key) # Create baseline of processes vt_logger.info('Creating base list of allowed process') wmic = subprocess.check_output("wmic process get ExecutablePath", shell=True) wmic = wmic.replace('\r', '\n') base_list = set([]) for process in wmic.split('\n'): process = process.strip() if not process: continue base_list.add(process) vt_logger.info("Starting main loop to watch for new processes") while True: sleep(1) wmic = subprocess.check_output("wmic process get ExecutablePath", shell=True) wmic = wmic.replace('\r', '\n') for process in wmic.split('\n'): process = process.strip() if not process: continue if process in base_list: continue vt_logger.debug("Unknown process %s, checking the hash on Virus Total" % process) # New process, let's submit to VT for details with open(process, 'rb') as handle: data = handle.read() signature = hashlib.md5(data).hexdigest() response = vt.get_file_report(signature) if response['results'].get('response_code') == 0: vt_logger.warn("Process %s is unknown on Virus Total" % process) else: vt_logger.info("Process %s has a known signature on Virus Total" % process) # and add it to the base list, otherwise it will keep pinging VT all the time base_list.add(process)
with open(filename, 'rb') as f: m = hashlib.sha256() while True: data = f.read(8192) if not data: break m.update(data) return m.hexdigest() API_KEY = '0d8a7ee919ee20042cc31d7565986da4b995703d4089f2cee967ae10a5462b91' file = "/srv/jathushan/data/apk/top1000/upload_manual/za.co.vodacom.android.app.apk" EICAR_MD5 = "671793b66a4d6130e22c9fd101b122ea2b55ecb423bf061ddc91e65be51ffc0d" vt = VirusTotalPrivateApi(API_KEY) response = vt.get_file_report(EICAR_MD5) # print(json.dumps(response, sort_keys=False, indent=4)) s = int(sys.argv[1]) # aa = np.load("/home/jathushan/2018/app_analysis/data/uploaded.npy") # c = 0 aa = os.listdir("/srv/jathushan/data/apk/top1000/main/top1000/") c = 0 if ((s + 1) * 100 > 950): aa = aa[s * 100:-1] else: aa = aa[s * 100:(s + 1) * 100]
from __future__ import print_function import json import hashlib import pymysql from virus_total_apis import PrivateApi as VirusTotalPrivateApi from datetime import datetime API_KEY = 'APIKEY' vt = VirusTotalPrivateApi(API_KEY) resobj = [] with open ('sha.txt', 'r', encoding='utf-8') as istr: for line in istr: mysha = line print(line) response = vt.get_file_report(mysha) resobj.append(response) timestamp=datetime.now() variable="C:\\VT_Query_Automation\\data_" cur_day_format = timestamp.strftime("%Y-%m-%d_%H") with open(variable+cur_day_format+".json", "w", encoding='utf-8') as write_file: json.dump(resobj, write_file, ensure_ascii=False)
class VirusTotalAnalyzer(Analyzer): def __init__(self): Analyzer.__init__(self) self.service = self.get_param("config.service", None, "Service parameter is missing") self.virustotal_key = self.get_param("config.key", None, "Missing VirusTotal API key") self.polling_interval = self.get_param("config.polling_interval", 60) self.rescan_hash_older_than_days = self.get_param( "config.rescan_hash_older_than_days", None) self.highlighted_antivirus = self.get_param( "config.highlighted_antivirus", None) self.download_sample = self.get_param("config.download_sample", False) self.download_sample_if_highlighted = self.get_param( "config.download_sample_if_highlighted", False) self.obs_path = None self.proxies = self.get_param("config.proxy", None) if (self.download_sample or self.download_sample_if_highlighted or self.service == "download"): self.vt_pay = PrivateApi(self.virustotal_key, self.proxies) self.vt = PublicApi(self.virustotal_key, self.proxies) def get_file(self, hash): self.obs_path = "{}/{}".format(tempfile.gettempdir(), hash) response = self.vt_pay.get_file(hash) if response.get("response_code", None) == 200: with open(self.obs_path, "wb") as f: f.write(response["results"]) kind = filetype.guess(self.obs_path) if kind and kind.extension != None: os.rename(self.obs_path, "{}.{}".format(self.obs_path, kind.extension)) self.obs_path = "{}.{}".format(self.obs_path, kind.extension) def wait_file_report(self, id): results = self.check_response(self.vt.get_file_report(id)) code = results.get("response_code", None) if code == 1: if self.data_type == "hash" and ( self.download_sample or (self.download_sample_if_highlighted and self.highlighted_antivirus and any([ results.get("scans", {}).get(av, {}).get( "detected", None) == False for av in self.highlighted_antivirus ]))): self.get_file(self.get_param("data", None, "Data is missing")) self.report(results) else: time.sleep(self.polling_interval) self.wait_file_report(id) def wait_url_report(self, id): results = self.check_response(self.vt.get_url_report(id)) code = results.get("response_code", None) if code == 1 and (results.get("scan_id") == id): self.report(results) else: time.sleep(self.polling_interval) self.wait_url_report(id) def check_response(self, response): if type(response) is not dict: self.error("Bad response : " + str(response)) status = response.get("response_code", -1) if status == 204: self.error("VirusTotal api rate limit exceeded (Status 204).") if status != 200: self.error("Bad status : " + str(status)) results = response.get("results", {}) if "Missing IP address" in results.get("verbose_msg", ""): results["verbose_msg"] = "IP address not available in VirusTotal" return results # 0 => not found # -2 => in queue # 1 => ready def read_scan_response(self, response, func): results = self.check_response(response) code = results.get("response_code", None) scan_id = results.get("scan_id", None) if code == 1 and scan_id is not None: func(scan_id) else: self.error("Scan not found") def artifacts(self, raw): artifacts = [] if self.obs_path: tags = [] # This will work only in scan/rescan workflow, not in download only if self.highlighted_antivirus: for av in self.highlighted_antivirus: detected = raw.get("scans", {}).get(av, {}).get("detected", None) if detected == False: tags.append("to_{}".format(av)) artifacts.append( self.build_artifact("file", self.obs_path, tags=tags)) return artifacts def summary(self, raw): taxonomies = [] level = "info" namespace = "VT" predicate = "GetReport" value = "0" if self.service == "scan": predicate = "Scan" elif self.service == "rescan": predicate = "Rescan" elif self.service == "download": return {"taxonomies": taxonomies} result = {"has_result": True} if raw["response_code"] != 1: result["has_result"] = False result["positives"] = raw.get("positives", 0) result["total"] = raw.get("total", 0) if "scan_date" in raw: result["scan_date"] = raw["scan_date"] if self.service == "get": if "scans" in raw: result["scans"] = len(raw["scans"]) value = "{}/{}".format(result["positives"], result["total"]) if result["positives"] == 0: level = "safe" elif result["positives"] < 5: level = "suspicious" else: level = "malicious" if "resolutions" in raw: result["resolutions"] = len(raw["resolutions"]) value = "{} resolution(s)".format(result["resolutions"]) if result["resolutions"] == 0: level = "safe" elif result["resolutions"] < 5: level = "suspicious" else: level = "malicious" if "detected_urls" in raw: result["detected_urls"] = len(raw["detected_urls"]) value = "{} detected_url(s)".format(result["detected_urls"]) if result["detected_urls"] == 0: level = "safe" elif result["detected_urls"] < 5: level = "suspicious" else: level = "malicious" if "detected_downloaded_samples" in raw: result["detected_downloaded_samples"] = len( raw["detected_downloaded_samples"]) if self.service in ["scan", "rescan"]: if "scans" in raw: result["scans"] = len(raw["scans"]) value = "{}/{}".format(result["positives"], result["total"]) if result["positives"] == 0: level = "safe" elif result["positives"] < 5: level = "suspicious" else: level = "malicious" taxonomies.append( self.build_taxonomy(level, namespace, predicate, value)) if self.highlighted_antivirus: for av in self.highlighted_antivirus: detected = raw.get("scans", {}).get(av, {}).get("detected", None) if detected == False: taxonomies.append( self.build_taxonomy("info", namespace, av, "Not detected!")) return {"taxonomies": taxonomies} def run(self): if self.service == "scan": if self.data_type == "file": filename = self.get_param("filename", "noname.ext") filepath = self.get_param("file", None, "File is missing") self.read_scan_response( self.vt.scan_file(filepath, from_disk=True, filename=filename), self.wait_file_report, ) elif self.data_type == "url": data = self.get_param("data", None, "Data is missing") self.read_scan_response(self.vt.scan_url(data), self.wait_url_report) else: self.error("Invalid data type") elif self.service == "rescan": if self.data_type == "hash": data = self.get_param("data", None, "Data is missing") self.read_scan_response(self.vt.rescan_file(data), self.wait_file_report) else: self.error("Invalid data type") elif self.service == "download": if self.data_type == "hash": data = self.get_param("data", None, "Data is missing") self.get_file(data) self.report({"message": "file downloaded"}) elif self.service == "get": if self.data_type == "domain": data = self.get_param("data", None, "Data is missing") results = self.check_response(self.vt.get_domain_report(data)) elif self.data_type == "fqdn": data = self.get_param("data", None, "Data is missing") results = self.check_response(self.vt.get_domain_report(data)) elif self.data_type == "ip": data = self.get_param("data", None, "Data is missing") results = self.check_response(self.vt.get_ip_report(data)) elif self.data_type == "file": hashes = self.get_param("attachment.hashes", None) if hashes is None: filepath = self.get_param("file", None, "File is missing") hash = hashlib.sha256(open(filepath, "rb").read()).hexdigest() else: hash = next(h for h in hashes if len(h) == 64) results = self.check_response(self.vt.get_file_report(hash)) elif self.data_type == "hash": data = self.get_param("data", None, "Data is missing") results = self.check_response(self.vt.get_file_report(data)) elif self.data_type == "url": data = self.get_param("data", None, "Data is missing") results = self.check_response(self.vt.get_url_report(data)) else: self.error("Invalid data type") # if aged and enabled rescan if self.data_type == "hash" and self.rescan_hash_older_than_days: if (datetime.strptime(results["scan_date"], "%Y-%m-%d %H:%M:%S") - datetime.now() ).days > self.rescan_hash_older_than_days: self.read_scan_response(self.vt.rescan_file(data), self.wait_file_report) # download if hash, dangerous and not seen by av if (self.data_type == "hash" and (results.get("response_code", None) == 1) and (results.get("positives", 0) >= 5) and (self.download_sample or (self.download_sample_if_highlighted and self.highlighted_antivirus and any([ results.get("scans", {}).get(av, {}).get( "detected", None) == False for av in self.highlighted_antivirus ])))): self.get_file(data) self.report(results) else: self.error("Invalid service")
class RuleEngine(object): def __init__(self): self.vt_req_counter = 0 self.vt_req_timer = time.time() self.vd = Validator() self.vt = VirusTotal(apikey.APIKEY_1) self.cache = {} def iterpcap(self, path): for dirPath, dirNames, fileNames in os.walk(path): for f in fileNames: if f.split('.')[1] == 'pcap': # check the file is pcap file yield os.path.join(dirPath, f) def _iterpayload(self, path): connection = utils.follow_tcp_stream(path) for conn, frame in connection.iteritems(): for seq, content in frame.iteritems(): if content: # Generate the content and 5-tuple yield content, conn else: # Some packets have no payload pass def _check_timer_counter(self): if self.vt_req_counter == 4: self.vt_req_counter = 0 period = time.time() - self.vt_req_timer waiting = 60 - period + 1 if waiting > 0: logger.info("Waiting %s seconds", (str(waiting))) time.sleep(waiting) self.vt_req_timer = time.time() def _make_rule(self, content, uricontent, dst_port, sid=0): rule = SnortRule() pattern = dict() pattern['msg'] = '"Trojan.Gen.uricontent"' pattern['content'] = ['"{host}"'.format(host=content), 'nocase'] pattern['uricontent'] = ['"{uri}"'.format(uri=uricontent), 'nocase'] # pattern['sid'] = sid pattern['dst_port'] = dst_port rule.set_malicious_pattern(**pattern) return rule def _get_url_positive(self, resource): m = hashlib.sha1(resource) urlkey = m.hexdigest() if urlkey in self.cache.keys(): # logger.info("%s in cache" % resource) positives = self.cache.get(urlkey)[1] return positives else: self.vt_req_counter += 1 logger.info("Search on VirusTotal counter: %s", str(self.vt_req_counter)) response = self.vt.get_url_report(resource) if response.get('error') is not None: logger.info("Error: {e}".format(e=response.get('error'))) sys.exit(0) results = response.get('results') positives = results.get('positives') self._check_timer_counter() if positives >= 0: self.cache[urlkey] = [resource, positives] return positives elif positives is None: logger.info( '''No report. Submmit the URL to VirusTotal countert: %s''', str(self.vt_req_counter)) self.vt.scan_url(resource) self._check_timer_counter() return None else: logger.debug("Get reports failed.") return None def gen_rule(self, pcap_path): self.cache = pickle_tool.check_json() for content, conn in self._iterpayload('%s' % (pcap_path)): # print content, utils.connection_key_2_str(conn) get_method = self.vd.is_get_method(content) host = self.vd.is_hsot(content) if host and get_method: if get_method.group(1) == '/': url = self.vd.is_valid_url(host.group(1).rstrip()) else: url = self.vd.is_valid_url( host.group(1).rstrip() + get_method.group(1)) if url is not None: # valid_utf8 = True try: url.group(0).decode('utf-8') except UnicodeDecodeError: with open('invalid_utf8', 'a') as fp: fp.write('{u}\n'.format(u=url.group(0))) url = None # valid_utf8 = False if url is not None: host_content = host.group(0).rstrip() uricontent = get_method.group(1) pos = self._get_url_positive(url.group(0)) if pos > 0: if uricontent == '/': uricontent = None #print host_content rule = self._make_rule(host_content, uricontent, conn[3], 0) with open('uricontent.rules', 'a') as fp: fp.write('{r}\n'.format(r=str(rule))) yield rule else: # positives == 0 or positives == None pass else: # invalid_url pass else: pass pickle_tool.update_json(self.cache)
class RuleEngine(object): def __init__(self): self.vt_req_counter = 0 self.vt_req_timer = time.time() self.vd = Validator() self.vt = VirusTotal(apikey.APIKEY_1) self.cache = {} def iterpcap(self, path): for dirPath, dirNames, fileNames in os.walk(path): for f in fileNames: if f.split('.')[1] == 'pcap': # check the file is pcap file yield os.path.join(dirPath, f) def _iterpayload(self, path): connection = utils.follow_tcp_stream(path) for conn, frame in connection.iteritems(): for seq, content in frame.iteritems(): if content: # Generate the content and 5-tuple yield content, conn else: # Some packets have no payload pass def _check_timer_counter(self): if self.vt_req_counter == 4: self.vt_req_counter = 0 period = time.time() - self.vt_req_timer waiting = 60 - period + 1 if waiting > 0: logger.info("Waiting %s seconds", (str(waiting))) time.sleep(waiting) self.vt_req_timer = time.time() def _make_rule(self, content, uricontent, dst_port, sid=0): rule = SnortRule() pattern = dict() pattern['msg'] = '"Trojan.Gen.uricontent"' pattern['content'] = ['"{host}"'.format(host=content), 'nocase'] pattern['uricontent'] = ['"{uri}"'.format(uri=uricontent), 'nocase'] # pattern['sid'] = sid pattern['dst_port'] = dst_port rule.set_malicious_pattern(**pattern) return rule def _get_url_positive(self, resource): m = hashlib.sha1(resource) urlkey = m.hexdigest() if urlkey in self.cache.keys(): # logger.info("%s in cache" % resource) positives = self.cache.get(urlkey)[1] return positives else: self.vt_req_counter += 1 logger.info("Search on VirusTotal counter: %s", str(self.vt_req_counter)) response = self.vt.get_url_report(resource) if response.get('error') is not None: logger.info("Error: {e}".format(e=response.get('error'))) sys.exit(0) results = response.get('results') positives = results.get('positives') self._check_timer_counter() if positives >= 0: self.cache[urlkey] = [resource, positives] return positives elif positives is None: logger.info('''No report. Submmit the URL to VirusTotal countert: %s''', str(self.vt_req_counter)) self.vt.scan_url(resource) self._check_timer_counter() return None else: logger.debug("Get reports failed.") return None def gen_rule(self, pcap_path): self.cache = pickle_tool.check_json() for content, conn in self._iterpayload('%s' % (pcap_path)): # print content, utils.connection_key_2_str(conn) get_method = self.vd.is_get_method(content) host = self.vd.is_hsot(content) if host and get_method: if get_method.group(1) == '/': url = self.vd.is_valid_url(host.group(1).rstrip()) else: url = self.vd.is_valid_url(host.group(1).rstrip() + get_method.group(1)) if url is not None: # valid_utf8 = True try: url.group(0).decode('utf-8') except UnicodeDecodeError: with open('invalid_utf8', 'a') as fp: fp.write('{u}\n'.format(u=url.group(0))) url = None # valid_utf8 = False if url is not None: host_content = host.group(0).rstrip() uricontent = get_method.group(1) pos = self._get_url_positive(url.group(0)) if pos > 0: if uricontent == '/': uricontent = None #print host_content rule = self._make_rule(host_content, uricontent, conn[3], 0) with open('uricontent.rules', 'a') as fp: fp.write('{r}\n'.format(r=str(rule))) yield rule else: # positives == 0 or positives == None pass else: # invalid_url pass else: pass pickle_tool.update_json(self.cache)
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'info': if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # FIXME: move code here in a library ip = unbracket(args.IP) try: ipy = IP(ip) except ValueError: print('Invalid IP format, quitting...') return ipinfo = self.ipinfo(ip) print('MaxMind: Located in %s, %s' % (ipinfo['city'], ipinfo['country'])) if ipinfo['asn'] == 0: print("MaxMind: IP not found in the ASN database") else: print('MaxMind: ASN%i, %s' % (ipinfo['asn'], ipinfo['asn_name'])) print('CAIDA Type: %s' % ipinfo['asn_type']) try: asndb2 = pyasn.pyasn(self.asncidr) res = asndb2.lookup(ip) except OSError: print("Configuration files are not available") print("Please run harpoon update before using harpoon") sys.exit(1) if res[1] is None: print("IP not found in ASN database") else: # Search for name f = open(self.asnname, 'r') found = False line = f.readline() name = '' while not found and line != '': s = line.split('|') if s[0] == str(res[0]): name = s[1].strip() found = True line = f.readline() print('ASN %i - %s (range %s)' % (res[0], name, res[1])) if ipinfo['hostname'] != '': print('Hostname: %s' % ipinfo['hostname']) if ipinfo['specific'] != '': print("Specific: %s" % ipinfo['specific']) if ipy.iptype() == "PRIVATE": "Private IP" print("") if ipy.version() == 4: print("Censys:\t\thttps://censys.io/ipv4/%s" % ip) print("Shodan:\t\thttps://www.shodan.io/host/%s" % ip) print("IP Info:\thttp://ipinfo.io/%s" % ip) print("BGP HE:\t\thttps://bgp.he.net/ip/%s" % ip) print( "IP Location:\thttps://www.iplocation.net/?query=%s" % ip) elif args.subcommand == "intel": if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % unbracket(args.IP)) passive_dns = [] urls = [] malware = [] files = [] # MISP misp_e = plugins['misp'].test_config(conf) if misp_e: print('[+] Downloading MISP information...') server = ExpandedPyMISP(conf['Misp']['url'], conf['Misp']['key']) misp_results = server.search('attributes', value=unbracket(args.IP)) # Binary Edge be_e = plugins['binaryedge'].test_config(conf) if be_e: try: print('[+] Downloading BinaryEdge information...') be = BinaryEdge(conf['BinaryEdge']['key']) # FIXME: this only get the first page res = be.domain_ip(unbracket(args.IP)) for d in res["events"]: passive_dns.append({ "domain": d['domain'], "first": parse(d['updated_at']).astimezone(pytz.utc), "last": parse(d['updated_at']).astimezone(pytz.utc), "source": "BinaryEdge" }) except BinaryEdgeException: print( 'BinaryEdge request failed, you need a paid subscription' ) # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full( IndicatorTypes.IPv4, unbracket(args.IP)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "domain": r['hostname'], "first": parse(r["first"]).astimezone(pytz.utc), "last": parse(r["last"]).astimezone(pytz.utc), "source": "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: if "result" in r: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": r["result"]["urlworker"]["ip"] if "ip" in r["result"]["urlworker"] else "", "source": "OTX" }) else: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": "", "source": "OTX" }) # RobTex print('[+] Downloading Robtex information....') rob = Robtex() try: res = rob.get_ip_info(unbracket(args.IP)) except RobtexError: print("Error with Robtex") else: for d in ["pas", "pash", "act", "acth"]: if d in res: for a in res[d]: passive_dns.append({ 'first': a['date'].astimezone(pytz.utc), 'last': a['date'].astimezone(pytz.utc), 'domain': a['o'], 'source': 'Robtex' }) # PT pt_e = plugins['pt'].test_config(conf) if pt_e: out_pt = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) try: raw_results = client.get_passive_dns( query=unbracket(args.IP)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]).astimezone( pytz.utc), "last": parse(res["lastSeen"]).astimezone( pytz.utc), "domain": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("Quota exceeded for Passive Total") out_pt = True pt_osint = {} except requests.exceptions.ReadTimeout: print("Timeout on Passive Total requests") if not out_pt: try: client2 = EnrichmentRequest( conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint( query=unbracket(args.IP)) # Get malware raw_results = client2.get_malware( query=unbracket(args.IP)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']), 'source': 'PT (%s)' % r["source"] }) except requests.exceptions.ReadTimeout: print("Timeout on Passive Total requests") # Urlhaus uh_e = plugins['urlhaus'].test_config(conf) if uh_e: print("[+] Checking urlhaus data...") try: urlhaus = UrlHaus(conf["UrlHaus"]["key"]) res = urlhaus.get_host(unbracket(args.IP)) except UrlHausError: print("Error with the query") else: if "urls" in res: for r in res['urls']: urls.append({ "date": parse(r["date_added"]).astimezone( pytz.utc), "url": r["url"], "source": "UrlHaus" }) # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_ip_report(unbracket(args.IP)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]).astimezone( pytz.utc), "last": parse(r["last_resolved"]).astimezone( pytz.utc), "domain": r["hostname"], "source": "VT" }) if "undetected_downloaded_samples" in res[ 'results']: for r in res['results'][ 'undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results'][ 'undetected_referrer_samples']: if 'date' in r: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) else: #FIXME : should consider data without dates files.append({ 'hash': r['sha256'], 'date': datetime.datetime(1970, 1, 1), 'source': 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results'][ 'detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results'][ 'detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) else: vt_e = False print('[+] Downloading GreyNoise information....') gn = GreyNoise() try: greynoise = gn.query_ip(unbracket(args.IP)) except GreyNoiseError: greynoise = [] tg_e = plugins['threatgrid'].test_config(conf) if tg_e: print('[+] Downloading Threat Grid....') try: tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.IP), type='ip') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']) d = d.replace(tzinfo=None) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source': 'TG' }) already.append(r['sample_sha256']) except ThreatGridError as e: print("Error with threat grid: {}".format(e.message)) # ThreatMiner print('[+] Downloading ThreatMiner....') tm = ThreatMiner() response = tm.get_report(unbracket(args.IP)) if response['status_code'] == '200': tmm = response['results'] else: tmm = [] if response['status_code'] != '404': print("Request to ThreatMiner failed: {}".format( response['status_message'])) response = tm.get_related_samples(unbracket(args.IP)) if response['status_code'] == '200': for r in response['results']: malware.append({ 'hash': r, 'date': None, 'source': 'ThreatMiner' }) print('----------------- Intelligence Report') ctor = CommandTor() tor_list = ctor.get_list() if tor_list: if unbracket(args.IP) in tor_list: print("{} is a Tor Exit node".format(unbracket( args.IP))) else: print("Impossible to reach the Tor Exit Node list") if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print('- %s (%s - %s)' % (p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'])) else: print('OTX: Not found in any pulse') if misp_e: if len(misp_results['Attribute']) > 0: print('MISP:') for event in misp_results['Attribute']: print("- {} - {}".format(event['Event']['id'], event['Event']['info'])) if len(greynoise) > 0: print("GreyNoise: IP identified as") for r in greynoise: print("\t%s (%s -> %s)" % (r["name"], r["first_seen"], r["last_updated"])) else: print("GreyNoise: Not found") if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print( "PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % pt_osint["results"][0]["sourceUrl"]) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("-%s %s" % (r["name"], r["sourceUrl"])) else: print("-%s" % r["sourceUrl"]) else: print("PT: Nothing found!") else: print("PT: Nothing found!") # ThreatMiner if len(tmm) > 0: print("ThreatMiner:") for r in tmm: print("- {} {} - {}".format(r['year'], r['filename'], r['URL'])) if len(malware) > 0: print('----------------- Malware') for r in malware: print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") if r["date"] else "")) if len(files) > 0: print('----------------- Files') for r in sorted(files, key=lambda x: x["date"]): print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d"))) if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % (r["domain"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"])) if len(urls) > 0: print('----------------- Urls') for r in sorted(urls, key=lambda x: x["date"], reverse=True): print("[%s] %s - %s" % (r["source"], r["url"], r["date"].strftime("%Y-%m-%d"))) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'info': if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # FIXME: move code here in a library ip = unbracket(args.IP) try: ipy = IP(ip) except ValueError: print('Invalid IP format, quitting...') return ipinfo = self.ipinfo(ip) print('MaxMind: Located in %s, %s' % (ipinfo['city'], ipinfo['country'])) if ipinfo['asn'] == 0: print("MaxMind: IP not found in the ASN database") else: print('MaxMind: ASN%i, %s' % (ipinfo['asn'], ipinfo['asn_name'])) asndb2 = pyasn.pyasn(self.asncidr) res = asndb2.lookup(ip) if res[1] is None: print("IP not found in ASN database") else: # Search for name f = open(self.asnname, 'r') found = False line = f.readline() name = '' while not found and line != '': s = line.split('|') if s[0] == str(res[0]): name = s[1].strip() found = True line = f.readline() print('ASN %i - %s (range %s)' % (res[0], name, res[1])) if ipinfo['hostname'] != '': print('Hostname: %s' % ipinfo['hostname']) if ipinfo['specific'] != '': print("Specific: %s" % ipinfo['specific']) if ipy.iptype() == "PRIVATE": "Private IP" print("") if ipy.version() == 4: print("Censys:\t\thttps://censys.io/ipv4/%s" % ip) print("Shodan:\t\thttps://www.shodan.io/host/%s" % ip) print("IP Info:\thttp://ipinfo.io/%s" % ip) print("BGP HE:\t\thttps://bgp.he.net/ip/%s" % ip) print( "IP Location:\thttps://www.iplocation.net/?query=%s" % ip) elif args.subcommand == "intel": if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % unbracket(args.IP)) passive_dns = [] urls = [] malware = [] files = [] # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full( IndicatorTypes.IPv4, unbracket(args.IP)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "domain": r['hostname'], "first": parse(r["first"]), "last": parse(r["last"]), "source": "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: urls.append(r) # RobTex print('[+] Downloading Robtex information....') rob = Robtex() res = rob.get_ip_info(unbracket(args.IP)) for d in ["pas", "pash", "act", "acth"]: if d in res: for a in res[d]: passive_dns.append({ 'first': a['date'], 'last': a['date'], 'domain': a['o'], 'source': 'Robtex' }) # PT pt_e = plugins['pt'].test_config(conf) if pt_e: out_pt = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns( query=unbracket(args.IP)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]), "last": parse(res["lastSeen"]), "domain": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("Quota exceeded for Passive Total") out_pt = True pt_osint = {} if not out_pt: client2 = EnrichmentRequest( conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint(query=unbracket(args.IP)) # Get malware raw_results = client2.get_malware( query=unbracket(args.IP)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']), 'source': 'PT (%s)' % r["source"] }) # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_ip_report(unbracket(args.IP)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]), "last": parse(r["last_resolved"]), "domain": r["hostname"], "source": "VT" }) if "undetected_downloaded_samples" in res[ 'results']: for r in res['results'][ 'undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results'][ 'undetected_referrer_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results'][ 'detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results'][ 'detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) else: vt_e = False print('[+] Downloading GreyNoise information....') gn = GreyNoise() try: greynoise = gn.query_ip(unbracket(args.IP)) except GreyNoiseError: greynoise = [] tg_e = plugins['threatgrid'].test_config(conf) if tg_e: print('[+] Downloading Threat Grid....') tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.IP), type='ip') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']) d = d.replace(tzinfo=None) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source': 'TG' }) already.append(r['sample_sha256']) # TODO: Add MISP print('----------------- Intelligence Report') if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print(' -%s (%s - %s)' % (p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'])) else: print('OTX: Not found in any pulse') if len(greynoise) > 0: print("GreyNoise: IP identified as") for r in greynoise: print("\t%s (%s -> %s)" % (r["name"], r["first_seen"], r["last_updated"])) else: print("GreyNoise: Not found") if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print( "PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % pt_osint["results"][0]["sourceUrl"]) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("-%s %s" % (r["name"], r["sourceUrl"])) else: print("-%s" % r["sourceUrl"]) else: print("PT: Nothing found!") else: print("PT: Nothing found!") if len(malware) > 0: print('----------------- Malware') for r in sorted(malware, key=lambda x: x["date"]): print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d"))) if len(files) > 0: print('----------------- Files') for r in sorted(files, key=lambda x: x["date"]): print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d"))) if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % (r["domain"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"])) else: self.parser.print_help() else: self.parser.print_help()
val2 = 'fs:' + cur_day_format url = 'https://www.virustotal.com/vtapi/v2/file/search' params = { 'apikey': 'APIKEY', 'query': val2 + '+' ' engines:lnk tag:lnk avira:clean' } response = requests.get(url, params=params) data = response.json() with open(val1 + cur_day_format + '.json', 'w', encoding='utf-8') as outfile: json.dump(data, outfile, ensure_ascii=False) ### file read ### with open(val1 + cur_day_format + ".json", "rb") as myfile: data1 = myfile.read() obj = json.loads(data1) ### full report generate ### API_KEY = 'APIKEY' vt = VirusTotalPrivateApi(API_KEY) resobj = [] if 'hashes' in obj: sha256_hash = obj['hashes'] for x in sha256_hash: response = vt.get_file_report(x) resobj.append(response) with open("data_" + cur_day_format + ".json", "w", encoding='utf-8') as write_file: json.dump(resobj, write_file, ensure_ascii=False)