def run(self, conf, args, plugins): print("Saving in cache platforms:") # Archive.is try: ai_url = ArchiveIs.capture(unbracket(args.URL)) except: print("Impossible to save in archive.is, weird") else: print("Archive.is: %s" % ai_url) # Web Archive try: ao_url = ArchiveOrg.capture(unbracket(args.URL)) except: print("Impossible to save in web archive, weird.") else: print("Web Archive: %s" % ao_url) # Perma.cc if 'Permacc' in conf and 'key' in conf['Permacc']: pc = Permacc(conf['Permacc']['key']) try: saved = pc.archive_create(unbracket(args.URL)) except PermaccError: print("Impossible to save in Permacc") else: print("Permacc: https://perma.cc/%s" % saved["guid"])
def run(self, conf, args, plugins): if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(-1) ips = self.get_list() if ips: if unbracket(args.IP) in ips: print("{} is a Tor Exit node".format(unbracket(args.IP))) else: print( "{} is not listed in the Tor Exit node public list".format( unbracket(args.IP))) else: print("Impossible to reach the Tor Exit node list")
def run(self, conf, args, plugins): urlhaus = UrlHaus(conf["UrlHaus"]["key"]) if "subcommand" in args: try: if args.subcommand == "get-url": res = urlhaus.get_url(args.url) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "get-host": res = urlhaus.get_host(unbracket(args.host)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "get-payload": res = urlhaus.get_payload(args.payload) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "get-tag": res = urlhaus.get_tag(args.tag) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "get-signature": res = urlhaus.get_signature(args.signature) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "get-sample": data = urlhaus.get_sample(args.hash) if data: with open(args.hash, "wb") as f: f.write(data) print("Sample saved as {}".format(args.hash)) else: print("Sample not found") else: self.parser.print_help() except UrlHausError: print("UrlHaus : query failed ¯\_(ツ)_/¯") else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'domain': rank = self.check(unbracket(args.DOMAIN)) if rank: print("Found ranked {}".format(rank)) else: print("Not found") elif args.subcommand == 'list': umbrella = {} with open(self.topfile) as f: for l in f.read().split('\n'): if l.strip() == '': continue ll = l.strip().split(',') umbrella[ll[1]] = ll[0] with open(args.FILE) as f: data = f.read().split('\n') data.remove('') for d in data: if d.strip() == "": continue if d.strip() in umbrella.keys(): print("{} in the umbrella list at {} position".format( d.strip(), umbrella[d.strip()])) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): tc = ThreatCrowd() try: if args.ip: self.pretty_print(tc.ip(unbracket(args.ip))) elif args.email: self.pretty_print(tc.email(args.email)) elif args.domain: self.pretty_print(tc.domain(unbracket(args.domain))) elif args.antivirus: self.pretty_print(tc.antivirus(args.antivirus)) elif args.file: self.pretty_print(tc.file(args.file)) else: self.parser.print_help() except ThreatCrowdError as e: print("Query failed: {}".format(e.message))
def asncount(): """ Take a list of IP addresses as an IP and count them by ASN """ parser = argparse.ArgumentParser(description='Count IP addresses by ASN') parser.add_argument('IP', type=str, nargs='*', default=[], help="IP addresses") args = parser.parse_args() if len(args.IP): ips = args.IP else: with open("/dev/stdin") as f: ips = f.read().split() ipc = CommandIp() asnc = CommandAsn() asns = {} error = False for ip in ips: if is_ip(unbracket(ip)): asninfo = ipc.ip_get_asn(unbracket(ip)) if asninfo['asn'] not in asns: asns[asninfo['asn']] = 1 else: asns[asninfo['asn']] += 1 else: print('%s is not a valid IP address' % ip) error = True if error: print('') for asnn, nb in sorted(asns.items(), key=lambda x: x[1], reverse=True): if asnn == 0: name = "Unknown" else: name = asnc.asnname(asnn) print("%i\tASN%-6i\t%s" % (nb, asnn, name))
def run(self, conf, args, plugins): x = pypdns.PyPDNS(basic_auth=(conf['Circl']['user'], conf['Circl']['pass'])) res = x.query(unbracket(args.DOMAIN)) print( json.dumps(res, sort_keys=True, indent=4, separators=(',', ': '), default=json_serial))
def get_subdomains(self, domain): crt = Crtsh() subdomains = set() index = crt.search(unbracket(domain)) for c in index: data = crt.get(c["id"], type="id") subdomains.add(data["subject"]["commonName"]) if "alternative_names" in data["extensions"]: for d in data["extensions"]["alternative_names"]: subdomains.add(d) return list(subdomains)
def run(self, conf, args, plugins): if args.source == 'all': # Search subdomains through a search in Censys certificates if plugins['censys'].test_config(conf): try: self.censys_certs(unbracket(args.DOMAIN), conf, args.verbose) except CensysRateLimitExceededException: print('Quota exceeded!') # Get subdomains through passive total if plugins['pt'].test_config(conf): self.pt(unbracket(args.DOMAIN), conf, args.verbose) if plugins['vt'].test_config(conf): self.vt(unbracket(args.DOMAIN), conf, args.verbose) elif args.source == 'censys': if plugins['censys'].test_config(conf): try: self.censys_certs(unbracket(args.DOMAIN), conf, args.verbose) except CensysRateLimitExceededException: print('Quota exceeded!') else: print('Please configure your Censys credentials') elif args.source == 'pt': if plugins['pt'].test_config(conf): self.pt(unbracket(args.DOMAIN), conf, args.verbose) else: print('Please configure your Passive Total credentials') elif args.source == 'vt': if plugins['vt'].test_config(conf): self.vt(unbracket(args.DOMAIN), conf, args.verbose) else: print('Please configure your VirusTotal credentials')
def run(self, conf, args, plugins): try: cs = CertSpotter(conf['CertSpotter']['key']) except KeyError: cs = CertSpotter() if 'subcommand' in args: if args.subcommand == 'search': if args.all: if cs.authenticated: try: res = cs.list(unbracket(args.DOMAIN), expired=True) except CertSpotterError: print( "Error with the API, likely because you need a paid plan to search expired certs. Check censys or crtsh plugins instead" ) sys.exit(1) else: print("API key needed for expired certificated") sys.exit(1) else: res = cs.list(unbracket(args.DOMAIN)) print( json.dumps(res, sort_keys=True, indent=4, separators=(',', ': '), default=json_serial)) elif args.subcommand == "cert": res = cs.get_cert(args.SHA256) print( json.dumps(res, sort_keys=True, indent=4, separators=(',', ': '), default=json_serial)) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: xe = XforceExchange(conf['Xforce']['key'], conf['Xforce']['password']) if args.subcommand == "ip_reputation": res = xe.ip_reputation(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'ip': res = xe.ip(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'ip_malware': res = xe.ip_malware(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'search': res = xe.search(args.QUERY) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'dns': res = xe.dns(args.INPUT) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'casefile': res = xe.casefile(args.ID) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'malware': try: res = xe.malware(args.HASH) print(json.dumps(res, sort_keys=False, indent=4)) except XforceExchangeNotFound: print("Not found") elif args.subcommand == 'url': res = xe.url(args.URL) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'usage': res = xe.usage() print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == 'whois': res = xe.whois(unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): cybercure = CyberCure(token='reserved_for_future') if 'subcommand' in args: if args.subcommand == 'ip': try: infos = cybercure.get_infos(unbracket(args.IP)) except CyberCureError: print("Invalid request") else: print( json.dumps(infos, sort_keys=True, indent=4, separators=(',', ': '))) elif args.subcommand == 'file': if os.path.isfile(args.FILE): with open(args.FILE) as f: data = f.read().split("\n") print("IP;Exists;Details") for d in data: if d.strip() == '': continue ip = unbracket(d.strip()) try: infos = cybercure.get_infos(ip) except CyberCureError: print("%s;;" % ip) else: print( "%s;%s;%s" % (ip, infos['exists'], infos['visual'] if 'visual' in infos else '')) else: print("This file does not exist") sys.exit(1) else: self.parser.print_help() else: self.parser.print_help()
def countrycount(): """ Count country from which IPs are """ parser = argparse.ArgumentParser( description='Count IP addresses by Country') parser.add_argument('IP', type=str, nargs='*', default=[], help="IP addresses") args = parser.parse_args() if len(args.IP): ips = args.IP else: with open("/dev/stdin") as f: ips = f.read().split() ipc = CommandIp() countries = {} error = False for ip in ips: if is_ip(unbracket(ip)): info = ipc.ipinfo(unbracket(ip), dns=False) if info['country'] not in countries: countries[info['country']] = 1 else: countries[info['country']] += 1 else: print('%s is not a valid IP address' % ip) error = True if error: print('') for cnn, nb in sorted(countries.items(), key=lambda x: x[1], reverse=True): print("%i\t%s" % (nb, cnn))
def run(self, conf, args, plugins): ip = unbracket(args.IP) if not is_ip(ip): print("Invalid IP address") sys.exit(1) # FIXME: move code here in a library ipinfo = self.ipinfo(ip) print("MaxMind: Located in %s, %s" % (ipinfo["city"], ipinfo["country"])) if ipinfo["asn"] == 0: print("MaxMind: IP not found in the ASN database") else: print("MaxMind: ASN%i, %s" % (ipinfo["asn"], ipinfo["asn_name"])) print("CAIDA Type: %s" % ipinfo["asn_type"]) try: asndb2 = pyasn.pyasn(self.asncidr) res = asndb2.lookup(ip) except OSError: print("Configuration files are not available") print("Please run harpoon update before using harpoon") sys.exit(1) if res[1] is None: print("IP not found in ASN database") else: # Search for name f = open(self.asnname, "r") found = False line = f.readline() name = "" while not found and line != "": s = line.split("|") if s[0] == str(res[0]): name = s[1].strip() found = True line = f.readline() print("ASN %i - %s (range %s)" % (res[0], name, res[1])) if ipinfo["hostname"] != "": print("Hostname: %s" % ipinfo["hostname"]) if ipinfo["specific"] != "": print("Specific: %s" % ipinfo["specific"]) ipy = IP(ip) if ipy.iptype() == "PRIVATE": "Private IP" print("") if ipy.version() == 4: print("Censys:\t\thttps://censys.io/ipv4/%s" % ip) print("Shodan:\t\thttps://www.shodan.io/host/%s" % ip) print("IP Info:\thttp://ipinfo.io/%s" % ip) print("BGP HE:\t\thttps://bgp.he.net/ip/%s" % ip) print("IP Location:\thttps://www.iplocation.net/?query=%s" % ip)
def run(self, conf, args, plugins): p = PulseDive(conf['PulseDive']['key']) if 'subcommand' in args: if args.subcommand == "indicator": res = p.indicators_by_value(unbracket(args.IOC), historical=args.history) print(json.dumps(res, sort_keys=True, indent=4)) elif args.subcommand == "links": res = p.indicators_by_value_links(args.IOC) print(json.dumps(res, sort_keys=True, indent=4)) elif args.subcommand == "properties": res = p.indicators_by_value_properties(args.IOC) print(json.dumps(res, sort_keys=True, indent=4)) elif args.subcommand == "threat": res = p.threat(args.THREAT) print(json.dumps(res, sort_keys=True, indent=4)) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): be = BinaryEdge(conf['BinaryEdge']['key']) try: if hasattr(args, 'which'): if args.which == 'ip': if args.score: res = be.host_score(unbracket(args.IP)) elif args.image: res = be.image_ip(unbracket(args.IP)) elif args.torrent: if args.historical: res = be.torrent_historical_ip(unbracket(args.IP)) else: res = be.torrent_ip(unbracket(args.IP)) elif args.historical: res = be.host_historical(unbracket(args.IP)) elif args.dns: res = be.domain_ip(args.IP, page=args.page) else: res = be.host(unbracket(args.IP)) print(json.dumps(res, sort_keys=True, indent=4)) elif args.which == 'search': if args.image: res = be.image_search(args.SEARCH, page=args.page) else: res = be.host_search(args.SEARCH, page=args.page) print(json.dumps(res, sort_keys=True, indent=4)) elif args.which == 'dataleaks': if args.domain: res = be.dataleaks_organization(args.EMAIL) else: res = be.dataleaks_email(args.EMAIL) print(json.dumps(res, sort_keys=True, indent=4)) elif args.which == 'domain': if args.subdomains: res = be.domain_subdomains(args.DOMAIN, page=args.page) else: res = be.domain_dns(args.DOMAIN, page=args.page) print(json.dumps(res, sort_keys=True, indent=4)) else: self.parser.print_help() else: self.parser.print_help() except ValueError as e: print('Invalid Value: %s' % e.message) except BinaryEdgeNotFound: print('Search term not found') except BinaryEdgeException as e: print('Error: %s' % e.message)
def run(self, conf, args, plugins): ipinfo = IPInfo(token=conf['IPInfo']['token']) if 'subcommand' in args: if args.subcommand == 'ip': try: infos = ipinfo.get_infos(unbracket(args.IP)) except IPInfoError: print("Invalid request") else: print(json.dumps(infos, sort_keys=True, indent=4, separators=(',', ': '))) elif args.subcommand == 'file': if os.path.isfile(args.FILE): with open(args.FILE) as f: data = f.read().split("\n") print("IP;Hostname;City;Region;Country;Location;Company Name;Company Domain;Company Type;ASN;AS Name;AS Domain;AS Route;AS Type") for d in data: if d.strip() == '': continue ip = unbracket(d.strip()) try: infos = ipinfo.get_infos(ip) except IPInfoError: print("%s;;;;;;;;;;;;;" % ip) else: if "company" in infos and "asn" in infos: print("%s;%s;%s;%s;%s;%s;%s;%s;%s;%s;%s;%s;%s;%s" % ( ip, infos['hostname'] if 'hostname' in infos else '', infos['city'], infos['region'], infos['country'], infos['loc'], infos['company']['name'], infos['company']['domain'], infos['company']['type'], infos['asn']['asn'] if 'asn' in infos['asn'] else '', infos['asn']['name'] if 'name' in infos['asn'] else '', infos['asn']['domain'] if 'domain' in infos['asn'] else '', infos['asn']['route'] if 'route' in infos['asn'] else '', infos['asn']['type'] if 'type' in infos['asn'] else '' ) ) elif "company" in infos and "asn" not in infos: print("%s;%s;%s;%s;%s;%s;%s;%s;%s;;;;;" % ( ip, infos['hostname'] if 'hostname' in infos else '', infos['city'], infos['region'], infos['country'], infos['loc'], infos['company']['name'], infos['company']['domain'], infos['company']['type'] ) ) elif "asn" in infos and "company" not in infos: print("%s;%s;%s;%s;%s;%s;;;;%s;%s;%s;%s;%s" % ( ip, infos['hostname'] if 'hostname' in infos else '', infos['city'], infos['region'], infos['country'], infos['loc'], infos['asn']['asn'], infos['asn']['name'], infos['asn']['domain'], infos['asn']['route'], infos['asn']['type'] ) ) else: print("%s;%s;%s;%s;%s;%s;;;;%s;%s;;;" % ( ip, infos['hostname'] if 'hostname' in infos else '', infos['city'], infos['region'], infos['country'], infos['loc'], infos['org'].split(' ')[0], ' '.join(infos['org'].split(" ")[1:]) ) ) else: print("This file does not exist") sys.exit(1) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'info': if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # FIXME: move code here in a library ip = unbracket(args.IP) try: ipy = IP(ip) except ValueError: print('Invalid IP format, quitting...') return ipinfo = self.ipinfo(ip) print('MaxMind: Located in %s, %s' % (ipinfo['city'], ipinfo['country'])) if ipinfo['asn'] == 0: print("MaxMind: IP not found in the ASN database") else: print('MaxMind: ASN%i, %s' % (ipinfo['asn'], ipinfo['asn_name'])) asndb2 = pyasn.pyasn(self.asncidr) res = asndb2.lookup(ip) if res[1] is None: print("IP not found in ASN database") else: # Search for name f = open(self.asnname, 'r') found = False line = f.readline() name = '' while not found and line != '': s = line.split('|') if s[0] == str(res[0]): name = s[1].strip() found = True line = f.readline() print('ASN %i - %s (range %s)' % (res[0], name, res[1])) if ipinfo['hostname'] != '': print('Hostname: %s' % ipinfo['hostname']) if ipinfo['specific'] != '': print("Specific: %s" % ipinfo['specific']) if ipy.iptype() == "PRIVATE": "Private IP" print("") if ipy.version() == 4: print("Censys:\t\thttps://censys.io/ipv4/%s" % ip) print("Shodan:\t\thttps://www.shodan.io/host/%s" % ip) print("IP Info:\thttp://ipinfo.io/%s" % ip) print("BGP HE:\t\thttps://bgp.he.net/ip/%s" % ip) print( "IP Location:\thttps://www.iplocation.net/?query=%s" % ip) elif args.subcommand == "intel": if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % unbracket(args.IP)) passive_dns = [] urls = [] malware = [] files = [] # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full( IndicatorTypes.IPv4, unbracket(args.IP)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "domain": r['hostname'], "first": parse(r["first"]), "last": parse(r["last"]), "source": "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: urls.append(r) # RobTex print('[+] Downloading Robtex information....') rob = Robtex() res = rob.get_ip_info(unbracket(args.IP)) for d in ["pas", "pash", "act", "acth"]: if d in res: for a in res[d]: passive_dns.append({ 'first': a['date'], 'last': a['date'], 'domain': a['o'], 'source': 'Robtex' }) # PT pt_e = plugins['pt'].test_config(conf) if pt_e: out_pt = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns( query=unbracket(args.IP)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]), "last": parse(res["lastSeen"]), "domain": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("Quota exceeded for Passive Total") out_pt = True pt_osint = {} if not out_pt: client2 = EnrichmentRequest( conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint(query=unbracket(args.IP)) # Get malware raw_results = client2.get_malware( query=unbracket(args.IP)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']), 'source': 'PT (%s)' % r["source"] }) # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_ip_report(unbracket(args.IP)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]), "last": parse(r["last_resolved"]), "domain": r["hostname"], "source": "VT" }) if "undetected_downloaded_samples" in res[ 'results']: for r in res['results'][ 'undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results'][ 'undetected_referrer_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results'][ 'detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results'][ 'detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) else: vt_e = False print('[+] Downloading GreyNoise information....') gn = GreyNoise() try: greynoise = gn.query_ip(unbracket(args.IP)) except GreyNoiseError: greynoise = [] tg_e = plugins['threatgrid'].test_config(conf) if tg_e: print('[+] Downloading Threat Grid....') tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.IP), type='ip') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']) d = d.replace(tzinfo=None) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source': 'TG' }) already.append(r['sample_sha256']) # TODO: Add MISP print('----------------- Intelligence Report') if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print(' -%s (%s - %s)' % (p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'])) else: print('OTX: Not found in any pulse') if len(greynoise) > 0: print("GreyNoise: IP identified as") for r in greynoise: print("\t%s (%s -> %s)" % (r["name"], r["first_seen"], r["last_updated"])) else: print("GreyNoise: Not found") if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print( "PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % pt_osint["results"][0]["sourceUrl"]) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("-%s %s" % (r["name"], r["sourceUrl"])) else: print("-%s" % r["sourceUrl"]) else: print("PT: Nothing found!") else: print("PT: Nothing found!") if len(malware) > 0: print('----------------- Malware') for r in sorted(malware, key=lambda x: x["date"]): print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d"))) if len(files) > 0: print('----------------- Files') for r in sorted(files, key=lambda x: x["date"]): print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d"))) if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % (r["domain"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"])) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if is_ip(unbracket(args.TARGET)): # That's an IP address ptr_n = str(reversename.from_address(unbracket(args.TARGET))) try: answer = [entry for entry in resolver.query(ptr_n, "PTR")][0] print("%s - %s" % (ptr_n, str(answer))) except (resolver.NXDOMAIN, resolver.NoAnswer): print("%s - %s" % (ptr_n, "NXDOMAIN")) else: cip = plugins['ip'] if args.extended: for a in self.all_types: try: answers = resolver.query(unbracket(args.TARGET), a) for rdata in answers: print(a, ':', rdata.to_text()) except Exception as e: pass else: target = unbracket(args.TARGET) # A print("# A") try: answers = resolver.query(target, 'A') except (resolver.NoAnswer, resolver.NXDOMAIN): print("No A entry") else: for rdata in answers: info = cip.ipinfo(rdata.address) print("%s: ASN%i %s - %s %s" % (rdata.address, info['asn'], info['asn_name'], info['city'], info['country'])) # AA print("") print("# AAAA") try: answers = resolver.query(target, 'AAAA') for rdata in answers: print(rdata.address) except (resolver.NoAnswer, resolver.NXDOMAIN): print("No AAAA entry configured") # DNS Servers print("\n# NS") try: answers = resolver.query(target, 'NS') except (resolver.NoAnswer, resolver.NXDOMAIN, resolver.NoNameservers): # That's pretty unlikely print("No NS entry configured") else: for entry in answers: ttarget = str(entry.target) if is_ip(ttarget): # Pretty unlikely info = cip.ipinfo(ttarget) print("%s - ASN%i %s - %s %s" % (ttarget, info['asn'], info['asn_name'], info['city'], info['country'])) else: try: ip = [ b.address for b in resolver.query(ttarget, 'A') ][0] except resolver.NXDOMAIN: # Hostname without IPv4 print(ttarget) else: # Hostname info = cip.ipinfo(ip) print("%s - %s - ASN%i %s - %s %s" % (ttarget, ip, info['asn'], info['asn_name'], info['city'], info['country'])) # MX print("\n# MX:") try: answers = resolver.query(target, 'MX') except (resolver.NoAnswer, resolver.NXDOMAIN): print("No MX entry configured") else: for rdata in answers: if is_ip(rdata.exchange): # IP directly info = cip.ipinfo(rdata.exchange) print("%i %s - ASN%i %s - %s %s" % (rdata.preference, rdata.exchange, info['asn'], info['asn_name'], info['city'], info['country'])) else: try: ip = [ b.address for b in resolver.query( rdata.exchange, 'A') ][0] except resolver.NoAnswer: # Hostname without IPv4 print(rdata.exchange) else: # Hostname info = cip.ipinfo(ip) print("%i %s - %s - ASN%i %s - %s %s" % (rdata.preference, rdata.exchange, ip, info['asn'], info['asn_name'], info['city'], info['country'])) # SOA print("\n# SOA") try: answers = resolver.query(target, 'SOA') except (resolver.NoAnswer, resolver.NXDOMAIN): print("No SOA entry configured") else: entry = [b for b in answers][0] print("NS: %s" % str(entry.mname)) print("Owner: %s" % self.owner_to_email(str(entry.rname))) # TXT print("\n# TXT:") try: answers = resolver.query(target, 'TXT') except (resolver.NoAnswer, resolver.NXDOMAIN): print("No TXT entry configured") else: for a in answers: print(a.to_text())
def run(self, conf, args, plugins): if 'subcommand' in args: tm = ThreatMiner() if args.subcommand == 'report': response = tm.get_report(unbracket(args.INDICATOR)) if response['status_code'] == '200': if len(response['results']) > 0: print("Reports found:") for r in response['results']: print("{} {} - {}".format( r['year'], r['filename'], r['URL'] )) else: print("No report found for this indicator") elif response['status_code'] == '404': print("No report found for this indicator") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'sample': response = tm.get_related_samples(unbracket(args.INDICATOR)) if response['status_code'] == '200': if len(response['results']) > 0: print("Samples found:") for r in response['results']: print(r) else: print("No report found for this indicator") elif response['status_code'] == '404': print("No report found for this indicator") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'metadata': response = tm.get_metadata(args.HASH) if response['status_code'] == '200': for r in response['results']: for d in r: print("{} - {}".format(d, r[d])) print("") elif response['status_code'] == '404': print("No report found for this indicator") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'traffic': response = tm.get_http_traffic(args.HASH) if response['status_code'] == '200': for t in response['results'][0]['http_traffic']: for d in t.keys(): if d != 'raw': print("{} - {}".format(d, t[d])) print("") elif response['status_code'] == '404': print("No traffic found for this file") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'host': response = tm.get_hosts(args.HASH) if response['status_code'] == '200': print("domains:") for d in response["results"][0]["domains"]: print("{} - {}".format(d["domain"], d["ip"])) print("\nHosts:") for h in response["results"][0]["hosts"]: print(h) elif response['status_code'] == '404': print("File not found") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'mutant': response = tm.get_mutants(args.HASH) if response['status_code'] == '200': for m in response["results"][0]['mutants']: print(m) elif response['status_code'] == '404': print("File not found") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'av': response = tm.get_av_detections(args.HASH) if response['status_code'] == '200': for m in response["results"][0]['av_detections']: print(m) elif response['status_code'] == '404': print("File not found") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'whois': response = tm.who_is(args.DOMAIN) if response['status_code'] == '200': print(json.dumps(response['results'][0]['whois'], indent=4)) elif response['status_code'] == '404': print("Domain not found") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'dns': response = tm.passive_dns(args.DOMAIN) if response['status_code'] == '200': for r in response['results']: print("{} - {} - {}".format( r["ip"] if "ip" in r else r["domain"], r["first_seen"], r["last_seen"] )) elif response['status_code'] == '404': print("Domain not found") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'uri': response = tm.get_uris(args.DOMAIN) if response['status_code'] == '200': print(json.dumps(response['results'][0]['whois'], indent=4)) elif response['status_code'] == '404': print("Domain not found") else: print("Request failed: {}".format(response['status_message'])) elif args.subcommand == 'subdomain': response = tm.get_subdomains(args.DOMAIN) if response['status_code'] == '200': for s in response['results']: print(s) elif response['status_code'] == '404': print("Domain not found") else: print("Request failed: {}".format(response['status_message'])) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == "ip": r = Robtex() res = r.get_ip_info(args.IP) if args.json: print(json.dumps(res, sort_keys=True, indent=4, default=json_serial)) else: print("AS %i: %s" % (res["as"], res["asname"])) print("Location: %s, %s" % (res["city"], res["country"])) print("BGP Route: %s, %s" % (res["bgproute"], res["routedesc"])) print("Whois: %s" % res["whoisdesc"]) print("https://www.robtex.com/ip-lookup/%s" % args.IP) if "pas" in res: if len(res["pas"]): print("Passive DNS:") for d in res["pas"]: print("\t%s %s" % (d["date"].isoformat(), d["o"])) if "pash" in res: if len(res["pash"]): print("Passive DNS History:") for d in res["pash"]: print("\t%s %s" % (d["date"].isoformat(), d["o"])) if "act" in res: if len(res["act"]): print("Active DNS:") for d in res["act"]: print("\t%s %s" % (d["date"].isoformat(), d["o"])) if "acth" in res: if len(res["acth"]): print("ACtive DNS History:") for d in res["acth"]: print("\t%s %s" % (d["date"].isoformat(), d["o"])) elif args.subcommand == "asn": r = Robtex() res = r.get_asn_info(args.ASN) if args.json: print(json.dumps(res, sort_keys=True, indent=4, default=json_serial)) else: print("ASN Routes:") for n in res["nets"]: print("[+] %s" % n["n"]) elif args.subcommand == "domain": r = Robtex() res = r.get_pdns_domain(unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=True, indent=4, default=json_serial)) else: if len(res) == 0: print("No information on this domain") else: print("Passive DNS info:") for r in res: print("[+] %s\t%s\t(%s -> %s)" % ( r["rrtype"], r["rrdata"], r["time_first_o"].isoformat(), r["time_last_o"].isoformat() ) ) else: self.parser.print_help() else: self.parser.print_help()
def ipinfo(): parser = argparse.ArgumentParser(description='Give information on an IP') parser.add_argument('IP', type=str, nargs='*', default=[], help="IP addresses") parser.add_argument('--format', '-f', help='Output format', choices=["json", "csv", "txt"], default="txt") parser.add_argument('--no-dns', '-n', help='No reverse DNS query', action='store_true') args = parser.parse_args() if len(args.IP): ips = args.IP else: with open("/dev/stdin") as f: ips = f.read().split() command = CommandIp() if len(ips) == 1: if is_ip(unbracket(ips[0])): r = command.ipinfo(unbracket(ips[0]), dns=not args.no_dns) if args.format == "txt": if r['asn'] == "": print("IP not found") else: print("Information on IP %s" % unbracket(ips[0])) print("ASN: AS%i - %s - %s" % (r['asn'], r['asn_name'], r['asn_type'])) print("Location: %s - %s" % (r['city'], r['country'])) if not args.no_dns: if r['hostname'] != '': print('Hostname: %s' % r['hostname']) if r['specific'] != '': print("Specific: %s" % r['specific']) elif args.format == "csv": if r['asn'] == "": print('%s;;;;;' % unbracket(ips[0])) else: if args.no_dns: print('%s;AS%i;%s;%s;%s;%s;%s' % (unbracket(ips[0]), r['asn'], r['asn_name'], r['asn_type'], r['city'], r['country'], r['specific'])) else: print('%s;AS%i;%s;%s;%s;%s;%s' % (unbracket(ips[0]), r['asn'], r['asn_name'], r['asn_type'], r['city'], r['country'], r['hostname'], r['specific'])) else: print(json.dumps(r, sort_keys=True, indent=4)) else: print("Invalid IP address") else: for ip in ips: if is_ip(unbracket(ip)): r = command.ipinfo(unbracket(ip), dns=not args.no_dns) if args.format in ["txt", "csv"]: if r['asn'] == "": print('%s ; ; ; ; ; ;' % unbracket(ip)) else: if args.no_dns: print('%s ; AS%i ; %s ; %s ; %s ; %s ; %s ' % (unbracket(ip), r['asn'], r['asn_name'], r['asn_type'], r['city'], r['country'], r['specific'])) else: print('%s ; AS%i ; %s ; %s ; %s ; %s ; %s ; %s' % (unbracket(ip), r['asn'], r['asn_name'], r['asn_type'], r['city'], r['country'], r['hostname'], r['specific'])) else: # JSON print( json.dumps({unbracket(ip): r}, sort_keys=True, indent=4)) else: print("%s ; ; ; ; ; ; Invalid IP" % unbracket(ip))
def intel(self, type, query, data, conf): if type == "domain": pd = PulseDive(conf['PulseDive']['key']) print("[+] Checking PulseDive...") try: res = pd.indicators_by_value(unbracket(query), historical=True) if "properties" in res: if "dns" in res["properties"]: for ip in res["properties"]["dns"]: if ip["name"] == "A": data["passive_dns"].append({ "ip": ip["value"], "first": parse(ip["stamp_seen"]).astimezone( pytz.utc), "last": None, "source": "PulseDive" }) if "threats" in res: if len(res["threats"]) > 0: data["reports"].append({ "date": None, "source": "PulseDive", "title": "Identified as related to {}".format(", ".join( [r["name"] for r in res["threats"]])), "url": "" }) res = pd.indicators_by_value_links(unbracket(query)) if "Related URLs" in res: for url in res["Related URLs"]: data["urls"].append({ "source": "PulseDive", "url": url["indicator"], "ip": "", "date": parse(url["stamp_linked"]).astimezone(pytz.utc) }) except PulseDiveError: pass elif type == "ip": pd = PulseDive(conf['PulseDive']['key']) print("[+] Checking PulseDive...") try: res = pd.indicators_by_value(unbracket(query), historical=True) if "attributes" in res: if "port" in res["attributes"]: for p in res["attributes"]["port"]: data["ports"].append({ "port": p, "source": "PulseDive", "info": "" }) if "properties" in res: if "dns" in res["properties"]: for ip in res["properties"]["dns"]: if ip["name"] == "A": data["passive_dns"].append({ "domain": ip["value"], "first": parse(ip["stamp_seen"]).astimezone( pytz.utc), "last": None, "source": "PulseDive" }) if "threats" in res: if len(res["threats"]) > 0: data["reports"].append({ "date": None, "source": "PulseDive", "title": "Identified as related to {}".format(", ".join( [r["name"] for r in res["threats"]])), "url": "" }) res = pd.indicators_by_value_links(unbracket(query)) if "Related URLs" in res: for url in res["Related URLs"]: data["urls"].append({ "source": "PulseDive", "url": url["indicator"], "ip": "", "date": parse(url["stamp_linked"]).astimezone(pytz.utc) }) except PulseDiveError: pass
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == "intel": # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % args.DOMAIN) passive_dns = [] urls = [] malware = [] files = [] # MISP misp_e = plugins['misp'].test_config(conf) if misp_e: print('[+] Downloading MISP information...') server = ExpandedPyMISP(conf['Misp']['url'], conf['Misp']['key']) misp_results = server.search('attributes', value=unbracket(args.DOMAIN)) # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') try: otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full(IndicatorTypes.DOMAIN, unbracket(args.DOMAIN)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "ip": r['hostname'], "first": parse(r["first"]).astimezone(pytz.utc), "last": parse(r["last"]).astimezone(pytz.utc), "source" : "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: if "result" in r: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": r["result"]["urlworker"]["ip"] if "ip" in r["result"]["urlworker"] else "" , "source": "OTX" }) else: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": "", "source": "OTX" }) except AttributeError: print('OTX crashed ¯\_(ツ)_/¯') # UrlScan us = UrlScan() print('[+] Downloading UrlScan information....') res = us.search(args.DOMAIN) for r in res['results']: urls.append({ "date": parse(r["task"]["time"]).astimezone(pytz.utc), "url": r["page"]["url"], "ip": r["page"]["ip"] if "ip" in r["page"] else "", "source": "UrlScan" }) # UrlHaus uh_e = plugins['urlhaus'].test_config(conf) if uh_e: print("[+] Checking urlhaus...") try: urlhaus = UrlHaus(conf["UrlHaus"]["key"]) res = urlhaus.get_host(unbracket(args.DOMAIN)) except UrlHausError: print("Error with the query") else: if "urls" in res: for r in res['urls']: urls.append({ "date": parse(r["date_added"]).astimezone(pytz.utc), "url": r["url"], "ip":"", "source": "UrlHaus" }) # CIRCL circl_e = plugins['circl'].test_config(conf) if circl_e: print('[+] Downloading CIRCL passive DNS information....') x = pypdns.PyPDNS( basic_auth=( conf['Circl']['user'], conf['Circl']['pass'] ) ) res = x.query(unbracket(args.DOMAIN)) for answer in res: passive_dns.append({ "ip": answer['rdata'], "first": answer['time_first'].astimezone(pytz.utc), "last": answer['time_last'].astimezone(pytz.utc), "source" : "CIRCL" }) # BinaryEdge be_e = plugins['binaryedge'].test_config(conf) if be_e: print('[+] Downloading BinaryEdge information....') try: be = BinaryEdge(conf['BinaryEdge']['key']) res = be.domain_dns(unbracket(args.DOMAIN)) for d in res['events']: if "A" in d: for a in d['A']: passive_dns.append({ "ip": a, "first": parse(d['updated_at']).astimezone(pytz.utc), "last": parse(d['updated_at']).astimezone(pytz.utc), "source" : "BinaryEdge" }) except BinaryEdgeException: print('You need a paid BinaryEdge subscription for this request') # RobTex print('[+] Downloading Robtex information....') try: rob = Robtex() res = rob.get_pdns_domain(args.DOMAIN) for d in res: if d['rrtype'] in ['A', 'AAAA']: passive_dns.append({ 'first': d['time_first_o'].astimezone(pytz.utc), 'last': d['time_last_o'].astimezone(pytz.utc), 'ip': d['rrdata'], 'source': 'Robtex' }) except RobtexError: print("Robtex query failed") # PT pt_e = plugins['pt'].test_config(conf) if pt_e: try: pt_osint = {} ptout = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns(query=unbracket(args.DOMAIN)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]).astimezone(pytz.utc), "last": parse(res["lastSeen"]).astimezone(pytz.utc), "ip": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("PT quota exceeded") ptout = True if not ptout: client2 = EnrichmentRequest(conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint(query=unbracket(args.DOMAIN)) # Get malware raw_results = client2.get_malware(query=unbracket(args.DOMAIN)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']).astimezone(pytz.utc), 'source' : 'PT (%s)' % r["source"] }) except requests.exceptions.ReadTimeout: print("PT: Time Out") # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_domain_report(unbracket(args.DOMAIN)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]).astimezone(pytz.utc), "last": parse(r["last_resolved"]).astimezone(pytz.utc), "ip": r["ip_address"], "source": "VT" }) if "undetected_downloaded_samples" in res['results']: for r in res['results']['undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc) if 'date' in r else '', 'source' : 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results']['undetected_referrer_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc) if 'date' in r else '', 'source' : 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results']['detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc), 'source' : 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results']['detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']).astimezone(pytz.utc), 'source' : 'VT' }) if "detected_urls" in res['results']: for r in res['results']['detected_urls']: urls.append({ 'date': parse(r['scan_date']).astimezone(pytz.utc), 'url': r['url'], 'ip': '', 'source': 'VT' }) else: vt_e = False tg_e = plugins['threatgrid'].test_config(conf) if tg_e: try: print('[+] Downloading Threat Grid....') tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.DOMAIN), type='domain') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']).astimezone(pytz.utc) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source' : 'ThreatGrid' }) already.append(r['sample_sha256']) except ThreatGridError as e: print("Failed to connect to Threat Grid: %s" % e.message) print('[+] Downloading ThreatMiner....') tm = ThreatMiner() response = tm.get_report(unbracket(args.DOMAIN)) if response['status_code'] == '200': tmm = response['results'] else: tmm = [] if response['status_code'] == '404': print("Request to ThreatMiner failed: {}".format(response['status_message'])) response = tm.get_related_samples(unbracket(args.DOMAIN)) if response['status_code'] == '200': for r in response['results']: malware.append({ 'hash': r, 'date': None, 'source': 'ThreatMiner' }) print('----------------- Intelligence Report') if misp_e: if len(misp_results['Attribute']) > 0: print('MISP:') for event in misp_results['Attribute']: print("- {} - {}".format( event['Event']['id'], event['Event']['info'] )) if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print('- %s (%s - %s)' % ( p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'] ) ) else: print('OTX: Not found in any pulse') if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print("PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % (pt_osint["results"][0]["sourceUrl"])) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("- %s %s" % (r["name"], r["sourceUrl"])) else: print("- %s" % (r["sourceUrl"])) else: print("PT: Nothing found!") else: print("PT: Nothing found!") # ThreatMiner if len(tmm) > 0: print("ThreatMiner:") for r in tmm: print("- {} {} - {}".format( r['year'], r['filename'], r['URL'] )) if len(malware) > 0: print('----------------- Malware') for r in malware: print("[%s] %s %s" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") if r["date"] else "" ) ) if len(files) > 0: print('----------------- Files') for r in files: if r['date'] != '': print("[%s] %s (%s)" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") ) ) else: print("[%s] %s" % ( r["source"], r["hash"], ) ) if len(urls) > 0: print('----------------- Urls') for r in sorted(urls, key=lambda x: x["date"], reverse=True): print("[%s] %s - %s %s" % ( r["source"], r["url"], r["ip"], r["date"].strftime("%Y-%m-%d") ) ) # TODO: add ASN + location info here if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % ( r["ip"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"] ) ) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: zeta = Zetalytics(token=conf['Zetalytics']['token']) if args.subcommand == "cname2qname": res = zeta.cname2qname(q=unbracket(args.CNAME)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Not found") else: for r in res['results']: print("{:12}{:35} {:20} {}".format( r['last_seen'], r['qname'], r['domain'], r['value'])) elif args.subcommand == "domain2aaaa": res = zeta.domain2aaaa(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['last_seen'], r['qname'], r['value'])) elif args.subcommand == "domain2cname": res = zeta.domain2cname(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['last_seen'], r['qname'], r['value'])) elif args.subcommand == "domain2d8s": res = zeta.domain2d8s(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domain2ip": res = zeta.domain2ip(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['last_seen'], r['qname'], r['value'])) elif args.subcommand == "domain2malwaredns": res = zeta.domain2malwaredns(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domain2malwarehttp": res = zeta.domain2malwarehttp(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domain2mx": res = zeta.domain2mx(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['date'], r['qname'], r['value'])) elif args.subcommand == "domain2ns": res = zeta.domain2ns(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['date'], r['qname'], r['value'])) elif args.subcommand == "domain2nsglue": res = zeta.domain2nsglue(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domain2ptr": res = zeta.domain2ptr(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['date'], r['qname'], r['value'])) elif args.subcommand == "domain2txt": res = zeta.domain2txt(q=unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in res['results']: print("{:12} {:30} {}".format( r['date'], r['qname'], r['value'])) elif args.subcommand == "domain2whois": res = zeta.domain2whois(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "email_address": res = zeta.email_address(q=unbracket(args.EMAIL)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Nothing found") else: for r in sorted(res['results'], key=lambda x: x['last_ts']): print("{} {} - {:30} - {}".format( r["first_ts"][:10], r["last_ts"][:10], r['d'], r["emails"][0]["addr"])) elif args.subcommand == "email_domain": res = zeta.email_domain(q=unbracket(args.EMAIL)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "email_user": res = zeta.email_user(q=unbracket(args.EMAIL)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "hash2malwaredns": if not re.fullmatch("[a-fA-F\d]{32}", args.HASH): print("Zetalytics only accept md5 hashes") else: res = zeta.hash2malwaredns(q=args.HASH) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "hash2malwarehttp": if not re.fullmatch("[a-fA-F\d]{32}", args.HASH): print("Zetalytics only accept md5 hashes") else: res = zeta.hash2malwarehttp(q=args.HASH) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "hostname": res = zeta.hostname(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "ip": res = zeta.ip(q=unbracket(args.IP)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: if res['total'] == 0: print("Not found") else: for r in res['results']: print("{} - {} - {}".format( r['date'], r['last_seen'], r['qname'])) elif args.subcommand == "ip2malwaredns": res = zeta.ip2malwaredns(q=unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "ip2malwarehttp": res = zeta.ip2malwarehttp(q=unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "ip2nsglue": res = zeta.ip2nsglue(q=unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "mx2domain": res = zeta.mx2domain(q=unbracket(args.MX)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "ns2domain": res = zeta.ns2domain(q=unbracket(args.NS)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "subdomains": res = zeta.subdomains(q=unbracket(args.DOMAIN)) print(json.dumps(res, sort_keys=False, indent=4)) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'whois': client = WhoisRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) if args.domain: raw_results = client.search_whois_by_field(query=unbracket( args.domain.strip()), field="domain") print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) elif args.file: with open(args.file, 'r') as infile: data = infile.read().split() print( "Domain|Date|Registrar|name|email|Phone|organization|Street|City|Postal Code|State|Country" ) for d in data: do = unbracket(d.strip()) # FIXME: bulk request here raw_results = client.search_whois_by_field( query=do, field="domain") if "results" not in raw_results: print("%s|||||||||||" % bracket(do)) else: if len(raw_results["results"]) == 0: print("%s|||||||||||" % bracket(do)) else: r = raw_results["results"][0] if "registered" in r: dd = datetime.datetime.strptime( r["registered"], "%Y-%m-%dT%H:%M:%S.%f%z") ddo = dd.strftime("%m/%d/%Y %H:%M:%S") else: ddo = "" print( "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s" % (bracket(do), ddo, r["registrar"] if "registrar" in r else "", r["registrant"]["name"] if "name" in r["registrant"] else "", r["registrant"]["email"] if "email" in r["registrant"] else "", r["registrant"]["telephone"] if "telephone" in r["registrant"] else "", r["registrant"]["organization"] if "organization" in r["registrant"] else "", r["registrant"]["street"] if "street" in r["registrant"] else "", r["registrant"]["city"] if "city" in r["registrant"] else "", r["registrant"]["postalCode"] if "postalCode" in r["registrant"] else "", r["registrant"]["state"] if "state" in r["registrant"] else "", r["registrant"]["country"] if "country" in r["registrant"] else "")) elif args.email: raw_results = client.search_whois_by_field( query=args.email.strip(), field="email") print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) else: self.parser.print_help() elif args.subcommand == "dns": client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns(query=unbracket( args.DOMAIN), ) print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) elif args.subcommand == "malware": client = EnrichmentRequest(conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) if args.domain: raw_results = client.get_malware(query=args.domain) print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) elif args.file: with open(args.file, 'r') as infile: data = infile.read().split() domain_list = list(set([a.strip() for a in data])) if len(domain_list) < 51: raw_results = client.get_bulk_malware( query=domain_list) if "results" not in raw_results or not raw_results[ "success"]: print("Request failed") print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) sys.exit(1) else: results = raw_results["results"] else: results = {} bulk_size = 50 i = 0 while i * bulk_size < len(domain_list): raw_results = client.get_bulk_malware( query=domain_list[i * bulk_size:(i + 1) * bulk_size]) if "results" not in raw_results or not raw_results[ "success"]: print("Request failed") print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) sys.exit(1) else: results.update(raw_results["results"]) i += 1 if args.raw: print( json.dumps(results, sort_keys=True, indent=4, separators=(',', ': '))) else: print("Domain|Date|Sample|Source|Source URL") for domain in results: if "results" in results[domain]: for sample in results[domain]["results"]: print("%s|%s|%s|%s|%s" % (domain, sample["collectionDate"], sample["sample"], sample["source"], sample["sourceUrl"])) else: self.parser.print_help() elif args.subcommand == "osint": # FIXME: add research of projects client = EnrichmentRequest(conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) if args.domain: raw_results = client.get_osint(query=args.domain) print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) elif args.file: with open(args.file, 'r') as infile: data = infile.read().split() domain_list = list(set([a.strip() for a in data])) if len(domain_list) < 51: raw_results = client.get_bulk_osint(query=domain_list) if "results" not in raw_results or not raw_results[ "success"]: print("Request failed") print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) sys.exit(1) else: results = raw_results["results"] else: results = {} bulk_size = 50 i = 0 while i * bulk_size < len(domain_list): raw_results = client.get_bulk_osint( query=domain_list[i * bulk_size:(i + 1) * bulk_size]) if "results" not in raw_results or not raw_results[ "success"]: print("Request failed") print( json.dumps(raw_results, sort_keys=True, indent=4, separators=(',', ': '))) sys.exit(1) else: results.update(raw_results["results"]) i += 1 if args.raw: print( json.dumps(results, sort_keys=True, indent=4, separators=(',', ': '))) else: print("Domain|Source|URL|Tags") for domain in results: if "results" in results[domain]: for report in results[domain]["results"]: print("%s|%s|%s|%s" % (domain, report["source"], report["source_url"], " / ".join( report["tags"]))) else: self.parser.print_help() else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if conf["VirusTotal"]["type"] != "public": vt = PrivateApi(conf["VirusTotal"]["key"]) if args.subcommand == "hash": response = vt.get_file_report(args.HASH) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) if args.extended: response = vt.get_network_traffic(args.HASH) print( json.dumps(response, sort_keys=False, indent=4)) response = vt.get_file_behaviour(args.HASH) print( json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "dl": if os.path.isfile(args.HASH): print("File %s already exists" % args.HASH) sys.exit(0) data = vt.get_file(args.HASH) if isinstance(data, dict): if 'results' in data: with open(args.HASH, "wb") as f: f.write(data['results']) print("File downloaded as %s" % args.HASH) else: print('Invalid answer format') sys.exit(1) else: with open(args.HASH, "wb") as f: f.write(data) print("File downloaded as %s" % args.HASH) elif args.subcommand == "file": with open(args.FILE, "rb") as f: # FIXME : could be more efficient data = f.read() m = hashlib.sha256() m.update(data) h = m.hexdigest() response = vt.get_file_report(h) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "hashlist": with open(args.FILE, 'r') as infile: data = infile.read().split() hash_list = list(set([a.strip() for a in data])) print( "Hash;Found;Detection;Total AV;First Seen;Last Seen;Link" ) for h in hash_list: response = vt.get_file_report(h) if response["response_code"] != 200: print("Error with the request (reponse code %i)" % response["response_code"]) print( json.dumps(response, sort_keys=False, indent=4)) print("Quitting...") sys.exit(1) if "response_code" in response["results"]: if response["results"]["response_code"] == 0: print("%s;Not found;;;;;" % h) else: print("%s;Found;%i;%i;%s;%s;%s" % (h, response["results"]["positives"], response["results"]["total"], response["results"]["first_seen"], response["results"]["last_seen"], response["results"]["permalink"])) else: print("%s;Not found;;;;;" % h) elif args.subcommand == "domainlist": with open(args.FILE, 'r') as infile: data = infile.read().split() for d in data: print("################ Domain %s" % d.strip()) res = vt.get_domain_report(d.strip()) self.print_domaininfo(res) elif args.subcommand == "iplist": with open(args.FILE, 'r') as infile: data = infile.read().split() for d in data: print("################ IP %s" % d.strip()) res = vt.get_ip_report(unbracket(d.strip())) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domain": res = vt.get_domain_report(unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: self.print_domaininfo(res) elif args.subcommand == "ip": res = vt.get_ip_report(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "url": res = vt.get_url_report(args.URL) print(json.dumps(res, sort_keys=False, indent=4)) else: self.parser.print_help() else: vt = PublicApi(conf["VirusTotal"]["key"]) if args.subcommand == "hash": response = vt.get_file_report(args.HASH) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "file": with open(args.FILE, "rb") as f: # FIXME : could be more efficient data = f.read() m = hashlib.sha256() m.update(data) response = vt.get_file_report(m.hexdigest()) if args.raw: print(json.dumps(response, sort_keys=False, indent=4)) else: self.print_file(response) elif args.subcommand == "hashlist": with open(args.FILE, 'r') as infile: data = infile.read().split() hash_list = list(set([a.strip() for a in data])) print("Hash;Found;Detection;Total AV;Link") for h in hash_list: response = vt.get_file_report(h) if response["response_code"] != 200: print("Error with the request (reponse code %i)" % response["response_code"]) print( json.dumps(response, sort_keys=False, indent=4)) print("Quitting...") sys.exit(1) if "response_code" in response["results"]: if response["results"]["response_code"] == 0: print("%s;Not found;;;" % h) else: print("%s;Found;%i;%i;%s" % (h, response["results"]["positives"], response["results"]["total"], response["results"]["permalink"])) else: print("%s;Not found;;;" % h) elif args.subcommand == "domain": res = vt.get_domain_report(unbracket(args.DOMAIN)) if args.json: print(json.dumps(res, sort_keys=False, indent=4)) else: self.print_domaininfo(res) elif args.subcommand == "ip": res = vt.get_ip_report(unbracket(args.IP)) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "url": res = vt.get_url_report(args.URL) print(json.dumps(res, sort_keys=False, indent=4)) elif args.subcommand == "domainlist": print( "Not implemented yet with public access, please propose PR if you need it" ) elif args.subcommand == "dl": print( "VirusTotal does not allow downloading files with a public feed, sorry" ) sys.exit(0) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'info': print("Not implemented yet") elif args.subcommand == "intel": # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % args.DOMAIN) passive_dns = [] urls = [] malware = [] files = [] # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full(IndicatorTypes.DOMAIN, unbracket(args.DOMAIN)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "ip": r['hostname'], "first": parse(r["first"]), "last": parse(r["last"]), "source" : "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: if "result" in r: urls.append({ "date": parse(r["date"]), "url": r["url"], "ip": r["result"]["urlworker"]["ip"] if "ip" in r["result"]["urlworker"] else "" , "source": "OTX" }) else: urls.append({ "date": parse(r["date"]), "url": r["url"], "ip": "", "source": "OTX" }) # CIRCL circl_e = plugins['circl'].test_config(conf) if circl_e: print('[+] Downloading CIRCL passive DNS information....') x = pypdns.PyPDNS( basic_auth=( conf['Circl']['user'], conf['Circl']['pass'] ) ) res = x.query(unbracket(args.DOMAIN)) for answer in res: passive_dns.append({ "ip": answer['rdata'], "first": answer['time_first'], "last": answer['time_last'], "source" : "CIRCL" }) # BinaryEdge be_e = plugins['binaryedge'].test_config(conf) if be_e: print('[+] Downloading BinaryEdge information....') be = BinaryEdge(conf['BinaryEdge']['key']) res = be.domain_dns(unbracket(args.DOMAIN)) for d in res['events']: if "A" in d: for a in d['A']: passive_dns.append({ "ip": a, "first": parse(d['updated_at']), "last": parse(d['updated_at']), "source" : "BinaryEdge" }) # RobTex print('[+] Downloading Robtex information....') rob = Robtex() res = rob.get_pdns_domain(args.DOMAIN) for d in res: if d['rrtype'] in ['A', 'AAAA']: passive_dns.append({ 'first': d['time_first_o'], 'last': d['time_last_o'], 'ip': d['rrdata'], 'source': 'Robtex' }) # PT pt_e = plugins['pt'].test_config(conf) if pt_e: try: pt_osint = {} ptout = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) raw_results = client.get_passive_dns(query=unbracket(args.DOMAIN)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]), "last": parse(res["lastSeen"]), "ip": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("PT quota exceeded") ptout = True if not ptout: client2 = EnrichmentRequest(conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint(query=unbracket(args.DOMAIN)) # Get malware raw_results = client2.get_malware(query=unbracket(args.DOMAIN)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']), 'source' : 'PT (%s)' % r["source"] }) except requests.exceptions.ReadTimeout: print("PT: Time Out") # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_domain_report(unbracket(args.DOMAIN)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]), "last": parse(r["last_resolved"]), "ip": r["ip_address"], "source": "VT" }) if "undetected_downloaded_samples" in res['results']: for r in res['results']['undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']) if 'date' in r else '', 'source' : 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results']['undetected_referrer_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']) if 'date' in r else '', 'source' : 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results']['detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source' : 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results']['detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source' : 'VT' }) if "detected_urls" in res['results']: for r in res['results']['detected_urls']: urls.append({ 'date': parse(r['scan_date']), 'url': r['url'], 'ip': '', 'source': 'VT' }) else: vt_e = False tg_e = plugins['threatgrid'].test_config(conf) if tg_e: try: print('[+] Downloading Threat Grid....') tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.DOMAIN), type='domain') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']) d = d.replace(tzinfo=None) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source' : 'ThreatGrid' }) already.append(r['sample_sha256']) except ThreatGridError as e: print("Failed to connect to Threat Grid: %s" % e.message) # TODO: Add MISP print('----------------- Intelligence Report') if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print(' -%s (%s - %s)' % ( p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'] ) ) else: print('OTX: Not found in any pulse') if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print("PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % (pt_osint["results"][0]["sourceUrl"])) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("-%s %s" % (r["name"], r["sourceUrl"])) else: print("-%s" % (r["sourceUrl"])) else: print("PT: Nothing found!") else: print("PT: Nothing found!") if len(malware) > 0: print('----------------- Malware') for r in sorted(malware, key=lambda x: x["date"]): print("[%s] %s %s" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") ) ) if len(files) > 0: print('----------------- Files') for r in files: if r['date'] != '': print("[%s] %s (%s)" % ( r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") ) ) else: print("[%s] %s" % ( r["source"], r["hash"], ) ) if len(urls) > 0: print('----------------- Urls') for r in sorted(urls, key=lambda x: x["date"], reverse=True): print("[%s] %s - %s %s" % ( r["source"], r["url"], r["ip"], r["date"].strftime("%Y-%m-%d") ) ) # TODO: add ASN + location info here if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % ( r["ip"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"] ) ) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): if 'subcommand' in args: if args.subcommand == 'info': if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # FIXME: move code here in a library ip = unbracket(args.IP) try: ipy = IP(ip) except ValueError: print('Invalid IP format, quitting...') return ipinfo = self.ipinfo(ip) print('MaxMind: Located in %s, %s' % (ipinfo['city'], ipinfo['country'])) if ipinfo['asn'] == 0: print("MaxMind: IP not found in the ASN database") else: print('MaxMind: ASN%i, %s' % (ipinfo['asn'], ipinfo['asn_name'])) print('CAIDA Type: %s' % ipinfo['asn_type']) try: asndb2 = pyasn.pyasn(self.asncidr) res = asndb2.lookup(ip) except OSError: print("Configuration files are not available") print("Please run harpoon update before using harpoon") sys.exit(1) if res[1] is None: print("IP not found in ASN database") else: # Search for name f = open(self.asnname, 'r') found = False line = f.readline() name = '' while not found and line != '': s = line.split('|') if s[0] == str(res[0]): name = s[1].strip() found = True line = f.readline() print('ASN %i - %s (range %s)' % (res[0], name, res[1])) if ipinfo['hostname'] != '': print('Hostname: %s' % ipinfo['hostname']) if ipinfo['specific'] != '': print("Specific: %s" % ipinfo['specific']) if ipy.iptype() == "PRIVATE": "Private IP" print("") if ipy.version() == 4: print("Censys:\t\thttps://censys.io/ipv4/%s" % ip) print("Shodan:\t\thttps://www.shodan.io/host/%s" % ip) print("IP Info:\thttp://ipinfo.io/%s" % ip) print("BGP HE:\t\thttps://bgp.he.net/ip/%s" % ip) print( "IP Location:\thttps://www.iplocation.net/?query=%s" % ip) elif args.subcommand == "intel": if not is_ip(unbracket(args.IP)): print("Invalid IP address") sys.exit(1) # Start with MISP and OTX to get Intelligence Reports print('###################### %s ###################' % unbracket(args.IP)) passive_dns = [] urls = [] malware = [] files = [] # MISP misp_e = plugins['misp'].test_config(conf) if misp_e: print('[+] Downloading MISP information...') server = ExpandedPyMISP(conf['Misp']['url'], conf['Misp']['key']) misp_results = server.search('attributes', value=unbracket(args.IP)) # Binary Edge be_e = plugins['binaryedge'].test_config(conf) if be_e: try: print('[+] Downloading BinaryEdge information...') be = BinaryEdge(conf['BinaryEdge']['key']) # FIXME: this only get the first page res = be.domain_ip(unbracket(args.IP)) for d in res["events"]: passive_dns.append({ "domain": d['domain'], "first": parse(d['updated_at']).astimezone(pytz.utc), "last": parse(d['updated_at']).astimezone(pytz.utc), "source": "BinaryEdge" }) except BinaryEdgeException: print( 'BinaryEdge request failed, you need a paid subscription' ) # OTX otx_e = plugins['otx'].test_config(conf) if otx_e: print('[+] Downloading OTX information....') otx = OTXv2(conf["AlienVaultOtx"]["key"]) res = otx.get_indicator_details_full( IndicatorTypes.IPv4, unbracket(args.IP)) otx_pulses = res["general"]["pulse_info"]["pulses"] # Get Passive DNS if "passive_dns" in res: for r in res["passive_dns"]["passive_dns"]: passive_dns.append({ "domain": r['hostname'], "first": parse(r["first"]).astimezone(pytz.utc), "last": parse(r["last"]).astimezone(pytz.utc), "source": "OTX" }) if "url_list" in res: for r in res["url_list"]["url_list"]: if "result" in r: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": r["result"]["urlworker"]["ip"] if "ip" in r["result"]["urlworker"] else "", "source": "OTX" }) else: urls.append({ "date": parse(r["date"]).astimezone(pytz.utc), "url": r["url"], "ip": "", "source": "OTX" }) # RobTex print('[+] Downloading Robtex information....') rob = Robtex() try: res = rob.get_ip_info(unbracket(args.IP)) except RobtexError: print("Error with Robtex") else: for d in ["pas", "pash", "act", "acth"]: if d in res: for a in res[d]: passive_dns.append({ 'first': a['date'].astimezone(pytz.utc), 'last': a['date'].astimezone(pytz.utc), 'domain': a['o'], 'source': 'Robtex' }) # PT pt_e = plugins['pt'].test_config(conf) if pt_e: out_pt = False print('[+] Downloading Passive Total information....') client = DnsRequest(conf['PassiveTotal']['username'], conf['PassiveTotal']['key']) try: raw_results = client.get_passive_dns( query=unbracket(args.IP)) if "results" in raw_results: for res in raw_results["results"]: passive_dns.append({ "first": parse(res["firstSeen"]).astimezone( pytz.utc), "last": parse(res["lastSeen"]).astimezone( pytz.utc), "domain": res["resolve"], "source": "PT" }) if "message" in raw_results: if "quota_exceeded" in raw_results["message"]: print("Quota exceeded for Passive Total") out_pt = True pt_osint = {} except requests.exceptions.ReadTimeout: print("Timeout on Passive Total requests") if not out_pt: try: client2 = EnrichmentRequest( conf["PassiveTotal"]["username"], conf["PassiveTotal"]['key']) # Get OSINT # TODO: add PT projects here pt_osint = client2.get_osint( query=unbracket(args.IP)) # Get malware raw_results = client2.get_malware( query=unbracket(args.IP)) if "results" in raw_results: for r in raw_results["results"]: malware.append({ 'hash': r["sample"], 'date': parse(r['collectionDate']), 'source': 'PT (%s)' % r["source"] }) except requests.exceptions.ReadTimeout: print("Timeout on Passive Total requests") # Urlhaus uh_e = plugins['urlhaus'].test_config(conf) if uh_e: print("[+] Checking urlhaus data...") try: urlhaus = UrlHaus(conf["UrlHaus"]["key"]) res = urlhaus.get_host(unbracket(args.IP)) except UrlHausError: print("Error with the query") else: if "urls" in res: for r in res['urls']: urls.append({ "date": parse(r["date_added"]).astimezone( pytz.utc), "url": r["url"], "source": "UrlHaus" }) # VT vt_e = plugins['vt'].test_config(conf) if vt_e: if conf["VirusTotal"]["type"] != "public": print('[+] Downloading VT information....') vt = PrivateApi(conf["VirusTotal"]["key"]) res = vt.get_ip_report(unbracket(args.IP)) if "results" in res: if "resolutions" in res['results']: for r in res["results"]["resolutions"]: passive_dns.append({ "first": parse(r["last_resolved"]).astimezone( pytz.utc), "last": parse(r["last_resolved"]).astimezone( pytz.utc), "domain": r["hostname"], "source": "VT" }) if "undetected_downloaded_samples" in res[ 'results']: for r in res['results'][ 'undetected_downloaded_samples']: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "undetected_referrer_samples" in res['results']: for r in res['results'][ 'undetected_referrer_samples']: if 'date' in r: files.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) else: #FIXME : should consider data without dates files.append({ 'hash': r['sha256'], 'date': datetime.datetime(1970, 1, 1), 'source': 'VT' }) if "detected_downloaded_samples" in res['results']: for r in res['results'][ 'detected_downloaded_samples']: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) if "detected_referrer_samples" in res['results']: for r in res['results'][ 'detected_referrer_samples']: if "date" in r: malware.append({ 'hash': r['sha256'], 'date': parse(r['date']), 'source': 'VT' }) else: vt_e = False print('[+] Downloading GreyNoise information....') gn = GreyNoise() try: greynoise = gn.query_ip(unbracket(args.IP)) except GreyNoiseError: greynoise = [] tg_e = plugins['threatgrid'].test_config(conf) if tg_e: print('[+] Downloading Threat Grid....') try: tg = ThreatGrid(conf['ThreatGrid']['key']) res = tg.search_samples(unbracket(args.IP), type='ip') already = [] if 'items' in res: for r in res['items']: if r['sample_sha256'] not in already: d = parse(r['ts']) d = d.replace(tzinfo=None) malware.append({ 'hash': r["sample_sha256"], 'date': d, 'source': 'TG' }) already.append(r['sample_sha256']) except ThreatGridError as e: print("Error with threat grid: {}".format(e.message)) # ThreatMiner print('[+] Downloading ThreatMiner....') tm = ThreatMiner() response = tm.get_report(unbracket(args.IP)) if response['status_code'] == '200': tmm = response['results'] else: tmm = [] if response['status_code'] != '404': print("Request to ThreatMiner failed: {}".format( response['status_message'])) response = tm.get_related_samples(unbracket(args.IP)) if response['status_code'] == '200': for r in response['results']: malware.append({ 'hash': r, 'date': None, 'source': 'ThreatMiner' }) print('----------------- Intelligence Report') ctor = CommandTor() tor_list = ctor.get_list() if tor_list: if unbracket(args.IP) in tor_list: print("{} is a Tor Exit node".format(unbracket( args.IP))) else: print("Impossible to reach the Tor Exit Node list") if otx_e: if len(otx_pulses): print('OTX:') for p in otx_pulses: print('- %s (%s - %s)' % (p['name'], p['created'][:10], "https://otx.alienvault.com/pulse/" + p['id'])) else: print('OTX: Not found in any pulse') if misp_e: if len(misp_results['Attribute']) > 0: print('MISP:') for event in misp_results['Attribute']: print("- {} - {}".format(event['Event']['id'], event['Event']['info'])) if len(greynoise) > 0: print("GreyNoise: IP identified as") for r in greynoise: print("\t%s (%s -> %s)" % (r["name"], r["first_seen"], r["last_updated"])) else: print("GreyNoise: Not found") if pt_e: if "results" in pt_osint: if len(pt_osint["results"]): if len(pt_osint["results"]) == 1: if "name" in pt_osint["results"][0]: print( "PT: %s %s" % (pt_osint["results"][0]["name"], pt_osint["results"][0]["sourceUrl"])) else: print("PT: %s" % pt_osint["results"][0]["sourceUrl"]) else: print("PT:") for r in pt_osint["results"]: if "name" in r: print("-%s %s" % (r["name"], r["sourceUrl"])) else: print("-%s" % r["sourceUrl"]) else: print("PT: Nothing found!") else: print("PT: Nothing found!") # ThreatMiner if len(tmm) > 0: print("ThreatMiner:") for r in tmm: print("- {} {} - {}".format(r['year'], r['filename'], r['URL'])) if len(malware) > 0: print('----------------- Malware') for r in malware: print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d") if r["date"] else "")) if len(files) > 0: print('----------------- Files') for r in sorted(files, key=lambda x: x["date"]): print("[%s] %s %s" % (r["source"], r["hash"], r["date"].strftime("%Y-%m-%d"))) if len(passive_dns) > 0: print('----------------- Passive DNS') for r in sorted(passive_dns, key=lambda x: x["first"], reverse=True): print("[+] %-40s (%s -> %s)(%s)" % (r["domain"], r["first"].strftime("%Y-%m-%d"), r["last"].strftime("%Y-%m-%d"), r["source"])) if len(urls) > 0: print('----------------- Urls') for r in sorted(urls, key=lambda x: x["date"], reverse=True): print("[%s] %s - %s" % (r["source"], r["url"], r["date"].strftime("%Y-%m-%d"))) else: self.parser.print_help() else: self.parser.print_help()
def run(self, conf, args, plugins): tg = ThreatGrid(conf['ThreatGrid']['key']) if 'subcommand' in args: if args.subcommand == "search": try: res = tg.search_samples(unbracket(args.QUERY), type=args.TYPE) except ThreatGridError: print("Invalid type") if args.json: print(json.dumps(res, sort_keys=True, indent=4)) else: if len(res['items']) == 0: print('Not found') else: already = [] for item in res['items']: if item['sample_sha256'] not in already: print("%s - %s - %s" % ( item['ts'], item['sample_sha256'], "https://panacea.threatgrid.com/mask/samples/" + item['sample'])) already.append(item['sample_sha256']) elif args.subcommand == 'hash': hash_type = {32: 'md5', 40: 'sha1', 64: 'sha256'} res = tg.get_sample(args.HASH, type=hash_type[len(args.HASH)]) if len(res['items']) > 0: item = res['items'][0] print( "Sample submitted the %s: https://panacea.threatgrid.com/mask/samples/%s" % (item['submitted_at'], item['id'])) idd = item['id'] res = tg.get_sample_threats(idd) print('\nThreats:') for t in res['bis']: print("-%s" % t) else: print('Hash not found') elif args.subcommand == 'networklist': with open(args.FILE, 'r') as f: data = f.read().split('\n') for d in data: target = unbracket(d.strip()) gtype = typeguess(target) print(target) res = tg.search_samples(target, type=gtype) if len(res['items']) > 0: already = [] for item in res['items']: if item['sample_sha256'] not in already: print( "-%s: https://panacea.threatgrid.com/mask/samples/%s %s" % (item['ts'][:10], item['sample'], item['sample_sha256'])) already.append(item['sample_sha256']) else: print('-Nothing found') print('') else: self.parser.print_help() else: self.parser.print_help()