def hello(request): if request.is_ajax(): try: domain = request.GET['input_text'] record_type = request.GET['record_type'] # print(domain, record_type) if record_type == 'AAAA' or record_type == 'PTR': resp_from_googledns = pydig.query(str(domain), str(record_type)) # print(resp_from_googledns, 'if work') else: resp_from_googledns = request_to_googledns(request_data = { "name_": domain, "type_": record_type }) #sleep(0.1) resolver = pydig.Resolver( executable='/usr/bin/dig', nameservers=[ '92.53.116.26' #ns timeweb ]) response_from_timeweb_dns = pydig.query(str(domain), str(record_type)) return JsonResponse({'response': resp_from_googledns, 'response_from_timeweb_dns':response_from_timeweb_dns}, status = 200) except Exception as err: html = f"Error {err}." print(html) return JsonResponse({'response':html, }, status = 200) else: html = "Error." return JsonResponse({'response':html, }, status = 500)
def schedule_reduce_server(framework, offers, dest_ip, dest_port, task_id): agent = framework.getAgentInfoForRunningTask("ReduceServer") if agent: print("Reduce server running.") domain = framework.getAgentProperty(agent, 'domain') agent_ip = pydig.query(domain, 'A')[0] return agent_ip, 3004 # reduce task needs to be scheduled server_agents = framework.findAgents(offers, { 'domain': None, 'cpus': 0.5, 'mem': 100000000 }) if len(server_agents) == 0: print( "No available reduce agents.\n\nA reduce agent is required. Please ensure that the cluster has enough resources.\n\nExiting." ) sys.exit(1) # issue task and return IP / Port framework.runTask("ReduceServer", server_agents[0], docker_image='jnoor/sensor-reduce:v1', docker_port_mappings={3004: 3004}, environment={'SERVER_PORT': '3004'}) print("Started reduce task on agent: {}".format(server_agents[0].agent_id)) domain = framework.getAgentProperty(server_agents[0].agent_id, 'domain') agent_ip = pydig.query(domain, 'A')[0] return agent_ip, 3004
def testForWildcardDomains(domainSet): wildcardDomains = set([]) for domain in domainSet: try: topLevelDomain = get_tld("https://" + domain, fail_silently=True, as_object=True) baseDomain = topLevelDomain.fld topLevelDomain = topLevelDomain.tld subDomains = domain.split("." + baseDomain) if len(subDomains) <= 1: continue subDomains = subDomains[0] subDomains = subDomains.split(".") referenceResponse = set(pydig.query(domain, 'A')) if len(referenceResponse) >= 1: for subDomain in subDomains: try: probeDomain = '*.' + domain probeDomain = probeDomain.replace( '*.' + subDomain + '.', '*.', 1) probeResponse = set(pydig.query(probeDomain, 'A')) if len(probeResponse) >= 1: if (len(referenceResponse - probeResponse) + len(probeResponse - referenceResponse)) == 0: wildcardDomains.add(domain) continue except Exception as e: print('Error in wildcard domain check: ' + str(e)) pass except Exception as e: print('Error in wildcard domain check: ' + str(e)) return wildcardDomains
def _dns_resolve(self, url): """ Runs the dns resolver against a url :param url: the url to visit :return: the dig result """ logging.debug("dns resolve against %s\n" % url) a_records = pydig.query(url, 'A') cnames = pydig.query(url, 'CNAME') ns_record = pydig.query(url, 'NS') logging.info("A records:%s, CNames: %s, NS Records:%s\n" % (a_records, cnames, ns_record))
def test_query(monkeypatch): """ Tests that the query returns the values we expect """ with _resolver(monkeypatch, 'dig example.com A +short', '127.0.0.1'): assert pydig.query('example.com', 'A') == ['127.0.0.1'] with _resolver(monkeypatch, 'dig example.com TXT +short', '"1"\n"2"'): # noqa assert pydig.query('example.com', 'TXT') == ['"1"', '"2"'] with _resolver(monkeypatch, 'dig example.com CNAME +short', ''): # noqa assert pydig.query('example.com', 'CNAME') == []
def domains(self, d_conf): if self.local_ip == "0.0.0.0": self.local_ip = "" lines = filter(None, open(d_conf, "r").read().splitlines()) for line in lines: if not line.startswith('#'): d = line.replace('@', '').strip() results = pydig.query(d, 'A') results = [item.replace('\r', '') for item in results] results = [x for x in results if re.match(self.ip_regex, x)] print(f"{d} IP: {str(results)}") if line.startswith('@'): subnets = [] for ip in results: net = os.popen( f"whois {ip} | grep 'route\|CIDR'").read() result = re.findall(self.cidr_regex, net) subnets.extend(result) unique_subnets = [] [ unique_subnets.append(n) for n in subnets if n not in unique_subnets ] print(f"{d} Subnets: {unique_subnets}") for s in unique_subnets: subnet = self.format_rule.format( 1, d[:self.name_length], self.local_ip, s, self.interface) self.vpn_list.append(subnet) else: for r in results: ip_address = self.format_rule.format( 1, d[:self.name_length], self.local_ip, r, self.interface) self.vpn_list.append(ip_address)
def dig_using_wordlist(domain, already_found_domains, objectified_domains): try: wordlist = open("/data/default_domains.txt", "r") except Exception as e: print(e) exit(1) for word in wordlist: full_domain = word.rstrip() + "." + domain returned = pydig.query(full_domain, 'A') if (len(returned) == 0): pass elif (len(returned) == 1): if (returned[0] == "92.242.132.24" or returned[0] == "NXDOMAIN" or returned == ""): pass else: if (full_domain not in already_found_domains): already_found_domains.append(full_domain) to_remove_from_ip_list = [] for suspected_ip in returned: # if what should be an ip contains any alpha letters if (suspected_ip.lower().islower() == True): to_remove_from_ip_list.append(suspected_ip) for item in to_remove_from_ip_list: returned.remove(item) domain_object = Domain(full_domain, returned) objectified_domains.append(domain_object) else: print("i already know about", full_domain)
def ip_updater(): global ALLOWED_HOSTS if '*' in ALLOWED_HOSTS: return # dynamically update loadbalancer ips while True: # load balancer internal ip - aws elb checks ip EC2_PRIVATE_IP = None try: EC2_PRIVATE_IP = requests.get( 'http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text except requests.exceptions.RequestException: pass if EC2_PRIVATE_IP and EC2_PRIVATE_IP not in ALLOWED_HOSTS: ALLOWED_HOSTS.append(EC2_PRIVATE_IP) # load balancer external ips for ip in pydig.query(WEBSITE_DOMAIN, 'A'): if ip not in ALLOWED_HOSTS and type(ip) is str: ALLOWED_HOSTS.append(ip) time.sleep(randint(1, 9))
def route53_info(self, zone_names): print('-----------------------------------------------------------------------') print(colored('[SYSTEM] Getting Hosted Zones Information...', 'yellow')) print('-----------------------------------------------------------------------') client = self.session.client('route53') ns_names = [] for i in zone_names: r = client.get_hosted_zone( Id=i ) ns_names.append(r['HostedZone']['Name']) pprint.pprint(f" [*] {r['HostedZone']['Name']}") if r['HostedZone']['Config']['PrivateZone']: print(colored(f" [*] Private Zone", "green")) print(f" [*] Zone ID: {r['HostedZone']['Id'].strip('/hostedzone/')}") else: print(colored(f" [*] Public Zone", "yellow")) print(f" [*] Zone ID: {r['HostedZone']['Id'].strip('/hostedzone/')}") response = client.list_resource_record_sets( HostedZoneId=i, StartRecordName=r['HostedZone']['Name'], StartRecordType='NS', MaxItems='1' ) ns_list = [] for record in response['ResourceRecordSets'][0]['ResourceRecords']: ns_list.append(record['Value']) dig_list = pydig.query(r['HostedZone']['Name'], 'NS') if set(dig_list) & set(ns_list): print(colored(f" [*] Valid Zone - DIG Check returned matched NS", "green")) continue else: print(colored(f" [*] Invalid Zone - DIG Check Failed", "red"))
def dig_ns(self): try: ns = pydig.query(urlparse(self.url).netloc, 'NS') return True if ns else False, None except Exception as e: print(e) return None, e
def ns_ip_address(self, ns_list): for x in ns_list: if not "cloudflare" in x: ip_address = pydig.query(x, "A") if ip_address: if not ip_address[0] in self.resolver.nameservers: self.resolver.nameservers.append(ip_address[0]) else: pass else: pass else: pass
def DnsResolve(urls): for target in urls: try: with open('ns.txt', 'a+') as file: dns_resolver = pydig.query(target, 'NS') file = [file.write(str(dns) + '\n') for dns in dns_resolver] if file: print( f"[{minus}] %s NameServer Found for {red}{target}{end}" % (green)) else: print( f"[{minus}] %s No NameServer Found for {yellow}{target}{end}" % (red)) except subprocess.CalledProcessError: print( f"[{minus}] %s No NameServer Found for {yellow}{target}{end}" % (red)) except: print( f"[{minus}] %s No NameServer Found for {yellow}{target}{end}" % (red))
def _find_diff(a_record): """Find the difference between existing & new A records.""" new_list = pydig.query(a_record, 'A') ips_removed = set(CURRENT_LIST) - set(new_list) ips_added = set(new_list) - set(CURRENT_LIST) if ips_removed: return ips_removed, 'IPs removed' elif ips_added: return ips_added, 'IPs added' return None, None
def parse_digtxt(querystr): """ executes dig and parses output and prints CIDR block, if necessary recursively""" response = pydig.query(querystr, 'txt') for elem in response[0].split(): if 'include:' in elem: parse_digtxt(elem[8:]) else: if 'ip4' in elem: print(elem[4:]) if 'ip6' in elem: print(elem[4:])
def check_hash(self): """Checks file hash reputation Checks Team Cymru's Malware Hash Registry for time last seen and detection percentage of a given file hash. """ if get_hash_type(self.file_hash) == "MD5" or "SHA-1": dig = pydig.query(f"{self.file_hash}.malware.hash.cymru.com", "TXT") if dig: return_list = self._to_list(dig) return self._to_dict(return_list)
def parse_digtxt(querystr, resultset): """ executes dig and parses output and prints CIDR block, if necessary recursively""" response = pydig.query(querystr, 'txt') for elem in response[0].split(): if 'include:' in elem: resultset = parse_digtxt(elem[8:], resultset) else: if 'ip4' in elem: if elem[4:] not in resultset: resultset[elem[4:]] = "GCP" if 'ip6' in elem: if elem[4:] not in resultset: resultset[elem[4:]] = "GCP" return resultset
def analyzer(record, type='ANY'): if record is None or len(record) == 0: return None type = type.upper() if type == 'ANY': types = ['A', 'AAAA', 'CNAME', 'MX', 'NS', 'TXT'] data = {} for t in types: data[t] = PhishingTrackerDig.analyzer(record, t) return data try: response = pydig.query(record, type) except Exception as e: response = 'Exception: {}'.format(str(e)) if type == 'TXT': response = [s.strip('"') for s in response] return response
def netflow_check(self, dir): """ Enrich and check the netflow from the conn.log against whitelist and IOCs. :return: nothing - all stuff appended to self.alerts """ max_ports = get_config(("analysis", "max_ports")) http_default_port = get_config(("analysis", "http_default_port")) # Get the netflow from conn.log. if os.path.isfile(os.path.join(dir, "conn.log")): for record in ParseZeekLogs(os.path.join(dir, "conn.log"), output_format="json", safe_headers=False): if record is not None: c = { "ip_dst": record["id.resp_h"], "proto": record["proto"], "port_dst": record["id.resp_p"], "service": record["service"], "alert_tiggered": False } if c not in self.conns: self.conns.append(c) # Let's add some dns resolutions. for c in self.conns: c["resolution"] = self.resolve(c["ip_dst"]) # Order the conns list by the resolution field. self.conns = sorted(self.conns, key=lambda c: c["resolution"]) # Check for whitelisted assets, if any, delete the record. if self.whitelist_analysis: wl_cidrs = [IPNetwork(cidr) for cidr in get_whitelist("cidr")] wl_hosts = get_whitelist("ip4addr") + get_whitelist("ip6addr") wl_domains = get_whitelist("domain") for i, c in enumerate(self.conns): if c["ip_dst"] in [ip for ip in wl_hosts]: self.whitelist.append(self.conns[i]) self.conns[i] = False elif c["resolution"] in wl_domains: self.whitelist.append(self.conns[i]) self.conns[i] = False elif True in [ c["resolution"].endswith("." + dom) for dom in wl_domains ]: self.whitelist.append(self.conns[i]) self.conns[i] = False elif True in [ IPAddress(c["ip_dst"]) in cidr for cidr in wl_cidrs ]: self.whitelist.append(self.conns[i]) self.conns[i] = False # Let's delete whitelisted connections. self.conns = list(filter(lambda c: c != False, self.conns)) if self.heuristics_analysis: for c in self.conns: # Check for UDP / ICMP (strange from a smartphone.) if c["proto"] in ["UDP", "ICMP"]: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["PROTO-01"]["title"].format( c["proto"].upper(), c["resolution"]), "description": self.template["PROTO-01"]["description"].format( c["proto"].upper(), c["resolution"]), "host": c["resolution"], "level": "Moderate", "id": "PROTO-01" }) # Check for use of ports over 1024. if c["port_dst"] >= max_ports: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["PROTO-02"]["title"].format( c["proto"].upper(), c["resolution"], max_ports), "description": self.template["PROTO-02"]["description"].format( c["proto"].upper(), c["resolution"], c["port_dst"]), "host": c["resolution"], "level": "Low", "id": "PROTO-02" }) # Check for use of HTTP. if c["service"] == "http" and c[ "port_dst"] == http_default_port: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["PROTO-03"]["title"].format( c["resolution"]), "description": self.template["PROTO-03"]["description"].format( c["resolution"]), "host": c["resolution"], "level": "Low", "id": "PROTO-03" }) # Check for use of HTTP on a non standard port. if c["service"] == "http" and c[ "port_dst"] != http_default_port: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["PROTO-04"]["title"].format( c["resolution"], c["port_dst"]), "description": self.template["PROTO-04"]["description"].format( c["resolution"], c["port_dst"]), "host": c["resolution"], "level": "Moderate", "id": "PROTO-04" }) # Check for non-resolved IP address. if c["ip_dst"] == c["resolution"]: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["PROTO-05"]["title"].format(c["ip_dst"]), "description": self.template["PROTO-05"]["description"].format( c["ip_dst"]), "host": c["ip_dst"], "level": "Low", "id": "PROTO-05" }) if self.iocs_analysis: bl_cidrs = [[IPNetwork(cidr[0]), cidr[1]] for cidr in get_iocs("cidr")] bl_hosts = get_iocs("ip4addr") + get_iocs("ip6addr") bl_domains = get_iocs("domain") bl_freedns = get_iocs("freedns") bl_nameservers = get_iocs("ns") bl_tlds = get_iocs("tld") for c in self.conns: # Check for blacklisted IP address. for host in bl_hosts: if c["ip_dst"] == host[0]: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["IOC-01"]["title"].format( c["resolution"], c["ip_dst"], host[1].upper()), "description": self.template["IOC-01"]["description"].format( c["ip_dst"]), "host": c["resolution"], "level": "High", "id": "IOC-01" }) break # Check for blacklisted CIDR. for cidr in bl_cidrs: if IPAddress(c["ip_dst"]) in cidr[0]: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["IOC-02"]["title"].format( c["resolution"], cidr[0], cidr[1].upper()), "description": self.template["IOC-02"]["description"].format( c["resolution"]), "host": c["resolution"], "level": "Moderate", "id": "IOC-02" }) # Check for blacklisted domain. for domain in bl_domains: if c["resolution"].endswith(domain[0]): if domain[1] != "tracker": c["alert_tiggered"] = True self.alerts.append({ "title": self.template["IOC-03"]["title"].format( c["resolution"], domain[1].upper()), "description": self.template["IOC-03"]["description"].format( c["resolution"]), "host": c["resolution"], "level": "High", "id": "IOC-03" }) else: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["IOC-04"]["title"].format( c["resolution"], domain[1].upper()), "description": self.template["IOC-04"]["description"].format( c["resolution"]), "host": c["resolution"], "level": "Moderate", "id": "IOC-04" }) # Check for blacklisted FreeDNS. for domain in bl_freedns: if c["resolution"].endswith("." + domain[0]): c["alert_tiggered"] = True self.alerts.append({ "title": self.template["IOC-05"]["title"].format( c["resolution"]), "description": self.template["IOC-05"]["description"].format( c["resolution"]), "host": c["resolution"], "level": "Moderate", "id": "IOC-05" }) # Check for suspect tlds. for tld in bl_tlds: if c["resolution"].endswith(tld[0]): c["alert_tiggered"] = True self.alerts.append({ "title": self.template["IOC-06"]["title"].format( c["resolution"]), "description": self.template["IOC-06"]["description"].format( c["resolution"], tld[0]), "host": c["resolution"], "level": "Low", "id": "IOC-06" }) if self.active_analysis: for c in self.conns: try: # Domain nameservers check. name_servers = pydig.query(c["resolution"], "NS") if len(name_servers): for ns in bl_nameservers: if name_servers[0].endswith(".{}.".format(ns[0])): c["alert_tiggered"] = True self.alerts.append({ "title": self.template["ACT-01"]["title"].format( c["resolution"], name_servers[0]), "description": self.template["ACT-01"] ["description"].format(c["resolution"]), "host": c["resolution"], "level": "Moderate", "id": "ACT-01" }) except: pass try: # Domain history check. whois_record = whois.whois(c["resolution"]) creation_date = whois_record.creation_date if type( whois_record.creation_date ) is not list else whois_record.creation_date[0] creation_days = abs((datetime.now() - creation_date).days) if creation_days < 365: c["alert_tiggered"] = True self.alerts.append({ "title": self.template["ACT-02"]["title"].format( c["resolution"], creation_days), "description": self.template["ACT-02"]["description"].format( c["resolution"]), "host": c["resolution"], "level": "Moderate", "id": "ACT-02" }) except: pass
def __init__(self): init(autoreset=True) self.target_list = list() self.lock = threading.Lock() self.domain_list = list() if args.stdin and not args.list: [ self.target_list.append(str(x)) for x in sys.stdin.read().split("\n") if x and self.control(x) ] if not self.target_list: print(Fore.RED + "Subdomains Not Found In Stdin") sys.exit() elif args.list and not args.stdin: if not os.path.exists(args.list): print(Fore.RED + "File Not Found:", args.list) sys.exit() with open(args.list, "r", encoding="utf-8") as f: [ self.target_list.append(x) for x in f.read().split("\n") if x and self.control(x) ] else: print( Fore.RED + """ \rYou Used The Wrong Parameter""", Fore.MAGENTA + """ \rUsage: \r------ \rpython3 DnsR.py --list subdomains.txt --output resolved.txt \rcat subdomains.txt | python3 DnsR.py --stdin --output resolved.txt \rpython3 DnsR.py --list subdomains.txt --blacklist 198,55,44,77 \rcat subdomains.txt | python3 DnsR.py --stdin --blacklist 198,55,44,77 \rpython3 DnsR.py --list subdomains.txt --thread 50 --blacklist 198,55,44,77,xx.example.com \rcat subdomains.txt | python3 DnsR.py --stdin --blacklist 198,55,44,77,xx.example.com """) sys.exit() self.target_list = list(set(self.target_list)) self.target_list.sort() self.resolver = pydig.Resolver(nameservers=[ '1.1.1.1', '1.0.0.1', '8.8.8.8', '8.8.4.4', '77.88.8.8', '77.88.8.1', '64.6.64.6', '64.6.65.6', '8.26.56.26', '8.20.247.20', '9.9.9.9', '149.112.112.112', '185.228.168.9', '185.228.169.9', '198.101.242.72', '23.253.163.53', '208.67.222.222', '208.67.220.220', '176.103.130.130', '176.103.130.131' ]) if args.blacklist: if not "," in args.blacklist: x = args.blacklist x = ".*" + x.replace(".", r"\.") + "*." self.BlackList = re.compile(x) else: x = args.blacklist.split(",") y = [] for i in x: if i: i = ".*" + i.replace(".", r"\.") + "*." y.append(i) req = ("|").join(y) self.BlackList = re.compile(req) xyz = self.target_list[0] tld = tldextract.extract(xyz).registered_domain query_ns = pydig.query(tld, "NS") if query_ns: self.ns_ip_address(query_ns) self.domain_list.append(tld) self.domain_list = tuple(self.domain_list) with ThreadPoolExecutor(max_workers=args.thread) as executor: for x in self.target_list: if not x.endswith(self.domain_list): r = tldextract.extract(x).registered_domain self.domain_list = list(self.domain_list) self.domain_list.append(r) self.domain_list = tuple(self.domain_list) query_ns = pydig.query(r, "NS") if query_ns: self.ns_ip_address(query_ns) executor.submit(self.resolve_subs, x)
from urllib.parse import urlparse import pydig import sys conn = None try: conn = psycopg2.connect(" user=acd dbname=acd ") except: print ("I am unable to connect to the database ", sys.exc_info()[0]) cur = conn.cursor() cur.execute("""SELECT netloc from cdn group by netloc""") rows = cur.fetchall() for r in rows: netloc = r[0] try: res = pydig.query(netloc, 'A') except : print ("Dig bailed out ", sys.exc_info()[0]) for a_record in res: data = {} data['netloc'] = netloc data['a_record'] = a_record cur.execute("""INSERT INTO dnsmap(netloc, a_record) VALUES (%(netloc)s, %(a_record)s)""", data) conn.commit()
def __init__(self, url, db): self.url = url url_extract = tldextract.extract(url) self.subdomain = url_extract.subdomain self.domain = url_extract.domain self.tld = url_extract.suffix self.db = db self.typosquatting_result = [] self.mx = pydig.query(urlparse(self.url).netloc, 'MX') try: self.HEADERS = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/33.0.1750.152 Safari/537.36' } self.response = requests.get(self.url, headers=self.HEADERS) self.response.raise_for_status() self.soup = BeautifulSoup(self.response.text, 'html.parser') except Exception as e: self.response = "" self.soup = None print('ERROR: HTTP-GET request for ' + self.url + ' failed: ', e) self.user_urls = [ x['url'] for x in list(self.db['analyzed-domains'].find( {'user_domain': True})).copy() ] self.qwerty = { '1': '2q', '2': '3wq1', '3': '4ew2', '4': '5re3', '5': '6tr4', '6': '7yt5', '7': '8uy6', '8': '9iu7', '9': '0oi8', '0': 'po9', 'q': '12wa', 'w': '3esaq2', 'e': '4rdsw3', 'r': '5tfde4', 't': '6ygfr5', 'y': '7uhgt6', 'u': '8ijhy7', 'i': '9okju8', 'o': '0plki9', 'p': 'lo0', 'a': 'qwsz', 's': 'edxzaw', 'd': 'rfcxse', 'f': 'tgvcdr', 'g': 'yhbvft', 'h': 'ujnbgy', 'j': 'ikmnhu', 'k': 'olmji', 'l': 'kop', 'z': 'asx', 'x': 'zsdc', 'c': 'xdfv', 'v': 'cfgb', 'b': 'vghn', 'n': 'bhjm', 'm': 'njk' } self.qwertz = { '1': '2q', '2': '3wq1', '3': '4ew2', '4': '5re3', '5': '6tr4', '6': '7zt5', '7': '8uz6', '8': '9iu7', '9': '0oi8', '0': 'po9', 'q': '12wa', 'w': '3esaq2', 'e': '4rdsw3', 'r': '5tfde4', 't': '6zgfr5', 'z': '7uhgt6', 'u': '8ijhz7', 'i': '9okju8', 'o': '0plki9', 'p': 'lo0', 'a': 'qwsy', 's': 'edxyaw', 'd': 'rfcxse', 'f': 'tgvcdr', 'g': 'zhbvft', 'h': 'ujnbgz', 'j': 'ikmnhu', 'k': 'olmji', 'l': 'kop', 'y': 'asx', 'x': 'ysdc', 'c': 'xdfv', 'v': 'cfgb', 'b': 'vghn', 'n': 'bhjm', 'm': 'njk' } self.azerty = { '1': '2a', '2': '3za1', '3': '4ez2', '4': '5re3', '5': '6tr4', '6': '7yt5', '7': '8uy6', '8': '9iu7', '9': '0oi8', '0': 'po9', 'a': '2zq1', 'z': '3esqa2', 'e': '4rdsz3', 'r': '5tfde4', 't': '6ygfr5', 'y': '7uhgt6', 'u': '8ijhy7', 'i': '9okju8', 'o': '0plki9', 'p': 'lo0m', 'q': 'zswa', 's': 'edxwqz', 'd': 'rfcxse', 'f': 'tgvcdr', 'g': 'yhbvft', 'h': 'ujnbgy', 'j': 'iknhu', 'k': 'olji', 'l': 'kopm', 'm': 'lp', 'w': 'sxq', 'x': 'wsdc', 'c': 'xdfv', 'v': 'cfgb', 'b': 'vghn', 'n': 'bhj' } self.glyphs = { '2': ['ƻ'], '5': ['ƽ'], 'a': ['à'], 'b': ['d', 'lb', 'ʙ'], 'c': ['e'], 'd': ['b', 'cl', 'dl'], 'e': ['c'], 'f': ['ƒ'], 'g': ['q', 'ɡ'], 'h': ['lh', 'b'], 'i': ['1', 'l'], 'j': ['ʝ'], 'k': ['lk', 'ik', 'lc'], 'l': ['1', 'i'], 'm': ['n', 'nn', 'rn', 'rr'], 'n': ['m', 'r'], 'o': ['0'], 'p': ['ƿ'], 'q': ['g'], 'r': ['ʀ', 'ɼ'], 's': ['ꜱ'], 't': ['ţ'], 'u': ['ᴜ'], 'v': ['ᴠ'], 'w': ['vv', 'ᴡ'], 'x': ['ẋ', 'ẍ'], 'y': ['ʏ'], 'z': ['ʐ'] } self.keyboards = [self.qwerty, self.qwertz, self.azerty] script_dir = os.path.dirname(__file__) with open(os.path.join(script_dir, 'dictionaries/dictionary')) as f: dictionary = set(f.read().splitlines()) self.dictionary = [x for x in dictionary if x.isalnum()] with open(os.path.join(script_dir, 'dictionaries/tld')) as f: tld_dictionary = set(f.read().splitlines()) self.tld_dictionary = [x for x in tld_dictionary if x.isalnum()] from core import modules_list_collection self.allowed_zones = \ modules_list_collection.find_one({'module': 'First-level subdomain is allowed'})['settings'][ 'ALLOWED_FIRST_LEVEL_DOMAINS'] self.GOOGLE_SAFE_BROWSING_API_KEY = \ modules_list_collection.find_one({'module': 'Google Safe Browsing'})['settings'][ 'GOOGLE_SAFE_BROWSING_API_KEY']
def get_4a_record(host): return pydig.query(host, 'AAAA')
def get_ns(host): return pydig.query(host, 'NS')
def main(host, port, client, sensor, period, func, val): # pragma: no cover #declare a framework framework = Framework("Sensor Framework", host, port) # First we need to launch the server task if it's not already running agent = framework.getAgentInfoForRunningTask('SensorServer') #Get offers offers = framework.getOffers() domain = None if agent is None: #launch the task server_agents = framework.findAgents(offers, {'domain':None,'cpus':0.5,'mem':100000000}) if len(server_agents) == 0: print("No available server agents.", file=sys.stderr) return framework.runTask("SensorServer",server_agents[0], docker_image='jnoor/coapserver:v1', docker_port_mappings={3002:3002}, environment={'SERVER_PORT':'3002'}) print("Started server task on agent: {}".format(server_agents[0].agent_id)) domain = framework.getAgentProperty(server_agents[0].agent_id,'domain') else: print("Sensor server task already running.") domain = framework.getAgentProperty(agent,'domain') # okay now do a dns lookup for this domain ip = pydig.query(domain,'A')[0] #construct the wasm environment env = {} env['IP'] = ip env['PORT'] = 3002 env['PERIOD'] = int(args.period) #Filters if (args.ffunc == "G"): env['FILT_FUNC'] = 'G' env['FILT_VAL'] = int(args.fval) elif args.ffunc == "L": env['FILT_FUNC'] = 'L' env['FILT_VAL'] = int(args.fval) #Sensor sensor_name = None if args.sensor == "temperature": env['PATH'] = args.client+'-t' env['SENSOR'] = 'temp' sensor_name = "temperature_sensor" elif args.sensor == "pressure": env['PATH'] = args.client+'-p' env['SENSOR'] = 'press' sensor_name = "pressure_sensor" elif args.sensor == "humidity": env['PATH'] = args.client+'-h' env['SENSOR'] = 'humidity' sensor_name = "humidity_sensor" else: print("Not a valid sensor.",file=sys.stderr) return #Find a wasm agent wasm_agents = framework.findAgents(offers, {'executors':'WASM','cpus':0.1,sensor_name:None}) if(len(wasm_agents) == 0): print("No available sensor agents.",file=sys.stderr) return wasm_index = random.randrange(0,len(wasm_agents)) print("Using WASM agent " + wasm_agents[wasm_index].agent_id) # Run the WASM task __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) wasm_file = open(os.path.join(__location__,'./wasm/wasm-send/out.wasm'),'rb') print("Running task with environment:") print(env) framework.runTask(args.client + ':' + "SensorSample",wasm_agents[wasm_index],wasm_binary=wasm_file.read(),environment=env) print("Started sensor task on agent: {}".format(wasm_agents[wasm_index].agent_id))
def main(host, port): # pragma: no cover #declare a framework framework = Framework("Profiler Framework", host, port) # First we need to launch the server task if it's not already running agent = framework.getAgentInfoForRunningTask('ProfilerServer') #Get offers offers = framework.getOffers() # find all domains allDomains = [] for agentWithDomain in offers: for att in agentWithDomain.attributes: if att.name == 'domain': allDomains.append(att.text.value) print(allDomains) domain = None if agent is None: #launch the task server_agents = framework.findAgents(offers, { 'domain': None, 'cpus': 0.5, 'mem': 100000000 }) if len(server_agents) == 0: print("No available server agents.") return framework.runTask("ProfilerServer", server_agents[0], docker_image='jnoor/profilerserver:v1', docker_port_mappings={3001: 3001}, environment={'SERVER_PORT': '3001'}) print("Started server task on agent: {}".format( server_agents[0].agent_id)) domain = framework.getAgentProperty(server_agents[0].agent_id, 'domain') else: print("Sensor server task already running.") domain = framework.getAgentProperty(agent, 'domain') # okay now do a dns lookup for this domain print("Lookup: " + domain) ip = pydig.query(domain, 'A')[0] #construct the profile task environment env = {} env['HOST'] = domain env['PORT'] = 3001 env['TS'] = int(time.time()) env['ENDPOINT'] = "http://" + str(ip) + ":3001/profile" for i in range(len(allDomains)): env['DOMAIN' + str(i)] = allDomains[i] agents = framework.findAgents(offers, {'cpus': 0.5, "mem": 10000000}) for agent in agents: imageToUse = None for attribute in agent.attributes: if attribute.name == 'OS': # print(agent.agent_id, attribute.name, attribute.text.value) if "x86" in attribute.text.value: imageToUse = 'jnoor/profiler-x86:v1' elif 'arm' in attribute.text.value: imageToUse = 'jnoor/profiler-arm:v1' if not imageToUse: print("No profiler image for ") print(agent) continue # if agent.agent_id != '14038003386728': # continue env['AGENT'] = agent.agent_id print("Starting profiler task on agent: {}".format(agent.agent_id)) framework.runTask("profiler-" + str(agent.agent_id), agent, docker_image=imageToUse, environment=env)
def netflow_check(self, dir): """ Enrich and check the netflow from the conn.log against whitelist and IOCs. :return: nothing - all stuff appended to self.alerts """ max_ports = get_config(("analysis", "max_ports")) http_default_port = get_config(("analysis", "http_default_port")) # Get the netflow from conn.log. if os.path.isfile(os.path.join(dir, "conn.log")): for record in ParseZeekLogs(os.path.join(dir, "conn.log"), output_format="json", safe_headers=False): if record is not None: c = { "ip_dst": record["id.resp_h"], "proto": record["proto"], "port_dst": record["id.resp_p"], "service": record["service"], "alert_tiggered": False } if c not in self.conns: self.conns.append(c) # Let's add some dns resolutions. for c in self.conns: c["resolution"] = self.resolve(c["ip_dst"]) # Order the conns list by the resolution field. self.conns = sorted(self.conns, key=lambda c: c["resolution"]) # Check for whitelisted assets, if any, delete the record. if self.whitelist_analysis: wl_cidrs = [IPNetwork(cidr) for cidr in get_whitelist("cidr")] wl_hosts = get_whitelist("ip4addr") + get_whitelist("ip6addr") wl_domains = get_whitelist("domain") for i, c in enumerate(self.conns): if c["ip_dst"] in [ip for ip in wl_hosts]: self.whitelist.append(self.conns[i]) self.conns[i] = False elif c["resolution"] in wl_domains: self.whitelist.append(self.conns[i]) self.conns[i] = False elif True in [ c["resolution"].endswith("." + dom) for dom in wl_domains ]: self.whitelist.append(self.conns[i]) self.conns[i] = False elif True in [ IPAddress(c["ip_dst"]) in cidr for cidr in wl_cidrs ]: self.whitelist.append(self.conns[i]) self.conns[i] = False # Let's delete whitelisted connections. self.conns = list(filter(lambda c: c != False, self.conns)) if self.heuristics_analysis: for c in self.conns: # Check for UDP / ICMP (strange from a smartphone.) if c["proto"] in ["UDP", "ICMP"]: c["alert_tiggered"] = True self.alerts.append({ "title": "{} communication going outside the local network to {}." .format(c["proto"].upper(), c["resolution"]), "description": "The {} protocol is commonly used in internal networks. Please, verify if the host {} leveraged other alerts which may " .format(c["proto"].upper(), c["resolution"]) + "indicates a possible malicious behavior.", "host": c["resolution"], "level": "Moderate", "id": "PROTO-01" }) # Check for use of ports over 1024. if c["port_dst"] >= max_ports: c["alert_tiggered"] = True self.alerts.append({ "title": "{} connection to {} to a port over or equal to {}.". format(c["proto"].upper(), c["resolution"], max_ports), "description": "{} connections have been seen to {} by using the port {}. The use of non-standard port can be sometimes associated to malicious activities. " .format(c["proto"].upper(), c["resolution"], c["port_dst"]) + "We recommend to check if this host has a good reputation by looking on other alerts and search it on the internet.", "host": c["resolution"], "level": "Low", "id": "PROTO-02" }) # Check for use of HTTP. if c["service"] == "http" and c[ "port_dst"] == http_default_port: c["alert_tiggered"] = True self.alerts.append({ "title": "HTTP communications been done to the host {}".format( c["resolution"]), "description": "Your device exchanged with the host {} by using HTTP, an unencrypted protocol. " .format(c["resolution"]) + "Even if this behavior is not malicious by itself, it is unusual to see HTTP communications issued from smartphone applications " + "running in the background. Please check the host reputation by searching it on the internet.", "host": c["resolution"], "level": "Low", "id": "PROTO-03" }) # Check for use of HTTP on a non standard port. if c["service"] == "http" and c[ "port_dst"] != http_default_port: c["alert_tiggered"] = True self.alerts.append({ "title": "HTTP communications have been seen to the host {} on a non standard port ({})." .format(c["resolution"], c["port_dst"]), "description": "Your device exchanged with the host {} by using HTTP, an unencrypted protocol on the port {}. " .format(c["resolution"], c["port_dst"]) + "This behavior is quite unusual. Please check the host reputation by searching it on the internet.", "host": c["resolution"], "level": "Moderate", "id": "PROTO-04" }) # Check for non-resolved IP address. if c["service"] == c["resolution"]: c["alert_tiggered"] = True self.alerts.append({ "title": "The server {} hasn't been resolved by any DNS query during the session" .format(c["ip_dst"]), "description": "It means that the server {} is likely not resolved by any domain name or the resolution has already been cached by " .format(c["ip_dst"]) + "the device. If the host appears in other alerts, please check it.", "host": c["ip_dst"], "level": "Low", "id": "PROTO-05" }) if self.iocs_analysis: bl_cidrs = [[IPNetwork(cidr[0]), cidr[1]] for cidr in get_iocs("cidr")] bl_hosts = get_iocs("ip4addr") + get_iocs("ip6addr") bl_domains = get_iocs("domain") bl_freedns = get_iocs("freedns") bl_nameservers = get_iocs("ns") bl_tlds = get_iocs("tld") for c in self.conns: # Check for blacklisted IP address. for host in bl_hosts: if c["ip_dst"] == host[0]: c["alert_tiggered"] = True self.alerts.append({ "title": "A connection has been made to {} ({}) which is tagged as {}." .format(c["resolution"], c["ip_dst"], host[1].upper()), "description": "The host {} has been explicitly blacklisted for malicious activities. Your device is likely compromised " .format(c["ip_dst"]) + "and needs to be investigated more deeply by IT security professionals.", "host": c["resolution"], "level": "High", "id": "IOC-01" }) break # Check for blacklisted CIDR. for cidr in bl_cidrs: if IPAddress(c["ip_dst"]) in cidr[0]: c["alert_tiggered"] = True self.alerts.append({ "title": "Communication to {} under the CIDR {} which is tagged as {}." .format(c["resolution"], cidr[0], cidr[1].upper()), "description": "The server {} is hosted under a network which is known to host malicious activities. Even if this behavior is not malicious by itself, " .format(c["resolution"]) + "you need to check if other alerts are also mentioning this host. If you have some doubts, please " + "search this host on the internet to see if its legit or not.", "host": c["resolution"], "level": "Moderate", "id": "IOC-02" }) # Check for blacklisted domain. for domain in bl_domains: if c["resolution"].endswith(domain[0]): if domain[1] != "tracker": c["alert_tiggered"] = True self.alerts.append({ "title": "A DNS request have been done to {} which is tagged as {}." .format(c["resolution"], domain[1].upper()), "description": "The domain name {} seen in the capture has been explicitly tagged as malicious. This indicates that " .format(c["resolution"]) + "your device is likely compromised and needs to be investigated deeply.", "host": c["resolution"], "level": "High", "id": "IOC-03" }) else: c["alert_tiggered"] = True self.alerts.append({ "title": "A DNS request have been done to {} which is tagged as {}." .format(c["resolution"], domain[1].upper()), "description": "The domain name {} seen in the capture has been explicitly tagged as a Tracker. This " .format(c["resolution"]) + "indicates that one of the active apps is geo-tracking your moves.", "host": c["resolution"], "level": "Moderate", "id": "IOC-03" }) # Check for blacklisted FreeDNS. for domain in bl_freedns: if c["resolution"].endswith("." + domain[0]): c["alert_tiggered"] = True self.alerts.append({ "title": "A DNS request have been done to the domain {} which is a Free DNS." .format(c["resolution"]), "description": "The domain name {} is using a Free DNS service. This kind of service is commonly used by cybercriminals " .format(c["resolution"]) + "or state-sponsored threat actors during their operations.", "host": c["resolution"], "level": "Moderate", "id": "IOC-04" }) # Check for suspect tlds. for tld in bl_tlds: if c["resolution"].endswith(tld[0]): c["alert_tiggered"] = True self.alerts.append({ "title": "A DNS request have been done to the domain {} which contains a suspect TLD." .format(c["resolution"]), "description": "The domain name {} is using a suspect Top Level Domain ({}). Even not malicious, this non-generic TLD is used regularly by cybercrime " .format(c["resolution"], tld[0]) + "or state-sponsored operations. Please check this domain by searching it on an internet search engine. If other alerts are related to this " + "host, please consider it as very suspicious.", "host": c["resolution"], "level": "Low", "id": "IOC-05" }) # Check for use of suspect nameservers. try: name_servers = pydig.query(c["resolution"], "NS") except: name_servers = [] if len(name_servers): for ns in bl_nameservers: if name_servers[0].endswith(".{}.".format(ns[0])): c["alert_tiggered"] = True self.alerts.append({ "title": "The domain {} is using a suspect nameserver ({})." .format(c["resolution"], name_servers[0]), "description": "The domain name {} is using a nameserver that has been explicitly tagged to be associated to malicious activities. " .format(c["resolution"]) + "Many cybercriminals and state-sponsored threat actors are using this kind of registrars because they allow cryptocurrencies and anonymous payments.", "host": c["resolution"], "level": "Moderate", "id": "IOC-06" })