def router_ip(self): """ DESCR: Get IP address of default gateway. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.opts['rhost']: ip = list(netifaces.gateways()['default'].values())[0][0] self._log('router_ip', ip) else: self._log('router_ip', self.opts['rhost']) return
def dnsrecords(self): """ DESCR: Find available DNS records. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): threads = 5 if not self.target['privip']: with cf.ThreadPoolExecutor(threads) as exe: for rt in self.dns_record_types: if rt != 'a' and rt != 'aaaa' and rt != 'ptr' and rt != 'mx': exe.submit(self._dns_query, rt, 'dnsrecords') return
def src_mac(self): """ DESCR: Get source MAC address of local device. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.opts['smac']: mac = netifaces.ifaddresses( self.target)[netifaces.AF_LINK][0]['addr'] self._log('src_mac', mac) else: self._log('src_mac', self.opts['smac']) return
def src_ip(self): """ DESCR: Get source IP address of local device. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.opts['shost']: ip = netifaces.ifaddresses( self.target)[netifaces.AF_INET][0]['addr'] self._log('src_ip', ip) else: self._log('src_ip', self.opts['shost']) return
def router_mac(self): """ DESCR: Get MAC address of default gateway. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.opts['rmac']: router_ip = self._read_log('router_ip') addr = getmac.get_mac_address(ip=router_ip[0]) self._log('router_mac', addr) else: self._log('router_mac', self.opts['rmac']) return
def mails_extract(self): """ DESCR: Extract mail-addresses from webpage. (int) TOOLS: python """ with timeout(self.opts['timeout']): splitted = bs4.BeautifulSoup(self.req.text, 'html.parser').text.split(' ') for mail in splitted: match = re.search(r'[\w\.-]+@[\w\.-]+', mail) if match: self._log('mails_extract', match.group(0)) return
def all_subdomains(self): """ DESCR: Merge all found subdomains from various tools into a single list. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.target['privip']: res_dnsspider = self._read_log('dnsspider') res_findomain = self._read_log('findomain') subdomains = list(sorted(set(res_dnsspider + res_findomain))) subdomains = [sd for sd in subdomains if len(sd) > 0] self._log('all_subdomains', subdomains, data_end='\n') return
def crack_http_auth_web(self): """ DESCR: Check HTTP auth type (basic, realm, etc.) and crack login. (int) TOOLS: python3 """ url = self.target if self.opts['attack_url']: url = self.opts['attack_url'] if self.opts['login_url']: url = self.opts['login_url'] with timeout(self.opts['timeout']): self._crack_http_auth(url, 'crack_http_auth_web') return
def ipv4addr(self): """ DESCR: Get IPv4 address. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if self._is_ipaddr(self.target['host']) == 'ipv4': self._log('ipv4addr', self.target['host']) else: try: self._log('ipv4addr', socket.gethostbyname(self.target['host'])) except: pass return
def cidr_range(self): """ DESCR: Get IPv4 network in CIDR range format. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if self.opts['shost']: ip = self.opts['shost'] else: ip = self._read_log('src_ip')[0] netmask = self._read_log('netmask')[0] cidr = str(netaddr.IPNetwork(f'{ip}/{netmask}')) self._log('cidr_range', cidr) return
def hostname(self): """ DESCR: Get hostname. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if self._is_ipaddr(self.target['host']): try: self._log('hostname', socket.gethostbyaddr(self.target['host'])[0]) except: pass else: self._log('hostname', self.target['host']) return
def comments_extract(self): """ DESCR: Extract comments from webpage. (int) TOOLS: python """ comments = [] with timeout(self.opts['timeout']): soup = bs4.BeautifulSoup(self.req.text, 'html.parser') comments = soup.find_all( text=lambda text: isinstance(text, bs4.Comment)) comments = [f'<!-- {c} -->\n' for c in comments] self._log('comments_extract', comments) return
def host_range(self): """ DESCR: Get IPv4 network in host range format. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): cidr = self._read_log('cidr_range')[0] net = netaddr.IPNetwork(cidr) if not cidr.endswith('32'): start = str(net[1]) end = str(net[-2]) self._log('host_range', f'{start}-{end}') else: start = cidr.rstrip('/32') end = start self._log('host_range', f'{start}-{end}') return
def ipv4range(self): """ DESCR: Get IPv4 address range in host range and cidr format. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.target['privip']: try: ipaddr = self._read_log('ipv4addr')[0] if ipaddr: res = ipwhois.IPWhois(ipaddr).lookup_whois() iprange = res['nets'][0]['range'].replace(' ', '') cidr = res['nets'][0]['cidr'].replace(' ', '') self._log('ipv4range', iprange) self._log('ipv4cidr', cidr) except: pass return
def crack_tomcat_web(self): """ DESCR: Check for tomcat and crack logins using tomcat's default creds. (int) TOOLS: python3 """ # default tomcat creds users = deque(('tomcat', 'both', 'role1', 'admin', 'manager', 'root')) pws = deque(('tomcat', 'both', 'role1', 'admin', 'manager', 'root', '')) threads = len(users) with timeout(self.opts['timeout']): url = self._is_tomcat(self.host, self.port) if url: with cf.ThreadPoolExecutor(threads) as exe: for us in users: for pw in pws: exe.submit(self._crack_tomcat, url, us, pw, 'crack_tomcat_web') return
def domainname(self): """ DESCR: Get domain names. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if not self.target['privip']: if self._is_ipaddr(self.target['host']): try: domain = socket.gethostbyaddr( self.target['host'])[0].split('.')[-2:] domain = '.'.join(domain) self._log('domainname', domain) except: pass else: # just in case that we have a subdomain we need to get domain only domain = '.'.join(self.target['host'].split('.')[-2:]) self._log('domainname', domain) return
def shodan(self): """ DESCR: Perform shodan host search to gather information. (int) TOOLS: python3 """ url = f"https://api.shodan.io/shodan/host/{self.host}?key=" url += f"{self.opts['shodan_key']}" headers = {'User-Agent': self.useragent} with timeout(self.opts['timeout']): if not self.target['privip']: r = requests.get(url, verify=False, headers=headers, timeout=300) if r.content: parsed = json.loads(r.content.decode('utf-8')) self._log('shodan', json.dumps(parsed, indent=2, sort_keys=True)) return
def whois_cidr(self): """ DESCR: Get CIDR ranges and ASN descriptions belonging and related to target by doing whois lookup on all found subdomains' IPv4 addresses. (int) TOOLS: python3 """ tmp = '127.0.0.1' with timeout(self.opts['timeout']): if not self.target['privip']: futures = [] results = [] with cf.ThreadPoolExecutor(30) as exe: for line in self._read_log('all_subdomains_ips'): splitted = line.split() if len(splitted) < 2: continue sub = splitted[0] ips = splitted[1].split(',') # list of ips if tmp != ips[0]: # skip duplicates futures.append({ exe.submit(self._whois, 'ipv4addr', ips[0]): sub }) tmp = ips[0] for i in futures: for f in cf.as_completed(i.keys()): sub = i[f] for r in f.result(): results.append( f"{sub} | {r['query']} | " + f"{r['network']['cidr']} | {r['asn_description']}" ) results = list(sorted(set(results))) self._log('whois_cidr', results, data_end='\n') return
def ipv6addr(self): """ DESCR: Get IPv6 address. (int) TOOLS: python3 """ with timeout(self.opts['timeout']): if self._is_ipaddr(self.target['host']) == 'ipv6': self._log('ipv6addr', self.target['host']) else: try: alladdr = socket.getaddrinfo(self.target['host'], None) if alladdr: ip6 = filter(lambda x: x[0] == socket.AF_INET6, alladdr) if ip6: ip6list = list(ip6)[0][4][0] if ip6list: self._log('ipv6addr', ip6list) except: pass return
def links_extract(self): """ DESCR: Extract links from webpage. (int) TOOLS: python """ links = [] attrs = (('a', 'href'), ('img', 'src'), ('script', 'src')) with timeout(self.opts['timeout']): soup = bs4.BeautifulSoup(self.req.text, 'html.parser') for a in attrs: for i in soup.find_all(a[0]): for j in i.get_attribute_list(a[1]): if '://' not in j: links.append(f'{self.target}{j}\n') else: links.append(f'{j}\n') links = list(sorted(set(links))) self._log('links_extract', links) return