def SshSingleUserBrute(self): """Run patator with seclists probable top 1575 wordlist against a single user specified as a command line argument.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) cl = helper_lists.Cewl(self.target) if not os.path.exists(c.getPath("wordlists", "CewlPlus")): cl.CewlWordlist() green = fg.li_green teal = fg.li_cyan reset = fg.rs np = nmapParser.NmapParserFunk(self.target) np.openPorts() if os.path.exists(c.getPath("wordlists", "CewlPlus")): if os.path.getsize(c.getPath("wordlists", "CewlPlus")) > 0: print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{self.user}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_cewl_auto", port=self.port, user=self.user) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True) else: print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{self.user}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_auto", port=self.port, user=self.user) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True)
def proxyScan(self): """This is the Web Proxy scan function that is called by lib/enumProxy.py. This function will attempt to run, dirsearch, whatweb, and nikto""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() proxy_http_ports = npp.proxy_http_ports proxy_ports = np.proxy_ports if len(proxy_http_ports) == 0: pass else: c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("proxy", "proxyDir")): os.makedirs(c.getPath("proxy", "proxyDir")) if not os.path.exists(c.getPath("proxy", "proxyWeb")): os.makedirs(c.getPath("proxy", "proxyWeb")) proxy_commands = [] for proxy in proxy_ports: print(f"""{fg.li_cyan} Enumerating HTTP Ports Through Port: {proxy}, Running the following commands: {fg.rs}""") proxy_commands.append(c.getCmd("proxy", "whatwebProxyServer", proxy=proxy)) if len(proxy_http_ports) != 0: for proxy_http_port in proxy_http_ports: proxy_commands.append(c.getCmd("proxy", "whatwebProxyHttpPorts", proxy=proxy, httpProxy=proxy_http_port)) proxy_commands.append(c.getCmd("proxy", "dirsearchHttpProxyPortsDict", proxy=proxy, httpProxy=proxy_http_port)) proxy_commands.append(c.getCmd("proxy", "niktoProxyHttpPort", proxy=proxy, httpProxy=proxy_http_port)) self.proxy_processes = tuple(proxy_commands)
def genDirsearchUrlList(self): c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) awkprint = "{print $3}" dirsearch_files = [] dir_list = [ d for d in glob.iglob(c.getPath("report", "reportGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"{d}/*", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "dirsearch" in rf: if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) dirsearch_files.append(rf) if "nikto" in rf: check_nikto_lines = f"""wc -l {rf} | cut -d ' ' -f 1""" num_lines_nikto = check_output(check_nikto_lines, stderr=STDOUT, shell=True).rstrip() if int(num_lines_nikto) < 100: call(f"cat {rf}", shell=True) if len(dirsearch_files) != 0: all_dirsearch_files_on_one_line = " ".join(map(str, dirsearch_files)) url_list_cmd = f"""cat {all_dirsearch_files_on_one_line} | grep -Ev '400|403' | awk '{awkprint}' | sort -u >> {c.getPath("web", "aquatoneDirUrls")}""" call(url_list_cmd, shell=True)
def check_links(self, hostnames, ports): import urllib.request import urllib.error from bs4 import BeautifulSoup import ssl found_links = [] for host in hostnames: for port in ports: try: ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE url = urllib.request.urlopen(f'http://{host}:{port}/', context=ctx) soup = BeautifulSoup(url, 'html.parser') for _link in soup.findAll('a'): if host in _link.get('href'): found_links.append(_link.get('href')) except urllib.error.HTTPError as http_err: print("HTTPError on http://{}:{}/ : {}".format(host, port, http_err)) except urllib.error.ContentTooShortError as content_err: print("ContentTooShortError on http://{}:{}/ : {}".format(host, port, content_err)) except urllib.error.URLError as url_err: print("URLError on http://{}:{}/ : {}".format(host, port, url_err)) c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) with open(c.getPath("web", "aquatoneDirUrls"), 'a') as weblinks: if found_links: for l in found_links: weblinks.write(l + '\n')
def Scan(self): """The Scan() function will run the initial nmap Top Tcp ports scan with enumerate versions and nmap's default safe scripts via the -sC and -sV flags. -Pn will ignore ping scan and the script-timeout is set to 5 minutes as sometimes https scripts can get stuck and output 100's of lines of unnecessary output which will slow the scan time down. 5 minutes is a good timeout setting.""" rc = run_commands.RunCommands(self.target) c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("report", "reportDir")): os.makedirs(c.getPath("report", "reportDir")) if not os.path.exists(c.getPath("report", "nmapDir")): os.makedirs(c.getPath("report", "nmapDir")) print(fg.cyan + "Running Nmap Top Open Ports" + fg.rs) hpl = helper_lists.topPortsToScan() topTCP = hpl.topTCP topTcpPortsString = ",".join(map(str, topTCP)) nmap_command = c.getCmd("nmap", "nmapTopTcpPorts", topTcpPorts=topTcpPortsString) cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" print(f"""{cmd_info} {fg.li_green}{nmap_command}{fg.rs}""") rc.loginator(nmap_command) call(nmap_command, shell=True)
def Scan(self): """This Scan() Function will run various oracle scanning tools and attempt to find valid SID's along with other useful information. The following tools will be used, Nmap, tnscmd10g, osscanner, and ODAT.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() oracle_tns_ports = np.oracle_tns_ports if len(oracle_tns_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("oracle", "oracleDir")): os.makedirs(c.getPath("oracle", "oracleDir")) print(fg.cyan + "Enumerating ORACLE, Running the following commands:" + fg.rs) # string_oracle_ports = ",".join(map(str, oracle_tns_ports)) commands = [] commands.append(c.getCmd("oracle", "nmapOracle")) commands.append(c.getCmd("oracle", "tnscmd10g", mode="ping")) commands.append(c.getCmd("oracle", "tnscmd10g", mode="version")) commands.append(c.getCmd("oracle", "oscanner")) commands.append(c.getCmd("oracle", "odatTNS", mode="ping")) commands.append(c.getCmd("oracle", "odatTNS", mode="version")) commands.append(c.getCmd("oracle", "odatTNS", mode="status")) self.processes = tuple(commands)
def genProxyDirsearchUrlList(self): """This Class, genProxyDirsearchUrlList is reponsible for sorting all the found URL's from Dirsearches report output and then it will combined them in to one unique list that will be fed to Aquatone to generate a nice HTML report that will Be opened up in the firefox web browser.""" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) current_month = datetime.datetime.now().strftime("%b") if os.path.exists(c.getPath("proxy", "proxyDir")): awkprint = "{print $3}" dirsearch_files = [] dir_list = [ d for d in glob.iglob(c.getPath("proxy", "proxyGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"{d}/*", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "dirsearch" in rf: if not os.path.exists( c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) dirsearch_files.append(rf) if len(dirsearch_files) != 0: all_dirsearch_files_on_one_line = " ".join( map(str, dirsearch_files)) url_list_cmd = f"""cat {all_dirsearch_files_on_one_line} | grep -Ev '400|403|401|{current_month}' | awk '{awkprint}' | sort -u > {c.getPath("proxy", "aquatoneDirProxyUrls")}""" call(url_list_cmd, shell=True)
def Scan(self): """This Scan() Funciton will run the following tools, SMBCLIENT, NMBLOOKUP, NBTSCAN, SMBSCAN, AND ENUM4LINUX""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() smb_ports = np.smb_ports if len(smb_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("smb", "smbDir")): os.makedirs(c.getPath("smb", "smbDir")) print( fg.cyan + "Enumerating NetBios SMB Samba Ports, Running the following commands:" + fg.rs) commands = [] commands.append(c.getCmd("smb", "smbclient")) commands.append(c.getCmd("smb", "nmblookup")) commands.append(c.getCmd("smb", "nmapSmb")) commands.append(c.getCmd("smb", "nbtscan")) commands.append(c.getCmd("smb", "smbmapH")) commands.append(c.getCmd("smb", "smbmapHR")) commands.append(c.getCmd("smb", "smbmapNull")) commands.append(c.getCmd("smb", "smbmapNullR")) commands.append(c.getCmd("smb", "enum4linux")) self.processes = tuple(commands)
def loginator(self, executed_command): c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) logging.basicConfig(filename=c.getPath("report", "commandLog"), format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO) logging.info(f"[+] {executed_command} \n")
def Scan(self): """Create Aquatone Report based off of the dirsearch results. If the length of urls.txt is greater than 150, aquatone won't be run as this might be an indication of too many false positives. """ np = nmapParser.NmapParserFunk(self.target) np.openPorts() npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" ssl_ports = np.ssl_ports http_ports = np.http_ports proxy_http_ports = npp.proxy_http_ports proxy_ssl_ports = npp.proxy_ssl_ports proxy_ports = np.proxy_ports all_web_ports = [] all_web_proxy_ports = [] for x in ssl_ports: all_web_ports.append(x) for x in http_ports: all_web_ports.append(x) for x in proxy_http_ports: all_web_proxy_ports.append(x) for x in proxy_ssl_ports: all_web_proxy_ports.append(x) all_web_ports_comma_list = ",".join(map(str, all_web_ports)) all_web_proxy_ports_comma_list = ",".join(map(str, all_web_proxy_ports)) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) if os.path.exists(c.getPath("web", "aquatoneDirUrls")): check_lines = f"""wc -l {c.getPath("web","aquatoneDirUrls")} | cut -d ' ' -f 1""" num_urls = check_output(check_lines, stderr=STDOUT, shell=True).rstrip() # ToDo: open urls.txt and sort urls by occurance of response codes. if int(num_urls) < 150 and (int(num_urls) != 0): aquatone_cmd = c.getCmd("web", "aquatone", allWebPorts=all_web_ports_comma_list) print(cmd_info, aquatone_cmd) call(aquatone_cmd, shell=True) if not which("firefox"): pass else: if os.path.exists(c.getPath("web", "aquatoneReport")): print(f"""{fg.cyan}Opening Aquatone Report {fg.rs}""") open_in_ff_cmd = f"""firefox {c.getPath("web","aquatoneReport")} &""" call(open_in_ff_cmd, shell=True) if os.path.exists(c.getPath("proxy", "aquatoneDirProxyUrls")): check_lines = f"""wc -l {c.getPath("proxy","aquatoneDirProxyUrls")} | cut -d ' ' -f 1""" num_urls = check_output(check_lines, stderr=STDOUT, shell=True).rstrip() if int(num_urls) < 150 and (int(num_urls) != 0): aquatone_cmd = c.getCmd("proxy", "aquatoneProxy", allWebProxyPorts=all_web_proxy_ports_comma_list, proxyPorts=proxy_ports[0]) print(cmd_info, aquatone_cmd) call(aquatone_cmd, shell=True) if not which("firefox"): pass else: if os.path.exists(c.getPath("proxy", "aquatoneProxyReport")): open_in_ff_proxy_cmd = f"""firefox {c.getPath("proxy", "aquatoneProxyReport")} &""" call(open_in_ff_proxy_cmd, shell=True)
def listFilesProxy(self): """ This function will list all files in report output folder and remove ansi color codes from the file using sed. It will also display niktos output if the latter was ran. """ def removeColor(self, filename): sedCMD = rf'sed "s,\x1B\[[0-9;]*[a-zA-Z],,g" -i {filename}' return call(sedCMD, shell=True) c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if os.path.exists(c.getPath("proxy", "proxyDir")): dir_list = [ d for d in glob.iglob(c.getPath("proxy", "proxyGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"{d}/*", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "aquatone" not in rf: if "eyewitness" not in rf: if "wafw00f" in rf: removeColor(self, rf) if "whatweb" in rf: removeColor(self, rf) if "wpscan" in rf: removeColor(self, rf) if "sslscan" in rf: removeColor(self, rf) if "dnsenum" in rf: removeColor(self, rf) if "drupal" in rf: removeColor(self, rf) if "joomlavs" in rf: removeColor(self, rf) if "oracle" in rf: removeColor(self, rf) if "oracle" in rf: removeColor(self, rf) if "nikto" in rf: check_nikto_lines = ( f"""wc -l {rf} | cut -d ' ' -f 1""") num_lines_nikto = check_output( check_nikto_lines, stderr=STDOUT, shell=True).rstrip() if int(num_lines_nikto) < 80: call(f"cat {rf}", shell=True) if "vulns" in rf: if fnmatch(rf, "*.log"): removeColor(self, rf)
def Scan(self): """If there is an open http-proxy port from nmaps results. Try to add the server IP to your proxychains config file and then proceed to scan the target again through the proxy port using proxychains and nmap. If more ports are discovered open, proceed to enumerate all found open ports through the http-proxy port.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() proxyPorts = np.proxy_ports hpl = helper_lists.topPortsToScan() topTCP = hpl.topTCP topTcpPortsString = ",".join(map(str, topTCP)) cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" if len(proxyPorts) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) duplicate_cmds = [] add_line_cmd = rf"""sed -e "\$ahttp {self.target} {proxyPorts[0]}" -i /etc/proxychains.conf""" comment_out_line_cmd = ( f"""sed -e '/socks5/ s/^#*/#/' -i /etc/proxychains.conf""") proxy_config_file = "/etc/proxychains.conf" try: pcCF = open(proxy_config_file, "r") for line in pcCF: parsed_lines = line.rstrip() if not parsed_lines.startswith("#"): tor_match = re.findall("socks5", parsed_lines) sorted_tor_matches = sorted(set(tor_match), reverse=True) if "socks5" in sorted_tor_matches: duplicate_cmds.append(comment_out_line_cmd) if (parsed_lines.startswith("#") or not parsed_lines.startswith('#')): matches = re.findall(f"http {self.target}", parsed_lines) sorted_matches = sorted(set(matches), reverse=True) if f"http {self.target}" not in sorted_matches: duplicate_cmds.append(add_line_cmd) pcCF.close() sorted_cmds = sorted(set(duplicate_cmds)) if len(sorted_cmds) != 0: for cmd in sorted_cmds: call(cmd, shell=True) except FileNotFoundError as fnf_error: print(fnf_error) exit() if not os.path.exists(c.getPath("proxy", "proxyDir")): os.makedirs(c.getPath("proxy", "proxyDir")) proxychains_nmap_top_ports_cmd = c.getCmd( "proxy", "proxychainsNmapTopPorts", topTcpPorts=topTcpPortsString) print(cmd_info, proxychains_nmap_top_ports_cmd) call(proxychains_nmap_top_ports_cmd, shell=True)
def __init__(self, target, port, tls=False, althost=None): self.target = target self.port = port self.processes = "" self.cms_processes = "" self.proxy_processes = "" self.tls = tls self.althost = althost self.conf = config_parser.CommandParser( f"{path.expanduser('~')}/.config/autorecon/config.yaml", self.target)
def openUdpPorts(self): """The openUdpPorts function will parse all found ports from the UDP nmap xml file fed to the report variable. All ports will be appended to the lists in __init__ and will then be accessible from the NmapParserFunk Class.""" def parsefile(xmlfile): parser = make_parser() parser.setContentHandler(ContentHandler()) parser.parse(xmlfile) c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if os.path.exists(c.getPath("nmap", "nmap_top_udp_ports_xml")): try: parsefile(c.getPath("nmap", "nmap_top_udp_ports_xml")) report = NmapParser.parse_fromfile( c.getPath("nmap", "nmap_top_udp_ports_xml")) self.udp_nmap_services += report.hosts[0].services self.udp_nmap_services = sorted(self.udp_nmap_services, key=lambda s: s.port) for service in self.udp_nmap_services: if "open" not in service.state: continue if "open|filtered" in service.state: continue self.udp_services.append(( service.port, service.service, service.tunnel, service.cpelist, service.banner, )) for service in self.udp_services: if service[0] not in self.udp_ports: self.udp_ports.append(service[0]) if "snmp" in service[1]: if service[0] not in self.snmp_ports: self.snmp_ports.append(service[0]) if "sip" in service[1]: if service[0] not in self.sip_udp_ports: self.sip_udp_ports.append(service[0]) if "isakmp?" in service[1] or ("isakmp" in service[1]): if service[0] not in self.ike_ports: self.ike_ports.append(service[0]) # print("SNMP PORTS", self.snmp_ports) # print("UDP SERVICES", self.udp_services) # print("UDP OPEN PORTS", self.udp_ports) except Exception as e: print( f"""{c.getPath("nmap", "nmap_top_udp_ports_xml")} Cannot Parse UDP nmap xml file. {e}""" ) return
def OraclePwn(self): """OraclePwn will run a helper lib/oracle.sh bash script which will attempt to bruteforce Oracle if any valid SID's are found from the Scan() Functions results.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() oracle_tns_ports = np.oracle_tns_ports if len(oracle_tns_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) oracle_pwn = f"""bash {c.getPath("oracle","oracleBrute")} {self.target}""" call(oracle_pwn, shell=True)
def ScanWebOption(self): """Enumerate Web Server ports based on nmaps output. This function will run the following tools; WhatWeb, WafW00f, Dirsearch, Nikto, and curl robots.txt This is almost identical to the normal web scan except it uses much larger wordlists """ np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports dn = domainFinder.DomainFinder(self.target) dn.getRedirect() hostnames = dn.redirect_hostname if len(http_ports) == 0: pass else: reset = fg.rs print(f"""{fg.li_cyan}Enumerating HTTP Ports, Running the following commands: {reset}""") c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) commands = [] if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) if hostnames: sorted_hostnames = sorted(set(hostnames)) for hostname in sorted_hostnames: for port in http_ports: commands.append(c.getCmd("web", "whatwebHttpHost", host=hostname, port=port)) # commands.append(c.getCmd("web", "eyewitnessHost", host=hostname, port=port)) # commands.append(c.getCmd("web", "wafw00fHost", host=hostname, port=port)) # commands.append(c.getCmd("web", "curlRobotsHost", host=hostname, port=port)) commands.append(c.getCmd("web", "dirsearchHttpHostDListMed", host=hostname, port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpHostRaftLargeFiles", host=hostname, port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpHostRaftLargeDirs", host=hostname, port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpHostForeign", host=hostname, port=port, url=self.web)) # commands.append(c.getCmd("web", "niktoHost", host=hostname, port=port)) else: for port in http_ports: commands.append(c.getCmd("web", "whatwebHttpTarget", port=port)) # commands.append(c.getCmd("web", "eyewitnessTarget", port=port)) # commands.append(c.getCmd("web", "wafw00fTarget", port=port)) # commands.append(c.getCmd("web", "curlRobotsTarget", port=port)) commands.append(c.getCmd("web", "dirsearchHttpTargetDListMed", port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpTargetRaftLargeFiles", port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpTargetRaftLargeDirs", port=port, url=self.web)) commands.append(c.getCmd("web", "dirsearchHttpTargetForeign", port=port, url=self.web)) # commands.append(c.getCmd("web", "niktoHost", port=port)) self.processes = tuple(commands)
def topUdpAllTcp(self): """topUdpAllTcp will run a full nmap tcp port scan and a top udp ports scan""" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) np = nmapParser.NmapParserFunk(self.target) np.openPorts() hpl = helper_lists.topPortsToScan() topUDP = hpl.topUDP topUdpPortsString = ",".join(map(str, topUDP)) commands = [] commands.append(c.getCmd("nmap", "nmapFullTcpScan")) commands.append( c.getCmd("nmap", "nmapTopUdpScan", topUdpPorts=topUdpPortsString)) self.processes = tuple(commands)
def listfiles(self): """ This function will list all files in report output folder and remove ansi color codes from the file using sed. """ def removeColor(self, filename): sedCMD = rf'sed "s,\x1B\[[0-9;]*[a-zA-Z],,g" -i {filename}' return call(sedCMD, shell=True) c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) dir_list = [ d for d in glob.iglob(c.getPath("report", "reportGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"{d}/*", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "aquatone" not in rf: if "eyewitness" not in rf: if "wafw00f" in rf: removeColor(self, rf) if "whatweb" in rf: removeColor(self, rf) if "sslscan" in rf: removeColor(self, rf) if "dnsenum" in rf: removeColor(self, rf) if "drupal" in rf: removeColor(self, rf) if "joomlavs" in rf: removeColor(self, rf) if "oracle" in rf: removeColor(self, rf) if "wpscan" in rf: removeColor(self, rf) if "dnsenum" in rf: removeColor(self, rf) if "vulns" in rf: if fnmatch(rf, "*.log"): removeColor(self, rf)
def Scan(self): """If Ldap ports are open, run nmap ldap scripts, enum4linux and the results will be fed to the ldap.sh bash script.""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() ldap_ports = np.ldap_ports if len(ldap_ports) == 0: pass else: c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("ldap", "ldapDir")): os.makedirs(c.getPath("ldap", "ldapDir")) print(fg.cyan + "Enumerating LDAP: Lightweight Directory Access Protocol, Running the following commands:" + fg.rs) string_ldap_ports = ",".join(map(str, ldap_ports)) commands = [] commands.append(c.getCmd("ldap", "nmapLdap", ldapPorts=string_ldap_ports)) commands.append(c.getCmd("ldap", "enum4linuxLdap")) self.processes = tuple(commands)
def sslProxyScan(self): """This function is called by lib/enumProxy.py and will enumerate HTTPS/SSL Web Servers. It will run, whatweb, dirsearch, and nikto.""" npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() np = nmapParser.NmapParserFunk(self.target) np.openPorts() proxy_ssl_ports = npp.proxy_ssl_ports proxy_ports = np.proxy_ports if len(proxy_ssl_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("proxy", "proxyDir")): os.makedirs(c.getPath("proxy", "proxyDir")) if not os.path.exists(c.getPath("proxy", "proxyWebSSL")): os.makedirs(c.getPath("proxy", "proxyWebSSL")) proxy_commands = [] for proxy in proxy_ports: print( f"""{fg.li_cyan} Enumerating HTTPS Ports Through {proxy}, Running the following commands: {fg.rs}""" ) for proxy_ssl_port in proxy_ssl_ports: proxy_commands.append( c.getCmd("proxySSL", "whatwebSSLProxy", proxy=proxy, proxySSLPort=proxy_ssl_port)) proxy_commands.append( c.getCmd("proxySSL", "dirsearchProxySSLDict", proxySslPort=proxy_ports, proxy=proxy_ssl_port)) proxy_commands.append( c.getCmd("proxySSL", "niktoProxySSL", proxySSLPort=proxy, proxy=proxy_ssl_port)) self.proxy_processes = tuple(proxy_commands)
def SshMultipleUsersBruteCustom(self): """Run patator with custome wordlist against a single user specified as a command line argument.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" green = fg.li_green teal = fg.li_cyan reset = fg.rs c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) np = nmapParser.NmapParserFunk(self.target) np.openPorts() print( f"""{teal}Beginning Password Brute Force for User: {reset} {green}{self.users}{reset}""" ) patator_cmd = c.getCmd("ssh", "patator_ssh_multiple_users_custom", port=self.port, users=self.users, wordlist=self.passList) print(f"""{cmd_info} {patator_cmd}""") call(patator_cmd, shell=True)
def ftpDownloadAll(self, port): try: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("ftp", "ftpDir")): os.makedirs(c.getPath("ftp", "ftpDir")) if not os.path.exists(c.getPath("ftp", "anonDownloadPath")): os.makedirs(c.getPath("ftp", "anonDownloadPath")) cwd = os.getcwd() os.chdir(c.getPath("ftp", "anonDownloadPath")) wget_cmd = f"""wget -m --no-passive -c --read-timeout=5 --tries=5 ftp://anonymous:anonymous@{self.target}:{port}""" print( f"{fg.li_magenta}Downloading All Files from FTP Server on Port: {fg.rs}{port}" ) print(f"[{fg.li_green}+{fg.rs}] {wget_cmd}") print(f"{fg.li_yellow}") call(wget_cmd, shell=True) print(f"{fg.rs}") os.chdir(cwd) except IOError as e: print(e) return
def Scan(self): """Enumerate DNS server if any hostnames are found from autorecon.lib/domainFinder.py and if port 53 is open.""" print(fg.cyan + "Checking For Virtual Host Routing and DNS" + fg.rs) np = nmapParser.NmapParserFunk(self.target) np.openPorts() dnsPorts = np.dns_ports dn = domainFinder.DomainFinder(self.target) dn.Scan() redirect_hostname = dn.redirect_hostname fqdn_hostname = dn.fqdn_hostname c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) commands = [] if len(redirect_hostname) != 0: for d in redirect_hostname: self.hostnames.append(d) if len(fqdn_hostname) != 0: for d in fqdn_hostname: self.hostnames.append(d) if len(self.hostnames) != 0 and (len(dnsPorts) != 0): if not os.path.exists(c.getPath("dns", "dnsDir")): os.makedirs(c.getPath("dns", "dnsDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) # string_hosts = " ".join(map(str, self.hostnames)) basename = [] for host in self.hostnames: basename.append(".".join(host.split('.')[-2:])) unique_hosts = sorted(set(basename)) for host in unique_hosts: commands.append(c.getCmd("dns", "dnsenum", hosts=host)) # commands.append(c.getCmd("dns", "vhost", hosts=host)) self.processes = tuple(commands)
def vulnCheck(self): """Vuln Check will check if OpenSSH is vulnerable to Username Enumeration. If it is, A message will be printed to the User. This feature can be enabled to automatically always brute force SSH if the instance is a vulnerable version, however, I've changed this feature to not run automatically as that option should be left up to the user, among various other reasons.""" cmd_info = "[" + fg.green + "+" + fg.rs + "]" manual_cmd_info = "[" + fg.li_yellow + "+" + fg.rs + "]" blue = fg.li_blue red = fg.red green = fg.li_green reset = fg.rs np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssh_product = np.ssh_product ssh_version = np.ssh_version c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) # Check what version OPENSSH is # If OpenSSH version is less than 7.7, Enumerate Users # If valid Unique User is found, Brute Force Passwords if len(ssh_product) == 1: if ssh_version is not None: string_ssh_version = " ".join(map(str, ssh_version)) if len(string_ssh_version) >= 2: lowercase_ssh_version = str(string_ssh_version).lower() first_two_nums = lowercase_ssh_version[0:3] int_first_two_nums = float(first_two_nums) if ssh_product[0] == "OpenSSH": if int_first_two_nums < float(7.7): ssh_port = np.ssh_ports print(f"""{cmd_info} {blue}{ssh_product[0]} {ssh_version[0]}{reset} is {red}VULNERABLE to Username Enumeration{reset}""") print(f"""{green}Consider running:{reset}""") print(f"""{manual_cmd_info} {c.getCmd("ssh", "ssh_user_enum", port=ssh_port[0])}""") # sb = brute.Brute(self.target, "ssh", ssh_port) # sb.SshUsersBrute() else: print(f"""{cmd_info} {blue}{ssh_product[0]} {ssh_version[0]}{reset} is {red}NOT{reset} Vulnerable to Username Enumeration""")
def Enum(self): """This is a helper function that will run all the Enumeration Commands Based off of nmaps proxychain original output scan if new ports are discovered.""" npp = nmapParser.NmapParserFunk(self.target) npp.openProxyPorts() np = nmapParser.NmapParserFunk(self.target) np.openPorts() open_proxy_ports = np.proxy_ports if len(open_proxy_ports) == 0: pass else: c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) pweb = enumWeb.EnumWeb(self.target) pweb.proxyScan() http_proxy_commands = pweb.proxy_processes psslweb = enumWebSSL.EnumWebSSL(self.target) psslweb.sslProxyScan() ssl_proxy_commands = psslweb.proxy_processes all_commands = [] proxy_tcp_ports = npp.proxy_tcp_ports tcp_proxy_ports = ",".join(map(str, proxy_tcp_ports)) default_command = c.getCmd("proxy", "proxychainsDiscoveredPorts", openTcpProxyPorts=tcp_proxy_ports) all_commands.append(default_command) for cmd in http_proxy_commands: all_commands.append(cmd) for cmd in ssl_proxy_commands: all_commands.append(cmd) sorted_commands = sorted(set(all_commands), reverse=True) commands_to_run = [] for i in sorted_commands: commands_to_run.append(i) allCmds = tuple(commands_to_run) self.all_processes = allCmds
def PwnWinRM(self): c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if not os.path.exists(c.getPath("kerberos", "kerbDir")): os.makedirs(c.getPath("kerberos", "kerbDir")) # print(fg.cyan + "Checking for valid usernames. Kerbrute! Running the following commands:" + fg.rs) def flatten(lis): for item in lis: if isinstance(item, Iterable) and not isinstance(item, str): for x in flatten(item): yield x else: yield item def parse_users(): """ Returns a list of users """ if os.path.exists(c.getPath("kerberos", "kerbUsers")): with open(c.getPath("kerberos", "kerbUsers"), 'r') as kbu: users = [u.split()[6].split('@')[0] for u in kbu.readlines() if 'VALID USERNAME:' in u] return users def parse_ad_domain(): """ Returns a domain as a list """ ad_domainName = [] ig = helper_lists.ignoreDomains() ignore = ig.ignore try: with open(c.getPath("nmap", "nmap_top_ports_nmap"), "r") as nm: for line in nm: new = ( line.replace("=", " ") .replace("/", " ") .replace("commonName=", "") .replace("/organizationName=", " ") .replace(",", " ") .replace("_", " ") ) matches = re.findall(r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3,6}", new) for x in matches: if not any(s in x for s in ignore): ad_domainName.append(x) _ips_ignore = re.findall(r"[0-9]+(?:\.[0-9]+){3}", x) if len(_ips_ignore) > 0: ad_domainName.remove(x) sorted_ad_domains = sorted(set(a.lower() for a in ad_domainName)) # print(sorted_ad_domains) return sorted_ad_domains except FileNotFoundError as fnf_error: print(fnf_error) def KerbBrute(): domain = parse_ad_domain() if domain: dope_cmd = f"""{c.getCmd("kerberos", "kerbrute", domain=str(domain[0]))}""" print(f"[{fg.li_magenta}+{fg.rs}] {dope_cmd}") call(dope_cmd, shell=True) users = parse_users() if users: print(users) print("Todo: finish this module...") KerbBrute()
def getLinks(self): """Grab all links from web server homepage i.e. http://IP:PORT/ and look for .htb domain names. If a .htb domain is found, add the hostname to the /etc/hosts file and then proceed to fuzz the hostname for virtual hostname routing using wfuzz. If a valid sub-hostname is found, add the domain to the /etc/hosts file as well using python_hosts library merge_names parameter.(Thanks for adding this feature! @jonhadfield)""" def cmdline(command): process = Popen(args=command, stdout=PIPE, shell=True) return process.communicate()[0] np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" cmd_info_orange = "[" + fg.li_yellow + "+" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if len(http_ports) != 0: if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) for hp in http_ports: try: url = f"""http://{self.target}:{hp}""" wfuzzReport = c.getPath("web", "wfuzzReport", port=hp) page = requests.get(url, verify=False, timeout=(5, 30)) data = page.text soup = BeautifulSoup(data, "html.parser") # links = [] htb = [".htb"] source_domain_name = [] for link in soup.find_all(text=lambda x: ".htb" in x): matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3}", link) for x in matches: if any(s in x for s in htb): source_domain_name.append(x) for link in soup.find_all('img'): src_matches = link.get('src') matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3}", src_matches) for x in matches: if any(s in x for s in htb): source_domain_name.append(x) except requests.exceptions.ConnectionError as ce_error: print("Connection Error: ", ce_error) continue except requests.exceptions.Timeout as t_error: print("Connection Timeout Error: ", t_error) continue except requests.exceptions.RequestException as req_err: print("Some Ambiguous Exception:", req_err) continue if source_domain_name and self.hostnames: all_hostnames = list( set(source_domain_name).union(set(self.hostnames))) if source_domain_name and not self.hostnames: all_hostnames = source_domain_name if self.hostnames and not source_domain_name: all_hostnames = self.hostnames if all_hostnames: vhostnames = [ i.lower() for i in sorted(set(all_hostnames)) ] vhost_log = open(c.getPath("web", "vhostnames"), "a+") for vh in vhostnames: vhost_log.write(vh) vhost_log.close() print( f"""{cmd_info_orange} {fg.li_magenta}Found{fg.rs} {fg.cyan}{vhostnames}{fg.rs} in {fg.li_red}The Source!{fg.rs} http://{self.target}:{hp}""" ) print( f"""{cmd_info} {fg.li_magenta}Adding{fg.rs} {fg.li_cyan} {vhostnames}{fg.rs} to /etc/hosts file""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=vhostnames) hosts.add([new_entry], merge_names=True) hosts.write() base_domain_name = [] for d in vhostnames: self.htb_source_domains.append(d) if d.count('.') == 1: base_domain_name.append(d) try: import wfuzz from tqdm import tqdm tk5 = c.getPath("wordlists", "top5Ksubs") print( f"""{cmd_info} wfuzz -z file,{tk5} -u {base_domain_name[0]}:{hp} -H 'Host: FUZZ.{base_domain_name[0]}:{hp}'""" ) print( f"{fg.li_yellow}Wfuzz's STDOUT is Hidden to prevent filling up Terminal. Desired Response Codes are unpredictable during initial fuzz session. {fg.rs} STDOUT will be written to {fg.li_magenta}{wfuzzReport}{fg.rs}" ) str_domain = f"""{base_domain_name[0]}:{hp}""" fuzz_domain = f"""FUZZ.{base_domain_name[0]}:{hp}""" wordlist_lines = 4997 with tqdm(total=wordlist_lines) as pbar: for r in wfuzz.fuzz( url=str_domain, hc=[404, 400], payloads=[("file", dict(fn=tk5))], headers=[("Host", fuzz_domain)], printer=(wfuzzReport, "raw"), ): # print(r) pbar.update() pbar.set_description_str( desc=f"{fg.li_yellow}wfuzz{fg.rs}") # pass except Exception as e: print(e) if os.path.exists(wfuzzReport): awk_print = "awk '{print $6}'" check_occurances = f"""sed -n -e 's/^.*C=//p' {wfuzzReport} | grep -v "Warning:" | {awk_print} | sort | uniq -c""" response_num = [ i.strip() for i in cmdline( check_occurances).decode("utf-8").split("\n") ] res_filt = [ i.split() for i in sorted(set(response_num)) ] filt2arr = [ c for c in res_filt if len(c) != 0 and int(c[0]) < 5 ] status_code = [] if len(filt2arr) != 0 and (len(filt2arr) < 5): # print(filt2arr) for htprc in filt2arr: status_code.append(htprc[1]) if len(status_code) != 0 and len(status_code) <= 5: for _ in status_code: # print(status_code) awk_print = "awk '{print $9}'" get_domain_cmd = f"""grep '{_} Ch' {wfuzzReport} | {awk_print}""" get_domains = (check_output( get_domain_cmd, shell=True, stderr=STDOUT).rstrip().decode( "utf-8").replace('"', "")) subdomains = [] if get_domains is not None: subdomains.append(get_domains) sub_d = "{}.{}".format( subdomains[0], base_domain_name[0]) print( f"""{cmd_info_orange}{fg.li_blue} Found Subdomain!{fg.rs} {fg.li_green}{sub_d}{fg.rs}""" ) print( f"""{cmd_info}{fg.li_magenta} Adding{fg.rs} {fg.li_cyan}{sub_d}{fg.rs} to /etc/hosts file""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry( entry_type="ipv4", address=self.target, names=[sub_d], ) hosts.add([new_entry], merge_names=True) hosts.write() self.htb_source_domains.append(sub_d)
def extract_source_comments(self): """Search home page for comments in the HTML source code. If any comments are found, Write them to a file in the report/web directory.""" cmd_info = "[" + fg.li_magenta + "*" + fg.rs + "]" cmd_info_orange = "[" + fg.li_green + "!" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if os.path.exists(c.getPath("web", "aquatoneDirUrls")): url_list = [] try: with open(f"""{c.getPath("web", "aquatoneDirUrls")}""", "r") as urls: for line in urls: _url = line.rstrip() url_list.append(_url) except FileNotFoundError as fnf_error: print(fnf_error) pass print( f"{cmd_info}{fg.li_yellow} Checking for comments in the source from found URL's...{fg.rs}" ) for link in url_list: if "https://" in link: if not os.path.exists(c.getPath("webSSL", "webSSLDir")): os.makedirs(c.getPath("webSSL", "webSSLDir")) with self.no_ssl_verification(): try: page = requests.get(link) data = page.text soup = BeautifulSoup(data, "html.parser") comments = soup.find_all( string=lambda text: isinstance(text, Comment)) comments_arr = [c.extract() for c in comments] if len(comments_arr) != 0: print( f" {cmd_info_orange}{fg.li_red} Found Comments in the Source!{fg.rs} URL: {fg.li_blue}{link}{fg.rs}" ) try: with open( c.getPath("webSSL", "sourceComments"), "a+") as com: com.write(f"[+] URL: {link}\n") for cm in comments_arr: com_str = cm.rstrip("\n") com.write(f"{com_str}\n") except FileNotFoundError as fnf: print(fnf) except requests.exceptions.ConnectionError as ce_error: print("Connection Error: ", ce_error) break except requests.exceptions.Timeout as t_error: print("Connection Timeout Error: ", t_error) break except requests.exceptions.RequestException as req_err: print("Some Ambiguous Exception:", req_err) break else: if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) try: page = requests.get(link) data = page.text soup = BeautifulSoup(data, "html.parser") comments = soup.find_all( string=lambda text: isinstance(text, Comment)) comments_arr = [c.extract() for c in comments] if len(comments_arr) != 0: print( f" {cmd_info_orange}{fg.li_red} Found Comments in the Source!{fg.rs} URL: {fg.li_blue}{link}{fg.rs}" ) try: with open(c.getPath("web", "sourceComments"), "a+") as com: com.write(f"[+] URL: {link}\n") for cm in comments_arr: com_str = cm.rstrip("\n") com.write(f"{com_str}\n") except FileNotFoundError as fnf: print(fnf) except requests.exceptions.ConnectionError as ce_error: print("Connection Error: ", ce_error) break except requests.exceptions.Timeout as t_error: print("Connection Timeout Error: ", t_error) break except requests.exceptions.RequestException as req_err: print("Some Ambiguous Exception:", req_err) break if os.path.exists(f"""{c.getPath("web", "sourceComments")}"""): print( f"""{cmd_info} Writing Comments to {c.getPath("web","sourceComments")}""" ) if os.path.exists(f"""{c.getPath("webSSL", "sourceComments")}"""): print( f"""{cmd_info} Writing Comments to {c.getPath("webSSL","sourceComments")}""" )
def CMS(self): """If a valid CMS is found from initial Web Enumeration, more specifically, WhatWebs results, Then proceed to Enumerate the CMS further using Wpscan, Magescan, Nmap, Droopescan, Joomscan, and davtest, hydra, and will create a brute force bash script using Cewl, which can then be used by WpScan to try and brute force Users and passwords.""" c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports dn = domainFinder.DomainFinder(self.target) dn.getRedirect() hostnames = dn.redirect_hostname another_array_of_hostnames = [] if os.path.exists(c.getPath("web", "vhostnames")): with open(c.getPath("web", "vhostnames"), "r") as vhfile: lines = vhfile.readlines() for vh in lines: another_array_of_hostnames.append(vh) if len(hostnames) != 0: for d in hostnames: another_array_of_hostnames.append(d) cms_commands = [] if len(http_ports) == 0: pass else: for http_port in http_ports: whatweb_files = [] whatweb_hostnames = [] dir_list = [ d for d in glob.iglob(c.getPath("report", "reportGlob"), recursive=True) if os.path.isdir(d) ] for d in dir_list: reportFile_list = [ fname for fname in glob.iglob(f"""{d}/*""", recursive=True) if os.path.isfile(fname) ] for rf in reportFile_list: if "nmap" not in rf: if "whatweb" in rf: if str(http_port) in rf: whatweb_files.append(rf) if len(another_array_of_hostnames) != 0: for host in another_array_of_hostnames: if host in rf: whatweb_hostnames.append(host) if len(whatweb_files) != 0: for i in whatweb_files: cms_strings = [ "WordPress", "Magento", "tomcat", "WebDAV", "Microsoft-IIS 6.0", "Drupal", "Joomla", "Webmin", ] try: with open(i, "r") as wwf: for word in wwf: fword = ( word.replace("[", " ") .replace("]", " ") .replace(",", " ") ) for cms in cms_strings: if cms in fword: if len(whatweb_hostnames) != 0: for hn in whatweb_hostnames: if hn in i: if "WordPress" in cms: wpscan_cmd = c.getCmd("web", "wpscanHttpHost", host=hn, httpPort=http_port) cms_commands.append(wpscan_cmd) if "Drupal" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Drupal")) cms_commands.append(c.getCmd("web", "droopescanHost", host=hn, httpPort=http_port)) if "Joomla" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Joomla")) cms_commands.append(c.getCmd("web", "joomscanHost", host=hn, httpPort=http_port)) cms_commands.append(c.getCmd("web", "joomlavsHost", host=hn, httpPort=http_port)) if "Magento" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Magento")) cms_commands.append(c.getCmd("web", "magescanHost", host=hn, httpPort=http_port)) if "WebDAV" in cms or ("Microsoft-IIS 6.0" in cms): if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="WebDAV")) webdav_cmd = c.getCmd("web", "davtestHost", host=hn) webdav_cmd2 = c.getCmd("web", "webDavNmap", httpPort=http_port) cms_commands.append(webdav_cmd) cms_commands.append(webdav_cmd2) if "tomcat" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="tomcat")) cms_commands.append(c.getCmd("web", "tomcatHydraHost", host=hn, httpPort=http_port)) if "Webmin" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Webmin")) else: if "WordPress" in cms: wpscan_cmd = c.getCmd("web", "wpscanHttpTarget", httpPort=http_port) cms_commands.append(wpscan_cmd) manual_brute_force_script = f"""#!/bin/bash if [[ -n $(grep -i "User(s) Identified" {c.getPath("web","wpscanHttpTarget", httpPort=http_port)}) ]]; then grep -w -A 100 "User(s)" {c.getPath("web","wpscanHttpTarget", httpPort=http_port)} | grep -w "[+]" | grep -v "WPVulnDB" | cut -d " " -f 2 | head -n -7 >{c.getPath("web", "wordpressUsers")} {c.getCmd("web", "CewlWeb", httpPort=http_port)} sleep 10 echo "Adding John Rules to Cewl Wordlist!" {c.getCmd("web", "cewl2John")} sleep 3 # brute force again with wpscan {c.getCmd("web", "wpscanCewlBrute", httpPort=http_port)} sleep 1 if grep -i "No Valid Passwords Found" {c.getPath("web", "wpscanCewlBrute")}; then if [[ -s {c.getPath("web", "johnCewlWordlist")} ]]; then {c.getCmd("web", "wpscanCewlJohnBrute", httpPort=http_port)} else echo "John wordlist is empty :(" fi sleep 1 if grep -i "No Valid Passwords Found" {c.getPath("web", "wordpressJohnCewlBrute")}; then {c.getCmd("web", "wpscanFastTrackBrute", httpPort=http_port)} fi fi fi """ try: with open(c.getPath("web", "wpscanBashBruteScript"), "w") as wpb: print("Creating wordpress Brute Force Script...") wpb.write(manual_brute_force_script) call(f"""chmod +x {c.getPath("web", "wpscanBashBruteScript")}""", shell=True) except FileNotFoundError as fnf_error: print(fnf_error) if "Drupal" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Drupal")) cms_commands.append(c.getCmd("web", "droopescan", httpPort=http_port)) if "Joomla" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Joomla")) cms_commands.append(c.getCmd("web", "joomscan", httpPort=http_port)) cms_commands.append(c.getCmd("web", "joomlavsTarget", httpPort=http_port)) if "Magento" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Magento")) cms_commands.append(c.getCmd("web", "magescan", httpPort=http_port)) if "WebDAV" in cms or ("Microsoft-IIS 6.0" in cms): if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="WebDAV")) webdav_cmd = c.getCmd("web", "davtest") webdav_cmd2 = c.getCmd("web", "webDavNmap", httpPort=http_port) cms_commands.append(webdav_cmd) cms_commands.append(webdav_cmd2) if "tomcat" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="tomcat")) cms_commands.append(c.getCmd("web", "tomcatHydra", httpPort=http_port)) if "Webmin" in cms: if not os.path.exists(c.getPath("vuln", "vulnDir")): os.makedirs(c.getPath("vuln", "vulnDir")) cms_commands.append(c.getCmd("vuln", "searchsploit", strang=str(cms), name="Webmin")) except FileNotFoundError as fnf_error: print(fnf_error) continue sorted_commands = sorted(set(cms_commands)) commands_to_run = [i for i in sorted_commands] self.cms_processes = tuple(commands_to_run)
def Scan(self): """Enumerate Web Server ports based on nmaps output. This function will run the following tools; WhatWeb, WafW00f, Dirsearch, Nikto, and curl robots.txt""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports system_type = np.os_system_type if len(http_ports) == 0: pass else: hl = helper_lists.IgnoreHttpPorts() _http_ports = [x for x in http_ports if x not in hl.ignore_http_ports] print(f"""{fg.li_cyan}Enumerating HTTP Ports! {fg.rs}""") c = config_parser.CommandParser(f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) dn = domainFinder.DomainFinder(self.target) dn.getRedirect() hostnames = sorted(set(a.lower() for a in dn.redirect_hostname)) if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) commands = [] another_array_of_hostnames = [] if hostnames: for d in hostnames: another_array_of_hostnames.append(d) if another_array_of_hostnames: vhc = vhostCrawl.checkSource(self.target, hostnames=another_array_of_hostnames) vhc.getLinks() htb_source_domains = vhc.htb_source_domains if htb_source_domains: for d in htb_source_domains: another_array_of_hostnames.append(d) sorted_hostnames = sorted(set(a.lower() for a in another_array_of_hostnames)) self.check_links(sorted_hostnames, http_ports) for hostname in sorted_hostnames: for port in _http_ports: commands.append(c.getCmd("web", "niktoHost", host=hostname, port=port)) commands.append(c.getCmd("web", "whatwebHttpHost", host=hostname, port=port)) commands.append(c.getCmd("web", "wafw00fHost", host=hostname, port=port)) commands.append(c.getCmd("web", "curlRobotsHost", host=hostname, port=port)) if system_type: if system_type[0] == "Windows": commands.append(c.getCmd("web", "dirsearchHttpHostDictWindows", host=hostname, port=port)) robots_check = check_robots.ParseRobots(self.target, port, althost=hostname) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchHostDisallowedWindows", host=hostname, port=port, dirname=_dir)) if system_type[0] == "Linux": commands.append(c.getCmd("web", "dirsearchHttpHostDict", host=hostname, port=port)) robots_check = check_robots.ParseRobots(self.target, port, althost=hostname) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchHostDisallowed", host=hostname, port=port, dirname=_dir)) else: commands.append(c.getCmd("web", "dirsearchHttpHostDict", host=hostname, port=port)) robots_check = check_robots.ParseRobots(self.target, port, althost=hostname) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchHostDisallowed", host=hostname, port=port, dirname=_dir)) else: for port in _http_ports: commands.append(c.getCmd("web", "niktoTarget", port=port)) commands.append(c.getCmd("web", "whatwebHttpTarget", port=port)) commands.append(c.getCmd("web", "wafw00fTarget", port=port)) commands.append(c.getCmd("web", "curlRobotsTarget", port=port)) if system_type: if system_type[0] == "Windows": commands.append(c.getCmd("web", "dirsearchHttpTargetDictWindows", port=port)) robots_check = check_robots.ParseRobots(self.target, port) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchDisallowedWindows", port=port, dirname=_dir)) if system_type[0] == "Linux": commands.append(c.getCmd("web", "dirsearchHttpTargetDict", port=port)) robots_check = check_robots.ParseRobots(self.target, port) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchDisallowed", port=port, dirname=_dir)) else: commands.append(c.getCmd("web", "dirsearchHttpTargetDict", port=port)) robots_check = check_robots.ParseRobots(self.target, port) disallowed_dirs = robots_check.interesting_dirs() if disallowed_dirs: for _dir in disallowed_dirs: commands.append(c.getCmd("web", "dirsearchDisallowed", port=port, dirname=_dir)) # sorted_cmds = sorted(set(commands), reverse=True) # commands_to_run = [i for i in sorted_cmds] self.processes = tuple(commands)