def nmapHTTP(host, port, output, p=""): whine("nMap HTTP Modules : " + host + " Port: " + port, "debug") cmd = "nmap -Pn --script discovery,vuln,version " + host + " -p " + port + " -oA " + output if p: cmd = "nmap -Pn --script discovery,vuln,version --proxies " + p + " " + host + " -p " + port + " -oA " + output #print(cmd) muxERquiet(cmd)
def nikto(url, f, p=""): whine("Running nikto : " + url, "debug") cmd = "nikto -Cgidirs all -nointeractive -ask no -maxtime 1h -host " + url + " -Format txt -output " + f if p: cmd = "nikto -Cgidirs all -nointeractive -ask no -maxtime 1h --useproxy " + p + " -host " + url + " -Format txt -output " + f muxERquiet(cmd)
def portLandia(file): # get all open ports per host cmd = "cat " + file + " | grep Ports: " CBH = muxER(cmd) allPort = set() httpList = set() H = None p = re.search(r'Host: (\d+\.\d+\.\d+\.\d+).*Ports: (.*)', CBH) if p: H = p.group(1) pL = p.group(2).split(',') for o in pL: # 8834/open/tcp//ssl|http//Nessus vulnerability scanner http UI/ mo = re.search(r'(\d+)/open/(\w+)/\w*/(\w+)', o) if mo: allPort.add(mo.group(1)) DBcommit = 'INSERT INTO results VALUES (?,?,?,?)', [ H, mo.group(1), mo.group(2), mo.group(3) ] dbQueue.workDB.put(DBcommit) http = re.search(r'(http|https)', o, re.IGNORECASE) if http: url = http.group(1).lower() + "://" + H + ":" + mo.group(1) httpList.add(url) helper.whine("portLandia: Adding URL: " + url, "debug") return allPort, httpList
def confirmIP(matchWork, cidr): # Lets see if this is a real IP try: ipaddress.ip_address(matchWork) # Add hosts to DB h = str(matchWork) + cidr # Add the DB task to the Queue DBcommit = 'INSERT INTO Hosts VALUES (?,?,?)', [ h, "Waiting", "No open ports" ] dbQueue.workDB.put(DBcommit) # Add the ip work to the Queue dbQueue.work.put(h) return True except ValueError: helper.printR("Address/Netmask is invalid: " + '\033[0m' + matchWork + cidr) return False except Exception as e: helper.whine('[validateHost] ' + str(e) + " " + str(matchWork), "error") return False
def validateHost(network): helper.whine('\033[94m' + "Validating Host : " + '\033[0m' + network, "status") cidr = "" # Single IP or a network match = re.search(r'(\d+.\d+.\d+.\d+)(/\d+)', network) if match: matchWork = match.group(1) helper.whine('\033[94m' + "IP Addr : " + '\033[0m' + matchWork, "status") helper.whine('\033[94m' + "Subnet : " + '\033[0m' + match.group(2), "status") cidr = match.group(2) if match.group(2) == '/32': confirmIP(matchWork, cidr) else: helper.whine("Expanding network : " + '\033[0m' + network, "status") expandedIPList = ipaddress.ip_network(network) cidr = "/32" for ip in expandedIPList: confirmIP(ip, cidr) else: helper.whine('\033[94m' + "Single IP : " + '\033[0m' + network, "status") matchWork = network cidr = "/32" confirmIP(matchWork, cidr)
def portLandia(file): # get all open ports per host cmd = "cat " + file + " | grep Ports: " CBH = muxER(cmd) allPort = set() httpList = set() H = None p = re.search(r'Host: (\d+\.\d+\.\d+\.\d+).*Ports: (.*)', CBH) if p: H = p.group(1) pL = p.group(2).split(',') for o in pL: # 8834/open/tcp//ssl|http//Nessus vulnerability scanner http UI/ mo = re.search(r'(\d+)/open/(\w+)/\w*/(\w+)', o) if mo: allPort.add(mo.group(1)) http = re.search(r'(http|https)', o, re.IGNORECASE) if http: url = http.group(1).lower() + "://" + H + ":" + mo.group( 1) + "/" httpList.add(url) helper.whine("Discovery - Identified URL : " + url, "debug") return allPort, httpList
def dirb(url, f, p=""): whine("Running dirb : " + url, "debug") wList = os.path.abspath( os.path.dirname(__file__)) + "/wordlists/master-dirb.txt" cmd = "dirb " + url + " " + wList + " -l -r -o " + f if p: cmd = "dirb " + url + " " + wList + " -l -r -p " + p + " -o " + f muxERquiet(cmd)
def udpScan(network, out): DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage5 - Running udp unicornscan", network ] dbQueue.workDB.put(DBcommit) helper.whine("UDP scanning: " + network, "debug") f = out + ".udp" cmd = "unicornscan -mU " + network + " > " + f muxER(cmd)
def chromeShot(url, f, p=""): whine("Taking Screenshot : " + url, "debug") prox = Proxy() prox.proxy_type = ProxyType.MANUAL if p: prox.proxy_type = ProxyType.MANUAL prox.http_proxy = p prox.ssl_proxy = p capabilities = webdriver.DesiredCapabilities.CHROME prox.add_to_capabilities(capabilities) chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--disable-logging") chrome_options.add_argument("--log-level=3") chrome_options.add_argument("--window-size=1920x1080") chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("--user-data-dir /tmp") chrome_options.add_argument('--ignore-certificate-errors') chrome_driver = "/usr/bin/chromedriver" # Copy to dedicated screenshot directory sDir = os.path.dirname(f) sDir = os.path.dirname(sDir) sDir += "/ScreenShots/" if not os.path.exists(sDir): os.makedirs(sDir) sf = sDir + urllib.parse.quote(url, safe='') + ".png" try: driver = webdriver.Chrome(options=chrome_options, executable_path=chrome_driver, desired_capabilities=capabilities) driver.set_page_load_timeout(10) driver.get(url) driver.get_screenshot_as_file(f) # Copy to dedicated screenshot directory copy2(f, sf) driver.quit() except Exception as e: whine("screenshot Error:" + str(e), "debug")
def showResult(selection): cmd = "date" if selection is 'ALL': helper.whine("Search key: " + '\033[95m' + selection + '\033[0m', "debug") cmd = "find " + dbQueue.dumpDir + " \\( -name \"*.out\" -o -name \"*.udp\" -o -name \"*.dirb\" -o -name \"*.nikto\" \\)" elif "name" in selection: helper.whine("Search key: " + '\033[95m' + selection + '\033[0m', "debug") cmd = "find " + dbQueue.dumpDir + " " + selection else: r = selection if len(r) > 0: print(r[0]) f = str(r[0]).split("/")[0] f = f.split("\'")[1] cmd = "find " + dbQueue.dumpDir + "* \\( -name \"*.out\" -o -name \"*.udp\" -o -name \"*.dirb\" -o -name \"*.nikto\" \\) | grep " + f else: helper.printR("This entry does not exist: " + str(selection)) return results = muxER(cmd) if (len(results)) < 1: return fList = results.split('\n') for f in fList: if not os.path.isfile(f): continue helper.printP(f) cmd = "cat -s " + f + " | egrep -v \"Nmap done|Starting Nmap|Warning|Note\"" out = muxER(cmd) print(out) # Find and list screenshots cmd = "find " + dbQueue.dumpDir + " -name *.png" results = muxER(cmd) if (len(results)) < 1: return fList = results.split('\n') helper.printP("HTTP Screenshots:") for f in fList: #if not os.path.isfile(f): continue helper.printW("file://" + f) print("")
def msfHTTPAuxilary(host, port, output): import ast import configparser config = configparser.ConfigParser() msfCFG = os.path.abspath(os.path.dirname(__file__)) + "/../utils/msf.ini" whine("Loading Safe Checks from: " + msfCFG, "debug") config.read(msfCFG) MSF = ast.literal_eval(config.get("MSF-SAFE", "msfLIST")) r = re.compile(".*http") msfLIST = list(filter(r.match, MSF)) for module in msfLIST: m = module.rsplit('/', 1)[-1] whine("Running Metasploit Module: " + module, "debug") f = output + "_Metasploit_" + m + ".txt" cmd = "msfconsole -x \"use " + module + ";set rhosts " + host + ";set rport " + port + "; run; exit\" > " + f muxER(cmd)
def MBaku(taskDB): workerName = (multiprocessing.current_process()).name while True: # Check to see if there is work to do taskDB = dbQueue.workDB.get() if taskDB: # need to do some magic here to pass the right data s = str(taskDB).strip('()') helper.whine("dbQueue: " + s, "INFO") pattern = "\'(.*)\', (\[.*\])" match = re.match(pattern, s) if not match: print("No Match error " + pattern + " " + s) sql = match.group(1) s = match.group(2).strip("[]") s = re.sub(r'\'', '', s) args = tuple(item.strip() for item in s.split(',')) db_runner(conn, sql, args) helper.printR("[MBaku] Got the poison pill ... DEAD.")
def chromeShot(url, f): chrome_options = Options() chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("--user-data-dir /tmp") chrome_options.add_argument('--ignore-certificate-errors') chrome_driver = "/usr/bin/chromedriver" try: driver = webdriver.Chrome(chrome_options=chrome_options, executable_path=chrome_driver) driver.set_page_load_timeout(3) driver.get(url) driver.get_screenshot_as_file(f) copy2(f, dbQueue.screenDir) driver.quit() except Exception as e: whine("screenshot Error:" + str(e), "debug")
def goBuster(url, f, p=""): whine("Running gobuster : " + url, "debug") wList = os.path.abspath( os.path.dirname(__file__)) + "/../web/wordlists/master-gobuster.txt" # check Verion installed cmd = "apt show -a gobuster 2>/dev/null | grep Version" r = muxER(cmd) mv = re.search(r'Version: 2.*', r) if mv: f = f.replace("vv", "v2") cmd = "gobuster -q -l -f -k -e -u " + url + " -w " + wList + " -o " + f if p: cmd = "gobuster -q -l -f -k -e -p " + p + " -u " + url + " -w " + wList + " -o " + f else: f = f.replace("vv", "v3") cmd = "gobuster dir -q -l -k -e -u " + url + " --wordlist " + wList + " -o " + f if p: cmd = "gobuster dir -q -l -k -e -p " + p + " -u " + url + " --wordlist " + wList + " -o " + f muxERquiet(cmd)
def fin(network, out, s0, workerName): helper.whine("Done with: " + '\033[0m' + network, "info") helper.whine("Files located at: " + '\033[95m' + out + "*" + '\033[0m', "debug") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', ["Completed", network] dbQueue.workDB.put(DBcommit) helper.whine( '\033[92m' + "[" + workerName + "] Session Closed: " + '\033[0m' + s0, "status") muxER('tput rs1')
def webTests(network, urls, out, workerName): DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage4 - Running Web Tests (screenshot Nikto dirb)", network ] dbQueue.workDB.put(DBcommit) whine("Running Web Tests on " + str(len(urls)) + " URL(s)", "info") for u in urls: whine("URL : " + u, "debug") match = re.search(r'.*:(\d+)', u) if match: whine( "Running Metasploit Modules: " + network + ":" + match.group(1), "debug") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage4 - Running Web Tests (Metasplot Auxilary)", network ] dbQueue.workDB.put(DBcommit) f = out + "_" + match.group(1) + "_" msfHTTPAuxilary(network, match.group(1), f) whine("Taking Screenshot: " + u + " " + match.group(1), "debug") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage4 - Running Web Tests (Taking Screenshot)", network ] dbQueue.workDB.put(DBcommit) f = out + "_Port_" + match.group(1) + ".png" chromeShot(u, f) whine("Running Nikto on: " + u + " " + match.group(1), "debug") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage4 - Running Web Tests (Nikto)", network ] dbQueue.workDB.put(DBcommit) f = out + "_nikto_" + match.group(1) + ".txt" cmd = "nikto -Cgidirs all -host " + u + " -Format txt -output " + f muxER(cmd) whine("Running dirb on: " + u + " " + match.group(1), "debug") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage4 - Running Web Tests (dirb)", network ] dbQueue.workDB.put(DBcommit) f = out + "_dirb_" + match.group(1) + ".txt" cmd = "dirb " + u + " -o " + f muxER(cmd) whine("Running gobuster on: " + u + " " + match.group(1), "debug") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage4 - Running Web Tests (gobuster)", network ] dbQueue.workDB.put(DBcommit) f = out + "_gobuster_" + "_" + match.group(1) + ".txt" wList = os.path.abspath( os.path.dirname(__file__)) + "/../web/big.txt" cmd = "gobuster dir -q -l -k -e -u " + u + " --wordlist " + wList + " -o " + f muxER(cmd)
def sweepER(network, workerName): # create a unique identifier date + master ts = time.strftime("%m%d%Y_%H_%M_%S", time.gmtime()) #s0 = network.replace(".","-").replace("/","_") + "_" + ts hostIP = network.split('/', 1)[0] s0 = network.replace("/", "_") + "_" + ts # Add the work to the DB DBcommit = 'INSERT INTO stages VALUES (?,?)', [s0, 'init'] dbQueue.workDB.put(DBcommit) s1 = "STAGE_1_" + s0 s2 = "STAGE_2_" + s0 s3 = "STAGE_3_" + s0 s4 = "STAGE_4_" + s0 s5 = "STAGE_6_" + s0 s6 = "STAGE_7_" + s0 sd = "ALLDONE_" + s0 # create a muxer for the session helper.whine( '\033[92m' + "[" + workerName + "] Session created: " + '\033[0m' + s0, "status") DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage1 - Running initial nmap sweep", network ] dbQueue.workDB.put(DBcommit) # # stage 1 - nMap : check for open ports out = dbQueue.dumpDir + hostIP os.makedirs(out) out = out + "/" + s0 cmd = pickWeapon("nmap", network, out) muxER(cmd) # # Stage 2 - nMap : get open ports from the gnmap file DBcommit = 'UPDATE Hosts SET status=? WHERE host=?', [ "Stage2 - Creating list of open ports", network ] dbQueue.workDB.put(DBcommit) f = out + ".gnmap" initPORTs, initURLs = portLandia(f) iPORTstr = "|".join(initPORTs) netOut = network + " " + str(iPORTstr) helper.whine(netOut, "debug") # If a host has 0 in set move on ... ######################### if len(initPORTs) < 1: fin(network, out, s0, workerName) return # # Stage 3 - nMap : get service description DBcommit = 'UPDATE Hosts SET status=?, ports=? WHERE host=?', [ "Stage3 - Running nMap service description", iPORTstr, network ] dbQueue.workDB.put(DBcommit) f = dbQueue.serviceDir + s0 + "_ServiceID" cmd = servicABLE(network, initPORTs, f) muxER(cmd) f = f + ".gnmap" # check for updates to the url list PORTs, URLs = portLandia(f) finalPortList = list(set().union(initPORTs, PORTs)) finalPortList = sorted(finalPortList, key=int) finalPORTstr = "|".join(finalPortList) finalUrlList = list(set().union(initURLs, URLs)) DBcommit = 'UPDATE Hosts SET ports=? WHERE host=?', [finalPORTstr, network] dbQueue.workDB.put(DBcommit) # # Stage 4 - Web Tests: ScreenShot, Nikto , dirbuster webTests(network, finalUrlList, out, workerName) # # Stage 5 - unicornscan: UDP udpScan(network, out) # # Stage 6 - MSF hostWork.msfSafeChecks(network, out) # # Clean-up ... we are done fin(network, out, s0, workerName)
def main(): # Open File and check for URLs urlFile = args.urlFile uriList = list() try: urlSet = set(line.strip() for line in open(urlFile)) for i in urlSet: if i: i = i.rstrip('/') i += "/" uriList.append(i) if len(uriList) < 1: print("No Hosts loaded ... Check File:" + urlFile) exit() helper.whine('\033[94m' + "[*] Loaded " + '\033[95m' + str(len(uriList)) + '\033[94m' + " URL(s)" + '\033[0m') ''' if args.validate: # validate the hosts then queue them up for uri in uriList: # Metasploit needs the host and the port seperate match = re.search(r'(http|https)\:\/\/([a-zA-Z0-9.]+)([/]*)', uri) if match: network = match.group(2) # should we validate and only use IP addresses? validateHost(network) else: print("Error identifying network ... " + uri) ''' except IOError: print("Could not read file:" + urlFile) # Rdiscover additional open HTTP ports if args.discover: uL = list() hList = idHosts(uriList) for u in hList: hstDIR = oDir + "/" + u + "/" if not os.path.exists(hstDIR): os.makedirs(hstDIR) f = hstDIR + "serviceDisc_" uL = portScanner(u, f) uriList = list(set(uriList + uL)) # run tests on the final List of URLs cURL = 0 tURL = len(uriList) helper.whine('\033[94m' + "[*] Testing " + '\033[95m' + str(len(uriList)) + '\033[94m' + " URL(s)" + '\033[0m') for u in uriList: cURL += 1 port = "" helper.whine( "\033[94m" + "[*] URL " + '\033[94m' + '\033[95m' + str(cURL) + " of " + str(tURL) + '\033[0m' + " : " + '\033[95m' + u + '\033[0m', "debug") # Get the port from the current URL match = re.search(r'(http|https)\:\/\/([a-zA-Z0-9.]+):*([0-9]*)(/*.*)', u) if match: host = match.group(2) if match.group(1) == "http": port = "80" if match.group(1) == "https": port = "443" if match.group(3): port = match.group(3) # We need to make a folder specific for IP hstDIR = oDir + "/" + host + "/" if not os.path.exists(hstDIR): os.makedirs(hstDIR) uri = urllib.parse.quote(match.group(4), safe='') OUTFile = hstDIR + uri # Run ChromShot if args.ScreenShot or args.allChecks: f = OUTFile + "_screenShot_" + port + ".png" if args.proxy: chromeShot(u, f, args.proxy) else: chromeShot(u, f) # Run Gobuster if args.goBuster or args.allChecks: f = OUTFile + "_gobuster_vv_" + port + ".txt" if args.proxy: goBuster(u, f, args.proxy) else: goBuster(u, f) # Run dirbuster if args.dirb or args.allChecks: f = OUTFile + "_dirb_" + port + ".txt" if args.proxy: dirb(u, f, args.proxy) else: dirb(u, f) # Metesploit Safe Checks if args.msfHTTPAuxilary or args.allChecks: f = hstDIR + "_" + port + "_" if args.proxy: msfHTTPAuxilary(host, port, f, args.proxy) else: msfHTTPAuxilary(host, port, f) # Run nikto if args.nikto or args.allChecks: f = OUTFile + "_nikto_" + port + ".txt" if args.proxy: nikto(u, f, args.proxy) else: nikto(u, f) # Run nMap HTTP if args.nmapHTTP or args.allChecks: f = hstDIR + "nMap-HTTP_" + port nmapHTTP(host, port, f)
def discoverHosts(network): whine("Welcome to discoverHosts: " + network, "info")
def msfSafeChecks(network, output): whine("Welcome to MSF Safe Checks: " + '\033[95m' + network + '\033[0m', "info") config = configparser.ConfigParser() msfCFG = os.path.abspath(os.path.dirname(__file__)) + "/utils/msf.ini" whine("Loading Safe Checks from: " + msfCFG, "debug") config.read(msfCFG) MSF = ast.literal_eval(config.get("MSF-SAFE", "msfLIST")) conn = dbQueue.conn host = network.split('/', 1)[0] DBselect = "SELECT host, port, serviceID FROM results WHERE host='" + host + "'" whine("Gathering ports : " + host, "debug") r = db_runner(conn, DBselect) if not r: return serviceSET = set(r) for i in serviceSET: port = i[1] service = i[2] whine( "Identifying MSF Safe Checks for Port: " + port + " Service: " + service, "debug") regEX = ".*" + service r = re.compile(regEX) msfLIST = list(filter(r.match, MSF)) for module in msfLIST: m = module.rsplit('/', 1)[-1] # At this point we already did HTTP so lets skip them. That might change tho if "http" in module: continue whine("Running Metasploit Module: " + module, "debug") f = output + "_Metasploit_" + m + ".txt" cmd = "msfconsole -x \"use " + module + ";set rhosts " + host + ";set rport " + port + "; run; exit\" > " + f muxER(cmd) whine("Done with MSF Safe Checks: " + '\033[95m' + network + '\033[0m', "info")
def servicABLE(host, ports, file): helper.whine("Sevice Identification: " + host, "debug") pL = ','.join(ports) fO = file + ".out" cmd = "nmap -sV -n --randomize-hosts --script discovery,vuln --max-retries 1 -Pn -A -p " + pL + " -T3 --open " + host + " -oA " + file + " > " + fO return cmd