def _download_database(self, chunk_size=8192):
     """
     download the database if it is available
     """
     info("discovered publicly available database for query {}".format(
         self.query))
     flatten = lambda l: [str(item) for sublist in l for item in sublist]
     database_links = flatten(self.database_links)
     to_download = []
     for db in database_links:
         try:
             to_download.append(db.split('"')[3])
         except Exception:
             pass
     if not os.path.exists(self.downloads_directory):
         os.makedirs(self.downloads_directory)
     for link in to_download:
         local_filename = link.split("/")[-1]
         local_file_path = "{}/{}".format(self.downloads_directory,
                                          local_filename)
         if not os.path.exists(local_file_path):
             with requests.get(link,
                               stream=True,
                               proxies=self.proxies,
                               headers=self.headers) as downloader:
                 downloader.raise_for_status()
                 with open(local_file_path, "wb") as path:
                     for chunk in downloader.iter_content(
                             chunk_size=chunk_size):
                         if chunk:
                             path.write(chunk)
             self.downloaded_databases.append(local_file_path)
     return self.downloaded_databases
Пример #2
0
 def account_hooker(self):
     """
     hookers accounting gonna hook
     """
     try:
         req = requests.get(HIBP_URL.format(self.email),
                            headers=self.headers,
                            proxies=self.proxies)
         if req.status_code == 429:
             wait_time = int(req.headers["Retry-After"])
             human = arrow.now().shift(seconds=wait_time).humanize()
             warn("HIBP Rate Limit Exceeded, trying again in {}".format(
                 human))
             sleep(wait_time)
             info("here we go!")
             self.account_hooker()
         elif req.status_code == 403:
             error(
                 "you have been blocked from HIBP, try again later or change your IP address"
             )
             exit(1)
         else:
             self.content = req.json()
         if self.content is not None or self.content != "":
             return self._get_breach_names()
     except ValueError:
         # this means something went wrong
         return None
Пример #3
0
 def account_hooker(self):
     """
     hookers accounting gonna hook
     """
     try:
         req = requests.get(
             HIBP_URL.format(self.email),
             headers=self.headers,
             proxies=self.proxies
         )
         if req.status_code == self.status_codes["throttled"]:
             wait_time = int(req.headers["Retry-After"])
             # we'll keep this in case we need it later
             # human = arrow.now().shift(seconds=wait_time).humanize()
             warn(
                 "you've reached HIBP's request limit, adding {}s to throttle time".format(wait_time)
             )
             self.opt.throttleRequests += wait_time
             sleep(wait_time)
             info("here we go!")
             self.account_hooker()
         elif req.status_code == self.status_codes["blocked"]:
             if self.blocked != self.max_attempts:
                 if self.retry:
                     warn(
                         "you have been blocked from HIBP, WhatBreach will try {} more time(s)".format(
                             self.max_attempts - self.blocked
                         )
                     )
                     sleep(10)
                     BeenPwnedHook(
                         self.email, self.headers["hibp-api-key"], self.opt, headers=self.headers,
                         proxies=self.proxies, blocked=self.blocked + 1
                     ).account_hooker()
                 else:
                     error(
                         "you have been blocked from HIBP, skipping and continuing, pass the `--do-retry` flag to "
                         "retry the requests on failure (max of 3 retries will be attempted)"
                     )
             else:
                 error(
                     "you have been blocked, {} attempts have failed, change your IP address and try again".format(
                         self.max_attempts
                     )
                 )
         else:
             self.content = req.json()
         if self.content is not None or self.content != "":
             return self._get_breach_names()
         else:
             return None
     except ValueError:
         # this means something went wrong
         return None
Пример #4
0
 def hooker(self):
     """
     hookers gonna hook
     """
     set_to_list_phone_numebrs = []
     discovered_phone_numbers = set()
     other_discovered_emails = set()
     discovered_external_links = set()
     processed = json.loads(self.make_request())
     domain_name = self.__get_domain_from_email()
     try:
         processed['errors'][0]['id']
         processed = None
     except:
         pass
     if processed is not None:
         try:
             email_pattern_identification = "{}@{}".format(
                 processed["data"]["pattern"], domain_name)
         except:
             email_pattern_identification = None
         for i, _ in enumerate(processed["data"]["emails"]):
             discovered_phone_numbers.add(
                 processed["data"]["emails"][i]["phone_number"])
             other_discovered_emails.add(
                 str(processed["data"]["emails"][i]["value"]))
             for y, _ in enumerate(
                     processed["data"]["emails"][i]["sources"]):
                 discovered_external_links.add(
                     str(processed["data"]["emails"][i]["sources"][y]
                         ["uri"]))
         for item in discovered_phone_numbers:
             if item is not None:
                 set_to_list_phone_numebrs.append(item)
         other_discovered_emails.add(self.email)
         info("discovered a total of {} email(s)".format(
             len(other_discovered_emails)))
         if self.verify:
             self.__verify_emails_alive(other_discovered_emails)
         file_path = process_discovered(set_to_list_phone_numebrs,
                                        discovered_external_links,
                                        other_discovered_emails,
                                        email_pattern_identification,
                                        domain_name,
                                        do_write=True)
         return file_path
     else:
         error(
             "error while processing domain: {} (have you exceeded your API limit?)"
             .format(domain_name))
         return None
Пример #5
0
 def hooker(self):
     """
     temporary hookers gonna hook harder than normal hookers
     """
     content = self._gather_database_urls()
     links = self._get_links(content)
     matched_databases = self._check_if_matched(links)
     if len(matched_databases) != 0:
         info(
             'found a total of {} databases(s) that matched the query, dumping URL list'
             .format(len(matched_databases)))
         for db in matched_databases:
             print("\t~~> {}".format(db))
     return []
Пример #6
0
 def account_hooker(self):
     """
     hookers accounting gonna hook
     """
     try:
         req = requests.get(
             HIBP_URL.format(self.email),
             headers={"User-Agent": grab_random_user_agent(RANDOM_USER_AGENT_PATH)},
             proxies=self.proxies
         )
         if req.status_code == self.status_codes["throttled"]:
             wait_time = int(req.headers["Retry-After"])
             human = arrow.now().shift(seconds=wait_time).humanize()
             warn("HIBP Rate Limit Exceeded, trying again in {}".format(human))
             sleep(wait_time)
             info("here we go!")
             self.account_hooker()
         elif req.status_code == self.status_codes["blocked"]:
             if self.blocked != self.max_attempts:
                 if self.retry:
                     warn(
                         "you have been blocked from HIBP, WhatBreach will try {} more time(s)".format(
                             self.max_attempts - self.blocked
                         )
                     )
                     sleep(10)
                     BeenPwnedHook(
                         self.email, headers=self.headers,
                         proxies=self.proxies, blocked=self.blocked + 1
                     ).account_hooker()
                 else:
                     error(
                         "you have been blocked from HIBP, skipping and continuing, pass the `--do-retry` flag to "
                         "retry the requests on failure (max of 3 retries will be attempted)"
                     )
             else:
                 error(
                     "you have been blocked, {} attempts have failed, change your IP address and try again".format(
                         self.max_attempts
                     )
                 )
         else:
             self.content = req.json()
         if self.content is not None or self.content != "":
             return self._get_breach_names()
         else:
             return None
     except ValueError:
         # this means something went wrong
         return None
Пример #7
0
 def threaded_response_helper(self):
     info("queuing everything")
     for target in self.targets:
         self.queue.put("{}{}".format(self.url, target))
     info("starting directory bruteforcing with {} thread(s)".format(
         self.threads))
     try:
         for _ in range(self.threads):
             t = threading.Thread(target=self.threader)
             t.daemon = True
             t.start()
         self.queue.join()
     except (KeyboardInterrupt, SystemExit):
         self.queue.all_tasks_done()
     return self.good_response_retval
Пример #8
0
 def __verify_emails_alive(self, email_list):
     """
     verify if the email address is deliverable or not
     """
     for email in email_list:
         info("verifying that {} is alive".format(email))
         try:
             req = requests.get(
                 HUNTER_IO_VERIFY_URL.format(email=email, api_key=self.api_key),
                 proxies=self.proxies, headers=self.headers
             )
             results = json.loads(req.text)["data"]["result"]
             info("result of verification: {}".format(str(results)))
         except:
             error("error verifying email: {}".format(email))
Пример #9
0
 def _parse_results(self, content):
     results = []
     for item in content["details"]["profiles"]:
         results.append(str(item))
     if len(results) != 0:
         if not os.path.exists(JSON_DATA_DUMPS):
             os.makedirs(JSON_DATA_DUMPS)
         file_path = "{}/{}_emailrep.json".format(JSON_DATA_DUMPS,
                                                  self.email.split("@")[0])
         if not os.path.exists(file_path):
             with open(file_path, 'a+') as data:
                 json.dump(content, data, sort_keys=True, indent=4)
             info(
                 "all data dumped to file for future processing: {}".format(
                     file_path))
     return results
Пример #10
0
def get_string_log_level(ip, spec, strict):
    blacklists = int(spec.split("/")[0].split(" ")[-1])
    total_lists = spec.split("/")[-1]
    output_string = "{} is blacklisted on {} out of {} lists".format(
        ip, blacklists, total_lists)
    if strict == 1:
        fatal(output_string) if blacklists != 0 else info(output_string)
        return 1
    if blacklists <= strict:
        info(output_string)
        return 2
    elif strict <= blacklists <= 4:
        error(output_string)
        return 0
    else:
        fatal(output_string)
        return 1
Пример #11
0
def main():
    opt = WhatWafParser().cmd_parser()

    if not len(sys.argv) > 1:
        error("you failed to provide an option, redirecting to help menu")
        time.sleep(2)
        cmd = "python whatwaf.py --help"
        subprocess.call(shlex.split(cmd))
        exit(0)

    if opt.encodePayload:
        spacer = "-" * 30
        info("encoding '{}' using '{}'".format(opt.encodePayload[0],
                                               opt.encodePayload[1]))
        encoded = encode(opt.encodePayload[0], opt.encodePayload[1])
        print("{}\n{}\n{}".format(spacer, encoded, spacer))
        exit(0)

    if opt.updateWhatWaf:
        info("update in progress")
        cmd = shlex.split("git pull origin master")
        subprocess.call(cmd)
        exit(0)

    if not opt.hideBanner:
        print(BANNER)

    proxy, agent = configure_request_headers(random_agent=opt.useRandomAgent,
                                             agent=opt.usePersonalAgent,
                                             proxy=opt.runBehindProxy,
                                             tor=opt.runBehindTor)

    if opt.providedPayloads is not None:
        payload_list = [
            p.strip() if p[0] == " " else p
            for p in str(opt.providedPayloads).split(",")
        ]
        info("using provided payloads")
    elif opt.payloadList is not None:
        payload_list = [
            p.strip("\n") for p in open(opt.payloadList).readlines()
        ]
        info("using provided payload file '{}'".format(opt.payloadList))
    else:
        payload_list = WAF_REQUEST_DETECTION_PAYLOADS
        info("using default payloads")

    try:
        if opt.runSingleWebsite:
            info("running single web application '{}'".format(
                opt.runSingleWebsite))
            detection_main(opt.runSingleWebsite,
                           payload_list,
                           agent=agent,
                           proxy=proxy,
                           verbose=opt.runInVerbose)

        elif opt.runMultipleWebsites:
            info("reading from '{}'".format(opt.runMultipleWebsites))
            with open(opt.runMultipleWebsites) as urls:
                for i, url in enumerate(urls, start=1):
                    url = url.strip()
                    info("currently running on site #{} ('{}')".format(i, url))
                    detection_main(url,
                                   payload_list,
                                   agent=agent,
                                   proxy=proxy,
                                   verbose=opt.runInVerbose)
                    print("\n\b")
                    time.sleep(0.5)
    except KeyboardInterrupt:
        fatal("user aborted scanning")
Пример #12
0
def main():
    try:
        opt = Parser().optparse()
        print(BANNER)
        res = Parser().check_opts(opt)
        if res is not None:
            to_search = res
        else:
            to_search = []
        do_not_search = []

        if len(to_search) == 0:
            if opt.singleEmail is None and opt.emailFile is None:
                warn("you have not provided an email to scan, redirecting to the help menu")
                subprocess.call(["python", "whatbreach.py", "--help"])
                exit(1)
            api_tokens = grab_api_tokens()
            if opt.searchHunterIo and opt.singleEmail is not None:
                info("starting search on hunter.io using {}".format(opt.singleEmail))
                file_results = HunterIoHook(
                    opt.singleEmail, api_tokens["hunter.io"], verify_emails=opt.verifyEmailsThroughHunterIo
                ).hooker()
                with open(file_results) as data:
                    emails = json.loads(data.read())["discovered_emails"]
                for email in emails:
                    to_search.append(email)
            elif opt.searchHunterIo and opt.emailFile is not None:
                if not test_file(opt.emailFile):
                    error("unable to open filename, does it exist?")
                    exit(1)
                api_tokens = grab_api_tokens()
                with open(opt.emailFile) as data:
                    for email in data.readlines():
                        email = email.strip()
                        file_results = HunterIoHook(
                            email, api_tokens["hunter.io"], verify_emails=opt.verifyEmailsThroughHunterIo
                        ).hooker()
                        with open(file_results) as results:
                            discovered_emails = json.loads(results.read())["discovered_emails"]
                        for discovered in discovered_emails:
                            to_search.append(discovered)
            elif opt.singleEmail is not None:
                info("starting search on single email address: {}".format(opt.singleEmail))
                to_search = [opt.singleEmail]
            elif opt.emailFile is not None:
                if not test_file(opt.emailFile):
                    error("unable to open filename, does it exist?")
                    exit(1)
                with open(opt.emailFile) as emails:
                    info("parsing email file: {}".format(opt.emailFile))
                    to_search = emails.readlines()
                info("starting search on a total of {} email(s)".format(len(to_search)))

        for email in to_search:
            email = email.strip()

            if opt.checkTenMinuteEmail:
                if check_ten_minute_email(email, TEN_MINUTE_EMAIL_EXTENSION_LIST):
                    warn("email: {} appears to be a ten minute email".format(email))
                    answer = prompt("would you like to process the email[y/N]")
                    if answer.startswith("n"):
                        do_not_search.append(email)

            if opt.checkEmailAccounts:
                info("searching for possible profiles related to {}".format(email))
                searcher = EmailRepHook(email)
                results = searcher.hooker()
                if results is not None and len(results) != 0:
                    info(
                        "found a total of {} possible profiles associated with {} on the following domains:".format(
                            len(results), email
                        )
                    )
                    for domain in results:
                        print("\t-> {}".format(domain.title()))
                else:
                    warn("no possible profiles discovered for email: {}".format(email))

            if email not in do_not_search:
                if opt.throttleRequests != 0:
                    time.sleep(opt.throttleRequests)
                info("searching breached accounts on HIBP related to: {}".format(email))
                account_dumps = BeenPwnedHook(email, retry=opt.retryOnFail).account_hooker()
                info("searching for paste dumps on HIBP related to: {}".format(email))

                if opt.searchPastebin:
                    paste_dumps = BeenPwnedHook(email, retry=opt.retryOnFail).paste_hooker()
                else:
                    warn("suppressing discovered pastes")
                    paste_dumps = []

                if opt.searchWeLeakInfo:
                    info("searching weleakinfo.com for breaches related to: {}".format(email))
                    searcher = WeLeakInfoHook(email, api_tokens["weleakinfo.com"])
                    tmp = set()
                    results = searcher.hooker()
                    if results is not None:
                        if account_dumps is not None:
                            original_length = len(account_dumps)
                        else:
                            original_length = 0
                        if account_dumps is not None:
                            for item in account_dumps:
                                tmp.add(item)
                        if results is not None:
                            for item in results:
                                tmp.add(item)
                        if len(tmp) != 0:
                            account_dumps = list(tmp)
                            new_length = len(account_dumps)
                            amount_discovered = new_length - original_length
                            if amount_discovered != 0:
                                info(
                                    "discovered a total of {} more breaches from weleakinfo.com".format(
                                        new_length - original_length
                                    )
                                )
                            else:
                                warn("did not discover any breaches")
                        else:
                            warn("did not discover any new databases from weleakinfo.com")
                    else:
                        warn("unable to search weleakinfo.com is your API key correct?")

                if account_dumps is not None and paste_dumps is not None:
                    info(
                        "found a total of {} database breach(es) and a total of {} paste(s) pertaining to: {}".format(
                            len(account_dumps), len(paste_dumps), email
                        )
                    )
                    if opt.searchDehashed:
                        if len(account_dumps) > 20:
                            warn(
                                "large amount of database breaches, obtaining links from "
                                "dehashed (this may take a minute)"
                            )
                        found_databases = DehashedHook(account_dumps).hooker()
                    else:
                        warn("suppressing discovered databases")
                        found_databases = {}
                    for i, dump in enumerate(paste_dumps, start=1):
                        found_databases["Paste#{}".format(i)] = str(dump)
                    display_found_databases(found_databases, download_pastes=opt.downloadPastes)
                    if opt.downloadDatabase:
                        for item in found_databases.keys():
                            if "Paste" not in item:
                                info("searching for downloadable databases using query: {}".format(item.lower()))
                                downloaded = DatabasesTodayHook(
                                    str(item), downloads_directory=opt.saveDirectory
                                ).hooker()
                                if len(downloaded) != 0:
                                    info(
                                        "downloaded a total of {} database(s) pertaining to query: {}".format(
                                            len(downloaded), item
                                        )
                                    )
                                    display_found_databases(
                                        downloaded, is_downloaded=True, download_pastes=opt.downloadPastes
                                    )
                                else:
                                    warn(
                                        "no databases appeared to be present and downloadable related to query: {}".format(
                                            str(item)
                                        )
                                    )

                elif account_dumps is not None and paste_dumps is None:
                    info("found a total of {} database breach(es) pertaining to: {}".format(len(account_dumps), email))
                    if opt.searchDehashed:
                        if len(account_dumps) > 20:
                            warn(
                                "large amount of database breaches, obtaining links from "
                                "dehashed (this may take a minute)"
                            )
                        found_databases = DehashedHook(account_dumps).hooker()
                    else:
                        warn("suppressing discovered databases")
                        found_databases = {}
                    if len(found_databases) != 0:
                        display_found_databases(found_databases, download_pastes=opt.downloadPastes)
                        if opt.downloadDatabase:
                            for item in found_databases.keys():
                                if "Paste" not in item:
                                    info("searching for downloadable databases using query: {}".format(item.lower()))
                                    downloaded = DatabasesTodayHook(
                                        str(item), downloads_directory=opt.saveDirectory
                                    ).hooker()
                                    if len(downloaded) != 0:
                                        info(
                                            "downloaded a total of {} database(s) pertaining to query: {}".format(
                                                len(downloaded), item
                                            )
                                        )
                                        display_found_databases(
                                            downloaded, is_downloaded=True, download_pastes=opt.downloadPastes
                                        )
                                    else:
                                        warn(
                                            "no databases appeared to be present and downloadable related to query: {}".format(
                                                str(item)
                                            )
                                        )
                    else:
                        warn("no output to show, most likely due to output suppression or dehashed")
                elif account_dumps is None and paste_dumps is not None:
                    # this should never happen
                    error("no database dumps found nor any pastes found for: {}".format(email))
                else:
                    error("email {} was not found in any breach".format(email))

        if opt.staySalty:
            # i know that you think that you know shit
            # all the shade that's coming at me I wonder who throws it
            # you can't see the vision boy, you must be outta focus
            # that's a real hot program homie, I wonder who wrote it? oh shit
            # (lyrics ripped from iSpy by Kyle, all I do is steal bruh)
            warn("all this code was stolen with <3 by Eku")
    except KeyboardInterrupt:
        error("user quit the session")
Пример #13
0
def main():
    opt = WhatWafParser().cmd_parser()

    if not len(sys.argv) > 1:
        error("you failed to provide an option, redirecting to help menu")
        time.sleep(2)
        cmd = "python whatwaf.py --help"
        subprocess.call(shlex.split(cmd))
        exit(0)

    if opt.encodePayload:
        spacer = "-" * 30
        payload, load_path = opt.encodePayload
        info("encoding '{}' using '{}'".format(payload, load_path))
        try:
            encoded = encode(payload, load_path)
            success("encoded successfully:")
            print("{}\n{}\n{}".format(spacer, encoded, spacer))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.encodePayloadList:
        spacer = "-" * 30
        try:
            file_path, load_path = opt.encodePayloadList
            info(
                "encoding payloads from given file '{}' using given tamper '{}'"
                .format(file_path, load_path))
            with open(file_path) as payloads:
                encoded = [
                    encode(p.strip(), load_path) for p in payloads.readlines()
                ]
                if opt.saveEncodedPayloads is not None:
                    with open(opt.saveEncodedPayloads, "a+") as save:
                        for item in encoded:
                            save.write(item + "\n")
                    success("saved encoded payloads to file '{}' successfully".
                            format(opt.saveEncodedPayloads))
                else:
                    success("payloads encoded successfully:")
                    print(spacer)
                    for i, item in enumerate(encoded, start=1):
                        print("#{} {}".format(i, item))
                    print(spacer)
        except IOError:
            fatal(
                "provided file '{}' appears to not exist, check the path and try again"
                .format(file_path))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.updateWhatWaf:
        info("update in progress")
        cmd = shlex.split("git pull origin master")
        subprocess.call(cmd)
        exit(0)

    if not opt.hideBanner:
        print(BANNER)

    proxy, agent = configure_request_headers(random_agent=opt.useRandomAgent,
                                             agent=opt.usePersonalAgent,
                                             proxy=opt.runBehindProxy,
                                             tor=opt.runBehindTor)

    if opt.providedPayloads is not None:
        payload_list = [
            p.strip() if p[0] == " " else p
            for p in str(opt.providedPayloads).split(",")
        ]
        info("using provided payloads")
    elif opt.payloadList is not None:
        payload_list = [
            p.strip("\n") for p in open(opt.payloadList).readlines()
        ]
        info("using provided payload file '{}'".format(opt.payloadList))
    else:
        payload_list = WAF_REQUEST_DETECTION_PAYLOADS
        info("using default payloads")

    try:
        if opt.runSingleWebsite:
            url_to_use = auto_assign(opt.runSingleWebsite, ssl=opt.forceSSL)
            info("running single web application '{}'".format(url_to_use))
            detection_main(url_to_use,
                           payload_list,
                           agent=agent,
                           proxy=proxy,
                           verbose=opt.runInVerbose)

        elif opt.runMultipleWebsites:
            info("reading from '{}'".format(opt.runMultipleWebsites))
            with open(opt.runMultipleWebsites) as urls:
                for i, url in enumerate(urls, start=1):
                    url = auto_assign(url, ssl=opt.forceSSL)
                    info("currently running on site #{} ('{}')".format(i, url))
                    detection_main(url,
                                   payload_list,
                                   agent=agent,
                                   proxy=proxy,
                                   verbose=opt.runInVerbose)
                    print("\n\b")
                    time.sleep(0.5)
    except KeyboardInterrupt:
        fatal("user aborted scanning")
Пример #14
0
def main():
    opt = WhatWafParser().cmd_parser()

    if not len(sys.argv) > 1:
        error("you failed to provide an option, redirecting to help menu")
        time.sleep(2)
        cmd = "python whatwaf.py --help"
        subprocess.call(shlex.split(cmd))
        exit(0)

    if opt.encodePayload:
        spacer = "-" * 30
        payload, load_path = opt.encodePayload
        info("encoding '{}' using '{}'".format(payload, load_path))
        try:
            encoded = encode(payload, load_path)
            success("encoded successfully:")
            print("{}\n{}\n{}".format(spacer, encoded, spacer))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.encodePayloadList:
        spacer = "-" * 30
        try:
            file_path, load_path = opt.encodePayloadList
            info(
                "encoding payloads from given file '{}' using given tamper '{}'"
                .format(file_path, load_path))
            with open(file_path) as payloads:
                encoded = [
                    encode(p.strip(), load_path) for p in payloads.readlines()
                ]
                if opt.saveEncodedPayloads is not None:
                    with open(opt.saveEncodedPayloads, "a+") as save:
                        for item in encoded:
                            save.write(item + "\n")
                    success("saved encoded payloads to file '{}' successfully".
                            format(opt.saveEncodedPayloads))
                else:
                    success("payloads encoded successfully:")
                    print(spacer)
                    for i, item in enumerate(encoded, start=1):
                        print("#{} {}".format(i, item))
                    print(spacer)
        except IOError:
            fatal(
                "provided file '{}' appears to not exist, check the path and try again"
                .format(file_path))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.updateWhatWaf:
        info("update in progress")
        cmd = shlex.split("git pull origin master")
        subprocess.call(cmd)
        exit(0)

    if not opt.hideBanner:
        print(BANNER)

    if opt.skipBypassChecks and opt.amountOfTampersToDisplay is not None:
        warn(
            "you've chosen to skip bypass checks and chosen an amount of tamper to display, tampers will be skipped",
            minor=True)

    # there is an extra dependency that you need in order
    # for requests to run behind socks proxies, we'll just
    # do a little check to make sure you have it installed
    if opt.runBehindTor or opt.runBehindProxy is not None and "socks" in opt.runBehindProxy:
        try:
            import socks
        except ImportError:
            # if you don't we will go ahead and exit the system with an error message
            error(
                "to run behind socks proxies (like Tor) you need to install pysocks `pip install pysocks`, "
                "otherwise use a different proxy protocol")
            sys.exit(1)

    proxy, agent = configure_request_headers(random_agent=opt.useRandomAgent,
                                             agent=opt.usePersonalAgent,
                                             proxy=opt.runBehindProxy,
                                             tor=opt.runBehindTor)

    if opt.providedPayloads is not None:
        payload_list = [
            p.strip() if p[0] == " " else p
            for p in str(opt.providedPayloads).split(",")
        ]
        info("using provided payloads")
    elif opt.payloadList is not None:
        payload_list = [
            p.strip("\n") for p in open(opt.payloadList).readlines()
        ]
        info("using provided payload file '{}'".format(opt.payloadList))
    else:
        payload_list = WAF_REQUEST_DETECTION_PAYLOADS
        info("using default payloads")

    try:
        if opt.runSingleWebsite:
            url_to_use = auto_assign(opt.runSingleWebsite, ssl=opt.forceSSL)
            info("running single web application '{}'".format(url_to_use))
            detection_main(url_to_use,
                           payload_list,
                           agent=agent,
                           proxy=proxy,
                           verbose=opt.runInVerbose,
                           skip_bypass_check=opt.skipBypassChecks,
                           verification_number=opt.verifyNumber,
                           formatted=opt.sendToJSON,
                           tamper_int=opt.amountOfTampersToDisplay)

        elif opt.runMultipleWebsites:
            info("reading from '{}'".format(opt.runMultipleWebsites))
            with open(opt.runMultipleWebsites) as urls:
                for i, url in enumerate(urls, start=1):
                    url = auto_assign(url.strip(), ssl=opt.forceSSL)
                    info("currently running on site #{} ('{}')".format(i, url))
                    detection_main(url,
                                   payload_list,
                                   agent=agent,
                                   proxy=proxy,
                                   verbose=opt.runInVerbose,
                                   skip_bypass_check=opt.skipBypassChecks,
                                   verification_number=opt.verifyNumber,
                                   formatted=opt.sendToJSON,
                                   tamper_int=opt.amountOfTampersToDisplay)
                    print("\n\b")
                    time.sleep(0.5)
    except KeyboardInterrupt:
        fatal("user aborted scanning")
    except Exception as e:
        fatal(
            "WhatWaf has caught an unhandled exception with the error message: '{}'. "
            "You can create an issue here: '{}'".format(str(e), ISSUES_LINK))
Пример #15
0
def main():
    print(BANNER)
    placement_marker = False

    try:
        opts = OptParser.opts()
        retval = []
        if opts.urlToUse is None:
            error("must provide a URL to test")
            exit(1)
        else:
            url_validation = heuristics(opts.urlToUse)
            if not url_validation["validated"]:
                error(
                    "the provided URL could not be validated as a URL, check the URL and try again. a valid URL "
                    "looks something like this: 'http://somesite.com/some/path.php?someid=param'"
                )
                exit(1)
            else:
                if url_validation["query"] == "nogo":
                    warning(
                        "heuristic testing has detected that the provided URL lacks a GET parameter "
                        "this may interfere with testing results")
            try:
                if url_validation["marker"] == "yes":
                    info("marker for attack placement found, prioritizing")
                    placement_marker = True
                if url_validation["multi_marker"]:
                    warning(
                        "multiple markers are not supported, only the first one will be used"
                    )
            except:
                pass
        if opts.extraHeaders is not None:
            info("using extra headers")
            headers = opts.extraHeaders
        else:
            headers = HEADERS
        if opts.amountToFind is not None:
            amount_of_payloads = opts.amountToFind
        else:
            amount_of_payloads = 25
        if opts.verificationAmount is not None:
            verification_amount = opts.verificationAmount
        else:
            verification_amount = 5
        if opts.providedPayloads is not None:
            info("using provided payloads")
            payloads = []
            for payload in opts.providedPayloads:
                payloads.append(payload)
        elif opts.payloadFile is not None:
            info("using payloads from a file")
            payloads = []
            with open(opts.payloadFile) as f:
                for payload in f.readlines():
                    payloads.append(payload.strip())
        else:
            info("using default payloads")
            payloads = PAYLOADS
        if len(payloads) < 5:
            error("must provide at least 5 payloads")
            exit(1)
        if opts.testTime is not None:
            test_time = opts.testTime
        else:
            test_time = 10
        info("generating payloads")
        generator = PayloadGeneration(payloads, amount_of_payloads)
        info("running payloads through tampering procedures")
        tampers = generator.create_tampers()
        info("payloads tampered successfully")
        payloads = generator.obfuscate_tampers(tampers)
        if opts.genPolyglot:
            info("generating polyglot script")
            polyglot = generate_polyglot()
            info("script generated: {}".format(polyglot))
        info("running payloads")
        times_ran = 0
        for payload in payloads:
            if opts.genPolyglot and times_ran != 1:
                warning("running polyglot first")
                times_ran += 1
                payload = polyglot
            if opts.runVerbose:
                debug("running payload '{}{}{}'".format(
                    opts.usePrefix, payload, opts.useSuffix))
            requester = Requester(opts.urlToUse,
                                  payload,
                                  headers=headers,
                                  proxy=opts.proxyToUse,
                                  throttle=opts.throttleTime)
            soup = requester.make_request(marker=placement_marker,
                                          prefix=opts.usePrefix,
                                          suffix=opts.useSuffix)
            retval.append(soup)
        info("running checks")
        working_payloads = Requester.check_for_script(
            retval,
            verification_amount=verification_amount,
            test_time=test_time)
        if opts.useSuffix != "" or opts.usePrefix != "":
            working_payloads = [
                opts.usePrefix + p + opts.useSuffix for p in working_payloads
            ]
        if len(working_payloads) == 0:
            warning("no working payloads found for requested site")
            info("checking if scripts are being sanitized")
            requester = Requester(opts.urlToUse,
                                  None,
                                  headers=headers,
                                  proxy=opts.proxyToUse,
                                  throttle=opts.throttleTime)
            results = requester.check_for_sanitize()
            if results:
                warning(
                    "it seems that the scripts are being sanitized properly")
            elif results is None:
                warning("hit an error in request, possible WAF?")
            else:
                info(
                    "it appears that the scripts are not being sanitized, try manually?"
                )
            exit(1)
        else:
            info("working payloads:")
            prettify(working_payloads)
            info("found a total of {} working payloads".format(
                len(working_payloads)))
    except Exception as e:
        import sys
        import traceback

        error(
            "something bad happened, failed with error: {}, traceback:".format(
                str(e)))
        print("Traceback (most recent call):\n    {}".format("".join(
            traceback.format_tb(sys.exc_info()[2])).strip()))
    except KeyboardInterrupt:
        error("user quit")
Пример #16
0
def main():
    try:
        opt = Parser().optparse()
        print(BANNER)
        res = Parser().check_opts(opt)
        if res is not None:
            to_search = res
        else:
            to_search = []
        do_not_search = []

        if len(to_search) == 0:
            if opt.singleEmail is None and opt.emailFile is None:
                warn(
                    "you have not provided an email to scan, redirecting to the help menu"
                )
                subprocess.call(["python", "whatbreach.py", "--help"])
                exit(1)
            if opt.singleEmail is not None:
                info("starting search on single email address: {}".format(
                    opt.singleEmail))
                to_search = [opt.singleEmail]
            elif opt.emailFile is not None:
                try:
                    open(opt.emailFile).close()
                except IOError:
                    error("unable to open file, does it exist?")
                    exit(1)
                with open(opt.emailFile) as emails:
                    info("parsing email file: {}".format(opt.emailFile))
                    to_search = emails.readlines()
                info("starting search on a total of {} email(s)".format(
                    len(to_search)))

        for email in to_search:
            email = email.strip()

            if opt.checkTenMinuteEmail:
                if check_ten_minute_email(email,
                                          TEN_MINUTE_EMAIL_EXTENSION_LIST):
                    warn("email: {} appears to be a ten minute email".format(
                        email))
                    answer = prompt("would you like to process the email[y/N]")
                    if answer.startswith("n"):
                        do_not_search.append(email)

            if email not in do_not_search:
                info("searching breached accounts on HIBP related to: {}".
                     format(email))
                account_dumps = BeenPwnedHook(email).account_hooker()
                info("searching for paste dumps on HIBP related to: {}".format(
                    email))

                if opt.searchPastebin:
                    paste_dumps = BeenPwnedHook(email).paste_hooker()
                else:
                    warn("suppressing discovered pastes")
                    paste_dumps = []

                if account_dumps is not None and paste_dumps is not None:
                    info(
                        "found a total of {} database breach(es) and a total of {} paste(s) pertaining to: {}"
                        .format(len(account_dumps), len(paste_dumps), email))
                    if opt.searchDehashed:
                        found_databases = DehashedHook(account_dumps).hooker()
                    else:
                        warn("suppressing discovered databases")
                        found_databases = {}
                    for i, dump in enumerate(paste_dumps, start=1):
                        found_databases["Paste#{}".format(i)] = str(dump)
                    display_found_databases(found_databases)
                    if opt.downloadDatabase:
                        for item in found_databases.keys():
                            if "Paste" not in item:
                                info(
                                    "searching for downloadable databases using query: {}"
                                    .format(item.lower()))
                                downloaded = DatabasesTodayHook(
                                    str(item),
                                    downloads_directory=opt.saveDirectory
                                ).hooker()
                                if len(downloaded) != 0:
                                    info(
                                        "downloaded a total of {} database(s) pertaining to query: {}"
                                        .format(len(downloaded), item))
                                    display_found_databases(downloaded,
                                                            is_downloaded=True)
                                else:
                                    warn(
                                        "no databases appeared to be preset and downloadable related to query: {}"
                                        .format(str(item)))

                elif account_dumps is not None and paste_dumps is None:
                    info(
                        "found a total of {} database breach(es) pertaining to: {}"
                        .format(len(account_dumps), email))
                    if opt.searchDehashed:
                        found_databases = DehashedHook(account_dumps).hooker()
                    else:
                        warn("suppressing discovered databases")
                        found_databases = {}
                    if len(found_databases) != 0:
                        display_found_databases(found_databases)
                        if opt.downloadDatabase:
                            for item in found_databases.keys():
                                if "Paste" not in item:
                                    info(
                                        "searching for downloadable databases using query: {}"
                                        .format(item.lower()))
                                    downloaded = DatabasesTodayHook(
                                        str(item),
                                        downloads_directory=opt.saveDirectory
                                    ).hooker()
                                    if len(downloaded) != 0:
                                        info(
                                            "downloaded a total of {} database(s) pertaining to query: {}"
                                            .format(len(downloaded), item))
                                        display_found_databases(
                                            downloaded, is_downloaded=True)
                                    else:
                                        warn(
                                            "no databases appeared to be preset and downloadable related to query: {}"
                                            .format(str(item)))
                    else:
                        warn(
                            "no output to show, most likely due to output suppression"
                        )
                elif account_dumps is None and paste_dumps is not None:
                    # this should never happen
                    error(
                        "no database dumps found nor any pastes found for: {}".
                        format(email))
                else:
                    error("email {} was not found in any breach".format(email))

        if opt.staySalty:
            # i know that you think that you know shit
            # all the shade that's coming at me I wonder who throws it
            # you can't see the vision boy, you must be outta focus
            # that's a real hot program homie, I wonder who wrote it? oh shit
            # (lyrics ripped from iSpy by Kyle, all I do is steal bruh)
            warn("all this code was stolen with <3 by Eku")
    except KeyboardInterrupt:
        error("user quit the session")
Пример #17
0
def main():
    opt = WhatWafParser().cmd_parser()

    if not len(sys.argv) > 1:
        error("you failed to provide an option, redirecting to help menu")
        time.sleep(2)
        cmd = "python whatwaf.py --help"
        subprocess.call(shlex.split(cmd))
        exit(0)

    # if you feel that you have to many folders or files in the whatwaf home folder
    # we'll give you an option to clean it free of charge
    if opt.cleanHomeFolder:
        import shutil

        try:
            warn(
                "cleaning the home folder: {home}, if you have installed with setup.sh, "
                "this will erase the executable script along with everything inside "
                "of the {home} directory (fingerprints, scripts, copies of whatwaf, etc) "
                "if you are sure you want to do this press ENTER now. If you changed "
                "your mind press CNTRL-C now".format(home=HOME))
            # you have three seconds to change your mind
            raw_input("")
            info("attempting to clean home folder")
            shutil.rmtree(HOME)
            info("home folder removed")
        except KeyboardInterrupt:
            fatal("cleaning aborted")
        except OSError:
            fatal("no home folder detected, already cleaned?")
        exit(0)

    if opt.encodePayload:
        spacer = "-" * 30
        payload, load_path = opt.encodePayload
        info("encoding '{}' using '{}'".format(payload, load_path))
        try:
            encoded = encode(payload, load_path)
            success("encoded successfully:")
            print("{}\n{}\n{}".format(spacer, encoded, spacer))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.encodePayloadList:
        spacer = "-" * 30
        try:
            file_path, load_path = opt.encodePayloadList
            info(
                "encoding payloads from given file '{}' using given tamper '{}'"
                .format(file_path, load_path))
            with open(file_path) as payloads:
                encoded = [
                    encode(p.strip(), load_path) for p in payloads.readlines()
                ]
                if opt.saveEncodedPayloads is not None:
                    with open(opt.saveEncodedPayloads, "a+") as save:
                        for item in encoded:
                            save.write(item + "\n")
                    success("saved encoded payloads to file '{}' successfully".
                            format(opt.saveEncodedPayloads))
                else:
                    success("payloads encoded successfully:")
                    print(spacer)
                    for i, item in enumerate(encoded, start=1):
                        print("#{} {}".format(i, item))
                    print(spacer)
        except IOError:
            fatal(
                "provided file '{}' appears to not exist, check the path and try again"
                .format(file_path))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.updateWhatWaf:
        info("update in progress")
        cmd = shlex.split("git pull origin master")
        subprocess.call(cmd)
        exit(0)

    if not opt.hideBanner:
        print(BANNER)
    check_version()

    format_opts = [opt.sendToYAML, opt.sendToCSV, opt.sendToJSON]
    if opt.formatOutput:
        amount_used = 0
        for item in format_opts:
            if item is True:
                amount_used += 1
        if amount_used > 1:
            warn(
                "multiple file formats have been detected, there is a high probability that this will cause "
                "issues while saving file information. please use only one format at a time"
            )
        elif amount_used == 0:
            warn(
                "output will not be saved to a file as no file format was provided. to save output to file "
                "pass one of the file format flags (IE `-J` for JSON format)",
                minor=True)
    elif any(format_opts) and not opt.formatOutput:
        warn(
            "you've chosen to send the output to a file, but have not formatted the output, no file will be saved "
            "do so by passing the format flag (IE `-F -J` for JSON format)")

    if opt.skipBypassChecks and opt.amountOfTampersToDisplay is not None:
        warn(
            "you've chosen to skip bypass checks and chosen an amount of tamper to display, tampers will be skipped",
            minor=True)

    # there is an extra dependency that you need in order
    # for requests to run behind socks proxies, we'll just
    # do a little check to make sure you have it installed
    if opt.runBehindTor or opt.runBehindProxy is not None and "socks" in opt.runBehindProxy:
        try:
            import socks
        except ImportError:
            # if you don't we will go ahead and exit the system with an error message
            error(
                "to run behind socks proxies (like Tor) you need to install pysocks `pip install pysocks`, "
                "otherwise use a different proxy protocol")
            sys.exit(1)

    proxy, agent = configure_request_headers(random_agent=opt.useRandomAgent,
                                             agent=opt.usePersonalAgent,
                                             proxy=opt.runBehindProxy,
                                             tor=opt.runBehindTor)

    if opt.checkTorConnection:
        import re

        info("checking Tor connection")
        check_url = "https://check.torproject.org/"
        check_regex = re.compile("This browser is configured to use Tor.",
                                 re.I)
        _, _, content, _ = get_page(check_url, proxy=proxy, agent=agent)
        if check_regex.search(str(content)) is not None:
            success("it appears that Tor is working properly")
        else:
            warn("it appears Tor is not configured properly")

    if opt.providedPayloads is not None:
        payload_list = [
            p.strip() if p[0] == " " else p
            for p in str(opt.providedPayloads).split(",")
        ]
        info("using provided payloads")
    elif opt.payloadList is not None:
        try:
            open(opt.payloadList).close()
        except Exception:
            fatal(
                "provided file '{}' does not exists, check the path and try again"
                .format(opt.payloadList))
            exit(1)
        payload_list = [
            p.strip("\n") for p in open(opt.payloadList).readlines()
        ]
        info("using provided payload file '{}'".format(opt.payloadList))
    else:
        payload_list = WAF_REQUEST_DETECTION_PAYLOADS
        info("using default payloads")

    if opt.saveFingerprints:
        warn(
            "fingerprinting is enabled, all fingerprints (WAF related or not) will be saved for further analysis "
            "if the fingerprint already exists it will be skipped",
            minor=True)

    if opt.trafficFile is not None:
        info("saving HTTP traffic to '{}'".format(opt.trafficFile))
    if opt.sleepTimeThrottle != 0:
        info("sleep throttle has been set to {}s".format(
            opt.sleepTimeThrottle))

    try:
        if opt.postRequest:
            request_type = "POST"
        else:
            request_type = "GET"

        request_count = 0

        if opt.runSingleWebsite:
            url_to_use = auto_assign(opt.runSingleWebsite, ssl=opt.forceSSL)
            info("running single web application '{}'".format(url_to_use))
            requests = detection_main(url_to_use,
                                      payload_list,
                                      agent=agent,
                                      proxy=proxy,
                                      verbose=opt.runInVerbose,
                                      skip_bypass_check=opt.skipBypassChecks,
                                      verification_number=opt.verifyNumber,
                                      formatted=opt.formatOutput,
                                      tamper_int=opt.amountOfTampersToDisplay,
                                      use_json=opt.sendToJSON,
                                      use_yaml=opt.sendToYAML,
                                      use_csv=opt.sendToCSV,
                                      fingerprint_waf=opt.saveFingerprints,
                                      provided_headers=opt.extraHeaders,
                                      traffic_file=opt.trafficFile,
                                      throttle=opt.sleepTimeThrottle,
                                      req_timeout=opt.requestTimeout,
                                      post_data=opt.postRequestData,
                                      request_type=request_type,
                                      check_server=opt.determineWebServer,
                                      threaded=opt.threaded)
            request_count = request_count + requests if requests is not None else request_count
        elif opt.runMultipleWebsites:
            info("reading from '{}'".format(opt.runMultipleWebsites))
            try:
                open(opt.runMultipleWebsites)
            except IOError:
                fatal("file: '{}' did not open, does it exist?".format(
                    opt.runMultipleWebsites))
                exit(-1)
            with open(opt.runMultipleWebsites) as urls:
                for i, url in enumerate(urls, start=1):
                    url = auto_assign(url.strip(), ssl=opt.forceSSL)
                    info("currently running on site #{} ('{}')".format(i, url))
                    requests = detection_main(
                        url,
                        payload_list,
                        agent=agent,
                        proxy=proxy,
                        verbose=opt.runInVerbose,
                        skip_bypass_check=opt.skipBypassChecks,
                        verification_number=opt.verifyNumber,
                        formatted=opt.formatOutput,
                        tamper_int=opt.amountOfTampersToDisplay,
                        use_json=opt.sendToJSON,
                        use_yaml=opt.sendToYAML,
                        use_csv=opt.sendToCSV,
                        fingerprint_waf=opt.saveFingerprints,
                        provided_headers=opt.extraHeaders,
                        traffic_file=opt.trafficFile,
                        throttle=opt.sleepTimeThrottle,
                        req_timeout=opt.requestTimeout,
                        post_data=opt.postRequestData,
                        request_type=request_type,
                        check_server=opt.determineWebServer,
                        threaded=opt.threaded)
                    request_count = request_count + requests if requests is not None else request_count
                    print("\n\b")
                    time.sleep(0.5)

        elif opt.burpRequestFile:
            request_data = parse_burp_request(opt.burpRequestFile)
            info("URL parsed from request file: '{}'".format(
                request_data["base_url"]))
            requests = detection_main(
                request_data["base_url"],
                payload_list,
                verbose=opt.runInVerbose,
                skip_bypass_check=opt.skipBypassChecks,
                verification_number=opt.verifyNumber,
                formatted=opt.formatOutput,
                tamper_int=opt.amountOfTampersToDisplay,
                use_json=opt.sendToJSON,
                use_yaml=opt.sendToYAML,
                use_csv=opt.sendToCSV,
                fingerprint_waf=opt.saveFingerprints,
                provided_headers=request_data["request_headers"],
                traffic_file=opt.trafficFile,
                throttle=opt.sleepTimeThrottle,
                req_timeout=opt.requestTimeout,
                post_data=request_data["post_data"],
                request_type=request_data["request_method"],
                check_server=opt.determineWebServer,
                threaded=opt.threaded)
            request_count = request_count + requests if requests is not None else request_count

        elif opt.googlerFile is not None:
            urls = parse_googler_file(opt.googlerFile)
            if urls is not None:
                info("parsed a total of {} URLS from Googler JSON file".format(
                    len(urls)))
                for i, url in enumerate(urls, start=1):
                    info("currently running on '{}' (site #{})".format(url, i))
                    requests = detection_main(
                        url,
                        payload_list,
                        agent=agent,
                        proxy=proxy,
                        verbose=opt.runInVerbose,
                        skip_bypass_check=opt.skipBypassChecks,
                        verification_number=opt.verifyNumber,
                        formatted=opt.formatOutput,
                        tamper_int=opt.amountOfTampersToDisplay,
                        use_json=opt.sendToJSON,
                        use_yaml=opt.sendToYAML,
                        use_csv=opt.sendToCSV,
                        fingerprint_waf=opt.saveFingerprints,
                        provided_headers=opt.extraHeaders,
                        traffic_file=opt.trafficFile,
                        throttle=opt.sleepTimeThrottle,
                        req_timeout=opt.requestTimeout,
                        post_data=opt.postRequestData,
                        request_type=request_type,
                        check_server=opt.determineWebServer,
                        threaded=opt.threaded)
                    request_count = request_count + requests if requests is not None else request_count
                    print("\n\b")
                    time.sleep(0.5)
            else:
                fatal("file failed to load, does it exist?")

        if request_count != 0:
            info("total requests sent: {}".format(request_count))
        else:
            warn("request counter failed to count correctly, deactivating",
                 minor=True)

    except KeyboardInterrupt:
        fatal("user aborted scanning")
    except InvalidURLProvided:
        fatal(
            "the provided URL is unable to be validated, check the URL and try again (you may need to unquote the "
            "HTML entities)")
    except Exception as e:
        import traceback

        sep = "-" * 30
        fatal(
            "WhatWaf has caught an unhandled exception with the error message: '{}'. "
            "You can create an issue here: '{}'".format(str(e), ISSUES_LINK))
        warn("you will need the following information to create an issue:")
        print("{}\nTraceback:\n```\n{}```\nCMD line: `{}`\nVersion: `{}`\n{}".
              format(sep, "".join(traceback.format_tb(sys.exc_info()[2])),
                     hide_sensitive(sys.argv, "-u"), VERSION, sep))
Пример #18
0
def main():
    opt = WhatWafParser().cmd_parser()

    if not len(sys.argv) > 1:
        error("you failed to provide an option, redirecting to help menu")
        time.sleep(2)
        cmd = "python whatwaf.py --help"
        subprocess.call(shlex.split(cmd))
        exit(0)

    # if you feel that you have to many folders or files in the whatwaf home folder
    # we'll give you an option to clean it free of charge
    if opt.cleanHomeFolder:
        import shutil

        try:
            warn(
                "cleaning home folder, all information will be deleted, if you changed your mind press CNTRL-C now"
            )
            # you have three seconds to change your mind
            time.sleep(3)
            info("attempting to clean home folder")
            shutil.rmtree(HOME)
            info("home folder removed")
        except KeyboardInterrupt:
            fatal("cleaning aborted")
        except OSError:
            fatal("no home folder detected, already cleaned?")
        exit(0)

    if opt.encodePayload:
        spacer = "-" * 30
        payload, load_path = opt.encodePayload
        info("encoding '{}' using '{}'".format(payload, load_path))
        try:
            encoded = encode(payload, load_path)
            success("encoded successfully:")
            print("{}\n{}\n{}".format(spacer, encoded, spacer))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.encodePayloadList:
        spacer = "-" * 30
        try:
            file_path, load_path = opt.encodePayloadList
            info(
                "encoding payloads from given file '{}' using given tamper '{}'"
                .format(file_path, load_path))
            with open(file_path) as payloads:
                encoded = [
                    encode(p.strip(), load_path) for p in payloads.readlines()
                ]
                if opt.saveEncodedPayloads is not None:
                    with open(opt.saveEncodedPayloads, "a+") as save:
                        for item in encoded:
                            save.write(item + "\n")
                    success("saved encoded payloads to file '{}' successfully".
                            format(opt.saveEncodedPayloads))
                else:
                    success("payloads encoded successfully:")
                    print(spacer)
                    for i, item in enumerate(encoded, start=1):
                        print("#{} {}".format(i, item))
                    print(spacer)
        except IOError:
            fatal(
                "provided file '{}' appears to not exist, check the path and try again"
                .format(file_path))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.updateWhatWaf:
        info("update in progress")
        cmd = shlex.split("git pull origin master")
        subprocess.call(cmd)
        exit(0)

    if not opt.hideBanner:
        print(BANNER)

    format_opts = [opt.sendToYAML, opt.sendToCSV, opt.sendToJSON]
    if opt.formatOutput:
        amount_used = 0
        for item in format_opts:
            if item is True:
                amount_used += 1
        if amount_used > 1:
            warn(
                "multiple file formats have been detected, there is a high probability that this will cause "
                "issues while saving file information. please use only one format at a time"
            )
        elif amount_used == 0:
            warn(
                "output will not be saved to a file as no file format was provided. to save output to file "
                "pass one of the file format flags (IE `-J` for JSON format)",
                minor=True)
    elif any(format_opts) and not opt.formatOutput:
        warn(
            "you've chosen to send the output to a file, but have not formatted the output, no file will be saved "
            "do so by passing the format flag (IE `-F -J` for JSON format)")

    if opt.skipBypassChecks and opt.amountOfTampersToDisplay is not None:
        warn(
            "you've chosen to skip bypass checks and chosen an amount of tamper to display, tampers will be skipped",
            minor=True)

    # there is an extra dependency that you need in order
    # for requests to run behind socks proxies, we'll just
    # do a little check to make sure you have it installed
    if opt.runBehindTor or opt.runBehindProxy is not None and "socks" in opt.runBehindProxy:
        try:
            import socks
        except ImportError:
            # if you don't we will go ahead and exit the system with an error message
            error(
                "to run behind socks proxies (like Tor) you need to install pysocks `pip install pysocks`, "
                "otherwise use a different proxy protocol")
            sys.exit(1)

    proxy, agent = configure_request_headers(random_agent=opt.useRandomAgent,
                                             agent=opt.usePersonalAgent,
                                             proxy=opt.runBehindProxy,
                                             tor=opt.runBehindTor)

    if opt.checkTorConnection:
        import re

        info("checking Tor connection")
        check_url = "https://check.torproject.org/"
        check_regex = re.compile("This browser is configured to use Tor.",
                                 re.I)
        _, content, _ = get_page(check_url, proxy=proxy, agent=agent)
        if check_regex.search(str(content)) is not None:
            success("it appears that Tor is working properly")
        else:
            warn("it appears Tor is not configured properly")

    if opt.providedPayloads is not None:
        payload_list = [
            p.strip() if p[0] == " " else p
            for p in str(opt.providedPayloads).split(",")
        ]
        info("using provided payloads")
    elif opt.payloadList is not None:
        payload_list = [
            p.strip("\n") for p in open(opt.payloadList).readlines()
        ]
        info("using provided payload file '{}'".format(opt.payloadList))
    else:
        payload_list = WAF_REQUEST_DETECTION_PAYLOADS
        info("using default payloads")

    if opt.saveFingerprints:
        warn(
            "fingerprinting is enabled, all fingerprints (WAF related or not) will be saved for further analysis",
            minor=True)

    try:
        if opt.runSingleWebsite:
            url_to_use = auto_assign(opt.runSingleWebsite, ssl=opt.forceSSL)
            info("running single web application '{}'".format(url_to_use))
            detection_main(url_to_use,
                           payload_list,
                           agent=agent,
                           proxy=proxy,
                           verbose=opt.runInVerbose,
                           skip_bypass_check=opt.skipBypassChecks,
                           verification_number=opt.verifyNumber,
                           formatted=opt.formatOutput,
                           tamper_int=opt.amountOfTampersToDisplay,
                           use_json=opt.sendToJSON,
                           use_yaml=opt.sendToYAML,
                           use_csv=opt.sendToCSV,
                           fingerprint_waf=opt.saveFingerprints)

        elif opt.runMultipleWebsites:
            info("reading from '{}'".format(opt.runMultipleWebsites))
            with open(opt.runMultipleWebsites) as urls:
                for i, url in enumerate(urls, start=1):
                    url = auto_assign(url.strip(), ssl=opt.forceSSL)
                    info("currently running on site #{} ('{}')".format(i, url))
                    detection_main(url,
                                   payload_list,
                                   agent=agent,
                                   proxy=proxy,
                                   verbose=opt.runInVerbose,
                                   skip_bypass_check=opt.skipBypassChecks,
                                   verification_number=opt.verifyNumber,
                                   formatted=opt.formatOutput,
                                   tamper_int=opt.amountOfTampersToDisplay,
                                   use_json=opt.sendToJSON,
                                   use_yaml=opt.sendToYAML,
                                   use_csv=opt.sendToCSV,
                                   fingerprint_waf=opt.saveFingerprints)
                    print("\n\b")
                    time.sleep(0.5)
    except KeyboardInterrupt:
        fatal("user aborted scanning")
Пример #19
0
def main():
    try:
        cursor = initialize()
        full_program_start_time = time.time()
        print(BANNER)
        opts = WhatDirParser().optparse()
        if opts.viewDbCache:
            cache = fetch_stored_data(cursor)
            display_database(cache)
            exit(1)
        if opts.urlToUse is not None:
            if opts.wordListToUse is not None:
                try:
                    if opts.runVerbose:
                        debug("checking file")
                    open(opts.wordListToUse)
                except:
                    error("wordlist did not open, does it exist?")
                    exit(1)
                if opts.runVerbose:
                    debug(
                        "file appears to exist, continuing and testing URL: {}"
                        .format(opts.urlToUse))
                test, usable_url = heuristics(opts.urlToUse)
                if not test:
                    fatal(
                        "heuristics have determined that the URL provided is not a valid URL, validate and try again, "
                        "does it have 'http(s)://' in it?")
                    exit(1)
                if opts.runVerbose:
                    debug("URL passed heuristic vailidation, continuing")
                info("processing your file")
                process_start_time = time.time()
                if opts.runVerbose:
                    debug("file processing start time: {}".format(
                        process_start_time))
                target_data = process_file(opts.wordListToUse)
                process_stop_time = time.time()
                if opts.runVerbose:
                    debug(
                        "file process end time: {}".format(process_stop_time))
                info(
                    "file processed in {}(s), total of {} unique string(s) to be used"
                    .format(round(process_stop_time - process_start_time),
                            len(target_data)))
                if opts.runVerbose:
                    debug("configuring headers and proxies")
                proxy, headers = create_request_headers(
                    proxy=opts.requestProxy,
                    headers=opts.extraHeaders,
                    user_agent=opts.userAgentRandomize)
                if opts.runVerbose:
                    debug(
                        "proxy configuration: {}, header configuration: {}, starting attacks"
                        .format(proxy, headers))
                results = RequestMaker(
                    usable_url,
                    target_data,
                    threads=opts.amountOfThreads,
                    quiet=opts.runInQuiet,
                    proxy=proxy,
                    headers=headers,
                    save_all=opts.saveAllAttempts,
                    verbose=opts.runVerbose,
                    timeout=opts.setTimeout).threaded_response_helper()
                info("a total of {} possible result(s) found".format(
                    len(results)))
                if len(results) != 0:
                    was_inserted = insert_website_info(cursor, usable_url,
                                                       results)
                    if was_inserted:
                        info("results saved to database")
                if opts.outputFile:
                    if len(results) != 0:
                        info("saving connections to file")
                        file_path = save_successful_connection(
                            results, usable_url)
                        info("connections saved to CSV file under {}".format(
                            file_path))
                    else:
                        warn("no results found, skipping file creation",
                             minor=True)
            else:
                warn("must provide a wordlist using the `-w/--words` flag")
                exit(1)
        else:
            warn("must provide a target URL using the `-u/--url` flag")
            exit(1)
        full_program_end_time = time.time()
        info("{} took {}(s) to complete with a total of {} requests".format(
            __name__.split(".")[0],
            round(full_program_end_time - full_program_start_time),
            len(target_data)))
    except KeyboardInterrupt:
        fatal("user quit")
Пример #20
0
def main():
    opt = WhatWafParser().cmd_parser()

    if not len(sys.argv) > 1:
        error("you failed to provide an option, redirecting to help menu")
        time.sleep(2)
        cmd = "python whatwaf.py --help"
        subprocess.call(shlex.split(cmd))
        exit(0)

    # if you feel that you have to many folders or files in the whatwaf home folder
    # we'll give you an option to clean it free of charge
    if opt.cleanHomeFolder:
        import shutil

        try:
            warn(
                "cleaning the home folder: {home}, if you have installed with setup.sh, "
                "this will erase the executable script along with everything inside "
                "of the {home} directory (fingerprints, scripts, copies of whatwaf, etc) "
                "if you are sure you want to do this press ENTER now. If you changed "
                "your mind press CNTRL-C now".format(home=HOME))
            # you have three seconds to change your mind
            raw_input("")
            info("attempting to clean home folder")
            shutil.rmtree(HOME)
            info("home folder removed")
        except KeyboardInterrupt:
            fatal("cleaning aborted")
        except OSError:
            fatal("no home folder detected, already cleaned?")
        exit(0)

    cursor = initialize()

    if opt.exportEncodedToFile is not None:
        payloads = fetch_data(cursor)
        if len(payloads) != 0:
            exported_payloads_path = export_payloads(payloads,
                                                     opt.exportEncodedToFile)
            info("payloads exported to: {}".format(exported_payloads_path))
        else:
            warn(
                "there appears to be no payloads stored in the database, to create payloads use the following options:"
            )
            proc = subprocess.check_output(["python", "whatwaf.py", "--help"])
            parsed_help = parse_help_menu(str(proc), "encoding options:",
                                          "output options:")
            print(parsed_help)
        exit(1)

    if opt.viewAllCache:
        cached_payloads = fetch_data(cursor)
        cached_urls = fetch_data(cursor, is_payload=False)
        display_cached(cached_urls, cached_payloads)
        exit(0)

    if opt.viewCachedPayloads:
        payloads = fetch_data(cursor)
        if len(payloads) != 0:
            display_cached(None, payloads)
        else:
            warn(
                "there appears to be no payloads stored in the database, to create payloads use the following options:"
            )
            proc = subprocess.check_output(["python", "whatwaf.py", "--help"])
            parsed_help = parse_help_menu(proc, "encoding options:",
                                          "output options:")
            print(parsed_help)
        exit(0)

    if opt.viewUrlCache:
        cached_urls = fetch_data(cursor, is_payload=False)
        display_cached(cached_urls, None)
        exit(0)

    if opt.encodePayload is not None:
        spacer = "-" * 30
        payload = opt.encodePayload[0]
        load_path = opt.encodePayload[1:]
        for load in load_path:
            try:
                payload = encode(payload, load)
            except (AttributeError, ImportError):
                warn(
                    "invalid load path given: '{}', skipping it and continuing"
                    .format(load))
        success("encoded successfully:")
        print("{}\n{}\n{}".format(spacer, payload, spacer))
        insert_payload(payload, cursor)
        info("payload has been cached for future use")
        exit(0)

    if opt.encodePayloadList is not None:
        spacer = "-" * 30
        try:
            file_path, load_path = opt.encodePayloadList
            info(
                "encoding payloads from given file '{}' using given tamper '{}'"
                .format(file_path, load_path))
            with open(file_path) as payloads:
                encoded = [
                    encode(p.strip(), load_path) for p in payloads.readlines()
                ]
                if opt.saveEncodedPayloads is not None:
                    with open(opt.saveEncodedPayloads, "a+") as save:
                        for item in encoded:
                            save.write(item + "\n")
                    success("saved encoded payloads to file '{}' successfully".
                            format(opt.saveEncodedPayloads))
                else:
                    success("payloads encoded successfully:")
                    print(spacer)
                    for i, item in enumerate(encoded, start=1):
                        insert_payload(item, cursor)
                        print("#{} {}".format(i, item))
                    print(spacer)
            info("payloads have been cached for future use")
        except IOError:
            fatal(
                "provided file '{}' appears to not exist, check the path and try again"
                .format(file_path))
        except (AttributeError, ImportError):
            fatal("invalid load path given, check the load path and try again")
        exit(0)

    if opt.updateWhatWaf:
        info("update in progress")
        cmd = shlex.split("git pull origin master")
        subprocess.call(cmd)
        exit(0)

    if not opt.hideBanner:
        print(BANNER)

    if opt.listEncodingTechniques:
        info("gathering available tamper script load paths")
        tamper_list = get_encoding_list(TAMPERS_DIRECTORY,
                                        is_tampers=True,
                                        is_wafs=False)
        for tamper in sorted(tamper_list):
            print(tamper)
        exit(0)

    if opt.viewPossibleWafs:
        import importlib

        info("gathering a list of possible detectable wafs")
        wafs_list = get_encoding_list(PLUGINS_DIRECTORY,
                                      is_tampers=False,
                                      is_wafs=True)
        for i, waf in enumerate(sorted(wafs_list), start=1):
            try:
                imported = importlib.import_module(waf)
                print("{}".format(imported.__product__))
            except ImportError:
                pass
        exit(0)

    # gotta find a better way to check for updates so ima hotfix it
    #info("checking for updates")
    #check_version()

    format_opts = [opt.sendToYAML, opt.sendToCSV, opt.sendToJSON]
    if opt.formatOutput:
        amount_used = 0
        for item in format_opts:
            if item is True:
                amount_used += 1
        if amount_used > 1:
            warn(
                "multiple file formats have been detected, there is a high probability that this will cause "
                "issues while saving file information. please use only one format at a time"
            )
        elif amount_used == 0:
            warn(
                "output will not be saved to a file as no file format was provided. to save output to file "
                "pass one of the file format flags (IE `-J` for JSON format)",
                minor=True)
    elif any(format_opts) and not opt.formatOutput:
        warn(
            "you've chosen to send the output to a file, but have not formatted the output, no file will be saved "
            "do so by passing the format flag (IE `-F -J` for JSON format)")

    if opt.skipBypassChecks and opt.amountOfTampersToDisplay is not None:
        warn(
            "you've chosen to skip bypass checks and chosen an amount of tamper to display, tampers will be skipped",
            minor=True)

    # there is an extra dependency that you need in order
    # for requests to run behind socks proxies, we'll just
    # do a little check to make sure you have it installed
    if opt.runBehindTor or opt.runBehindProxy is not None and "socks" in opt.runBehindProxy:
        try:
            import socks
        except ImportError:
            # if you don't we will go ahead and exit the system with an error message
            error(
                "to run behind socks proxies (like Tor) you need to install pysocks `pip install pysocks`, "
                "otherwise use a different proxy protocol")
            sys.exit(1)

    proxy, agent = configure_request_headers(random_agent=opt.useRandomAgent,
                                             agent=opt.usePersonalAgent,
                                             proxy=opt.runBehindProxy,
                                             tor=opt.runBehindTor,
                                             tor_port=opt.configTorPort)

    if opt.checkTorConnection:
        import re

        info("checking Tor connection")
        check_url = "https://check.torproject.org/"
        check_regex = re.compile("This browser is configured to use Tor.",
                                 re.I)
        _, _, content, _ = get_page(check_url, proxy=proxy, agent=agent)
        if check_regex.search(str(content)) is not None:
            success("it appears that Tor is working properly")
        else:
            warn("it appears Tor is not configured properly")

    if opt.providedPayloads is not None:
        payload_list = [
            p.strip() if p[0] == " " else p
            for p in str(opt.providedPayloads).split(",")
        ]
        info("using provided payloads")
    elif opt.payloadList is not None:
        try:
            open(opt.payloadList).close()
        except Exception:
            fatal(
                "provided file '{}' does not exists, check the path and try again"
                .format(opt.payloadList))
            exit(1)
        payload_list = [
            p.strip("\n") for p in open(opt.payloadList).readlines()
        ]
        info("using provided payload file '{}'".format(opt.payloadList))
    else:
        payload_list = WAF_REQUEST_DETECTION_PAYLOADS
        info("using default payloads")

    if opt.saveFingerprints:
        warn(
            "fingerprinting is enabled, all fingerprints (WAF related or not) will be saved for further analysis "
            "if the fingerprint already exists it will be skipped",
            minor=True)

    if opt.trafficFile is not None:
        info("saving HTTP traffic to '{}'".format(opt.trafficFile))
    if opt.sleepTimeThrottle != 0:
        info("sleep throttle has been set to {}s".format(
            opt.sleepTimeThrottle))

    try:
        if opt.postRequest:
            request_type = "POST"
        else:
            request_type = "GET"

        request_count = 0

        if opt.runSingleWebsite:
            url_to_use = auto_assign(opt.runSingleWebsite, ssl=opt.forceSSL)
            if opt.checkCachedUrls:
                checked_results = check_url_against_cached(url_to_use, cursor)
                if checked_results is not None:
                    print(
                        RESULTS_TEMPLATE.format("-" * 20,
                                                str(checked_results[1]),
                                                str(checked_results[2]),
                                                str(checked_results[3]),
                                                str(checked_results[4]),
                                                "-" * 20))
                    exit(0)

            if opt.testTargetConnection:
                info(
                    "testing connection to target URL before starting attack {}"
                    .format(
                        "\033[1m\033[33m(Tor is initialized which may increase latency)"
                        if opt.runBehindTor else ""))
                results = test_target_connection(url_to_use,
                                                 proxy=proxy,
                                                 agent=agent,
                                                 headers=opt.extraHeaders)
                if results == "nogo":
                    fatal(
                        "connection to target URL failed multiple times, check connection and try again"
                    )
                    exit(1)
                elif results == "acceptable":
                    warn(
                        "there appears to be some latency on the connection, this may interfere with results",
                        minor=False)
                else:
                    success("connection succeeded, continuing")

            info("running single web application '{}'".format(url_to_use))
            requests = detection_main(
                url_to_use,
                payload_list,
                cursor,
                agent=agent,
                proxy=proxy,
                verbose=opt.runInVerbose,
                skip_bypass_check=opt.skipBypassChecks,
                verification_number=opt.verifyNumber,
                formatted=opt.formatOutput,
                tamper_int=opt.amountOfTampersToDisplay,
                use_json=opt.sendToJSON,
                use_yaml=opt.sendToYAML,
                use_csv=opt.sendToCSV,
                fingerprint_waf=opt.saveFingerprints,
                provided_headers=opt.extraHeaders,
                traffic_file=opt.trafficFile,
                throttle=opt.sleepTimeThrottle,
                req_timeout=opt.requestTimeout,
                post_data=opt.postRequestData,
                request_type=request_type,
                check_server=opt.determineWebServer,
                threaded=opt.threaded,
                force_file_creation=opt.forceFileCreation,
                save_copy_of_file=opt.outputDirectory)
        elif any(o is not None
                 for o in [opt.runMultipleWebsites, opt.burpRequestFile]):
            info("reading from '{}'".format(opt.runMultipleWebsites
                                            or opt.burpRequestFile))
            try:
                open(opt.runMultipleWebsites or opt.burpRequestFile)
            except IOError:
                fatal("file: '{}' did not open, does it exist?".format(
                    opt.runMultipleWebsites))
                exit(-1)
            if opt.runMultipleWebsites is not None:
                site_runners = []
                with open(opt.runMultipleWebsites) as urls:
                    for url in urls:
                        possible_url = auto_assign(url.strip(),
                                                   ssl=opt.forceSSL)
                        if opt.checkCachedUrls:
                            url_is_cached = check_url_against_cached(
                                possible_url, cursor)
                            if url_is_cached is not None:
                                print(
                                    RESULTS_TEMPLATE.format(
                                        "-" * 20, str(url_is_cached[1]),
                                        str(url_is_cached[2]),
                                        str(url_is_cached[3]),
                                        str(url_is_cached[4]), "-" * 20))
                            else:
                                site_runners.append(possible_url)
                        else:
                            site_runners.append(possible_url)
            elif opt.burpRequestFile is not None:
                site_runners = parse_burp_request(opt.burpRequestFile)
            else:
                site_runners = []

            if len(site_runners) == 0:
                fatal("no targets parsed from file, exiting")
                exit(1)
            else:
                info("parsed a total of {} target(s) from file".format(
                    len(site_runners)))

            for i, url in enumerate(site_runners, start=1):
                if opt.testTargetConnection:
                    info(
                        "testing connection to target URL before starting attack"
                    )
                    results = test_target_connection(url,
                                                     proxy=proxy,
                                                     agent=agent,
                                                     headers=opt.extraHeaders)
                    if results == "nogo":
                        fatal(
                            "connection to target URL failed multiple times, check connection and try again, skipping"
                        )
                        continue
                    elif results == "acceptable":
                        warn(
                            "there appears to be some latency on the connection, this may interfere with results",
                            minor=False)
                    else:
                        success("connection succeeded, continuing")

                info("currently running on site #{} ('{}')".format(i, url))
                detection_main(url,
                               payload_list,
                               cursor,
                               agent=agent,
                               proxy=proxy,
                               verbose=opt.runInVerbose,
                               skip_bypass_check=opt.skipBypassChecks,
                               verification_number=opt.verifyNumber,
                               formatted=opt.formatOutput,
                               tamper_int=opt.amountOfTampersToDisplay,
                               use_json=opt.sendToJSON,
                               use_yaml=opt.sendToYAML,
                               use_csv=opt.sendToCSV,
                               fingerprint_waf=opt.saveFingerprints,
                               provided_headers=opt.extraHeaders,
                               traffic_file=opt.trafficFile,
                               throttle=opt.sleepTimeThrottle,
                               req_timeout=opt.requestTimeout,
                               post_data=opt.postRequestData,
                               request_type=request_type,
                               check_server=opt.determineWebServer,
                               threaded=opt.threaded,
                               force_file_creation=opt.forceFileCreation,
                               save_copy_of_file=opt.outputDirectory)
                print("\n\b")
                time.sleep(0.5)

        elif opt.googlerFile is not None:
            urls = parse_googler_file(opt.googlerFile)
            if urls is not None:
                info("parsed a total of {} URLS from Googler JSON file".format(
                    len(urls)))
                for i, url in enumerate(urls, start=1):
                    do_url_run = True
                    if opt.checkCachedUrls:
                        url_is_cached = check_url_against_cached(url, cursor)
                        if url_is_cached is not None:
                            print(
                                RESULTS_TEMPLATE.format(
                                    "-" * 20, str(url_is_cached[1]),
                                    str(url_is_cached[2]),
                                    str(url_is_cached[3]),
                                    str(url_is_cached[4]), "-" * 20))
                            do_url_run = False

                    if do_url_run:
                        if opt.testTargetConnection:
                            info(
                                "testing connection to target URL before starting attack"
                            )
                            results = test_target_connection(
                                url,
                                proxy=proxy,
                                agent=agent,
                                headers=opt.extraHeaders)
                            if results == "nogo":
                                fatal(
                                    "connection to target URL failed multiple times, check connection and try again"
                                )
                                continue
                            elif results == "acceptable":
                                warn(
                                    "there appears to be some latency on the connection, this may interfere with "
                                    "results",
                                    minor=False)
                            else:
                                success("connection succeeded, continuing")

                        info("currently running on '{}' (site #{})".format(
                            url, i))
                        requests = detection_main(
                            url,
                            payload_list,
                            cursor,
                            agent=agent,
                            proxy=proxy,
                            verbose=opt.runInVerbose,
                            skip_bypass_check=opt.skipBypassChecks,
                            verification_number=opt.verifyNumber,
                            formatted=opt.formatOutput,
                            tamper_int=opt.amountOfTampersToDisplay,
                            use_json=opt.sendToJSON,
                            use_yaml=opt.sendToYAML,
                            use_csv=opt.sendToCSV,
                            fingerprint_waf=opt.saveFingerprints,
                            provided_headers=opt.extraHeaders,
                            traffic_file=opt.trafficFile,
                            throttle=opt.sleepTimeThrottle,
                            req_timeout=opt.requestTimeout,
                            post_data=opt.postRequestData,
                            request_type=request_type,
                            check_server=opt.determineWebServer,
                            threaded=opt.threaded,
                            force_file_creation=opt.forceFileCreation,
                            save_copy_of_file=opt.outputDirectory)
                        request_count = request_count + requests if requests is not None else request_count
                        print("\n\b")
                        time.sleep(0.5)
            else:
                fatal("file failed to load, does it exist?")

    except KeyboardInterrupt:
        fatal("user aborted scanning")
    except InvalidURLProvided:
        fatal(
            "the provided URL is unable to be validated, check the URL and try again (you may need to unquote the "
            "HTML entities)")
    except Exception as e:
        import traceback

        sep = "-" * 30
        fatal(
            "WhatWaf has caught an unhandled exception with the error message: '{}'."
            .format(str(e)))
        exception_data = "Traceback (most recent call):\n{}{}".format(
            "".join(traceback.format_tb(sys.exc_info()[2])), str(e))
        error("\n{}\n{}\n{}".format(sep, exception_data, sep))
        request_issue_creation(exception_data)
Пример #21
0
def main():
    opts = Parser().optparse()
    config = initialize()
    cursor = init_sql()
    Parser().check_args(opts, cursor)
    ip_addresses_to_check = set()
    info("sorting by unique IP addresses")
    if opts.singleIp is not None:
        for item in opts.singleIp.split(","):
            if check_real_ip(item.strip()):
                ip_addresses_to_check.add(item)
    if opts.ipAddressList is not None:
        with open(opts.ipAddressList) as data:
            for line in data.readlines():
                line = line.strip()
                if check_real_ip(line):
                    ip_addresses_to_check.add(line)
    if len(ip_addresses_to_check) == 0:
        error(
            "there aren't any IP addresses to check, did you pass valid IP addresses?"
        )
        exit(1)
    ip_addresses_to_check = list(ip_addresses_to_check)
    total_ip_addresses = len(ip_addresses_to_check)
    total_credits_left = int(
        round(
            IpVoidApiConnection(
                config["api_key"], None,
                TOKEN_VALID_CHECK_URL).make_request()["credits_remained"]))
    info(
        "testing if you have enough credits to run a total of {} unique IP address(es)"
        .format(total_ip_addresses))
    amount_to_be_run = test_if_enough_credits(total_ip_addresses,
                                              total_credits_left)
    if opts.hideResults is not None:
        to_hide = configure_hide_list(opts.hideResults.split(","))
    else:
        to_hide = None
    output_results = {"results": []}
    for i, ip in enumerate(ip_addresses_to_check, start=1):
        try:
            res = IpVoidApiConnection(config["api_key"],
                                      ip,
                                      IP_REP_REQUEST_URL,
                                      to_exclude=to_hide).make_request()
            detection_percentage = res["data"]["report"]["blacklists"][
                "detection_rate"]
            total_engines = res["data"]["report"]["blacklists"][
                "engines_count"]
            decimal_percent = float("." + detection_percentage.split("%")[0])
            detection_rate = total_engines * decimal_percent
            output_results["results"].append(
                (ip, detection_rate, total_engines, detection_percentage))
        except KeyError:
            warn("IP address {} caused an issue, skipping".format(ip))
            i -= 1
        if i == amount_to_be_run:
            break
    build_output(output_results["results"],
                 cursor,
                 is_json=opts.jsonOutput,
                 is_csv=opts.csvOutput,
                 is_yaml=opts.yamlOutput)
    credits_left = IpVoidApiConnection(
        config["api_key"], None,
        TOKEN_VALID_CHECK_URL).make_request()["credits_remained"]
    warn("you have a total of {} credits left".format(credits_left))