Exemple #1
0
    def parse(self):
        """
        Try to extract domain (full, naked, sub-domain), IP and port.
        """
        if self.target.endswith("/"):
            self.target = self.target[:-1]

        if self._is_proto(self.target):
            try:
                self.protocol, self.target = self.target.split("://")
                self.logger.info("{} Protocol detected: {}".format(
                    COLORED_COMBOS.NOTIFY, self.protocol))
                if self.protocol.lower() == "https" and self.port == 80:
                    self.port = 443
            except ValueError:
                raise HostHandlerException(
                    "Could not make domain and protocol from host")

        if ":" in self.target:
            self._extract_port(self.target)

        if self.validate_ip(self.target):
            self.logger.info("{} Detected {} as an IP address.".format(
                COLORED_COMBOS.NOTIFY, self.target))
            self.is_ip = True
        else:
            domains = []
            if self.target.startswith("www."):
                # Obviously an FQDN
                domains.extend((self.target, self.target.split("www.")[1]))
                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
            else:
                domains.append(self.target)
                domain_levels = self.target.split(".")
                if len(domain_levels) == 2 or (len(domain_levels) == 3
                                               and domain_levels[1] == "co"):
                    self.logger.info("{} Found {} to be a naked domain".format(
                        COLORED_COMBOS.NOTIFY, self.target))
                    self.naked = self.target

            try:
                self.dns_results = DNSHandler.query_dns(
                    domains, self.dns_records)
            except Timeout:
                raise HostHandlerException(
                    "DNS Query timed out. Maybe target has DNS protection ?")

            if self.dns_results.get("CNAME"):
                # Naked domains shouldn't hold CNAME records according to RFC regulations
                self.logger.info(
                    "{} Found {} to be an FQDN by CNAME presence in DNS records"
                    .format(COLORED_COMBOS.NOTIFY, self.target))

                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
        self.create_host_dir_and_set_file_logger()
        self.write_up()
Exemple #2
0
    def parse(self):
        """
        Try to extract domain (full, naked, sub-domain), IP and port.
        """
        if self.target.endswith("/"):
            self.target = self.target[:-1]

        if self._is_proto(self.target):
            try:
                self.protocol, self.target = self.target.split("://")
                self.logger.info("{} Protocol detected: {}".format(COLORED_COMBOS.NOTIFY, self.protocol))
                if self.protocol.lower() == "https" and self.port == 80:
                    self.port = 443
            except ValueError:
                raise HostHandlerException("Could not make domain and protocol from host")

        if ":" in self.target:
            self._extract_port(self.target)

        if self.validate_ip(self.target):
            self.logger.info("{} Detected {} as an IP address.".format(COLORED_COMBOS.NOTIFY, self.target))
            self.is_ip = True
        else:
            domains = []
            if self.target.startswith("www."):
                # Obviously an FQDN
                domains.extend((self.target, self.target.split("www.")[1]))
                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
            else:
                # Can't be sure if FQDN or just naked domain
                domains.append(self.target)

            try:
                self.dns_results = DNSHandler.query_dns(domains, self.dns_records)
            except Timeout:
                raise HostHandlerException("DNS Query timed out. Maybe target has DNS protection ?")

            if self.dns_results.get("CNAME"):
                # Naked domains shouldn't hold CNAME records according to RFC regulations
                self.logger.info("{} Found {} to be an FQDN by CNAME presence in DNS records".format(
                    COLORED_COMBOS.NOTIFY, self.target))

                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
        self.create_host_dir_and_set_file_logger()
        self.write_up()
Exemple #3
0
def main(target,
         tor_routing,
         proxy_list,
         proxy,
         cookies,
         dns_records,
         wordlist,
         threads,
         ignored_response_codes,
         subdomain_list,
         full_scan,
         scripts,
         services,
         port,
         tls_port,
         skip_health_check,
         follow_redirects,
         no_url_fuzzing,
         no_sub_enum,
         skip_nmap_scan,
         # delay,
         outdir,
         quiet):
    try:
        # ------ Arg validation ------
        # Set logging level and Logger instance
        log_level = HelpUtilities.determine_verbosity(quiet)
        logger = SystemOutLogger(log_level)
        intro(logger)

        target = target.lower()
        try:
            HelpUtilities.validate_executables()
        except RaccoonException as e:
            logger.critical(str(e))
            exit(9)
        HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list)
        HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list)
        HelpUtilities.create_output_directory(outdir)

        if tor_routing:
            logger.info("{} Testing that Tor service is up...".format(COLORED_COMBOS.NOTIFY))
        elif proxy_list:
            if proxy_list and not os.path.isfile(proxy_list):
                raise FileNotFoundError("Not a valid file path, {}".format(proxy_list))
            else:
                logger.info("{} Routing traffic using proxies from list {}\n".format(
                    COLORED_COMBOS.NOTIFY, proxy_list))
        elif proxy:
            logger.info("{} Routing traffic through proxy {}\n".format(COLORED_COMBOS.NOTIFY, proxy))

        # TODO: Sanitize delay argument

        dns_records = tuple(dns_records.split(","))
        ignored_response_codes = tuple(int(code) for code in ignored_response_codes.split(","))

        if port:
            HelpUtilities.validate_port_range(port)

        # ------ /Arg validation ------

        if cookies:
            try:
                cookies = HelpUtilities.parse_cookie_arg(cookies)
            except RaccoonException as e:
                logger.critical("{}{}{}".format(COLOR.RED, str(e), COLOR.RESET))
                exit(2)

        # Set Request Handler instance
        request_handler = RequestHandler(
            proxy_list=proxy_list,
            tor_routing=tor_routing,
            single_proxy=proxy,
            cookies=cookies
        )

        if tor_routing:
            try:
                HelpUtilities.confirm_traffic_routs_through_tor()
                logger.info("{} Validated Tor service is up. Routing traffic anonymously\n".format(
                    COLORED_COMBOS.NOTIFY))
            except RaccoonException as err:
                print("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET))
                exit(3)

        main_loop = asyncio.get_event_loop()

        logger.info("{}### Raccoon Scan Started ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        logger.info("{} Trying to gather information about host: {}".format(COLORED_COMBOS.INFO, target))

        # TODO: Populate array when multiple targets are supported
        # hosts = []
        try:
            host = Host(target=target, dns_records=dns_records)
            host.parse()
        except HostHandlerException as e:
            logger.critical("{}{}{}".format(COLOR.RED, str(e), COLOR.RESET))
            exit(11)

        if not skip_health_check:
            try:
                HelpUtilities.validate_target_is_up(host)
            except RaccoonException as err:
                logger.critical("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET))
                exit(42)

        if not skip_nmap_scan:
            logger.info("\n{} Setting Nmap scan to run in the background".format(COLORED_COMBOS.INFO))
            nmap_scan = NmapScan(host, full_scan, scripts, services, port)
            # # # TODO: Populate array when multiple targets are supported
            # nmap_threads = []
            nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan,))
            # Run Nmap scan in the background. Can take some time
            nmap_thread.start()

        # Run first set of checks - TLS, Web/WAF Data, DNS data
        waf = WAF(host)
        tls_info_scanner = TLSHandler(host, tls_port)
        web_app_scanner = WebApplicationScanner(host)
        tasks = (
            asyncio.ensure_future(tls_info_scanner.run()),
            asyncio.ensure_future(waf.detect()),
            asyncio.ensure_future(DNSHandler.grab_whois(host)),
            asyncio.ensure_future(web_app_scanner.run_scan()),
            asyncio.ensure_future(DNSHandler.generate_dns_dumpster_mapping(host, logger))
        )

        main_loop.run_until_complete(asyncio.wait(tasks))

        # Second set of checks - URL fuzzing, Subdomain enumeration
        if not no_url_fuzzing:
            fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects)
            main_loop.run_until_complete(fuzzer.fuzz_all())

        if not host.is_ip:
            sans = tls_info_scanner.sni_data.get("SANs")
            subdomain_enumerator = SubDomainEnumerator(
                host,
                domain_list=subdomain_list,
                sans=sans,
                ignored_response_codes=ignored_response_codes,
                num_threads=threads,
                follow_redirects=follow_redirects,
                no_sub_enum=no_sub_enum
            )
            main_loop.run_until_complete(subdomain_enumerator.run())

        if not skip_nmap_scan:
            if nmap_thread.is_alive():
                logger.info("{} All scans done. Waiting for Nmap scan to wrap up. "
                            "Time left may vary depending on scan type and port range".format(COLORED_COMBOS.INFO))

                while nmap_thread.is_alive():
                    time.sleep(15)

        logger.info("\n{}### Raccoon scan finished ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        os.system("stty sane")

    except KeyboardInterrupt:
        print("{}Keyboard Interrupt detected. Exiting{}".format(COLOR.RED, COLOR.RESET))
        # Fix F'd up terminal after CTRL+C
        os.system("stty sane")
        exit(42)
Exemple #4
0
def main(target,
         tor_routing,
         proxy_list,
         proxy,
         dns_records,
         wordlist,
         threads,
         ignored_response_codes,
         subdomain_list,
         full_scan,
         scripts,
         services,
         port,
         tls_port,
         skip_health_check,
         no_redirects,
         no_url_fuzzing,
         no_sub_enum,
         # delay,
         outdir,
         quiet):
    try:
        # ------ Arg validation ------

        # Set logging level and Logger instance
        log_level = HelpUtilities.determine_verbosity(quiet)
        logger = SystemOutLogger(log_level)
        intro(logger)

        target = target.lower()
        try:
            HelpUtilities.validate_executables()
        except RaccoonException as e:
            logger.critical(str(e))
            exit(9)
        HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list)
        HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list)
        HelpUtilities.create_output_directory(outdir)

        if tor_routing:
            logger.info("{} Testing that Tor service is up...".format(COLORED_COMBOS.NOTIFY))
        elif proxy_list:
            if proxy_list and not os.path.isfile(proxy_list):
                raise FileNotFoundError("Not a valid file path, {}".format(proxy_list))
            else:
                logger.info("{} Routing traffic using proxies from list {}\n".format(
                    COLORED_COMBOS.NOTIFY, proxy_list))
        elif proxy:
            logger.info("Routing traffic through proxy {}\n".format(COLORED_COMBOS.NOTIFY, proxy))

        # TODO: Sanitize delay argument

        dns_records = tuple(dns_records.split(","))
        ignored_response_codes = tuple(int(code) for code in ignored_response_codes.split(","))
        follow_redirects = not no_redirects

        if port:
            HelpUtilities.validate_port_range(port)

        # ------ /Arg validation ------

        # Set Request Handler instance
        request_handler = RequestHandler(proxy_list=proxy_list, tor_routing=tor_routing, single_proxy=proxy)

        if tor_routing:
            try:
                HelpUtilities.confirm_traffic_routs_through_tor()
                logger.info("{} Validated Tor service is up. Routing traffic anonymously\n".format(
                    COLORED_COMBOS.NOTIFY))
            except RaccoonException as err:
                print("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET))
                exit(3)

        if not skip_health_check:
            HelpUtilities.validate_target_is_up(target)

        main_loop = asyncio.get_event_loop()

        logger.info("{}### Raccoon Scan Started ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        logger.info("{} Trying to gather information about host: {}".format(COLORED_COMBOS.INFO, target))

        # TODO: Populate array when multiple targets are supported
        # hosts = []
        host = Host(target=target, dns_records=dns_records)
        host.parse()

        logger.info("\n{} Setting Nmap scan to run in the background".format(COLORED_COMBOS.INFO))
        nmap_scan = NmapScan(host, full_scan, scripts, services, port)
        # # # TODO: Populate array when multiple targets are supported
        # nmap_threads = []
        nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan,))
        # Run Nmap scan in the background. Can take some time
        nmap_thread.start()

        # Run first set of checks - TLS, Web/WAF Data, DNS data
        waf = WAF(host)
        tls_info_scanner = TLSHandler(host, tls_port)
        web_app_scanner = WebApplicationScanner(host)
        tasks = (
            asyncio.ensure_future(tls_info_scanner.run()),
            asyncio.ensure_future(waf.detect()),
            asyncio.ensure_future(DNSHandler.grab_whois(host)),
            asyncio.ensure_future(web_app_scanner.run_scan())
        )

        main_loop.run_until_complete(asyncio.wait(tasks))

        # DNS dumpster visualization
        DNSHandler.generate_dns_dumpster_mapping(host, logger)

        # Second set of checks - URL fuzzing, Subdomain enumeration
        if not no_url_fuzzing:
            fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects)
            main_loop.run_until_complete(fuzzer.fuzz_all())

        if not host.is_ip:
            sans = tls_info_scanner.sni_data.get("SANs")
            subdomain_enumerator = SubDomainEnumerator(
                host,
                domain_list=subdomain_list,
                sans=sans,
                ignored_response_codes=ignored_response_codes,
                num_threads=threads,
                follow_redirects=follow_redirects,
                no_sub_enum=no_sub_enum
            )
            main_loop.run_until_complete(subdomain_enumerator.run())

        if nmap_thread.is_alive():
            logger.info("{} All scans done. Waiting for Nmap scan to wrap up. "
                        "Time left may vary depending on scan type and port range".format(COLORED_COMBOS.INFO))

            while nmap_thread.is_alive():
                time.sleep(15)

        logger.info("\n{}### Raccoon scan finished ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        os.system("stty sane")

    except KeyboardInterrupt:
        print("{}Keyboard Interrupt detected. Exiting{}".format(COLOR.RED, COLOR.RESET))
        # Fix F'd up terminal after CTRL+C
        os.system("stty sane")
        exit(42)