Ejemplo n.º 1
0
 def __init__(self, host):
     self.host = host
     self.cnames = host.dns_results.get('CNAME')
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.waf_present = False
     self.waf_cname_map = {
         "incapdns": "Incapsula",
         "edgekey": "Akamai",
         "akamai": "Akamai",
         "edgesuite": "Akamai",
         "distil": "Distil Networks",
         "cloudfront": "CloudFront",
         "netdna-cdn": "MaxCDN"
     }
     self.waf_app_method_map = {
         "CloudFront": WAFApplicationMethods.detect_cloudfront,
         "Cloudflare": WAFApplicationMethods.detect_cloudflare,
         "Incapsula": WAFApplicationMethods.detect_incapsula,
         "MaxCDN": WAFApplicationMethods.detect_maxcdn,
         "Edgecast": WAFApplicationMethods.detect_edgecast,
         "Distil Networks": WAFApplicationMethods.detect_distil,
         "Sucuri": WAFApplicationMethods.detect_sucuri,
         "Reblaze": WAFApplicationMethods.detect_reblaze
     }
     log_file = HelpUtilities.get_output_path("{}/WAF.txt".format(
         self.host.target))
     self.logger = Logger(log_file)
Ejemplo n.º 2
0
 def __init__(self, host, port_range, vulners_path):
     self.target = host.target
     self.port_range = port_range
     self.path = HelpUtilities.get_output_path("{}/nmap_vulners_scan.txt".format(self.target))
     self.vulners_path = vulners_path
     self.logger = Logger(self.path)
     self.script = self.build_script()
Ejemplo n.º 3
0
 def __init__(self, host, port_range, full_scan=None, scripts=None, services=None):
     self.target = host.target
     self.full_scan = full_scan
     self.scripts = scripts
     self.services = services
     self.port_range = port_range
     self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format(self.target))
     self.logger = Logger(self.path)
Ejemplo n.º 4
0
 def __init__(self, host):
     self.host = host
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.web_scan_results = []
     self.headers = None
     self.robots = None
     log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(
         self.host.target))
     self.target_dir = "/".join(log_file.split("/")[:-1])
     self.logger = Logger(log_file)
Ejemplo n.º 5
0
 def __init__(self, host, sans, domain_list, ignored_response_codes,
              num_threads, follow_redirects, no_sub_enum):
     self.host = host
     self.target = host.target
     self.sans = sans
     self.domain_list = domain_list
     self.ignored_error_codes = ignored_response_codes
     self.num_threads = num_threads
     self.follow_redirects = follow_redirects
     self.no_sub_enum = no_sub_enum
     self.request_handler = RequestHandler()
     log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(
         self.target))
     self.logger = Logger(log_file)
Ejemplo n.º 6
0
 def __init__(self, host, port):
     super().__init__(host)
     self.target = host.target
     self.port = port
     self._versions = ("tls1", "tls1_1", "tls1_2")
     # OpenSSL likes to hang, Linux timeout to the rescue
     self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format(self.target, self.port)
     self.begin = "-----BEGIN CERTIFICATE-----"
     self.end = "-----END CERTIFICATE-----"
     self.sni_data = {}
     self.non_sni_data = {}
     self.ciphers = ""
     log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format(self.target))
     self.logger = Logger(log_file)
Ejemplo n.º 7
0
 def __init__(self, host):
     self.host = host
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.headers = None
     self.robots = None
     self.forms = None
     self.fuzzable_urls = set()
     self.emails = set()
     log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(
         self.host.target))
     self.target_dir = "/".join(log_file.split("/")[:-1])
     self.logger = Logger(log_file)
     self.storage_explorer = StorageExplorer(host, self.logger)
Ejemplo n.º 8
0
    def get_log_file_path(self, path):
        if path:
            log_file = path
        else:
            log_file = "{}/url_fuzz.txt".format(self.target)

        return Logger(HelpUtilities.get_output_path(log_file))
Ejemplo n.º 9
0
 def __init__(self, host):
     self.host = host
     self.cnames = host.dns_results.get('CNAME')
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.waf_present = False
     self.waf_cname_map = {
         "incapdns": "Incapsula",
         "edgekey": "Akamai",
         "akamai": "Akamai",
         "edgesuite": "Akamai",
         "distil": "Distil Networks",
         "cloudfront": "CloudFront",
         "netdna-cdn": "MaxCDN"
     }
     self.waf_app_method_map = {
         "CloudFront": WAFApplicationMethods.detect_cloudfront,
         "Cloudflare": WAFApplicationMethods.detect_cloudflare,
         "Incapsula": WAFApplicationMethods.detect_incapsula,
         "MaxCDN": WAFApplicationMethods.detect_maxcdn,
         "Edgecast": WAFApplicationMethods.detect_edgecast,
         "Distil Networks": WAFApplicationMethods.detect_distil,
         "Sucuri": WAFApplicationMethods.detect_sucuri,
         "Reblaze": WAFApplicationMethods.detect_reblaze
     }
     log_file = HelpUtilities.get_output_path("{}/WAF.txt".format(self.host.target))
     self.logger = Logger(log_file)
Ejemplo n.º 10
0
 def __init__(self, host, full_scan, scripts, services, port_range):
     self.target = host.target
     self.full_scan = full_scan
     self.scripts = scripts
     self.services = services
     self.port_range = port_range
     self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format(self.target))
     self.logger = Logger(self.path)
     self.script = self.build_script()
Ejemplo n.º 11
0
 def __init__(self, target, dns_records):
     self.target = target.strip()
     self.dns_records = dns_records
     self.port = 80
     self.protocol = "http"
     self.is_ip = False
     self.fqdn = None
     self.naked = None
     self.dns_results = {}
     self.logger = SystemOutLogger()
Ejemplo n.º 12
0
 def __init__(self, target, dns_records):
     self.target = target.strip()
     self.dns_records = dns_records
     self.port = 80
     self.protocol = "http"
     self.is_ip = False
     self.fqdn = None
     self.naked = None
     self.dns_results = {}
     self.logger = SystemOutLogger()
Ejemplo n.º 13
0
    async def grab_whois(cls, host):
        if not host.naked:
            return

        script = "whois {}".format(host.naked).split()
        log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target))
        logger = Logger(log_file)

        process = await create_subprocess_exec(
            *script,
            stdout=PIPE,
            stderr=PIPE
        )
        result, err = await process.communicate()

        if process.returncode == 0:
            logger.info("{} {} WHOIS information retrieved".format(COLORED_COMBOS.GOOD, host))
            for line in result.decode().strip().split("\n"):
                    if ":" in line:
                        logger.debug(line)
Ejemplo n.º 14
0
class NmapVulnersScan(NmapScan):
    """
    NmapVulners scan class (NmapScan subclass)
    """

    def __init__(self, host, port_range, vulners_path):
        super().__init__(host=host, port_range=port_range)
        self.vulners_path = vulners_path
        self.path = HelpUtilities.get_output_path("{}/nmap_vulners_scan.txt".format(self.target))
        self.logger = Logger(self.path)

    def build_script(self):
        script = ["nmap", "-Pn", "-sV", "--script", self.vulners_path, self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(COLORED_COMBOS.NOTIFY, self.port_range))

        return script
Ejemplo n.º 15
0
class NmapScan:
    """
    Nmap scan class
    Will run SYN/TCP scan according to privileges.
    Start Raccoon with sudo for -sS else will run -sT
    """
    def __init__(self,
                 host,
                 port_range,
                 full_scan=None,
                 scripts=None,
                 services=None):
        self.target = host.target
        self.full_scan = full_scan
        self.scripts = scripts
        self.services = services
        self.port_range = port_range
        self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format(
            self.target))
        self.logger = Logger(self.path)

    def build_script(self):
        script = ["nmap", "-Pn", self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(
                COLORED_COMBOS.NOTIFY, self.port_range))
        if self.full_scan:
            script.append("-sV")
            script.append("-sC")
            self.logger.info(
                "{} Added scripts and services to Nmap script".format(
                    COLORED_COMBOS.NOTIFY))
            return script
        else:
            if self.scripts:
                self.logger.info(
                    "{} Added safe-scripts scan to Nmap script".format(
                        COLORED_COMBOS.NOTIFY))
                script.append("-sC")
            if self.services:
                self.logger.info("{} Added service scan to Nmap script".format(
                    COLORED_COMBOS.NOTIFY))
                script.append("-sV")
        return script
Ejemplo n.º 16
0
 def __init__(self, host, port=443):
     super().__init__(host)
     self.target = host.target
     self.port = port
     self._versions = ("tls1", "tls1_1", "tls1_2")
     # OpenSSL likes to hang, Linux timeout to the rescue
     self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format(self.target, self.port)
     self.begin = "-----BEGIN CERTIFICATE-----"
     self.end = "-----END CERTIFICATE-----"
     self.sni_data = {}
     self.non_sni_data = {}
     self.ciphers = ""
     log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format(self.target))
     self.logger = Logger(log_file)
Ejemplo n.º 17
0
class NmapVulnersScan:
    """
    Nmap scan class
    Will run SYN/TCP scan according to privileges.
    Start Raccoon with sudo for -sS else will run -sT
    """

    def __init__(self, host, port_range, vulners_path):
        self.target = host.target
        self.port_range = port_range
        self.path = HelpUtilities.get_output_path("{}/nmap_vulners_scan.txt".format(self.target))
        self.vulners_path = vulners_path
        self.logger = Logger(self.path)
        self.script = self.build_script()

    def build_script(self):
        script = ["nmap", "-Pn", "-sV", "--script", self.vulners_path, self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(COLORED_COMBOS.NOTIFY, self.port_range))
        return script
Ejemplo n.º 18
0
class NmapScan:
    """
    Nmap scan class
    Will run SYN/TCP scan according to privileges.
    Start Raccoon with sudo for -sS else will run -sT
    """

    def __init__(self, host, full_scan, scripts, services, port_range):
        self.target = host.target
        self.full_scan = full_scan
        self.scripts = scripts
        self.services = services
        self.port_range = port_range
        self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format(self.target))
        self.logger = Logger(self.path)
        self.script = self.build_script()

    def build_script(self):
        script = ["nmap", "-Pn", self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(COLORED_COMBOS.NOTIFY, self.port_range))

        if self.full_scan:
            script.append("-sV")
            script.append("-sC")
            self.logger.info("{} Added scripts and services to Nmap script".format(COLORED_COMBOS.NOTIFY))
            return script
        else:
            if self.scripts:
                self.logger.info("{} Added safe-scripts scan to Nmap script".format(COLORED_COMBOS.NOTIFY))
                script.append("-sC")
            if self.services:
                self.logger.info("{} Added service scan to Nmap script".format(COLORED_COMBOS.NOTIFY))
                script.append("-sV")
        return script
Ejemplo n.º 19
0
 def __init__(self,
              host,
              sans,
              domain_list,
              ignored_response_codes,
              num_threads,
              follow_redirects,
              no_sub_enum):
     self.host = host
     self.target = host.target
     self.sans = sans
     self.domain_list = domain_list
     self.ignored_error_codes = ignored_response_codes
     self.num_threads = num_threads
     self.follow_redirects = follow_redirects
     self.no_sub_enum = no_sub_enum
     self.request_handler = RequestHandler()
     self.sub_domains = set()
     log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(self.target))
     self.logger = Logger(log_file)
Ejemplo n.º 20
0
    async def grab_whois(cls, host):
        if not host.naked:
            return

        script = "whois {}".format(host.naked).split()
        log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target))
        logger = Logger(log_file)

        process = await create_subprocess_exec(
            *script,
            stdout=PIPE,
            stderr=PIPE
        )
        result, err = await process.communicate()

        if process.returncode == 0:
            logger.info("{} {} WHOIS information retrieved".format(COLORED_COMBOS.GOOD, host))
            for line in result.decode().strip().split("\n"):
                    if ":" in line:
                        logger.debug(line)
Ejemplo n.º 21
0
class TLSHandler(TLSCipherSuiteChecker):
    def __init__(self, host, port):
        super().__init__(host)
        self.target = host.target
        self.port = port
        self._versions = ("tls1", "tls1_1", "tls1_2")
        # OpenSSL likes to hang, hence the timeout call
        self._base_script = "{} 10 openssl s_client -connect {}:{} ".format(
            TIMEOUT, self.target, self.port)
        self.begin = "-----BEGIN CERTIFICATE-----"
        self.end = "-----END CERTIFICATE-----"
        self.sni_data = {}
        self.non_sni_data = {}
        self.ciphers = ""
        log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format(
            self.target))
        self.logger = Logger(log_file)

    def _tls_results_exist(self):
        if all(not x for x in (self.ciphers, *self.non_sni_data.values(),
                               *self.sni_data.values())):
            return
        return True

    def _are_certificates_identical(self):
        """
        Validate that both certificates exist.
        :returns: True if they are identical, False otherwise
        """
        sni_cert = self.sni_data.get("Certificate_details")
        non_sni_cert = self.non_sni_data.get("Certificate_details")
        if all(cert for cert in (sni_cert, non_sni_cert)
               if cert) and sni_cert == non_sni_cert:
            return True
        return

    def _is_certificate_exists(self, text):
        if self.begin in text and self.end in text:
            return True
        return

    async def _extract_certificate_details(self, data):
        process = await create_subprocess_exec(TIMEOUT,
                                               "5",
                                               "openssl",
                                               "x509",
                                               "-text",
                                               stdin=PIPE,
                                               stderr=PIPE,
                                               stdout=PIPE)
        result, err = await process.communicate(
            input=bytes(data, encoding='ascii'))
        result = result.decode().strip()
        cert_details = result.split(self.begin)[0].strip()

        result_lines = cert_details.split("\n")
        for i, line in enumerate(result_lines):
            if "DNS:" in line:
                result_lines.pop(i)
                result_lines.pop(i - 1)

        cert_details = "\n".join(result_lines)
        return cert_details

    async def _is_heartbleed_vulnerable(self):
        script = self._base_script + "-tlsextdebug"
        process = await create_subprocess_exec(*script.split(),
                                               stdout=PIPE,
                                               stderr=PIPE)
        result, err = await process.communicate()
        try:
            if "server extension \"heartbeat\" (id=15)" in result.decode(
            ).strip():
                self.logger.info(
                    "{} Target seems to be vulnerable to Heartbleed - CVE-2014-016. "
                    "see http://heartbleed.com/ for more details.".format(
                        COLORED_COMBOS.GOOD))
        except TypeError:  # Type error means no result
            pass

    async def _execute_ssl_data_extraction(self, sni=False):
        """
        Test for version support (SNI/non-SNI), get all SANs, get certificate details
        :param sni: True will call cause _exec_openssl to call openssl with -servername flag
        """
        # Do for all responses
        responses = await self._run_openssl_sclient_cmd(self._base_script, sni)
        tls_dict = self._parse_openssl_sclient_output(responses)
        # Do for one successful SSL response
        for res in responses:
            if self._is_certificate_exists(res):
                tls_dict["SANs"] = await self._get_sans_from_openssl_cmd(res)
                tls_dict[
                    "Certificate_details"] = await self._extract_certificate_details(
                        res)
                break

        return tls_dict

    async def _run_openssl_sclient_cmd(self, script, sni=False):
        processes = []
        outputs = []
        if sni:
            script += " -servername {}".format(self.target)
        for v in self._versions:
            curr = (script + ' -{}'.format(v)).split()
            processes.append(await create_subprocess_exec(*curr,
                                                          stdout=PIPE,
                                                          stderr=PIPE))
        for p in processes:
            result, err = await p.communicate()

            outputs.append(result.decode().strip())

        return outputs

    @staticmethod
    async def _get_sans_from_openssl_cmd(data):
        process = await create_subprocess_exec("openssl",
                                               "x509",
                                               "-noout",
                                               "-text",
                                               stdin=PIPE,
                                               stderr=PIPE,
                                               stdout=PIPE)
        result, err = await process.communicate(
            input=bytes(data, encoding='ascii'))
        sans = re.findall(r"DNS:\S*\b", result.decode().strip())
        return {san.replace("DNS:", '') for san in sans}

    def _parse_openssl_sclient_output(self, results):
        is_supported = {"TLSv1": False, "TLSv1.1": False, "TLSv1.2": False}
        for res in results:
            if not self._is_certificate_exists(res):
                continue
            for line in res.split('\n'):
                if "Protocol" in line:
                    ver = line.strip().split(':')[1].strip()
                    is_supported[ver] = True
        return is_supported

    def _dictionary_log_procedure(self, result_dict):
        for k, v in result_dict.items():
            if k == "SANs":
                self.logger.debug("{0}:\n{1}\n {2}\n{1}\n".format(
                    k, "-" * 15, "\n".join(v)))
            elif k == "Certificate_details":
                self.logger.debug(v)
            else:
                self.logger.debug("{}: {}\n".format(k, v))

    def write_up(self):
        self.logger.info("{} Supported Ciphers:".format(COLORED_COMBOS.GOOD))
        self.logger.info(self.ciphers + "\n")
        self.logger.debug("-" * 80 + "\n")
        self.logger.debug("SNI Data:\n")
        self._dictionary_log_procedure(self.sni_data)
        self.logger.debug("-" * 80 + "\n")
        self.logger.debug("non-SNI Data:\n")
        self._dictionary_log_procedure(self.non_sni_data)

    async def run(self):
        self.logger.info("{} Started collecting TLS data for {}".format(
            COLORED_COMBOS.INFO, self.target))
        self.ciphers = await self.scan_ciphers(self.port)
        self.non_sni_data = await self._execute_ssl_data_extraction()
        self.sni_data = await self._execute_ssl_data_extraction()
        await self._is_heartbleed_vulnerable()

        if self._tls_results_exist():
            self.logger.info("{} Done collecting TLS data".format(
                COLORED_COMBOS.INFO))
            if self._are_certificates_identical():
                self.non_sni_data[
                    "Certificate_details"] = "Same as SNI Certificate"
            self.write_up()
        else:
            self.logger.info(
                "{} Could not obtain any TLS data from target on port {}. "
                "Target may not support SSL/TLS or supports it on a different port."
                .format(COLORED_COMBOS.BAD, self.port))
Ejemplo n.º 22
0
class SubDomainEnumerator:

    def __init__(self,
                 host,
                 sans,
                 domain_list,
                 ignored_response_codes,
                 num_threads,
                 follow_redirects,
                 no_sub_enum):
        self.host = host
        self.target = host.target
        self.sans = sans
        self.domain_list = domain_list
        self.ignored_error_codes = ignored_response_codes
        self.num_threads = num_threads
        self.follow_redirects = follow_redirects
        self.no_sub_enum = no_sub_enum
        self.request_handler = RequestHandler()
        self.sub_domains = set()
        log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(self.target))
        self.logger = Logger(log_file)

    async def run(self):
        self.logger.info("{} Enumerating Subdomains".format(COLORED_COMBOS.INFO))
        if self.sans:
            self._extract_from_sans()
        self._google_dork()
        self._extract_from_dns_dumpster()
        if not self.no_sub_enum:
            await self.bruteforce()
        self.logger.info("{} Done enumerating Subdomains".format(COLORED_COMBOS.INFO))

    def _extract_from_sans(self):
        """Looks for different TLDs as well as different sub-domains in SAN list"""
        self.logger.info("{} Trying to find Subdomains in SANs list".format(COLORED_COMBOS.NOTIFY))
        if self.host.naked:
            domain = self.host.naked
            tld_less = domain.split(".")[0]
        else:
            domain = self.host.target.split(".")
            tld_less = domain[1]
            domain = ".".join(domain[1:])

        for san in self.sans:
            if (tld_less in san or domain in san) and self.target != san and not san.startswith("*"):
                self.logger.info("{} Subdomain detected: {}".format(COLORED_COMBOS.GOOD, san))

    def _google_dork(self):
        self.logger.info("{} Trying to discover subdomains in Google".format(COLORED_COMBOS.NOTIFY))
        page = self.request_handler.send(
            "GET",
            url="https://www.google.com/search?q=site:{}&num=100".format(self.target)
        )
        soup = BeautifulSoup(page.text, "lxml")
        results = set(re.findall(r"\w+\.{}".format(self.target), soup.text))
        for subdomain in results:
            if "www." not in subdomain:
                self.logger.info("{} Detected subdomain through Google dorking: {}".format(
                    COLORED_COMBOS.GOOD, subdomain))

    def _extract_from_dns_dumpster(self):
        self.logger.info("{} Trying to extract subdomains from DNS dumpster".format(COLORED_COMBOS.NOTIFY))
        try:
            page = HelpUtilities.query_dns_dumpster(host=self.host)
            soup = BeautifulSoup(page.text, "lxml")
            hosts_table = soup.select(".table")[-1]
            for row in hosts_table.find_all("tr"):
                tds = row.select("td")
                sub_domain = tds[0].text.split('\n')[0]  # Grab just the URL, truncate other information
                self.logger.info("{} Found subdomain in DNS dumpster: {}".format(COLORED_COMBOS.GOOD, sub_domain))
                self.sub_domains.add(sub_domain)
        except RaccoonException:
            self.logger.info("{} Failed to query DNS dumpster for subdomains".format(COLORED_COMBOS.BAD))

    async def bruteforce(self):
        path = "{}/subdomain_fuzz.txt".format(self.host.target)

        # If a naked domain exists, use it
        if self.host.naked:
            self.host.target = self.host.naked

        self.logger.info("{} Bruteforcing subdomains".format(COLORED_COMBOS.NOTIFY))
        sub_domain_fuzzer = URLFuzzer(
            host=self.host,
            wordlist=self.domain_list,
            num_threads=self.num_threads,
            ignored_response_codes=self.ignored_error_codes,
            follow_redirects=self.follow_redirects
            )
        await sub_domain_fuzzer.fuzz_all(sub_domain=True, log_file_path=path)
Ejemplo n.º 23
0
class SubDomainEnumerator:
    def __init__(self, host, sans, domain_list, ignored_response_codes,
                 num_threads, follow_redirects, no_sub_enum):
        self.host = host
        self.target = host.target
        self.sans = sans
        self.domain_list = domain_list
        self.ignored_error_codes = ignored_response_codes
        self.num_threads = num_threads
        self.follow_redirects = follow_redirects
        self.no_sub_enum = no_sub_enum
        self.request_handler = RequestHandler()
        log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(
            self.target))
        self.logger = Logger(log_file)

    async def run(self):
        self.logger.info("{} Enumerating Subdomains".format(
            COLORED_COMBOS.INFO))
        if self.sans:
            self._extract_from_sans()
        self._google_dork()
        self._extract_from_dns_dumpster()
        if not self.no_sub_enum:
            await self.bruteforce()
        self.logger.info("{} Done enumerating Subdomains".format(
            COLORED_COMBOS.INFO))

    def _extract_from_sans(self):
        """Looks for different TLDs as well as different sub-domains in SAN list"""
        self.logger.info("{} Trying to find Subdomains in SANs list".format(
            COLORED_COMBOS.NOTIFY))
        if self.host.naked:
            domain = self.host.naked
            tld_less = domain.split(".")[0]
        else:
            domain = self.host.target.split(".")
            tld_less = domain[1]
            domain = ".".join(domain[1:])

        for san in self.sans:
            if (tld_less in san or domain
                    in san) and self.target != san and not san.startswith("*"):
                self.logger.info("{} Subdomain detected: {}".format(
                    COLORED_COMBOS.GOOD, san))

    def _google_dork(self):
        self.logger.info("{} Trying to discover subdomains in Google".format(
            COLORED_COMBOS.NOTIFY))
        page = self.request_handler.send(
            "GET",
            url="https://www.google.com/search?q=site:{}&num=100".format(
                self.target))
        soup = BeautifulSoup(page.text, "lxml")
        results = set(re.findall(r"\w+\.{}".format(self.target), soup.text))
        for subdomain in results:
            if "www." not in subdomain:
                self.logger.info(
                    "{} Detected subdomain through Google dorking: {}".format(
                        COLORED_COMBOS.GOOD, subdomain))

    def _extract_from_dns_dumpster(self):
        self.logger.info(
            "{} Trying to extract subdomains from DNS dumpster".format(
                COLORED_COMBOS.NOTIFY))
        try:
            page = HelpUtilities.query_dns_dumpster(host=self.host)
            soup = BeautifulSoup(page.text, "lxml")
            hosts_table = soup.select(".table")[-1]
            for row in hosts_table.find_all("tr"):
                tds = row.select("td")
                sub_domain = tds[0].text.split('\n')[
                    0]  # Grab just the URL, truncate other information
                self.logger.info(
                    "{} Found subdomain in DNS dumpster: {}".format(
                        COLORED_COMBOS.GOOD, sub_domain))
        except (RaccoonException, IndexError):
            self.logger.info(
                "{} Failed to query DNS dumpster for subdomains".format(
                    COLORED_COMBOS.BAD))

    async def bruteforce(self):
        path = "{}/subdomain_fuzz.txt".format(self.host.target)

        # If a naked domain exists, use it
        if self.host.naked:
            self.host.target = self.host.naked

        self.logger.info("{} Bruteforcing subdomains".format(
            COLORED_COMBOS.NOTIFY))
        sub_domain_fuzzer = URLFuzzer(
            host=self.host,
            wordlist=self.domain_list,
            num_threads=self.num_threads,
            ignored_response_codes=self.ignored_error_codes,
            follow_redirects=self.follow_redirects)
        await sub_domain_fuzzer.fuzz_all(sub_domain=True, log_file_path=path)
Ejemplo n.º 24
0
class TLSHandler(TLSCipherSuiteChecker):

    def __init__(self, host, port=443):
        super().__init__(host)
        self.target = host.target
        self.port = port
        self._versions = ("tls1", "tls1_1", "tls1_2")
        # OpenSSL likes to hang, Linux timeout to the rescue
        self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format(self.target, self.port)
        self.begin = "-----BEGIN CERTIFICATE-----"
        self.end = "-----END CERTIFICATE-----"
        self.sni_data = {}
        self.non_sni_data = {}
        self.ciphers = ""
        log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format(self.target))
        self.logger = Logger(log_file)

    def _tls_results_exist(self):
        if all(not x for x in (self.ciphers, *self.non_sni_data.values(), *self.sni_data.values())):
            return
        return True

    def _are_certificates_identical(self):
        """
        Validate that both certificates exist.
        :returns: True if they are identical, False otherwise
        """
        sni_cert = self.sni_data.get("Certificate_details")
        non_sni_cert = self.non_sni_data.get("Certificate_details")
        if all(cert for cert in (sni_cert, non_sni_cert) if cert) and sni_cert == non_sni_cert:
            return True
        return

    def _is_certificate_exists(self, text):
        if self.begin in text and self.end in text:
            return True
        return

    async def _extract_certificate_details(self, data):
        process = await create_subprocess_exec(
            "timeout", "5", "openssl", "x509", "-text",
            stdin=PIPE,
            stderr=PIPE,
            stdout=PIPE
        )
        result, err = await process.communicate(input=bytes(data, encoding='ascii'))
        result = result.decode().strip()
        cert_details = result.split(self.begin)[0].strip()

        result_lines = cert_details.split("\n")
        for i, line in enumerate(result_lines):
            if "DNS:" in line:
                result_lines.pop(i)
                result_lines.pop(i-1)

        cert_details = "\n".join(result_lines)
        return cert_details

    async def is_heartbleed_vulnerable(self):
        script = self._base_script + "-tlsextdebug"
        process = await create_subprocess_exec(
            *script.split(),
            stdout=PIPE,
            stderr=PIPE
        )
        result, err = await process.communicate()
        try:
            if "server extension \"heartbeat\" (id=15)" in result.decode().strip():
                self.logger.info("{} Target seems to be vulnerable to Heartbleed - CVE-2014-0160".format(
                    COLORED_COMBOS.GOOD))
        except TypeError:  # Type error means no result
            pass

    async def _execute_ssl_data_extraction(self, sni=False):
        """
        Test for version support (SNI/non-SNI), get all SANs, get certificate details
        :param sni: True will call cause _exec_openssl to call openssl with -servername flag
        """
        # Do for all responses
        responses = await self._run_openssl_sclient_cmd(self._base_script, sni)
        tls_dict = self._parse_openssl_sclient_output(responses)
        # Do for one successful SSL response
        for res in responses:
            if self._is_certificate_exists(res):
                tls_dict["SANs"] = await self._get_sans_from_openssl_cmd(res)
                tls_dict["Certificate_details"] = await self._extract_certificate_details(res)
                break

        return tls_dict

    async def _run_openssl_sclient_cmd(self, script, sni=False):
        processes = []
        outputs = []
        if sni:
            script += " -servername {}".format(self.target)
        for v in self._versions:
            curr = (script + ' -{}'.format(v)).split()
            processes.append(
                await create_subprocess_exec(
                    *curr,
                    stdout=PIPE,
                    stderr=PIPE
                )
            )
        for p in processes:
            result, err = await p.communicate()

            outputs.append(result.decode().strip())

        return outputs

    @staticmethod
    async def _get_sans_from_openssl_cmd(data):
        process = await create_subprocess_exec(
            "openssl", "x509", "-noout", "-text",
            stdin=PIPE,
            stderr=PIPE,
            stdout=PIPE
        )
        result, err = await process.communicate(input=bytes(data, encoding='ascii'))
        sans = re.findall(r"DNS:\S*\b", result.decode().strip())
        return {san.replace("DNS:", '') for san in sans}

    def _parse_openssl_sclient_output(self, results):
        is_supported = {"TLSv1": False, "TLSv1.1": False, "TLSv1.2": False}
        for res in results:
            if not self._is_certificate_exists(res):
                continue
            for line in res.split('\n'):
                if "Protocol" in line:
                    ver = line.strip().split(':')[1].strip()
                    is_supported[ver] = True
        return is_supported

    def _dictionary_log_procedure(self, result_dict):
        for k, v in result_dict.items():
            if k == "SANs":
                self.logger.debug("{0}:\n{1}\n {2}\n{1}\n".format(k, "-"*15, "\n".join(v)))
            elif k == "Certificate_details":
                self.logger.debug(v)
            else:
                self.logger.debug("{}: {}\n".format(k, v))

    def write_up(self):
        self.logger.info("{} Supported Ciphers:".format(COLORED_COMBOS.GOOD))
        self.logger.info(self.ciphers+"\n")
        self.logger.debug("-"*80+"\n")
        self.logger.debug("SNI Data:\n")
        self._dictionary_log_procedure(self.sni_data)
        self.logger.debug("-"*80+"\n")
        self.logger.debug("non-SNI Data:\n")
        self._dictionary_log_procedure(self.non_sni_data)

    async def run(self):
        self.logger.info("{} Started collecting TLS data for {}".format(COLORED_COMBOS.INFO, self.target))
        self.ciphers = await self.scan_ciphers(self.port)
        self.non_sni_data = await self._execute_ssl_data_extraction()
        self.sni_data = await self._execute_ssl_data_extraction()
        await self.is_heartbleed_vulnerable()

        if self._tls_results_exist():
            self.logger.info("{} Done collecting TLS data".format(COLORED_COMBOS.INFO))
            if self._are_certificates_identical():
                self.non_sni_data["Certificate_details"] = "Same as SNI Certificate"
            self.write_up()
        else:
            self.logger.info(
                "{} Could not obtain any TLS data from target on port {}. "
                "Target may not support SSL/TLS or supports it on a different port.".format(
                    COLORED_COMBOS.BAD, self.port)
            )
Ejemplo n.º 25
0
class Host:
    """
    Host parsing, IP to host resolution (and vice verse), etc
    Sets domain/IP, port, protocol. also tries to parse FQDN, naked domain, if possible.
    """
    def __init__(self, target, dns_records):
        self.target = target.strip()
        self.dns_records = dns_records
        self.port = 80
        self.protocol = "http"
        self.is_ip = False
        self.fqdn = None
        self.naked = None
        self.dns_results = {}
        self.logger = SystemOutLogger()

    def __str__(self):
        return self.target

    def __repr__(self):
        return self.__dict__

    @staticmethod
    def _create_host_dir(path):
        try:
            os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
        except FileExistsError:
            pass

    def validate_ip(self, addr=None):
        if not addr:
            addr = self.target
        try:
            ip_address(addr.strip())
            return True
        except ValueError:
            return

    def _extract_port(self, addr):
        try:
            self.target, self.port = addr.split(":")
            self.port = int(self.port)
            self.logger.info("{} Port detected: {}".format(COLORED_COMBOS.NOTIFY, self.port))
        except IndexError:
            self.logger.info("{} Did not detect port. Using default port 80".format(COLORED_COMBOS.NOTIFY))
            return
        return

    def _is_proto(self, domain=None):
        if not domain:
            domain = self.target
        if "://" in domain:
            if any(domain.startswith(proto) for proto in ("https", "http")):
                return True
            else:
                raise HostHandlerException("Unknown or unsupported protocol: {}".format(self.target.split("://")[0]))
        return

    def write_up(self):
        self.logger.info("{} Writing DNS query results".format(COLORED_COMBOS.GOOD, self))

        for record in self.dns_results:
            self.logger.debug(record+"\n")
            for value in self.dns_results.get(record):
                self.logger.debug("\t{}".format(value))

    def create_host_dir_and_set_file_logger(self):
        log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format(self.target))
        self._create_host_dir(log_file)
        self.logger = Logger(log_file)

    def parse(self):
        """
        Try to extract domain (full, naked, sub-domain), IP and port.
        """
        if self.target.endswith("/"):
            self.target = self.target[:-1]

        if self._is_proto(self.target):
            try:
                self.protocol, self.target = self.target.split("://")
                self.logger.info("{} Protocol detected: {}".format(COLORED_COMBOS.NOTIFY, self.protocol))
                if self.protocol.lower() == "https" and self.port == 80:
                    self.port = 443
            except ValueError:
                raise HostHandlerException("Could not make domain and protocol from host")

        if ":" in self.target:
            self._extract_port(self.target)

        if self.validate_ip(self.target):
            self.logger.info("{} Detected {} as an IP address.".format(COLORED_COMBOS.NOTIFY, self.target))
            self.is_ip = True
        else:
            domains = []
            if self.target.startswith("www."):
                # Obviously an FQDN
                domains.extend((self.target, self.target.split("www.")[1]))
                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
            else:
                # Can't be sure if FQDN or just naked domain
                domains.append(self.target)

            try:
                self.dns_results = DNSHandler.query_dns(domains, self.dns_records)
            except Timeout:
                raise HostHandlerException("DNS Query timed out. Maybe target has DNS protection ?")

            if self.dns_results.get("CNAME"):
                # Naked domains shouldn't hold CNAME records according to RFC regulations
                self.logger.info("{} Found {} to be an FQDN by CNAME presence in DNS records".format(
                    COLORED_COMBOS.NOTIFY, self.target))

                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
        self.create_host_dir_and_set_file_logger()
        self.write_up()
Ejemplo n.º 26
0
 def create_host_dir_and_set_file_logger(self):
     log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format(
         self.target))
     self._create_host_dir(log_file)
     self.logger = Logger(log_file)
Ejemplo n.º 27
0
class Host:
    """
    Host parsing, IP to host resolution (and vice verse), etc
    Sets domain/IP, port, protocol. also tries to parse FQDN, naked domain, if possible.
    """
    def __init__(self, target, dns_records):
        self.target = target.strip()
        self.dns_records = dns_records
        self.port = 80
        self.protocol = "http"
        self.is_ip = False
        self.fqdn = None
        self.naked = None
        self.dns_results = {}
        self.logger = SystemOutLogger()

    def __str__(self):
        return self.target

    def __repr__(self):
        return self.__dict__

    @staticmethod
    def _create_host_dir(path):
        try:
            os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
        except FileExistsError:
            pass

    def validate_ip(self, addr=None):
        if not addr:
            addr = self.target
        try:
            ip_address(addr.strip())
            return True
        except ValueError:
            return

    def _extract_port(self, addr):
        try:
            self.target, self.port = addr.split(":")
            self.port = int(self.port)
            self.logger.info("{} Port detected: {}".format(
                COLORED_COMBOS.NOTIFY, self.port))
        except IndexError:
            self.logger.info(
                "{} Did not detect port. Using default port 80".format(
                    COLORED_COMBOS.NOTIFY))
            return
        return

    def _is_proto(self, domain=None):
        if not domain:
            domain = self.target
        if "://" in domain:
            if any(domain.startswith(proto) for proto in ("https", "http")):
                return True
            else:
                raise HostHandlerException(
                    "Unknown or unsupported protocol: {}".format(
                        self.target.split("://")[0]))
        return

    def write_up(self):
        self.logger.info("{} Writing DNS query results".format(
            COLORED_COMBOS.GOOD, self))

        for record in self.dns_results:
            self.logger.debug(record + "\n")
            for value in self.dns_results.get(record):
                self.logger.debug("\t{}".format(value))

    def create_host_dir_and_set_file_logger(self):
        log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format(
            self.target))
        self._create_host_dir(log_file)
        self.logger = Logger(log_file)

    def parse(self):
        """
        Try to extract domain (full, naked, sub-domain), IP and port.
        """
        if self.target.endswith("/"):
            self.target = self.target[:-1]

        if self._is_proto(self.target):
            try:
                self.protocol, self.target = self.target.split("://")
                self.logger.info("{} Protocol detected: {}".format(
                    COLORED_COMBOS.NOTIFY, self.protocol))
                if self.protocol.lower() == "https" and self.port == 80:
                    self.port = 443
            except ValueError:
                raise HostHandlerException(
                    "Could not make domain and protocol from host")

        if ":" in self.target:
            self._extract_port(self.target)

        if self.validate_ip(self.target):
            self.logger.info("{} Detected {} as an IP address.".format(
                COLORED_COMBOS.NOTIFY, self.target))
            self.is_ip = True
        else:
            domains = []
            if self.target.startswith("www."):
                # Obviously an FQDN
                domains.extend((self.target, self.target.split("www.")[1]))
                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
            else:
                domains.append(self.target)
                if len(self.target.split(".")) == 2:
                    self.logger.info("{} Found {} to be a naked domain".format(
                        COLORED_COMBOS.NOTIFY, self.target))
                    self.naked = self.target

            try:
                self.dns_results = DNSHandler.query_dns(
                    domains, self.dns_records)
            except Timeout:
                raise HostHandlerException(
                    "DNS Query timed out. Maybe target has DNS protection ?")

            if self.dns_results.get("CNAME"):
                # Naked domains shouldn't hold CNAME records according to RFC regulations
                self.logger.info(
                    "{} Found {} to be an FQDN by CNAME presence in DNS records"
                    .format(COLORED_COMBOS.NOTIFY, self.target))

                self.fqdn = self.target
                self.naked = ".".join(self.fqdn.split('.')[1:])
        self.create_host_dir_and_set_file_logger()
        self.write_up()
Ejemplo n.º 28
0
class SubDomainEnumerator:

    def __init__(self,
                 host,
                 sans,
                 domain_list,
                 ignored_response_codes,
                 num_threads,
                 follow_redirects,
                 no_sub_enum):
        self.host = host
        self.target = host.target
        self.sans = sans
        self.domain_list = domain_list
        self.ignored_error_codes = ignored_response_codes
        self.num_threads = num_threads
        self.follow_redirects = follow_redirects
        self.no_sub_enum = no_sub_enum
        self.request_handler = RequestHandler()
        self.sub_domains = set()
        log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(self.target))
        self.logger = Logger(log_file)

    async def run(self):
        self.logger.info("\n{} Enumerating Subdomains".format(COLORED_COMBOS.INFO))
        if self.sans:
            self.find_subdomains_in_sans()
        self.google_dork()
        if not self.no_sub_enum:
            await self.bruteforce()
        self.logger.info("\n{} Done enumerating Subdomains".format(COLORED_COMBOS.INFO))

    def find_subdomains_in_sans(self):
        """Looks for different TLDs as well as different sub-domains in SAN list"""
        self.logger.info("{} Trying to find Subdomains in SANs list".format(COLORED_COMBOS.INFO))
        if self.host.naked:
            domain = self.host.naked
            tld_less = domain.split(".")[0]
        else:
            domain = self.host.target.split(".")
            tld_less = domain[1]
            domain = ".".join(domain[1:])

        for san in self.sans:
            if (tld_less in san or domain in san) and self.target != san:
                self.logger.info("{} Subdomain detected: {}".format(COLORED_COMBOS.GOOD, san))

    def google_dork(self):
        self.logger.info("{} Trying to discover subdomains in Google".format(COLORED_COMBOS.INFO))
        page = self.request_handler.send(
            "GET",
            url="https://www.google.com/search?q=site:{}&num=100".format(self.target)
        )
        soup = BeautifulSoup(page.text, "lxml")
        results = set(re.findall(r"\w+\.{}".format(self.target), soup.text))
        for subdomain in results:
            if "www." not in subdomain:
                self.logger.info("{} Detected subdomain through Google dorking: {}".format(
                    COLORED_COMBOS.GOOD, subdomain))

    async def bruteforce(self):
        path = "{}/subdomain_fuzz.txt".format(self.host.target)

        # If a naked domain exists, use it
        if self.host.naked:
            self.host.target = self.host.naked

        self.logger.info("{} Bruteforcing subdomains".format(COLORED_COMBOS.INFO))
        sub_domain_fuzzer = URLFuzzer(
            host=self.host,
            wordlist=self.domain_list,
            num_threads=self.num_threads,
            ignored_response_codes=self.ignored_error_codes,
            follow_redirects=self.follow_redirects
            )
        await sub_domain_fuzzer.fuzz_all(sub_domain=True, log_file_path=path)
Ejemplo n.º 29
0
 def __init__(self, host, port_range, vulners_path):
     super().__init__(host=host, port_range=port_range)
     self.vulners_path = vulners_path
     self.path = HelpUtilities.get_output_path("{}/nmap_vulners_scan.txt".format(self.target))
     self.logger = Logger(self.path)
Ejemplo n.º 30
0
class WebApplicationScanner:
    def __init__(self, host):
        self.host = host
        self.request_handler = RequestHandler()
        self.web_server_validator = WebServerValidator()
        self.headers = None
        self.robots = None
        self.forms = None
        self.fuzzable_urls = set()
        log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(
            self.host.target))
        self.target_dir = "/".join(log_file.split("/")[:-1])
        self.logger = Logger(log_file)

    def _detect_cms(self, tries=0):
        """
        Detect CMS using whatcms.org.
        Has a re-try mechanism because false negatives may occur
        :param tries: Count of tries for CMS discovery
        """
        # WhatCMS is under CloudFlare which detects and blocks proxied/Tor traffic, hence normal request.
        page = requests.get(
            url="https://whatcms.org/?s={}".format(self.host.target))
        soup = BeautifulSoup(page.text, "lxml")
        found = soup.select(".panel.panel-success")
        if found:
            try:
                cms = [a for a in soup.select("a")
                       if "/c/" in a.get("href")][0]
                self.logger.info(
                    "{} CMS detected: target is using {}{}{}".format(
                        COLORED_COMBOS.GOOD, COLOR.GREEN, cms.get("title"),
                        COLOR.RESET))
            except IndexError:
                if tries >= 4:
                    return
                else:
                    self._detect_cms(tries=tries + 1)
        else:
            if tries >= 4:
                return
            else:
                self._detect_cms(tries=tries + 1)

    def _cookie_info(self, jar):
        for cookie in jar:
            key = cookie.__dict__.get("name")
            value = cookie.__dict__.get("value")
            domain = cookie.__dict__.get("domain")
            secure = cookie.__dict__.get("secure")
            http_only = cookie.has_nonstandard_attr("HttpOnly")
            try:
                if domain in self.host.target or self.host.target in domain:
                    if not secure or not http_only:
                        current = "%s Cookie: {%s: %s} -" % (
                            COLORED_COMBOS.GOOD, key, value)
                        if not secure and not http_only:
                            current += " both secure and HttpOnly flags are not set"
                        elif not secure:
                            current += " secure flag not set"
                        else:
                            current += " HttpOnly flag not set"
                        self.logger.info(current)

            except TypeError:
                continue

    def _server_info(self):
        if self.headers.get("server"):
            self.logger.info("{} Web server detected: {}{}{}".format(
                COLORED_COMBOS.GOOD, COLOR.GREEN, self.headers.get("server"),
                COLOR.RESET))

    def _x_powered_by(self):
        if self.headers.get("X-Powered-By"):
            self.logger.info("{} X-Powered-By header detected: {}{}{}".format(
                COLORED_COMBOS.GOOD, COLOR.GREEN,
                self.headers.get("X-Powered-By"), COLOR.RESET))

    def _anti_clickjacking(self):
        if not self.headers.get("X-Frame-Options"):
            self.logger.info(
                "{} X-Frame-Options header not detected - target might be vulnerable to clickjacking"
                .format(COLORED_COMBOS.GOOD))

    def _xss_protection(self):
        xss_header = self.headers.get("X-XSS-PROTECTION")
        if xss_header and "1" in xss_header:
            self.logger.info("{} Found X-XSS-PROTECTION header".format(
                COLORED_COMBOS.BAD))

    def _cors_wildcard(self):
        if self.headers.get("Access-Control-Allow-Origin") == "*":
            self.logger.info("{} CORS wildcard detected".format(
                COLORED_COMBOS.GOOD))

    def _robots(self):
        res = self.request_handler.send("GET",
                                        url="{}://{}:{}/robots.txt".format(
                                            self.host.protocol,
                                            self.host.target, self.host.port))
        if res.status_code != 404 and res.text and "<!DOCTYPE html>" not in res.text:
            self.logger.info("{} Found robots.txt".format(COLORED_COMBOS.GOOD))
            with open("{}/robots.txt".format(self.target_dir), "w") as file:
                file.write(res.text)

    def _sitemap(self):
        res = self.request_handler.send("GET",
                                        url="{}://{}:{}/sitemap.xml".format(
                                            self.host.protocol,
                                            self.host.target, self.host.port))
        if res.status_code != 404 and res.text and "<!DOCTYPE html>" not in res.text:
            self.logger.info("{} Found sitemap.xml".format(
                COLORED_COMBOS.GOOD))
            with open("{}/sitemap.xml".format(self.target_dir), "w") as file:
                file.write(res.text)

    def _find_fuzzable_urls(self, soup):
        urls = soup.select("a")
        if urls:
            for url in urls:
                href = url.get("href")
                if href and "?" in href and "=" in href:
                    self.fuzzable_urls.add(href)
            if self.fuzzable_urls:
                self.logger.info("{} {} fuzzable URLs discovered".format(
                    COLORED_COMBOS.NOTIFY, len(self.fuzzable_urls)))

                base_target = "{}://{}:{}".format(self.host.protocol,
                                                  self.host.target,
                                                  self.host.port)
                for url in self.fuzzable_urls:
                    if url.startswith("/"):
                        self.logger.debug("\t{}{}".format(base_target, url))
                    else:
                        self.logger.debug("\t{}".format(url))

    def _find_forms(self, soup):
        self.forms = soup.select("form")
        if self.forms:
            self.logger.info("{} {} HTML forms discovered".format(
                COLORED_COMBOS.NOTIFY, len(self.forms)))
            for form in self.forms:
                form_id = form.get("id")
                form_class = form.get("class")
                form_method = form.get("method")
                form_action = form.get("action")
                self.logger.debug(
                    "Form details: ID: {}, Class: {}, Method: {}, action: {}".
                    format(form_id, form_class, form_method, form_action))

    def _find_emails(self, soup):
        pass

    def get_web_application_info(self):
        session = self.request_handler.get_new_session()
        try:
            with session:
                # Test if target is serving HTTP requests
                response = session.get(timeout=20,
                                       url="{}://{}:{}".format(
                                           self.host.protocol,
                                           self.host.target, self.host.port))
                self.headers = response.headers
                self._detect_cms()
                self._robots()
                self._sitemap()
                self._server_info()
                self._x_powered_by()
                self._cors_wildcard()
                self._xss_protection()
                self._anti_clickjacking()
                self._cookie_info(session.cookies)

                soup = BeautifulSoup(response.text, "lxml")
                self._find_fuzzable_urls(soup)
                self._find_forms(soup)

        except (ConnectionError, TooManyRedirects) as e:
            raise WebAppScannerException("Couldn't get response from server.\n"
                                         "Caused due to exception: {}".format(
                                             str(e)))

    async def run_scan(self):
        self.logger.info("{} Trying to collect {} web application data".format(
            COLORED_COMBOS.INFO, self.host))
        try:
            self.web_server_validator.validate_target_webserver(self.host)
            self.get_web_application_info()
        except WebServerValidatorException:
            self.logger.info(
                "{} Target does not seem to have an active web server on port: {}. "
                "No web application data will be gathered.".format(
                    COLORED_COMBOS.NOTIFY, self.host.port))
            return
Ejemplo n.º 31
0
 def create_host_dir_and_set_file_logger(self):
     log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format(self.target))
     self._create_host_dir(log_file)
     self.logger = Logger(log_file)
Ejemplo n.º 32
0
class WAF:
    def __init__(self, host):
        self.host = host
        self.cnames = host.dns_results.get('CNAME')
        self.request_handler = RequestHandler()
        self.web_server_validator = WebServerValidator()
        self.waf_present = False
        self.waf_cname_map = {
            "incapdns": "Incapsula",
            "edgekey": "Akamai",
            "akamai": "Akamai",
            "edgesuite": "Akamai",
            "distil": "Distil Networks",
            "cloudfront": "CloudFront",
            "netdna-cdn": "MaxCDN"
        }
        self.waf_app_method_map = {
            "CloudFront": WAFApplicationMethods.detect_cloudfront,
            "Cloudflare": WAFApplicationMethods.detect_cloudflare,
            "Incapsula": WAFApplicationMethods.detect_incapsula,
            "MaxCDN": WAFApplicationMethods.detect_maxcdn,
            "Edgecast": WAFApplicationMethods.detect_edgecast,
            "Distil Networks": WAFApplicationMethods.detect_distil,
            "Sucuri": WAFApplicationMethods.detect_sucuri,
            "Reblaze": WAFApplicationMethods.detect_reblaze
        }
        log_file = HelpUtilities.get_output_path("{}/WAF.txt".format(
            self.host.target))
        self.logger = Logger(log_file)

    def _waf_detected(self, name):
        self.logger.info(
            "{} Detected WAF presence in web application: {}{}{}".format(
                COLORED_COMBOS.BAD, COLOR.RED, name, COLOR.RESET))
        self.waf_present = True

    def _detect_by_cname(self):
        for waf in self.waf_cname_map:
            if any(waf in str(cname) for cname in self.cnames):
                self.logger.info(
                    "{} Detected WAF presence in CNAME: {}{}{}".format(
                        COLORED_COMBOS.BAD, COLOR.RED,
                        self.waf_cname_map.get(waf), COLOR.RESET))
                self.waf_present = True

    def _detect_by_application(self):
        try:
            session = self.request_handler.get_new_session()
            response = session.get(timeout=20,
                                   allow_redirects=True,
                                   url="{}://{}:{}".format(
                                       self.host.protocol, self.host.target,
                                       self.host.port))
            for waf, method in self.waf_app_method_map.items():
                result = method(response)
                if result:
                    self._waf_detected(waf)

        except (ConnectionError, TooManyRedirects) as e:
            raise WAFException("Couldn't get response from server.\n"
                               "Caused due to exception: {}".format(str(e)))

    async def detect(self):
        self.logger.info("{} Trying to detect WAF presence in {}".format(
            COLORED_COMBOS.INFO, self.host))
        if self.cnames:
            self._detect_by_cname()
        try:
            self.web_server_validator.validate_target_webserver(self.host)
            self._detect_by_application()

            if not self.waf_present:
                self.logger.info(
                    "{} Did not detect WAF presence in target".format(
                        COLORED_COMBOS.GOOD))
        except WebServerValidatorException:
            self.logger.info(
                "{} Target does not seem to have an active web server on port {}. "
                "No WAF could be detected on an application level.".format(
                    COLORED_COMBOS.NOTIFY, self.host.port))
Ejemplo n.º 33
0
class WAF:

    def __init__(self, host):
        self.host = host
        self.cnames = host.dns_results.get('CNAME')
        self.request_handler = RequestHandler()
        self.web_server_validator = WebServerValidator()
        self.waf_present = False
        self.waf_cname_map = {
            "incapdns": "Incapsula",
            "edgekey": "Akamai",
            "akamai": "Akamai",
            "edgesuite": "Akamai",
            "distil": "Distil Networks",
            "cloudfront": "CloudFront",
            "netdna-cdn": "MaxCDN"
        }
        self.waf_app_method_map = {
            "CloudFront": WAFApplicationMethods.detect_cloudfront,
            "Cloudflare": WAFApplicationMethods.detect_cloudflare,
            "Incapsula": WAFApplicationMethods.detect_incapsula,
            "MaxCDN": WAFApplicationMethods.detect_maxcdn,
            "Edgecast": WAFApplicationMethods.detect_edgecast,
            "Distil Networks": WAFApplicationMethods.detect_distil,
            "Sucuri": WAFApplicationMethods.detect_sucuri,
            "Reblaze": WAFApplicationMethods.detect_reblaze
        }
        log_file = HelpUtilities.get_output_path("{}/WAF.txt".format(self.host.target))
        self.logger = Logger(log_file)

    def _waf_detected(self, name):
        self.logger.info(
            "{} Detected WAF presence in web application: {}{}{}".format(
                COLORED_COMBOS.BAD, COLOR.RED, name, COLOR.RESET))
        self.waf_present = True

    def _detect_by_cname(self):
        for waf in self.waf_cname_map:
            if any(waf in str(cname) for cname in self.cnames):
                self.logger.info("{} Detected WAF presence in CNAME: {}{}{}".format(
                    COLORED_COMBOS.BAD, COLOR.RED, self.waf_cname_map.get(waf), COLOR.RESET)
                )
                self.waf_present = True

    def _detect_by_application(self):
        try:
            session = self.request_handler.get_new_session()
            response = session.get(
                timeout=20,
                allow_redirects=True,
                url="{}://{}:{}".format(
                    self.host.protocol,
                    self.host.target,
                    self.host.port
                )
            )
            for waf, method in self.waf_app_method_map.items():
                result = method(response)
                if result:
                    self._waf_detected(waf)

        except (ConnectionError, TooManyRedirects) as e:
            raise WAFException("Couldn't get response from server.\n"
                               "Caused due to exception: {}".format(str(e)))

    async def detect(self):
        self.logger.info("{} Trying to detect WAF presence in {}".format(COLORED_COMBOS.INFO, self.host))
        if self.cnames:
            self._detect_by_cname()
        try:
            self.web_server_validator.validate_target_webserver(self.host)
            self._detect_by_application()

            if not self.waf_present:
                self.logger.info("{} Did not detect WAF presence in target".format(COLORED_COMBOS.GOOD))
        except WebServerValidatorException:
            self.logger.info(
                "{} Target does not seem to have an active web server on port {}. "
                "No WAF could be detected on an application level.".format(COLORED_COMBOS.NOTIFY, self.host.port))