Esempio n. 1
0
    async def worker(self, queue: asyncio.Queue, resolvers: Iterator[str],
                     root_domain: str, bad_responses: Set[str]):
        while True:
            try:
                domain = queue.get_nowait().strip()
            except asyncio.QueueEmpty:
                await asyncio.sleep(.05)
            else:
                queue.task_done()
                if domain == "__exit__":
                    break

                try:
                    resolver = dns.asyncresolver.Resolver()
                    resolver.timeout = 10.
                    resolver.nameservers = [
                        next(resolvers) for __ in range(10)
                    ]
                    answers = await resolver.resolve(domain,
                                                     'CNAME',
                                                     raise_on_no_answer=False)
                except (socket.gaierror, UnicodeError):
                    continue
                except (dns.asyncresolver.NXDOMAIN, dns.exception.Timeout):
                    continue
                except (dns.name.EmptyLabel,
                        dns.resolver.NoNameservers) as exception:
                    logging.warning(f"{domain}: {exception}")
                    continue

                for answer in answers:
                    cname = answer.to_text().strip(".")

                    if cname in bad_responses:
                        continue

                    log_verbose(_(f"Record {domain} points to {cname}"))

                    try:
                        if get_root_domain(cname) == root_domain:
                            # If it is an internal CNAME (like www.target.tld to target.tld) just ignore
                            continue
                    except (TldDomainNotFound, TldBadUrl):
                        logging.warning(f"{cname} is not a valid domain name")
                        continue

                    if await self.takeover.check(domain, cname):
                        log_red("---")
                        log_red(
                            _(f"CNAME {domain} to {cname} seems vulnerable to takeover"
                              ))
                        log_red("---")

                        await self.add_vuln_high(
                            category=NAME,
                            info=
                            _(f"CNAME {domain} to {cname} seems vulnerable to takeover"
                              ),
                            request=Request(f"https://{domain}/"),
                            wstg=WSTG_CODE)
Esempio n. 2
0
    async def _async_try_login_post(
            self, username: str, password: str,
            auth_url: str) -> Tuple[bool, dict, List[str]]:
        # Fetch the login page and try to extract the login form
        try:
            page = await self.async_get(web.Request(auth_url),
                                        follow_redirects=True)
            form = {}
            disconnect_urls = []

            login_form, username_field_idx, password_field_idx = page.find_login_form(
            )
            if login_form:
                post_params = login_form.post_params
                get_params = login_form.get_params

                if login_form.method == "POST":
                    post_params[username_field_idx][1] = username
                    post_params[password_field_idx][1] = password
                    form["login_field"] = post_params[username_field_idx][0]
                    form["password_field"] = post_params[password_field_idx][0]
                else:
                    get_params[username_field_idx][1] = username
                    get_params[password_field_idx][1] = password
                    form["login_field"] = get_params[username_field_idx][0]
                    form["password_field"] = get_params[password_field_idx][0]

                login_request = web.Request(path=login_form.url,
                                            method=login_form.method,
                                            post_params=post_params,
                                            get_params=get_params,
                                            referer=login_form.referer,
                                            link_depth=login_form.link_depth)

                login_response = await self.async_send(login_request,
                                                       follow_redirects=True)

                # ensure logged in
                if login_response.soup.find_all(
                        text=re.compile(DISCONNECT_REGEX)):
                    self.is_logged_in = True
                    logging.success(_("Login success"))
                    disconnect_urls = self._extract_disconnect_urls(
                        login_response)
                else:
                    logging.warning(
                        _("Login failed") + " : " +
                        _("Credentials might be invalid"))
            else:
                logging.warning(
                    _("Login failed") + " : " + _("No login form detected"))
            return self.is_logged_in, form, disconnect_urls

        except ConnectionError:
            logging.error(_("[!] Connection error with URL"), auth_url)
        except httpx.RequestError as error:
            logging.error(
                _("[!] {} with url {}").format(error.__class__.__name__,
                                               auth_url))
Esempio n. 3
0
    async def attack(self, request: Request):
        try:
            with open(os.path.join(self.user_config_dir, self.NIKTO_DB),
                      encoding='utf-8') as nikto_db_file:
                reader = csv.reader(nikto_db_file)
                next(reader)
                self.nikto_db = [
                    line for line in reader
                    if line != [] and line[0].isdigit()
                ]

        except IOError:
            logging.warning(_("Problem with local nikto database."))
            logging.info(_("Downloading from the web..."))
            await self.update()

        self.finished = True
        root_url = request.url
        self.parts = urlparse(root_url)

        tasks = set()
        pending_count = 0

        with open(os.path.join(self.user_config_dir, self.NIKTO_DB),
                  encoding='utf-8') as nikto_db_file:
            reader = csv.reader(nikto_db_file)
            while True:
                if pending_count < self.options[
                        "tasks"] and not self._stop_event.is_set():
                    try:
                        line = next(reader)
                    except StopIteration:
                        pass
                    else:
                        if line == [] or not line[0].isdigit():
                            continue

                        task = asyncio.create_task(self.process_line(line))
                        tasks.add(task)

                if not tasks:
                    break

                done_tasks, pending_tasks = await asyncio.wait(
                    tasks, timeout=0.01, return_when=asyncio.FIRST_COMPLETED)
                pending_count = len(pending_tasks)
                for task in done_tasks:
                    await task
                    tasks.remove(task)

                if self._stop_event.is_set():
                    for task in pending_tasks:
                        task.cancel()
                        tasks.remove(task)
Esempio n. 4
0
    async def detect_plugin(self, url):
        for plugin in self.get_plugin():
            if self._stop_event.is_set():
                break

            req = Request(f'{url}/wp-content/plugins/{plugin}/readme.txt')
            rep = await self.crawler.async_get(req)

            if rep.status == 200:
                version = re.search(r'tag:\s*([\d.]+)', rep.content)

                # This check was added to detect invalid format of "Readme.txt" who can cause a crashe
                if version:
                    version = version.group(1)
                else:
                    logging.warning("Readme.txt is not in a valid format")
                    version = ""

                plugin_detected = {
                    "name": plugin,
                    "versions": [version],
                    "categories": ["WordPress plugins"]
                }

                log_blue(
                    MSG_TECHNO_VERSIONED,
                    plugin,
                    version
                )

                await self.add_addition(
                    category=TECHNO_DETECTED,
                    request=req,
                    info=json.dumps(plugin_detected),
                    wstg=TECHNO_DETECTED_WSTG_CODE
                )
            elif rep.status == 403:
                plugin_detected = {
                    "name": plugin,
                    "versions": [""],
                    "categories": ["WordPress plugins"]
                }
                log_blue(
                    MSG_TECHNO_VERSIONED,
                    plugin,
                    [""]
                )
                await self.add_addition(
                    category=TECHNO_DETECTED,
                    request=req,
                    info=json.dumps(plugin_detected),
                    wstg=TECHNO_DETECTED_WSTG_CODE
                )
Esempio n. 5
0
 async def _verify_wapp_database(self, categories_file_path: str,
                                 technologies_base_path: str,
                                 groups_file_path: str):
     try:
         with open(categories_file_path, encoding='utf-8') as categories_file, \
                 open(technologies_base_path, encoding='utf-8') as technologies_file, \
                 open(groups_file_path, encoding='utf-8') as groups_file:
             json.load(categories_file)
             json.load(technologies_file)
             json.load(groups_file)
     except IOError:
         logging.warning(_("Problem with local wapp database."))
         logging.info(_("Downloading from the web..."))
         await self.update()
Esempio n. 6
0
 async def _verify_htp_database(self, htp_database_path: str):
     if os.path.exists(htp_database_path) is False:
         logging.warning(_("Problem with local htp database."))
         logging.info(_("Downloading from the web..."))
         await self.update()
Esempio n. 7
0
    async def check(self, origin: str, domain: str) -> bool:
        if "." not in domain or domain.endswith((".local", ".internal")):
            # Stuff like "localhost": internal CNAMEs we can't control
            return False

        # Check for known false positives first
        for regex in self.ignore:
            if regex.search(domain):
                return False

        if IPV4_REGEX.match(domain):
            # Obviously we can't take control over any IP on the Internet
            return False

        # Is the pointed domain part of some particular takeover case?
        for service_entry in self.services:
            for cname_regex in service_entry["cname"]:
                if re.search(cname_regex, domain):
                    # The pointed domain match one of the rules, check the content on the website if necessary
                    result = await self.check_content(
                        origin, service_entry["fingerprint"])
                    if result:
                        search = GITHUB_IO_REGEX.search(domain)
                        if search:
                            # This is a github.io website, we need to check is the username/organization exists
                            username = search.group(1)
                            try:
                                async with httpx.AsyncClient() as client:
                                    response = await client.head(
                                        f"https://github.com/{username}",
                                        timeout=10.)
                                    if response.status_code == 404:
                                        return True
                            except httpx.RequestError:
                                logging.warning(
                                    f"HTTP request to https://github.com/{username} failed"
                                )
                            return False

                        search = MY_SHOPIFY_REGEX.search(domain)
                        if search:
                            # Check for myshopify false positives
                            shop_name = search.group(1)
                            try:
                                async with httpx.AsyncClient() as client:
                                    # Tip from https://github.com/buckhacker/SubDomainTakeoverTools
                                    response = await client.get((
                                        "https://app.shopify.com/services/signup/check_availability.json?"
                                        f"shop_name={shop_name}&[email protected]"
                                    ),
                                                                timeout=10.)
                                    data = response.json()
                                    if data["status"] == "available":
                                        return True
                            except httpx.RequestError:
                                logging.warning(
                                    "HTTP request to Shopify API failed")

                            return False

                        return True

                    # Otherwise if the pointed domain doesn't exists if may be enough
                    if service_entry["nxdomain"]:
                        try:
                            await dns.asyncresolver.resolve(domain)
                        except dns.asyncresolver.NXDOMAIN:
                            return True
                        except BaseException:
                            continue

        # What remains is potentially unregistered domain.
        # First: get root domain of the pointed domain
        try:
            root_domain = get_root_domain(domain)
        except (TldDomainNotFound, TldBadUrl):
            # We can't register the pointed domain as it is invalid
            logging.warning(
                f"Pointed domain {domain} is not a valid domain name")
            return False

        try:
            # Second: using SOA on this root domain we check if it is available
            await dns.asyncresolver.resolve(root_domain,
                                            "SOA",
                                            raise_on_no_answer=False)
        except dns.resolver.NXDOMAIN:
            return True
        except BaseException as exception:
            logging.warning(f"ANY request for {root_domain}: {exception}")

        return False
Esempio n. 8
0
    async def process_line(self, line):
        match = match_or = match_and = False
        fail = fail_or = False

        osv_id = line[1]
        path = line[3]
        method = line[4]
        vuln_desc = line[10]
        post_data = line[11]

        path = path.replace("@CGIDIRS", "/cgi-bin/")
        path = path.replace("@ADMIN", "/admin/")
        path = path.replace("@NUKE", "/modules/")
        path = path.replace("@PHPMYADMIN", "/phpMyAdmin/")
        path = path.replace("@POSTNUKE", "/postnuke/")
        path = re.sub(r"JUNK\((\d+)\)",
                      lambda x: self.junk_string[:int(x.group(1))], path)

        if path[0] == "@":
            return

        if not path.startswith("/"):
            path = "/" + path

        try:
            url = f"{self.parts.scheme}://{self.parts.netloc}{path}"
        except UnicodeDecodeError:
            return

        if method == "GET":
            evil_request = Request(url)
        elif method == "POST":
            evil_request = Request(url, post_params=post_data, method=method)
        else:
            evil_request = Request(url, post_params=post_data, method=method)

        if method == "GET":
            log_verbose(f"[¨] {evil_request.url}")
        else:
            log_verbose(f"[¨] {evil_request.http_repr()}")

        try:
            response = await self.crawler.async_send(evil_request)
            page = response.content
            code = response.status
        except (RequestError, ConnectionResetError):
            self.network_errors += 1
            return
        except Exception as exception:
            logging.warning(
                f"{exception} occurred with URL {evil_request.url}")
            return

        raw = " ".join([x + ": " + y for x, y in response.headers.items()])
        raw += page

        # See https://github.com/sullo/nikto/blob/master/program/plugins/nikto_tests.plugin for reference
        expected_status_codes = []
        # First condition (match)
        if len(line[5]) == 3 and line[5].isdigit():
            expected_status_code = int(line[5])
            expected_status_codes.append(expected_status_code)
            if code == expected_status_code:
                match = True
        else:
            if line[5] in raw:
                match = True

        # Second condition (or)
        if line[6] != "":
            if len(line[6]) == 3 and line[6].isdigit():
                expected_status_code = int(line[6])
                expected_status_codes.append(expected_status_code)
                if code == expected_status_code:
                    match_or = True
            else:
                if line[6] in raw:
                    match_or = True

        # Third condition (and)
        if line[7] != "":
            if len(line[7]) == 3 and line[7].isdigit():
                if code == int(line[7]):
                    match_and = True
            else:
                if line[7] in raw:
                    match_and = True
        else:
            match_and = True

        # Fourth condition (fail)
        if line[8] != "":
            if len(line[8]) == 3 and line[8].isdigit():
                if code == int(line[8]):
                    fail = True
            else:
                if line[8] in raw:
                    fail = True

        # Fifth condition (or)
        if line[9] != "":
            if len(line[9]) == 3 and line[9].isdigit():
                if code == int(line[9]):
                    fail_or = True
            else:
                if line[9] in raw:
                    fail_or = True

        if ((match or match_or) and match_and) and not (fail or fail_or):
            if expected_status_codes:
                if await self.is_false_positive(evil_request,
                                                expected_status_codes):
                    return

            log_red("---")
            log_red(vuln_desc)
            log_red(url)

            refs = []
            if osv_id != "0":
                refs.append("https://vulners.com/osvdb/OSVDB:" + osv_id)

            # CERT
            cert_advisory = re.search("(CA-[0-9]{4}-[0-9]{2})", vuln_desc)
            if cert_advisory is not None:
                refs.append("http://www.cert.org/advisories/" +
                            cert_advisory.group(0) + ".html")

            # SecurityFocus
            securityfocus_bid = re.search("BID-([0-9]{4})", vuln_desc)
            if securityfocus_bid is not None:
                refs.append("http://www.securityfocus.com/bid/" +
                            securityfocus_bid.group(1))

            # Mitre.org
            mitre_cve = re.search("((CVE|CAN)-[0-9]{4}-[0-9]{4,})", vuln_desc)
            if mitre_cve is not None:
                refs.append("http://cve.mitre.org/cgi-bin/cvename.cgi?name=" +
                            mitre_cve.group(0))

            # CERT Incidents
            cert_incident = re.search("(IN-[0-9]{4}-[0-9]{2})", vuln_desc)
            if cert_incident is not None:
                refs.append("http://www.cert.org/incident_notes/" +
                            cert_incident.group(0) + ".html")

            # Microsoft Technet
            ms_bulletin = re.search("(MS[0-9]{2}-[0-9]{3})", vuln_desc)
            if ms_bulletin is not None:
                refs.append(
                    "http://www.microsoft.com/technet/security/bulletin/" +
                    ms_bulletin.group(0) + ".asp")

            info = vuln_desc
            if refs:
                log_red(_("References:"))
                log_red("  " + "\n  ".join(refs))

                info += "\n" + _("References:") + "\n"
                info += "\n".join(refs)

            log_red("---")

            await self.add_vuln_high(category=NAME,
                                     request=evil_request,
                                     info=info,
                                     wstg=WSTG_CODE)