Esempio n. 1
0
 def import_results(self, input_file):
     results = NmapScanPlugin.parse_nmap_results(None, input_file)
     if results:
         Database.async_add_many(results)
         Logger.log("Loaded %d elements from file: %s" % (len(results), input_file))
     else:
         Logger.log_verbose("No data found in file: %s" % input_file)
Esempio n. 2
0
    def run(self, info):

        # Skip if the vulnerability doesn't have CVE or OSVDB tags.
        if not info.cve and not info.osvdb:
            Logger.log_more_verbose("No CVE or OSVDB tags found, skipped.")
            return

        # Search for public exploits in the exploit-db database.
        ids = self.query_exploitdb(info.cve, info.osvdb)

        # Add any previously existing IDs.
        ids.update(info.edb)

        # Are there any new IDs?
        new_ids = ids.difference(info.edb)
        if new_ids:

            # Log the new IDs.
            msg = "\n".join(
                "    " + x
                for x in sorted( convert_vuln_ids_to_references(new_ids) )
            )
            Logger.log("Public exploits found:\n" + msg)

            # Update the Vulnerability object.
            info.edb = ids

            # Return the updated object.
            return info
Esempio n. 3
0
    def run(self, info):

        m_return = []

        m_url = info.url
        Logger.log_verbose("Spidering URL: %s" % m_url)

        # Check if need follow first redirect, then follow the link.
        p = None
        try:
            allow_redirects = Config.audit_config.follow_redirects or \
                (info.depth == 0 and Config.audit_config.follow_first_redirect)
            p = download(m_url,
                         self.check_download,
                         allow_redirects=allow_redirects)
        except NetworkException, e:
            Logger.log_error_verbose("Error while processing %r: %s" %
                                     (m_url, str(e)))
Esempio n. 4
0
    def recv_info(self, info):

        # Parse original URL
        m_url = info.url
        m_url_parts = info.parsed_url

        Logger.log_more_verbose("Bruteforcing URL: %s" % m_url)

        # If file is a javascript, css or image, do not run
        if info.parsed_url.extension[1:] in ('css', 'js', 'jpeg', 'jpg', 'png', 'gif', 'svg') or not m_url_parts.extension:
            Logger.log_more_verbose("Skipping URL: %s" % m_url)
            return

        #
        # Load wordlist for prefixes
        #
        # COMMON
        m_urls = make_url_with_prefixes(get_list_from_wordlist("common_prefixes"), m_url_parts)

        # Generates the error page
        m_error_response = get_error_page(m_url)

        # Create the matching analyzer
        try:
            m_store_info = MatchingAnalyzer(m_error_response, min_ratio=0.65)
        except ValueError:
            # Thereis not information
            return

        # Create the partial funs
        _f = partial(process_url,
                     severity_vectors['prefixes'],
                     get_http_method(m_url),
                     m_store_info,
                     self.update_status,
                     len(m_urls))

        # Process the URLs
        for i, l_url in enumerate(m_urls):
            _f((i, l_url))


        # Generate and return the results.
        return generate_results(m_store_info.unique_texts)
Esempio n. 5
0
    def test(self, hostname, port, starttls=False):
        """
        Test against the specified hostname and port.

        :param hostname: Hostname to test.
        :type hostname: str

        :param port: TCP port to test.
        :type port: int

        :param starttls: True to issue a STARTTLS command, False otherwise.
                         This is useful for SMTP only.
        :type starttls: bool

        :returns: True if the host is vulnerable, False otherwise.
        :rtype: bool
        """

        # Don't scan the same host and port twice.
        if self.state.put("%s:%d" % (hostname, port), True):
            Logger.log_more_verbose("Host %s:%d already scanned, skipped." %
                                    (hostname, port))
            return False

        # Request permission to connect to the host.
        with ConnectionSlot(hostname):

            # Test the host and port.
            success = self.__test(hostname,
                                  port,
                                  starttls=starttls,
                                  version="1.1")
            if not success:
                success = self.__test(hostname,
                                      port,
                                      starttls=starttls,
                                      version="1.2")
                if not success:
                    success = self.__test(hostname,
                                          port,
                                          starttls=starttls,
                                          version="1.0")
            return success
Esempio n. 6
0
    def run(self, info):
        #if not info.has_url_params and not info.has_post_params:
        #    return

        m_url = info.url


        # If file is a javascript, css or image, do not run

        if info.parsed_url.extension[1:] in ('css', 'js', 'jpeg', 'jpg', 'png', 'gif', 'svg', 'txt') or ( not info.has_url_params and not info.has_post_params):
            Logger.log_verbose("Skipping URL: %s" % m_url)
            return

        m_return = []

        b_continue = True
        m_source_url = []
        target = None

        if info.has_url_params:
            print "GET"
            #param_dict = info.url_params

            for test_type in TEST_SQL_TYPE:
            #self.deal_param_payload(test_type, info.url, param_dict, method = info.method, referer = info.referer)
                if self.deal_param_payload(test_type, info, method = 'GET'):
                    return m_return

        if info.has_post_params:
            print 'POST'
            print info.post_params
            #print info.url, info.post_params
            #param_dict = info.post_params

            for test_type in TEST_SQL_TYPE:
                #self.deal_param_payload(test_type, info.url, param_dict, method = info.method, referer = info.referer)
                if self.deal_param_payload(test_type, info, method = 'POST'):
                    return m_return


        # Send the results
        return m_return
Esempio n. 7
0
    def ttl_platform_detection(self, main_url):
        """
        This function tries to recognize the remote platform doing a ping and analyzing the
        TTL of IP header response.

        :param main_url: Base url to test.
        :type main_url: str

        :return: Possible platforms.
        :rtype: list(tuple(OS, version))
        """

        # Do a ping
        try:
            m_ttl = do_ping_and_receive_ttl(ParsedURL(main_url).hostname, 2)

            # Load words for the wordlist
            l_wordlist_instance = WordListLoader.get_advanced_wordlist_as_dict(
                Config.plugin_extra_config["Wordlist_ttl"]["ttl"])
            # Looking for matches
            l_matches = l_wordlist_instance.matches_by_value(m_ttl)

            if l_matches:
                m_ret = {}
                for v in l_matches:
                    sp = v.split("|")
                    k = sp[0].strip()
                    v = sp[1].strip()
                    m_ret[k] = v

                return [(k, v) for k, v in m_ret.iteritems()]
            else:
                return {}
        except EnvironmentError:
            Logger.log_error(
                "[!] You can't run the platform detection plugin if you're not root."
            )
            return {}
        except Exception, e:
            Logger.log_error("[!] Platform detection failed, reason: %s" % e)
            return {}
Esempio n. 8
0
    def run(self, info):

        # Make sure it's a CNAME record.
        # This is required because a plugin can't ask for a specific DNS
        # register type - all types are received together.
        if info.type != "CNAME":
            return

        # Get the root domain.
        root = info.target
        Logger.log_verbose("Looking for poisoned domains at: *.%s" % root)

        # Load the malware URLs list.
        wordlist_filename = Config.plugin_args["wordlist"]
        try:
            wordlist = WordListLoader.get_advanced_wordlist_as_list(
                wordlist_filename)
        except WordlistNotFound:
            Logger.log_error_verbose("Wordlist not found: " +
                                     wordlist_filename)
            return
        except TypeError:
            Logger.log_error_verbose("Wordlist is not a file: " +
                                     wordlist_filename)
            return

        results = []
        root_set = set([root])

        for x in root_set.intersection(set(wordlist)):
            results.append(DNSPoisoning(info, x))

        # Log how many results we got.
        if results:
            Logger.log_verbose("Discovered %s poisoned domains." %
                               len(results))
        else:
            Logger.log_verbose("No poisoned domains found.")

        # Return the results.
        return results
Esempio n. 9
0
def get_list_from_wordlist(wordlist):
    """
    Load the content of the wordlist and return a set with the content.

    :param wordlist: wordlist name.
    :type wordlist: str

    :return: a set with the results.
    :rtype result_output: set
    """

    try:
        m_commom_wordlists = set()

        for v in Config.plugin_extra_config[wordlist].itervalues():
            m_commom_wordlists.update(WordListLoader.get_wordlist_as_list(v))

        return m_commom_wordlists
    except KeyError, e:
        Logger.log_error_more_verbose(str(e))
        return set()
Esempio n. 10
0
    def run(self, info):

        # Parse original URL
        m_url = info.url
        m_url_parts = info.parsed_url

        # If file is a javascript, css or image, do not run
        if info.parsed_url.extension[1:] in (
                'css', 'js', 'jpeg', 'jpg', 'png', 'gif',
                'svg') or not m_url_parts.extension:
            Logger.log_more_verbose("Skipping URL: %s" % m_url)
            return

        Logger.log_more_verbose("Bruteforcing URL: %s" % m_url)

        #
        # Load wordlist for changing directories
        #
        # COMMON
        m_urls = make_url_changing_folder_name(m_url_parts)

        # Generates the error page
        m_error_response = get_error_page(m_url)

        # Create the matching analyzer
        try:
            m_store_info = MatchingAnalyzer(m_error_response.raw_data,
                                            min_ratio=0.65)
        except ValueError, e:
            Logger.log_error(
                "There is not information for analyze when creating the matcher: '%s'"
                % e)
            return
Esempio n. 11
0
    def recv_info(self, info):

        m_return = []

        m_url = info.url
        m_depth = info.depth

        # Check depth
        if Config.audit_config.depth is not None and m_depth > Config.audit_config.depth:
            Logger.log_more_verbose("Spider depth level exceeded for URL: %s" %
                                    m_url)
            return m_return

        Logger.log_verbose("Spidering URL: %r" % m_url)

        # Check if need follow first redirect
        p = None
        try:
            allow_redirects = Config.audit_config.follow_redirects or \
                             (m_depth == 0 and Config.audit_config.follow_first_redirect)
            p = download(m_url,
                         self.check_download,
                         allow_redirects=allow_redirects)
        except NetworkException, e:
            Logger.log_more_verbose("Error while processing %r: %s" %
                                    (m_url, str(e)))
Esempio n. 12
0
    def run(self, info):

        # Get the search parameters.
        word = info.hostname
        limit = 100
        try:
            limit = int(Config.plugin_config.get("limit", str(limit)), 0)
        except ValueError:
            pass

        # Search every supported engine.
        total = float(len(self.SUPPORTED))
        all_emails, all_hosts = set(), set()
        for step, engine in enumerate(self.SUPPORTED):
            try:
                Logger.log_verbose("Searching keyword %r in %s" %
                                   (word, engine))
                self.update_status(progress=float(step * 80) / total)
                emails, hosts = self.search(engine, word, limit)
            except Exception, e:
                t = traceback.format_exc()
                Logger.log_error(str(e))
                Logger.log_error_more_verbose(t)
                continue
            all_emails.update(address.lower() for address in emails if address)
            all_hosts.update(name.lower() for name in hosts if name)
Esempio n. 13
0
    def __generate_report(self, output_file):
        Logger.log_verbose(
            "Writing LaTeX report to file: %s" % output_file)

        # Load docutils.
        with warnings.catch_warnings(record=True):
            from docutils.core import publish_file

        # Create a temporary file for the reStructured Text report.
        with tempfile(suffix=".rst") as filename:

            # Generate the report in reStructured Text format.
            Logger.log_more_verbose("Writing temporary file in rST format...")
            with open(filename, "w") as source:
                self.write_report_to_open_file(source)

            # Convert to LaTeX format.
            Logger.log_more_verbose("Converting to LaTeX format...")
            with open(filename, "rU") as source:
                with warnings.catch_warnings(record=True):
                    with open(output_file, "wb") as destination:
                        publish_file(
                            source = source,
                            destination = destination,
                            destination_path = output_file,
                            writer_name = "latex",
                        )
Esempio n. 14
0
    def generate_report(self, output_file):
        Logger.log_verbose("Writing OpenOffice report to file: %s" %
                           output_file)

        # Load docutils.
        with catch_warnings(record=True):
            from docutils.core import publish_file
            from docutils.writers.odf_odt import Writer, Reader

        # Create a temporary file for the reStructured Text report.
        with tempfile(suffix=".rst") as filename:

            # Generate the report in reStructured Text format.
            Logger.log_more_verbose("Writing temporary file in rST format...")
            with open(filename, "w") as source:
                self.write_report_to_open_file(source)

            # Convert to OpenOffice format.
            Logger.log_more_verbose("Converting to OpenOffice format...")
            with open(filename, "rU") as source:
                writer = Writer()
                reader = Reader()
                with catch_warnings(record=True):
                    with open(output_file, "wb") as destination:
                        publish_file(
                            source=source,
                            destination=destination,
                            destination_path=output_file,
                            reader=reader,
                            writer=writer,
                        )
Esempio n. 15
0
    def check_download(self, url, name, content_length, content_type):

        # Check the file type is text.
        if not content_type or not content_type.strip().lower().startswith(
                "text/"):
            Logger.log_more_verbose("Skipping URL, binary content: %s" % url)
            return False

        # Is the content length present?
        if content_length is not None:

            # Check the file doesn't have 0 bytes.
            if content_length <= 0:
                Logger.log_more_verbose("Skipping URL, empty content: %s" %
                                        url)
                return False

            # Check the file is not too big.
            if content_length > 100000:
                Logger.log_more_verbose(
                    "Skipping URL, content too large (%d bytes): %s" %
                    (content_length, url))
                return False

            # Approved!
            return True

        # Content length absent but likely points to a directory index.
        if not parse_url(url).filename:

            # Approved!
            return True

        # Content length absent but likely points to a webpage.
        if "download" in url or name[name.rfind(".") + 1:].lower() not in (
                "htm",
                "html",
                "php",
                "asp",
                "aspx",
                "jsp",
        ):
            Logger.log_more_verbose(
                "Skipping URL, content is likely not text: %s" % url)
            return False

        # Approved!
        return True
Esempio n. 16
0
    def run(self, info):

        m_url = info.url

        Logger.log_more_verbose("Start to process URL: %r" % m_url)

        #
        # Get the remote web server fingerprint
        #
        m_webserver_finger = info.get_associated_informations_by_category(WebServerFingerprint.information_type)

        m_wordlist = set()

        # Common wordlists
        try:
            w = Config.plugin_extra_config["common"]
            m_wordlist.update([l_w for l_w in w.itervalues()])
        except KeyError:
            Logger.log_error("Can't load common wordlists")

        # There is fingerprinting information?
        if m_webserver_finger:

            m_webserver_finger = m_webserver_finger.pop()

            m_server_canonical_name = m_webserver_finger.canonical_name
            m_servers_related = m_webserver_finger.related # Set with related web servers

            #
            # Load wordlists
            #
            m_wordlist_update = m_wordlist.update

            # Wordlist of server name
            try:
                w = Config.plugin_extra_config["%s_predictables" % m_server_canonical_name]
                m_wordlist_update([l_w for l_w in w.itervalues()])
            except KeyError:
                Logger.log_error("Can't load predictables wordlists for server: '%s'." % m_server_canonical_name)

            # Wordlist of related with the server found
            try:
                for l_servers_related in m_servers_related:
                    w = Config.plugin_extra_config["%s_predictables" % l_servers_related]
                    m_wordlist_update([l_w for l_w in w.itervalues()])
            except KeyError, e:
                Logger.log_error("Can't load wordlists predictables wordlists for related webserver: '%s'" % e)
Esempio n. 17
0
    def recv_info(self, info):

        # Synchronization object to wait for completion.
        m_event = Event()

        # Get the config.
        m_user = Config.plugin_args["user"]
        m_password = Config.plugin_args["password"]
        m_host = Config.plugin_args["host"]
        m_port = Config.plugin_args["port"]
        m_timeout = Config.plugin_args["timeout"]
        m_profile = Config.plugin_args["profile"]

        # Sanitize the port and timeout.
        try:
            m_port = int(m_port)
        except Exception:
            m_port = 9390
        if m_timeout.lower().strip() in ("inf", "infinite", "none"):
            m_timeout = None
        else:
            try:
                m_timeout = int(m_timeout)
            except Exception:
                m_timeout = None

        # Connect to the scanner.
        try:
            m_scanner = VulnscanManager(m_host, m_user, m_password, m_port,
                                        m_timeout)
        except VulnscanException, e:
            t = format_exc()
            Logger.log_error("Error connecting to OpenVAS, aborting scan!")
            #Logger.log_error_verbose(str(e))
            Logger.log_error_more_verbose(t)
            return
Esempio n. 18
0
    def recv_info(self, info):
        """
        Main function for server fingerprint. Get an URL and return the fingerprint results.

        :param info: Folder URL.
        :type info: FolderUrl

        :return: Fingerprint.
        :rtype: WebServerFingerprint
        """

        m_main_url = info.url

        Logger.log_more_verbose(
            "Starting webserver fingerprinting plugin for site: %s" %
            m_main_url)

        #
        # Analyze HTTP protocol
        #
        m_server_name, m_server_version, m_canonical_name, m_webserver_complete_desc, m_related_webservers, m_others = http_analyzers(
            m_main_url, self.update_status, 100)

        Logger.log_more_verbose("Fingerprint - Server: %s | Version: %s" %
                                (m_server_name, m_server_version))

        m_return = WebServerFingerprint(m_server_name, m_server_version,
                                        m_webserver_complete_desc,
                                        m_canonical_name, m_related_webservers,
                                        m_others)

        # Associate resource
        m_return.add_resource(info)

        # Return the fingerprint
        return m_return
Esempio n. 19
0
def process_url(risk_level, method, matcher, updater_func, total_urls, url):
    """
    Checks if an URL exits.

    :param risk_level: risk level of the tested URL, if discovered.
    :type risk_level: int

    :param method: string with HTTP method used.
    :type method: str

    :param matcher: instance of MatchingAnalyzer object.
    :type matcher: `MatchingAnalyzer`

    :param updater_func: update_status function to send updates
    :type updater_func: update_status

    :param total_urls: total number of URL to globally process.
    :type total_urls: int

    :param url: a tuple with data: (index, the URL to process)
    :type url: tuple(int, str)
    """
    i, url = url

    updater_func((float(i) * 100.0) / float(total_urls))
    # Logger.log_more_verbose("Trying to discover URL %s" % url)

    # Get URL
    p = None
    try:
        p = HTTP.get_url(url, use_cache=False, method=method)
        if p:
            discard_data(p)
    except Exception, e:
        Logger.log_error_more_verbose("Error while processing: '%s': %s" %
                                      (url, str(e)))
Esempio n. 20
0
 def import_results(self, input_file):
     try:
         results, vuln_count = NiktoPlugin.parse_nikto_results(
             None, input_file)
         if results:
             Database.async_add_many(results)
     except Exception, e:
         Logger.log_error(
             "Could not load Nikto results from file: %s" % input_file)
         Logger.log_error_verbose(str(e))
         Logger.log_error_more_verbose(format_exc())
Esempio n. 21
0
 def import_results(self, input_file):
     try:
         with open(input_file, "rU") as fd:
             results = SpiderFootParser().parse(fd)
         if results:
             Database.async_add_many(results)
     except Exception, e:
         fmt = format_exc()
         Logger.log_error("Could not load file: %s" % input_file)
         Logger.log_error_verbose(str(e))
         Logger.log_error_more_verbose(fmt)
Esempio n. 22
0
 def import_results(self, input_file):
     try:
         openvas_results = report_parser(input_file)
         golismero_results = OpenVASPlugin.parse_results(openvas_results)
         if golismero_results:
             Database.async_add_many(golismero_results)
     except Exception, e:
         fmt = format_exc()
         Logger.log_error("Could not load OpenVAS results from file: %s" %
                          input_file)
         Logger.log_error_verbose(str(e))
         Logger.log_error_more_verbose(fmt)
Esempio n. 23
0
 def import_results(self, input_file):
     try:
         xml_results = etree.parse(input_file)
         openvas_results = VulnscanManager.transform(xml_results.getroot())
         golismero_results = OpenVASPlugin.parse_results(openvas_results)
         if golismero_results:
             Database.async_add_many(golismero_results)
     except Exception, e:
         Logger.log_error("Could not load OpenVAS results from file: %s" %
                          input_file)
         Logger.log_error_verbose(str(e))
         Logger.log_error_more_verbose(format_exc())
Esempio n. 24
0
    def search(engine, word, limit=100):
        """
        Run a theHarvester search on the given engine.

        :param engine: Search engine.
        :type engine: str

        :param word: Word to search for.
        :type word: str

        :param limit: Maximum number of results.
            Its exact meaning may depend on the search engine.
        :type limit: int

        :returns: All email addresses, hostnames and usernames collected.
        :rtype: tuple(list(str), list(str), list(str))
        """

        Logger.log_more_verbose("Searching on: %s" % engine)

        # Get the search class.
        search_mod = getattr(discovery, "%ssearch" % engine)
        search_fn = getattr(search_mod, "search_%s" % engine)

        # Run the search, hiding all the prints.
        fd = StringIO.StringIO()
        old_out, old_err = sys.stdout, sys.stderr
        try:
            sys.stdout, sys.stderr = fd, fd

            class Options:
                pass

            options = Options()
            options.word = word
            options.limit = limit
            options.start = 0
            search = search_fn(word, options)
            search.process()
        finally:
            sys.stdout, sys.stderr = old_out, old_err

        # Extract the results.
        emails, hosts = [], []
        results = search.get_results()
        if hasattr(results, "emails"):
            try:
                emails = results.emails
            except Exception, e:
                t = traceback.format_exc()
                Logger.log_error(str(e))
                Logger.log_error_more_verbose(t)
Esempio n. 25
0
 def query_google(latitude, longitude):
     coordinates = "%s, %s" % (latitude, longitude)
     Logger.log_more_verbose("Querying Google Geocoder for: %s" %
                             coordinates)
     try:
         g = geocoders.GoogleV3()
         r = g.reverse(coordinates)
         if r:
             return r[0][0].encode("UTF-8")
     except Exception, e:
         fmt = traceback.format_exc()
         Logger.log_error_verbose("Error: %s" % str(e))
         Logger.log_error_more_verbose(fmt)
Esempio n. 26
0
    def run(self, data):
        #
        #
        # PUT YOUR CODE HERE
        #
        #

        if data.is_instance(URL):
            Logger.log_verbose("Found an URL! %s" % data.url)
        elif data.is_instance(Relationship(Username, Password)):
            Logger.log("Found a valid password! User: %s, Pass: %s" %
                       (data[0].name, data[1].password))
        else:
            Logger.log_error("This should never happen...")
Esempio n. 27
0
 def query_freegeoip(ip):
     Logger.log_more_verbose("Querying freegeoip.net for: " + ip)
     try:
         resp = requests.get("http://freegeoip.net/json/" + ip)
         if resp.status_code == 200:
             return json_decode(resp.content)
         if resp.status_code == 404:
             Logger.log_more_verbose(
                 "No results from freegeoip.net for IP: " + ip)
         else:
             Logger.log_more_verbose(
                 "Response from freegeoip.net for %s: %s" %
                 (ip, resp.content))
     except Exception:
         raise RuntimeError("Freegeoip.net webservice is not available,"
                            " possible network error?")
Esempio n. 28
0
def recvmsg(s):
    hdr = recvall(s, 5)
    if hdr is None:
        Logger.log(
            'Unexpected EOF receiving record header - server closed connection'
        )
        return None, None, None
    typ, ver, ln = struct.unpack('>BHH', hdr)
    pay = recvall(s, ln, 10)
    if pay is None:
        Logger.log(
            'Unexpected EOF receiving record payload - server closed connection'
        )
        return None, None, None
    Logger.log(' ... received message: type = %d, ver = %04x, length = %d' %
               (typ, ver, len(pay)))
    return typ, ver, pay
Esempio n. 29
0
    def run(self, info):
        m_return = []

        m_url = info.url
        m_hostname = info.hostname
        m_url_robots_txt = urljoin(m_url, 'robots.txt')

        p = None
        try:
            msg = "Looking for robots.txt in: %s" % m_hostname
            Logger.log_more_verbose(msg)
            p = download(m_url_robots_txt, self.check_download)
        except NetworkOutOfScope:
            Logger.log_more_verbose("URL out of scope: %s" %
                                    (m_url_robots_txt))
            return
        except Exception, e:
            Logger.log_more_verbose("Error while processing %r: %s" %
                                    (m_url_robots_txt, str(e)))
            return
Esempio n. 30
0
    def recv_info(self, info):

        m_domain = info.root

        # Skips localhost
        if m_domain == "localhost":
            return

        m_return = None

        # Checks if the hostname has been already processed
        if not self.state.check(m_domain):

            Logger.log_verbose("Starting DNS analyzer plugin")
            m_return = []

            m_reg_len = len(DnsRegister.DNS_TYPES)
            for l_step, l_type in enumerate(DnsRegister.DNS_TYPES):

                # Update status
                progress = (float(l_step) / float(m_reg_len)) * 100.0
                self.update_status(progress=progress)
                Logger.log_more_verbose("Making %r DNS query" % l_type)

                # Make the query
                m_return.extend(DNS.resolve(m_domain, l_type))

            # Set the domain parsed
            self.state.set(m_domain, True)

            # Add the information to the host
            map(info.add_information, m_return)

            Logger.log_verbose(
                "Ending DNS analyzer plugin, found %d registers" %
                len(m_return))

        return m_return
Esempio n. 31
0
    def parse_results(openvas_results, ip=None):
        """
        Convert the OpenVAS scan results to the GoLismero data model.

        :param openvas_results: OpenVAS scan results.
        :type openvas_results: list(OpenVASResult)

        :param ip: (Optional) IP address to link the vulnerabilities to.
        :type ip: IP | None

        :returns: Scan results converted to the GoLismero data model.
        :rtype: list(Data)
        """

        # This is where we'll store the results.
        results = []

        # Remember the hosts we've seen so we don't create them twice.
        hosts_seen = {}

        # Maps of OpenVAS levels to GoLismero levels.
        LEVELS = {
            'debug': 'informational',
            'log': 'informational',
            'low': "low",
            'medium': 'middle',
            'high': "high",
        }
        RISKS = {
            'none': 0,
            'debug': 0,
            'log': 0,
            'low': 1,
            'medium': 2,
            'high': 3,
            'critical': 4
        }

        # Do we have the OpenVAS plugin database?
        if not os.path.exists(openvas_db):
            Logger.log_error(
                "OpenVAS plugin not initialized, please run setup.py")
            return

        # Load the database.
        with open(openvas_db, "rb") as f:
            use_openvas_db = Pickler.load(f)

        # Get the configuration.
        import_log = Config.audit_config.boolean(
            Config.plugin_args.get("import_log", "no"))
        import_debug = Config.audit_config.boolean(
            Config.plugin_args.get("import_debug", "no"))

        # For each OpenVAS result...
        for opv in openvas_results:
            try:

                # Get the host.
                host = opv.host

                # Skip if we don't have a target host.
                if host is None:
                    continue

                # Get the threat level.
                threat = getattr(opv, "threat", "log").lower()

                # Discard log and debug entries, keep only the vulnerabilities.
                if threat == "log" and not import_log:
                    continue
                if threat == "debug" and not import_debug:
                    continue

                # Get or create the vulnerable resource.
                target = ip
                if host in hosts_seen:
                    target = hosts_seen[host]
                elif not ip or ip.address != host:
                    try:
                        target = IP(host)
                    except ValueError:
                        target = Domain(host)
                    hosts_seen[host] = target
                    results.append(target)

                # Extract the relevant information from the results.
                nvt = opv.nvt
                vid = opv.id
                oid = int(nvt.oid.split(".")[-1])
                name = getattr(nvt, "name", None)
                cvss_base = getattr(nvt, "cvss_base", None)
                level = LEVELS.get(threat, "informational")
                risk = RISKS.get(
                    getattr(opv.nvt, "risk_factor", "none").lower(), 0)

                # Get the vulnerability description.
                description = opv.raw_description
                if not description:
                    description = nvt.description
                    if not description:
                        description = nvt.summary
                        if not description:
                            description = None

                # Extract the CVEs and Bugtraq IDs.
                cve = nvt.cve.split(", ") if nvt.cve else []
                if "NOCVE" in cve:
                    cve.remove("NOCVE")
                bid = []
                if nvt.bid:
                    bid.extend("BID-" + x for x in nvt.bid.split(", "))
                if nvt.bugtraq:
                    bid.extend("BID-" + x for x in nvt.bugtraq.split(", "))
                if "NOBID" in bid:
                    cve.remove("NOBID")

                # Extract the notes and add them to the description text.
                if opv.notes and description is not None:
                    description += "\n" + "\n".join(" - " + note.text
                                                    for note in opv.notes)

                # Extract the reference URLs from the description text.
                references = []
                if description is not None:
                    p = description.find("URL:")
                    while p >= 0:
                        p += 4
                        q2 = description.find("\n", p)
                        q1 = description.find(",", p, q2)
                        if q1 > p:
                            q = q1
                        else:
                            q = q2
                        if q < p:
                            q = len(description)
                        url = description[p:q].strip()
                        try:
                            url = parse_url(url).url
                            references.append(url)
                        except Exception:
                            Logger.log_error(format_exc())
                            pass
                        p = description.find("URL:", q)

                # Prepare the vulnerability properties.
                kwargs = {
                    "title": name,
                    "description": description,
                    "references": references,
                    "level": level,
                    "risk": risk,
                    "severity": risk,
                    "impact": risk,
                    "cvss_base": cvss_base,
                    "cve": cve,
                    "bid": bid,
                    "tool_id": "openvas_plugin_%s" % oid,
                    "custom_id": vid,
                }

                # If we have the OpenVAS plugin database, look up the plugin ID
                # that reported this vulnerability and create the vulnerability
                # using a specific class. Otherwise use the vulnerability class
                # for uncategorized vulnerabilities.
                classname = "UncategorizedVulnerability"
                if oid in use_openvas_db:
                    classname = use_openvas_db[oid][0][0]

                # Create the Vulnerability object.
                try:
                    clazz = globals()[classname]
                    vuln = clazz(target, **kwargs)
                except Exception, e:
                    t = format_exc()
                    Logger.log_error_more_verbose(
                        "Could not load vulnerability of type: %s" % classname)
                    Logger.log_error_more_verbose(t)
                    vuln = UncategorizedVulnerability(target, **kwargs)
                results.append(vuln)

            # Skip this result on error.
            except Exception, e:
                t = format_exc()
                Logger.log_error_verbose("Error parsing OpenVAS results: %s" %
                                         str(e))
                Logger.log_error_more_verbose(t)
Esempio n. 32
0
    def run(self, info):

        # Checks if connection was not set as down
        if not self.state.check("connection_down"):

            # Synchronization object to wait for completion.
            m_event = Event()

            # Get the config.
            m_user = Config.plugin_args["user"]
            m_password = Config.plugin_args["password"]
            m_host = Config.plugin_args["host"]
            m_port = Config.plugin_args["port"]
            m_timeout = Config.plugin_args["timeout"]
            m_profile = Config.plugin_args["profile"]

            # Sanitize the port and timeout.
            try:
                m_port = int(m_port)
            except Exception:
                m_port = 9390
            if m_timeout.lower().strip() in ("inf", "infinite", "none"):
                m_timeout = None
            else:
                try:
                    m_timeout = int(m_timeout)
                except Exception:
                    m_timeout = None

            # Connect to the scanner.
            try:
                Logger.log_more_verbose(
                    "Connecting to OpenVAS server at %s:%d" % (m_host, m_port))
                m_scanner = VulnscanManager(m_host, m_user, m_password, m_port,
                                            m_timeout)

            except VulnscanVersionError:
                Logger.log_error(
                    "Remote host is running an unsupported version of OpenVAS."
                    " Only OpenVAS 6 is currently supported.")

                # Set the openvas connection as down and remember it.
                self.state.put("connection_down", True)
                return

            except VulnscanException, e:
                t = format_exc()
                Logger.log_error("Error connecting to OpenVAS, aborting scan!")
                Logger.log_error_more_verbose(t)

                # Set the openvas connection as down and remember it.
                self.state.put("connection_down", True)
                return

            m_scan_id = None
            m_target_id = None
            try:
                # Launch the scanner.
                m_scan_id, m_target_id = m_scanner.launch_scan(
                    target=info.address,
                    profile=m_profile,
                    callback_end=partial(lambda x: x.set(), m_event),
                    callback_progress=OpenVASProgress(self.update_status))
                Logger.log_more_verbose("OpenVAS task ID: %s" % m_scan_id)

                # Wait for completion.
                m_event.wait()

                # Get the scan results.
                m_openvas_results = m_scanner.get_results(m_scan_id)

            except Exception, e:
                t = format_exc()
                Logger.log_error_verbose("Error parsing OpenVAS results: %s" %
                                         str(e))
                Logger.log_error_more_verbose(t)
                return