Exemplo n.º 1
0
 def queue(command):
     try:
         return scanner.queue_scan_command(server_info, command)
     except OSError:
         text = ("OSError - likely too many processes and open files.")
         data['errors'].append(text)
         logging.exception("%s\n%s" % (text, utils.format_last_exception()))
         return None, None, None, None, None, None, None
     except Exception:
         text = ("Unknown exception queueing sslyze command.\n%s" % utils.format_last_exception())
         data['errors'].append(text)
         logging.exception(text)
         return None, None, None, None, None, None, None
Exemplo n.º 2
0
 def queue(command):
     try:
         return scanner.queue_scan_command(server_info, command)
     except OSError:
         text = ("OSError - likely too many processes and open files.")
         data['errors'].append(text)
         logging.warning("%s\n%s" % (text, utils.format_last_exception()))
         return None, None, None, None, None, None, None
     except Exception:
         text = ("Unknown exception queueing sslyze command.\n%s" % utils.format_last_exception())
         data['errors'].append(text)
         logging.warning(text)
         return None, None, None, None, None, None, None
Exemplo n.º 3
0
    def gather(self):
        # Defaults to --url, but can be overridden.
        name = self.extra.get("name", "url")
        url = self.options.get(name)

        if url is None:
            logging.warn("A --url is required. (Can be a local path.)")
            exit(1)

        # remote URL
        if url.startswith("http:") or url.startswith("https:"):
            # Though it's saved in cache/, it will be downloaded every time.
            remote_path = os.path.join(self.cache_dir, "url.csv")

            try:
                response = requests.get(url)
                utils.write(response.text, remote_path)
            except:
                logging.error("Remote URL not downloaded successfully.")
                print(utils.format_last_exception())
                exit(1)

        # local path
        else:
            remote_path = url

        for domain in utils.load_domains(remote_path):
            yield domain
Exemplo n.º 4
0
def scan_serial(scanner, server_info, data, options):

    logging.debug("\tRunning scans in serial.")
    logging.debug("\t\tSSLv2 scan.")
    sslv2 = scanner.run_scan_command(server_info, Sslv20ScanCommand())
    logging.debug("\t\tSSLv3 scan.")
    sslv3 = scanner.run_scan_command(server_info, Sslv30ScanCommand())
    logging.debug("\t\tTLSv1.0 scan.")
    tlsv1 = scanner.run_scan_command(server_info, Tlsv10ScanCommand())
    logging.debug("\t\tTLSv1.1 scan.")
    tlsv1_1 = scanner.run_scan_command(server_info, Tlsv11ScanCommand())
    logging.debug("\t\tTLSv1.2 scan.")
    tlsv1_2 = scanner.run_scan_command(server_info, Tlsv12ScanCommand())
    logging.debug("\t\tTLSv1.3 scan.")
    tlsv1_3 = scanner.run_scan_command(server_info, Tlsv13ScanCommand())

    certs = None
    if options.get("sslyze_certs", True) is True:

        try:
            logging.debug("\t\tCertificate information scan.")
            certs = scanner.run_scan_command(server_info,
                                             CertificateInfoScanCommand())
        # Let generic exceptions bubble up.
        except idna.core.InvalidCodepoint:
            logging.warn(utils.format_last_exception())
            data['errors'].append("Invalid certificate/OCSP for this domain.")
            certs = None
    else:
        certs = None

    logging.debug("\tDone scanning.")

    return sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs
Exemplo n.º 5
0
    def gather(self):
        # Defaults to --url, but can be overridden.
        name = self.extra.get("name", "url")
        url = self.options.get(name)

        if url is None:
            logging.warning("A --url is required. (Can be a local path.)")
            exit(1)

        # remote URL
        if url.startswith("http:") or url.startswith("https:"):
            # Though it's saved in cache/, it will be downloaded every time.
            remote_path = os.path.join(self.cache_dir, "url.csv")

            try:
                response = requests.get(url)
                utils.write(response.text, remote_path)
            except:
                logging.error("Remote URL not downloaded successfully.")
                print(utils.format_last_exception())
                exit(1)

        # local path
        else:
            remote_path = url

        for domain in utils.load_domains(remote_path):
            yield domain
Exemplo n.º 6
0
def init(environment, options):
    global analytics_domains

    analytics_file = options.get("analytics")
    if (not analytics_file) or (not analytics_file.endswith(".csv")):
        no_csv = "--analytics should point to the file path or URL to a CSV of participating domains."
        logging.error(no_csv)
        return False

    # It's a URL, download it first.
    if analytics_file.startswith("http:") or analytics_file.startswith(
            "https:"):

        analytics_path = os.path.join(utils.cache_dir(), "analytics.csv")

        try:
            utils.download(analytics_file, analytics_path)
        except:
            logging.error(utils.format_last_exception())
            no_csv = "--analytics URL not downloaded successfully."
            logging.error(no_csv)
            return False

    # Otherwise, read it off the disk
    else:
        analytics_path = analytics_file

        if (not os.path.exists(analytics_path)):
            no_csv = "--analytics file not found."
            logging.error(no_csv)
            return False

    analytics_domains = utils.load_domains(analytics_path)

    return {'analytics_domains': analytics_domains}
Exemplo n.º 7
0
def scan_serial(scanner, server_info, data, options):
    errors = 0

    def run_scan(scan_type, command, errors):
        if (errors >= 2):
            return None, errors
        logging.debug("\t\t{} scan.".format(scan_type))
        result = None
        try:
            result = scanner.run_scan_command(server_info, command)
        except Exception as err:
            logging.warning("{}: Error during {} scan.".format(
                server_info.hostname, scan_type))
            logging.debug("{}: Exception during {} scan: {}".format(
                server_info.hostname, scan_type, err))
            errors = errors + 1
        return result, errors

    logging.debug("\tRunning scans in serial.")
    sslv2, errors = run_scan("SSLv2", Sslv20ScanCommand(), errors)
    sslv3, errors = run_scan("SSLv3", Sslv30ScanCommand(), errors)
    tlsv1, errors = run_scan("TLSv1.0", Tlsv10ScanCommand(), errors)
    tlsv1_1, errors = run_scan("TLSv1.1", Tlsv11ScanCommand(), errors)
    tlsv1_2, errors = run_scan("TLSv1.2", Tlsv12ScanCommand(), errors)
    tlsv1_3, errors = run_scan("TLSv1.3", Tlsv13ScanCommand(), errors)

    certs = None
    if errors < 2 and options.get("sslyze_certs", True) is True:
        try:
            logging.debug("\t\tCertificate information scan.")
            certs = scanner.run_scan_command(
                server_info, CertificateInfoScanCommand(ca_file=CA_FILE))
        except idna.core.InvalidCodepoint:
            logging.warning(utils.format_last_exception())
            data['errors'].append("Invalid certificate/OCSP for this domain.")
            certs = None
        except Exception as err:
            logging.warning(
                "{}: Error during certificate information scan.".format(
                    server_info.hostname))
            logging.debug(
                "{}: Exception during certificate information scan: {}".format(
                    server_info.hostname, err))
    else:
        certs = None

    reneg = None
    if options.get("sslyze_reneg", True) is True:
        reneg, errors = run_scan("Renegotiation",
                                 SessionRenegotiationScanCommand(), errors)
    else:
        reneg = None

    logging.debug("\tDone scanning.")

    return sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs, reneg
Exemplo n.º 8
0
def handle_scanner_args(args, opts) -> Tuple[dict, list]:
    """
    --analytics: file path or URL to a CSV of participating domains.

    This function also handles checking for the existence of the file,
    downloading it succesfully, and reading the file in order to populate the
    list of analytics domains.
    """
    parser = scan_utils.ArgumentParser(prefix_chars="--")
    parser.add_argument("--analytics", nargs=1, required=True)
    parsed, unknown = parser.parse_known_args(args)
    dicted = vars(parsed)
    should_be_single = ["analytics"]
    dicted = scan_utils.make_values_single(dicted, should_be_single)
    resource = dicted.get("analytics")
    if not resource.endswith(".csv"):
        no_csv = "".join([
            "--analytics should be the file path or URL to a CSV of participating",
            " domains and end with .csv, which '%s' does not" % resource
        ])
        logging.error(no_csv)
        raise argparse.ArgumentTypeError(no_csv)
    try:
        parsed_url = urlparse(resource)
    except:
        raise
    if parsed_url.scheme and parsed_url.scheme in ("http", "https"):
        analytics_path = Path(opts["_"]["cache_dir"],
                              "analytics.csv").resolve()
        try:
            utils.download(resource, str(analytics_path))
        except:
            logging.error(utils.format_last_exception())
            no_csv = "--analytics URL %s not downloaded successfully." % resource
            logging.error(no_csv)
            raise argparse.ArgumentTypeError(no_csv)
    else:
        if not os.path.exists(resource):
            no_csv = "--analytics file %s not found." % resource
            logging.error(no_csv)
            raise FileNotFoundError(no_csv)
        else:
            analytics_path = resource

    analytics_domains = utils.load_domains(analytics_path)
    dicted["analytics_domains"] = analytics_domains
    del dicted["analytics"]

    return (dicted, unknown)
Exemplo n.º 9
0
def scan_serial(scanner, server_info, data, options):
    errors = 0

    def run_scan(scan_type, command, errors):
        if(errors >= 2):
            return None, errors
        logging.debug("\t\t{} scan.".format(scan_type))
        result = None
        try:
            result = scanner.run_scan_command(server_info, command)
        except Exception as err:
            logging.warning("{}: Error during {} scan.".format(server_info.hostname, scan_type))
            logging.debug("{}: Exception during {} scan: {}".format(server_info.hostname, scan_type, err))
            errors = errors + 1
        return result, errors

    logging.debug("\tRunning scans in serial.")
    sslv2, errors = run_scan("SSLv2", Sslv20ScanCommand(), errors)
    sslv3, errors = run_scan("SSLv3", Sslv30ScanCommand(), errors)
    tlsv1, errors = run_scan("TLSv1.0", Tlsv10ScanCommand(), errors)
    tlsv1_1, errors = run_scan("TLSv1.1", Tlsv11ScanCommand(), errors)
    tlsv1_2, errors = run_scan("TLSv1.2", Tlsv12ScanCommand(), errors)
    tlsv1_3, errors = run_scan("TLSv1.3", Tlsv13ScanCommand(), errors)

    certs = None
    if errors < 2 and options.get("sslyze_certs", True) is True:
        try:
            logging.debug("\t\tCertificate information scan.")
            certs = scanner.run_scan_command(server_info, CertificateInfoScanCommand(ca_file=CA_FILE))
        except idna.core.InvalidCodepoint:
            logging.warning(utils.format_last_exception())
            data['errors'].append("Invalid certificate/OCSP for this domain.")
            certs = None
        except Exception as err:
            logging.warning("{}: Error during certificate information scan.".format(server_info.hostname))
            logging.debug("{}: Exception during certificate information scan: {}".format(server_info.hostname, err))
    else:
        certs = None

    reneg = None
    if options.get("sslyze_reneg", True) is True:
        reneg, errors = run_scan("Renegotiation", SessionRenegotiationScanCommand(), errors)
    else:
        reneg = None

    logging.debug("\tDone scanning.")

    return sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs, reneg
Exemplo n.º 10
0
def handle_scanner_args(args, opts) -> Tuple[dict, list]:
    """
    --analytics: file path or URL to a CSV of participating domains.

    This function also handles checking for the existence of the file,
    downloading it succesfully, and reading the file in order to populate the
    list of analytics domains.
    """
    parser = scan_utils.ArgumentParser(prefix_chars="--")
    parser.add_argument("--analytics", nargs=1, required=True)
    parsed, unknown = parser.parse_known_args(args)
    dicted = vars(parsed)
    should_be_single = ["analytics"]
    dicted = scan_utils.make_values_single(dicted, should_be_single)
    resource = dicted.get("analytics")
    if not resource.endswith(".csv"):
        no_csv = "".join([
            "--analytics should be the file path or URL to a CSV of participating",
            " domains and end with .csv, which '%s' does not" % resource
        ])
        logging.error(no_csv)
        raise argparse.ArgumentTypeError(no_csv)
    try:
        parsed_url = urlparse(resource)
    except:
        raise
    if parsed_url.scheme and parsed_url.scheme in ("http", "https"):
        analytics_path = Path(opts["_"]["cache_dir"], "analytics.csv").resolve()
        try:
            utils.download(resource, str(analytics_path))
        except:
            logging.error(utils.format_last_exception())
            no_csv = "--analytics URL %s not downloaded successfully." % resource
            logging.error(no_csv)
            raise argparse.ArgumentTypeError(no_csv)
    else:
        if not os.path.exists(resource):
            no_csv = "--analytics file %s not found." % resource
            logging.error(no_csv)
            raise FileNotFoundError(no_csv)
        else:
            analytics_path = resource

    analytics_domains = utils.load_domains(analytics_path)
    dicted["analytics_domains"] = analytics_domains
    del dicted["analytics"]

    return (dicted, unknown)
Exemplo n.º 11
0
def gather(suffixes, options, extra={}):

    # Returns a parsed, processed Google service credentials object.
    credentials = load_credentials()

    if credentials is None:
        logging.warn("No BigQuery credentials provided.")
        logging.warn("Set BIGQUERY_CREDENTIALS or BIGQUERY_CREDENTIALS_PATH environment variables.")
        exit(1)

    # When using this form of instantiation, the client won't pull
    # the project_id out of the creds, has to be set explicitly.
    client = bigquery.Client(
        project=credentials.project_id,
        credentials=credentials
    )

    # Allow override of default timeout (in seconds).
    timeout = int(options.get("timeout", default_timeout))

    # Construct the query.
    query = query_for(suffixes)
    logging.debug("Censys query:\n%s\n" % query)

    # Plan to store in cache/censys/export.csv.
    download_path = utils.cache_path("export", "censys", ext="csv")

    # Reuse of cached data can be turned on with --cache.
    cache = options.get("cache", False)
    if (cache is True) and os.path.exists(download_path):
        logging.warn("Using cached download data.")

    # But by default, fetch new data from the BigQuery API,
    # and write it to the expected download location.
    else:
        logging.warn("Kicking off SQL query job.")

        rows = None

        # Actually execute the query.
        try:
            # Executes query and loads all results into memory.
            query_job = client.query(query)
            iterator = query_job.result(timeout=timeout)
            rows = list(iterator)
        except google.api_core.exceptions.Forbidden:
            logging.warn("Access denied to Censys' BigQuery tables.")
        except:
            logging.warn(utils.format_last_exception())
            logging.warn("Error talking to BigQuery, aborting.")

        # At this point, the job is complete and we need to download
        # the resulting CSV URL in results_url.
        logging.warn("Caching results of SQL query.")

        download_file = open(download_path, 'w', newline='')
        download_writer = csv.writer(download_file)
        download_writer.writerow(["Domain"])  # will be skipped on read

        # Parse the rows and write them out as they were returned (dupes
        # and all), to be de-duped by the central gathering script.
        for row in rows:
            domains = row['common_name'] + row['dns_names']
            for domain in domains:
                download_writer.writerow([domain])

        # End CSV writing.
        download_file.close()

    # Whether we downloaded it fresh or not, read from the cached data.
    for domain in utils.load_domains(download_path):
        if domain:
            yield domain
Exemplo n.º 12
0
def scan_parallel(scanner, server_info, data, options):
    logging.debug("\tRunning scans in parallel.")

    def queue(command):
        try:
            return scanner.queue_scan_command(server_info, command)
        except OSError as err:
            text = ("OSError - likely too many processes and open files.")
            data['errors'].append(text)
            logging.warn("%s\n%s" % (text, utils.format_last_exception()))
            return None, None, None, None, None, None, None
        except Exception as err:
            text = ("Unknown exception queueing sslyze command.\n%s" %
                    utils.format_last_exception())
            data['errors'].append(text)
            logging.warn(text)
            return None, None, None, None, None, None, None

    # Initialize commands and result containers
    sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs = None, None, None, None, None, None

    # Queue them all up
    queue(Sslv20ScanCommand())
    queue(Sslv30ScanCommand())
    queue(Tlsv10ScanCommand())
    queue(Tlsv11ScanCommand())
    queue(Tlsv12ScanCommand())
    queue(Tlsv13ScanCommand())

    if options.get("sslyze-certs", True) is True:
        queue(CertificateInfoScanCommand())

    # Reassign them back to predictable places after they're all done
    was_error = False
    for result in scanner.get_results():
        try:
            if isinstance(result, PluginRaisedExceptionScanResult):
                error = ("Scan command failed: %s" % result.as_text())
                logging.warn(error)
                data['errors'].append(error)
                return None, None, None, None, None, None, None

            if type(result.scan_command) == Sslv20ScanCommand:
                sslv2 = result
            elif type(result.scan_command) == Sslv30ScanCommand:
                sslv3 = result
            elif type(result.scan_command) == Tlsv10ScanCommand:
                tlsv1 = result
            elif type(result.scan_command) == Tlsv11ScanCommand:
                tlsv1_1 = result
            elif type(result.scan_command) == Tlsv12ScanCommand:
                tlsv1_2 = result
            elif type(result.scan_command) == Tlsv13ScanCommand:
                tlsv1_3 = result
            elif type(result.scan_command) == CertificateInfoScanCommand:
                certs = result
            else:
                error = "Couldn't match scan result with command! %s" % result
                logging.warn("\t%s" % error)
                data['errors'].append(error)
                was_error = True

        except Exception as err:
            was_error = True
            text = ("Exception inside async scanner result processing.\n%s" %
                    utils.format_last_exception())
            data['errors'].append(text)
            logging.warn("\t%s" % text)

    # There was an error during async processing.
    if was_error:
        return None, None, None, None, None, None, None

    logging.debug("\tDone scanning.")

    return sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs
Exemplo n.º 13
0
    def gather(self):

        # Returns a parsed, processed Google service credentials object.
        credentials = load_credentials()

        if credentials is None:
            logging.warning("No BigQuery credentials provided.")
            logging.warning("Set BIGQUERY_CREDENTIALS or BIGQUERY_CREDENTIALS_PATH environment variables.")
            exit(1)

        # When using this form of instantiation, the client won't pull
        # the project_id out of the creds, has to be set explicitly.
        client = bigquery.Client(
            project=credentials.project_id,
            credentials=credentials
        )

        # Allow override of default timeout (in seconds).
        timeout = int(self.options.get("timeout", default_timeout))

        # Construct the query.
        query = query_for(self.suffixes)
        logging.debug("Censys query:\n%s\n" % query)

        # Plan to store in cache/censys/export.csv.
        download_path = utils.cache_path(
            "export", "censys", ext="csv", cache_dir=self.cache_dir)

        # Reuse of cached data can be turned on with --cache.
        cache = self.options.get("cache", False)
        if (cache is True) and os.path.exists(download_path):
            logging.warning("Using cached download data.")

        # But by default, fetch new data from the BigQuery API,
        # and write it to the expected download location.
        else:
            # Ensure cache destination exists.
            utils.mkdir_p(os.path.dirname(download_path))

            logging.warning("Kicking off SQL query job.")

            rows = None

            # Actually execute the query.
            try:
                # Executes query and loads all results into memory.
                query_job = client.query(query)
                iterator = query_job.result(timeout=timeout)
                rows = list(iterator)
            except google.api_core.exceptions.Forbidden:
                logging.warning("Access denied to Censys' BigQuery tables.")
            except:
                logging.warning(utils.format_last_exception())
                logging.warning("Error talking to BigQuery, aborting.")

            # At this point, the job is complete and we need to download
            # the resulting CSV URL in results_url.
            logging.warning("Caching results of SQL query.")

            download_file = open(download_path, 'w', newline='')
            download_writer = csv.writer(download_file)
            download_writer.writerow(["Domain"])  # will be skipped on read

            # Parse the rows and write them out as they were returned (dupes
            # and all), to be de-duped by the central gathering script.
            for row in rows:
                domains = row['common_name'] + row['dns_names']
                for domain in domains:
                    download_writer.writerow([domain])

            # End CSV writing.
            download_file.close()

        # Whether we downloaded it fresh or not, read from the cached data.
        for domain in utils.load_domains(download_path):
            if domain:
                yield domain
Exemplo n.º 14
0
def scan_parallel(scanner, server_info, data, options):
    logging.debug("\tRunning scans in parallel.")

    def queue(command):
        try:
            return scanner.queue_scan_command(server_info, command)
        except OSError:
            text = ("OSError - likely too many processes and open files.")
            data['errors'].append(text)
            logging.warning("%s\n%s" % (text, utils.format_last_exception()))
            return None, None, None, None, None, None, None
        except Exception:
            text = ("Unknown exception queueing sslyze command.\n%s" % utils.format_last_exception())
            data['errors'].append(text)
            logging.warning(text)
            return None, None, None, None, None, None, None

    # Initialize commands and result containers
    sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs, reneg = None, None, None, None, None, None, None, None

    # Queue them all up
    queue(Sslv20ScanCommand())
    queue(Sslv30ScanCommand())
    queue(Tlsv10ScanCommand())
    queue(Tlsv11ScanCommand())
    queue(Tlsv12ScanCommand())
    queue(Tlsv13ScanCommand())

    if options.get("sslyze-certs", True) is True:
        queue(CertificateInfoScanCommand())

    if options.get("sslyze-reneg", True) is True:
        queue(CertificateInfoScanCommand())

    # Reassign them back to predictable places after they're all done
    was_error = False
    for result in scanner.get_results():
        try:
            if isinstance(result, PluginRaisedExceptionScanResult):
                error = ("Scan command failed: %s" % result.as_text())
                logging.warning(error)
                data['errors'].append(error)
                return None, None, None, None, None, None, None

            if type(result.scan_command) == Sslv20ScanCommand:
                sslv2 = result
            elif type(result.scan_command) == Sslv30ScanCommand:
                sslv3 = result
            elif type(result.scan_command) == Tlsv10ScanCommand:
                tlsv1 = result
            elif type(result.scan_command) == Tlsv11ScanCommand:
                tlsv1_1 = result
            elif type(result.scan_command) == Tlsv12ScanCommand:
                tlsv1_2 = result
            elif type(result.scan_command) == Tlsv13ScanCommand:
                tlsv1_3 = result
            elif type(result.scan_command) == CertificateInfoScanCommand:
                certs = result
            elif type(result.scan_command) == SessionRenegotiationScanCommand:
                reneg = result
            else:
                error = "Couldn't match scan result with command! %s" % result
                logging.warning("\t%s" % error)
                data['errors'].append(error)
                was_error = True

        except Exception:
            was_error = True
            text = ("Exception inside async scanner result processing.\n%s" % utils.format_last_exception())
            data['errors'].append(text)
            logging.warning("\t%s" % text)

    # There was an error during async processing.
    if was_error:
        return None, None, None, None, None, None, None, None

    logging.debug("\tDone scanning.")

    return sslv2, sslv3, tlsv1, tlsv1_1, tlsv1_2, tlsv1_3, certs, reneg