コード例 #1
0
 def __init__(self, api_id: str = "", api_secret: str = ""):
     self.api_id = self.api_secret = ""
     if api_id:
         self.api_id = api_id
         self.api_secret = api_secret
     self.ipv4 = ipv4.CensysIPv4(self.api_id, self.api_secret)
     self.websites = websites.CensysWebsites(self.api_id, self.api_secret)
     self.certificates = certificates.CensysCertificates(
         self.api_id, self.api_secret)
     self.export = export.CensysExport(self.api_id, self.api_secret)
コード例 #2
0
def export_mode(suffix, options, uid, api_key):
    # Cache hostnames in a dict for de-duping.
    hostnames_map = {}

    # Default timeout to 20 minutes.
    timeout = int(options.get("timeout", (60 * 60 * 20)))

    # Wait 5 seconds between checking on the job.
    between_jobs = 5

    try:
        export_api = export.CensysExport(uid, api_key)
    except censys.base.CensysUnauthorizedException:
        logging.warn(
            "The Censys.io Export API rejected the provided Censys credentials. The credentials may be inaccurate, or you may need to request access from the Censys.io team."
        )
        exit(1)

    # Uses a FLATTEN command in order to work around a BigQuery
    # error around multiple "repeated" fields. *shrug*
    query = "SELECT parsed.subject.common_name, parsed.extensions.subject_alt_name.dns_names from FLATTEN([certificates.certificates], parsed.extensions.subject_alt_name.dns_names) where parsed.subject.common_name LIKE \"%%%s\" OR parsed.extensions.subject_alt_name.dns_names LIKE \"%%%s\";" % (
        suffix, suffix)
    logging.debug("Censys query:\n%s\n" % query)

    download_file = utils.cache_path("export", "censys", ext="csv")

    force = options.get("force", False)

    if (force is False) and os.path.exists(download_file):
        logging.warn("Using cached download data.")
    else:
        logging.warn("Kicking off SQL query job.")
        results_url = None

        try:
            job = export_api.new_job(query, format='csv', flatten=True)
            job_id = job['job_id']

            started = datetime.datetime.now()
            while True:
                elapsed = (datetime.datetime.now() - started).seconds

                status = export_api.check_job(job_id)
                if status['status'] == 'error':
                    logging.warn("Error from Censys: %s" % status['error'])
                    exit(1)

                # Not expected, but better to explicitly handle.
                elif status['status'] == 'expired':
                    logging.warn("Results are somehow expired, bailing.")
                    exit(1)

                elif status['status'] == 'pending':
                    logging.debug("[%is] Job still pending." % elapsed)
                    time.sleep(between_jobs)

                elif status['status'] == 'success':
                    logging.warn("[%is] Job complete!" % elapsed)
                    results_url = status['download_paths'][0]
                    break

                if (elapsed > timeout):
                    logging.warn("Timeout waiting for job to complete.")
                    exit(1)

        except censys.base.CensysException:
            logging.warn(utils.format_last_exception())
            logging.warn("Censys error, aborting.")

        # At this point, the job is complete and we need to download
        # the resulting CSV URL in results_url.
        logging.warn("Downloading results of SQL query.")
        utils.download(results_url, download_file)

    # Read in downloaded CSV file, run any hostnames in each line
    # through the sanitizer, and de-dupe using the map.
    with open(download_file, newline='') as csvfile:
        for row in csv.reader(csvfile):
            if (not row[0]) or (
                    row[0].lower().startswith("parsed_subject_common_name")):
                continue

            names = [row[0].lower(), row[1].lower()]
            # logging.debug(names)

            for name in names:
                if name:
                    hostnames_map[sanitize_name(name)] = None

    return hostnames_map