Пример #1
0
    def get_subdomains(self, conf, domain, verbose, only_sub=False):
        """
        Inspired from https://github.com/appsecco/the-art-of-subdomain-enumeration/blob/master/censys_subdomain_enum.py
        """
        api = certificates.CensysCertificates(conf['Censys']['id'],
                                              conf['Censys']['secret'])
        subdomains = set()
        for cert in api.search(domain):
            if cert['parsed.subject_dn'].endswith(domain):
                if verbose:
                    print('Certificate : %s - %s' %
                          (cert['parsed.subject_dn'],
                           cert['parsed.fingerprint_sha256']))
                subdomains.add(cert['parsed.subject_dn'].split('CN=')[1])
                c = api.view(cert['parsed.fingerprint_sha256'])
                try:
                    for name in c['parsed']['names']:
                        if only_sub:
                            if name.endswith(domain):
                                subdomains.add(name)
                        else:
                            subdomains.add(name)
                except KeyError:
                    pass

        return subdomains
Пример #2
0
 def __init__(self, api_id: str = "", api_secret: str = ""):
     self.api_id = self.api_secret = ""
     if api_id:
         self.api_id = api_id
         self.api_secret = api_secret
     self.ipv4 = ipv4.CensysIPv4(self.api_id, self.api_secret)
     self.websites = websites.CensysWebsites(self.api_id, self.api_secret)
     self.certificates = certificates.CensysCertificates(
         self.api_id, self.api_secret)
     self.export = export.CensysExport(self.api_id, self.api_secret)
Пример #3
0
 def run(self, conf, args, plugins):
     if 'subcommand' in args:
         if args.subcommand == 'ip':
             api = ipv4.CensysIPv4(conf['Censys']['id'],
                                   conf['Censys']['secret'])
             if args.search:
                 res = api.search(args.IP)
                 for r in res:
                     if len(r['ip']) > 11:
                         print("[+] %s\t[Location: %s] [Ports: %s]" %
                               (r['ip'], r['location.country'], " ".join(
                                   r['protocols'])))
                     else:
                         print("[+] %s\t\t[Location: %s] [Ports: %s]" %
                               (r['ip'], r['location.country'], " ".join(
                                   r['protocols'])))
             else:
                 try:
                     ip = api.view(args.IP)
                     print(
                         json.dumps(ip,
                                    sort_keys=True,
                                    indent=4,
                                    separators=(',', ': ')))
                 except censys.base.CensysNotFoundException:
                     print('IP not found')
         elif args.subcommand == 'cert':
             try:
                 c = certificates.CensysCertificates(
                     conf['Censys']['id'], conf['Censys']['secret'])
                 res = c.view(args.ID)
             except censys.base.CensysNotFoundException:
                 print("Certificate not found")
             else:
                 print(
                     json.dumps(res,
                                sort_keys=True,
                                indent=4,
                                separators=(',', ': ')))
         elif args.subcommand == 'subdomains':
             subdomains = self.get_subdomains(conf, args.DOMAIN,
                                              args.verbose)
             for d in subdomains:
                 print(d)
         elif args.subcommand == 'account':
             api = ipv4.CensysIPv4(conf['Censys']['id'],
                                   conf['Censys']['secret'])
             # Gets account data
             account = api.account()
             print(json.dumps(account, sort_keys=True, indent=4))
         else:
             self.parser.print_help()
     else:
         self.parser.print_help()
Пример #4
0
    def __init__(self, domain=None, ip=None):
        if domain is None and ip is None:
            raise Exception('TODO: error')

        self.ip = ip
        if ip is None:
            self.ip = gethostbyname(domain)

        self.domain = domain
        if self.domain is None:
            self.domain = self.ip_reverse_find_domain

        self.__lookup = None
        self.__sub_domain = None

        self._certificates = certificates.CensysCertificates(self.UID, self.SECRET)
Пример #5
0
def paginated_mode(suffix, options, uid, api_key):
    # Cache hostnames in a dict for de-duping.
    hostnames_map = {}

    certificate_api = certificates.CensysCertificates(uid, api_key)

    if 'query' in options and options['query']:
        query = options['query']
    else:
        query = "parsed.subject.common_name:\"%s\" or parsed.extensions.subject_alt_name.dns_names:\"%s\"" % (
            suffix, suffix)
    logging.debug("Censys query:\n%s\n" % query)

    # time to sleep between requests (defaults to 5s)
    delay = int(options.get("delay", 5))

    # Censys page size, fixed
    page_size = 100

    # Start page defaults to 1.
    start_page = int(options.get("start", 1))

    # End page defaults to whatever the API says is the last one.
    end_page = options.get("end", None)
    if end_page is None:
        end_page = get_end_page(query, certificate_api)
        if end_page is None:
            logging.warn("Error looking up number of pages.")
            exit(1)
    else:
        end_page = int(end_page)

    max_records = ((end_page - start_page) + 1) * page_size

    fields = [
        "parsed.subject.common_name",
        "parsed.extensions.subject_alt_name.dns_names"
    ]

    current_page = start_page

    logging.warn("Fetching up to %i records, starting at page %i." %
                 (max_records, start_page))
    last_cached = False
    force = options.get("force", False)

    while current_page <= end_page:
        if (not last_cached) and (current_page > start_page):
            logging.debug("(Waiting %is before fetching page %i.)" %
                          (delay, current_page))
            last_cached = False
            time.sleep(delay)

        logging.debug("Fetching page %i." % current_page)

        cache_page = utils.cache_path(str(current_page), "censys")
        if (force is False) and (os.path.exists(cache_page)):
            logging.warn("\t[%i] Cached page." % current_page)
            last_cached = True

            certs_raw = open(cache_page).read()
            certs = json.loads(certs_raw)
            if (certs.__class__ is dict) and certs.get('invalid'):
                continue
        else:
            try:
                certs = list(
                    certificate_api.search(query,
                                           fields=fields,
                                           page=current_page,
                                           max_records=page_size))
                utils.write(utils.json_for(certs), cache_page)
            except censys.base.CensysException:
                logging.warn(utils.format_last_exception())
                logging.warn("Censys error, skipping page %i." % current_page)
                utils.write(utils.invalid({}), cache_page)
                continue
            except:
                logging.warn(utils.format_last_exception())
                logging.warn("Unexpected error, skipping page %i." %
                             current_page)
                utils.write(utils.invalid({}), cache_page)
                exit(1)

        for cert in certs:
            # Common name + SANs
            names = cert.get('parsed.subject.common_name', []) + cert.get(
                'parsed.extensions.subject_alt_name.dns_names', [])
            logging.debug(names)

            for name in names:
                hostnames_map[sanitize_name(name)] = None

        current_page += 1

    logging.debug("Done fetching from API.")

    return hostnames_map
Пример #6
0
def gather(suffix, options):
    # Register a (free) Censys.io account to get a UID and API key.
    uid = options.get("censys_id", None)
    api_key = options.get("censys_key", None)

    if (uid is None) or (api_key is None):
        uid = os.environ.get("CENSYS_UID", None)
        api_key = os.environ.get("CENSYS_API_KEY", None)

    if (uid is None) or (api_key is None):
        logging.warn(
            "No Censys credentials set. API key required to use the Censys API."
        )
        exit(1)

    certificate_api = certificates.CensysCertificates(uid, api_key)

    query = "parsed.subject.common_name:\"%s\" or parsed.extensions.subject_alt_name.dns_names:\"%s\"" % (
        suffix, suffix)
    logging.debug("Censys query:\n%s\n" % query)

    # Hostnames beginning with a wildcard prefix will have the prefix stripped.
    wildcard_pattern = re.compile("^\*\.")
    redacted_pattern = re.compile("^(\?\.)+")

    # time to sleep between requests (defaults to 5s)
    delay = int(options.get("delay", 5))

    # Censys page size, fixed
    page_size = 100

    # Start page defaults to 1.
    start_page = int(options.get("start", 1))

    # End page defaults to whatever the API says is the last one.
    end_page = options.get("end", None)
    if end_page is None:
        end_page = get_end_page(query, certificate_api)
        if end_page is None:
            logging.warn("Error looking up number of pages.")
            exit(1)
    else:
        end_page = int(end_page)

    max_records = ((end_page - start_page) + 1) * page_size

    # Cache hostnames in a dict for de-duping.
    hostnames_map = {}

    fields = [
        "parsed.subject.common_name",
        "parsed.extensions.subject_alt_name.dns_names"
    ]

    current_page = start_page

    logging.warn("Fetching up to %i records, starting at page %i." %
                 (max_records, start_page))
    last_cached = False
    force = options.get("force", False)

    while current_page <= end_page:
        if (not last_cached) and (current_page > start_page):
            logging.debug("(Waiting %is before fetching page %i.)" %
                          (delay, current_page))
            last_cached = False
            time.sleep(delay)

        logging.debug("Fetching page %i." % current_page)

        cache_page = utils.cache_path(str(current_page), "censys")
        if (force is False) and (os.path.exists(cache_page)):
            logging.warn("\t[%i] Cached page." % current_page)
            last_cached = True

            certs_raw = open(cache_page).read()
            certs = json.loads(certs_raw)
            if (certs.__class__ is dict) and certs.get('invalid'):
                continue
        else:
            try:
                certs = list(
                    certificate_api.search(query,
                                           fields=fields,
                                           page=current_page,
                                           max_records=page_size))
                utils.write(utils.json_for(certs), cache_page)
            except censys.base.CensysException:
                logging.warn(utils.format_last_exception())
                logging.warn("Censys error, skipping page %i." % current_page)
                utils.write(utils.invalid({}), cache_page)
                continue
            except:
                logging.warn(utils.format_last_exception())
                logging.warn("Unexpected error, skipping page %i." %
                             current_page)
                utils.write(utils.invalid({}), cache_page)
                exit(1)

        for cert in certs:
            # Common name + SANs
            names = cert.get('parsed.subject.common_name', []) + cert.get(
                'parsed.extensions.subject_alt_name.dns_names', [])
            logging.debug(names)

            for name in names:
                # Strip off any wildcard prefix.
                name = re.sub(wildcard_pattern, '', name).lower().strip()
                # Strip off any redacted ? prefixes. (Ugh.)
                name = re.sub(redacted_pattern, '', name).lower().strip()
                hostnames_map[name] = None

        current_page += 1

    logging.debug("Done fetching from API.")

    # Iterator doesn't buy much efficiency, since we paginated already.
    # Necessary evil to de-dupe before returning hostnames, though.
    for hostname in hostnames_map.keys():
        yield hostname