def scan(domain, environment, options): full_command = [ command, domain, '--json', '--timeout', str(timeout), # Use Google DNS '--dns-hostnames', '8.8.8.8,8.8.4.4' ] if options.get("debug", False): full_command.append("--debug") raw = utils.scan(full_command) if not raw: logging.warn("\ttrustymail command failed, skipping.") return None data = json.loads(raw) # trustymail uses JSON arrays, even for single items. data = data[0] return data
def scan(domain, options): logging.debug("[%s][pageload]" % domain) inspection = utils.data_for(domain, "inspect") # If we have data from inspect, skip if it's not a live domain. if inspection and (not inspection.get("up")): logging.debug("\tSkipping, domain not reachable during inspection.") return None # If we have data from inspect, skip if it's just a redirector. if inspection and (inspection.get("redirect") is True): logging.debug( "\tSkipping, domain seen as just a redirector during inspection.") return None # phantomas needs a URL, not just a domain. if not (domain.startswith('http://') or domain.startswith('https://')): # If we have data from inspect, use the canonical endpoint. if inspection and inspection.get("canonical"): url = inspection.get("canonical") # Otherwise, well, whatever. else: url = 'http://' + domain else: url = domain # We'll cache prettified JSON from the output. cache = utils.cache_path(domain, "pageload") # If we've got it cached, use that. if (options.get("force", False) is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None # If no cache, or we should run anyway, do the scan. else: logging.debug("\t %s %s --reporter=json --ignore-ssl-errors" % (command, url)) raw = utils.scan( [command, url, "--reporter=json", "--ignore-ssl-errors"]) if not raw: utils.write(utils.invalid({}), cache) return None # It had better be JSON, which we can cache in prettified form. data = json.loads(raw) utils.write(utils.json_for(data), cache) yield [data['metrics'][metric] for metric in interesting_metrics]
def scan(domain, options): logging.debug("[%s][pageload]" % domain) inspection = utils.data_for(domain, "inspect") # If we have data from inspect, skip if it's not a live domain. if inspection and (not inspection.get("up")): logging.debug("\tSkipping, domain not reachable during inspection.") return None # If we have data from inspect, skip if it's just a redirector. if inspection and (inspection.get("redirect") is True): logging.debug("\tSkipping, domain seen as just a redirector during inspection.") return None # phantomas needs a URL, not just a domain. if not (domain.startswith('http://') or domain.startswith('https://')): # If we have data from inspect, use the canonical endpoint. if inspection and inspection.get("canonical"): url = inspection.get("canonical") # Otherwise, well, whatever. else: url = 'http://' + domain else: url = domain # We'll cache prettified JSON from the output. cache = utils.cache_path(domain, "pageload") # If we've got it cached, use that. if (options.get("force", False) is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None # If no cache, or we should run anyway, do the scan. else: logging.debug("\t %s %s --reporter=json --ignore-ssl-errors" % (command, url)) raw = utils.scan([command, url, "--reporter=json", "--ignore-ssl-errors"]) if not raw: utils.write(utils.invalid({}), cache) return None # It had better be JSON, which we can cache in prettified form. data = json.loads(raw) utils.write(utils.json_for(data), cache) yield [data['metrics'][metric] for metric in interesting_metrics]
def scan(domain, options): logging.debug("[%s][pshtt]" % domain) # cache output from pshtt cache_pshtt = utils.cache_path(domain, "pshtt", ext="json") force = options.get("force", False) data = None if (force is False) and (os.path.exists(cache_pshtt)): logging.debug("\tCached.") raw = open(cache_pshtt).read() data = json.loads(raw) if (data.__class__ is dict) and data.get('invalid'): return None else: logging.debug("\t %s %s" % (command, domain)) raw = utils.scan([ command, domain, '--json', '--user-agent', '\"%s\"' % user_agent, '--timeout', str(timeout), '--preload-cache', preload_cache ]) if not raw: utils.write(utils.invalid({}), cache_pshtt) logging.warn("\tBad news scanning, sorry!") return None data = json.loads(raw) utils.write(utils.json_for(data), utils.cache_path(domain, "pshtt")) # pshtt scanner uses JSON arrays, even for single items data = data[0] row = [] for field in headers: value = data[field] # TODO: Fix this upstream if (field != "HSTS Header") and (field != "HSTS Max Age") and (field != "Redirect To"): if value is None: value = False row.append(value) yield row
def scan(domain, options): logging.debug("[%s][trustymail]" % domain) # cache output from pshtt cache_trustymail = utils.cache_path(domain, "trustymail", ext="json") force = options.get("force", False) if (force is False) and (os.path.exists(cache_trustymail)): logging.debug("\tCached.") raw = open(cache_trustymail).read() data = json.loads(raw) if (data.__class__ is dict) and data.get('invalid'): return None else: logging.debug("\t %s %s" % (command, domain)) raw = utils.scan([ command, domain, '--json', '--timeout', str(timeout), ]) if not raw: utils.write(utils.invalid({}), cache_trustymail) logging.warn("\tBad news scanning, sorry!") return None data = json.loads(raw) utils.write(utils.json_for(data), utils.cache_path(domain, "trustymail")) # trustymail scanner follows pshtt in using JSON arrays, even for single items data = data[0] row = [] for field in headers: value = data[field] row.append(value) yield row
def scan(domain, options): logging.debug("[%s][inspect]" % domain) # cache JSON as it comes back from site-inspector cache = utils.cache_path(domain, "inspect") if (options.get("force", False) is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None else: logging.debug("\t %s %s --http" % (command, domain)) raw = utils.scan([command, domain, "--http"]) if not raw: utils.write(utils.invalid({}), cache) return None utils.write(raw, cache) data = json.loads(raw) # TODO: get this from a site-inspector field directly canonical_https = data['endpoints']['https'][data['canonical_endpoint']] # TODO: guarantee these as present in site-inspector https_valid = canonical_https.get('https_valid', False) https_bad_chain = canonical_https.get('https_bad_chain', False) https_bad_name = canonical_https.get('https_bad_name', False) # TODO: site-inspector should float this up hsts_details = canonical_https.get('hsts_details', {}) max_age = hsts_details.get('max_age', None) yield [ data['canonical'], data['up'], data['redirect'], data['redirect_to'], https_valid, data['default_https'], data['downgrade_https'], data['enforce_https'], https_bad_chain, https_bad_name, data['hsts'], data['hsts_header'], max_age, data['hsts_entire_domain'], data['hsts_entire_domain_preload'], domain in chrome_preload_list, data['broken_root'], data['broken_www'] ]
def scan(domain, environment, options): timeout = int(options.get("timeout", default_timeout)) url = environment["url"] raw = utils.scan([ command, url, "--modules=domains", "--reporter=json", "--timeout=%i" % timeout, "--ignore-ssl-errors" ], allowed_return_codes=[252]) if not raw: logging.warn("\tError with the phantomas command, skipping.") return None # Phantomas returns JSON. data = json.loads(raw) return services_for(url, data, domain, options)
def run_a11y_scan(domain, cache): logging.debug("[%s][a11y]" % domain) pa11y = os.environ.get("PA11Y_PATH", "pa11y") command = [pa11y, domain, "--reporter", "json", "--config", "config/pa11y_config.json", "--level", "none", "--timeout", "300000"] raw = utils.scan(command) if raw: results = json.loads(raw) else: results = [{ 'typeCode': '', 'code': '', 'message': '', 'context': '', 'selector': '', 'type': '' }] cache_errors(results, domain, cache) return results
def run_a11y_scan(domain, cache): global config logging.debug("[%s][a11y]" % domain) pa11y = os.environ.get("PA11Y_PATH", "pa11y") domain_to_scan = get_domain_to_scan(domain) command = [pa11y, domain_to_scan, "--reporter", "json", "--level", "none", "--timeout", "300000"] if config: command += ["--config", config] raw = utils.scan(command) if not raw or raw == '[]\n': results = [{ 'typeCode': '', 'code': '', 'message': '', 'context': '', 'selector': '', 'type': '' }] else: results = json.loads(raw) cache_errors(results, domain, cache) return results
def run_a11y_scan(domain): command = [ pa11y, domain, "--reporter", "json", "--level", "none", "--timeout", "300000" ] if config: command += ["--config", config] raw = utils.scan(command) if not raw or raw == '[]\n': results = [{ 'typeCode': '', 'code': '', 'message': '', 'context': '', 'selector': '', 'type': '' }] else: results = json.loads(raw) return results
def network_check(subdomain, endpoint, options): cache = utils.cache_path(subdomain, "subdomains") wildcard = wildcard_for(subdomain) if (options.get("force", False) is False) and (os.path.exists(cache)): logging.debug("\tDNS and content cached.") raw = open(cache).read() data = json.loads(raw) # Hit DNS and HTTP. else: # HTTP content: just use curl. # # Turn on --insecure because we want to see the content even at sites # where the certificate isn't right or proper. logging.debug("\t curl --silent --insecure %s" % endpoint) content = utils.scan(["curl", "--silent", "--insecure", endpoint]) # DNS content: just use dig. # # Not awesome - uses an unsafe shell execution of `dig` to look up DNS, # as I couldn't figure out a way to get "+short" to play nice with # the more secure execution methods available to me. Since this system # isn't expected to process untrusted input, this should be okay. logging.debug("\t dig +short '%s'" % wildcard) raw_wild = utils.unsafe_execute("dig +short '%s'" % wildcard) if raw_wild == "": raw_wild = None raw_self = None else: logging.debug("\t dig +short '%s'" % subdomain) raw_self = utils.unsafe_execute("dig +short '%s'" % subdomain) if raw_wild: parsed_wild = raw_wild.split("\n") parsed_wild.sort() else: parsed_wild = None if raw_self: parsed_self = raw_self.split("\n") parsed_self.sort() else: parsed_self = None # Cache HTTP and DNS data to disk. data = { 'response': { 'content': content, 'wildcard_dns': parsed_wild, 'self_dns': parsed_self } } if (parsed_wild) and (parsed_wild == parsed_self): data['response']['matched_wild'] = True else: data['response']['matched_wild'] = False utils.write(utils.json_for(data), cache) return data['response']
def scan(domain, options): logging.debug("[%s][sslyze]" % domain) # Optional: skip domains which don't support HTTPS in prior inspection if utils.domain_doesnt_support_https(domain): logging.debug("\tSkipping, HTTPS not supported in inspection.") return None # Optional: if pshtt data says canonical endpoint uses www and this domain # doesn't have it, add it. if utils.domain_uses_www(domain): scan_domain = "www.%s" % domain else: scan_domain = domain # cache XML from sslyze cache_xml = utils.cache_path(domain, "sslyze", ext="xml") # because sslyze manages its own output (can't yet print to stdout), # we have to mkdir_p the path ourselves utils.mkdir_p(os.path.dirname(cache_xml)) force = options.get("force", False) if (force is False) and (os.path.exists(cache_xml)): logging.debug("\tCached.") xml = open(cache_xml).read() else: logging.debug("\t %s %s" % (command, scan_domain)) # use scan_domain (possibly www-prefixed) to do actual scan # Give the Python shell environment a pyenv environment. pyenv_init = "eval \"$(pyenv init -)\" && pyenv shell %s" % pyenv_version # Really un-ideal, but calling out to Python2 from Python 3 is a nightmare. # I don't think this tool's threat model includes untrusted CSV, either. raw = utils.unsafe_execute( "%s && %s --regular --quiet %s --xml_out=%s" % (pyenv_init, command, scan_domain, cache_xml)) if raw is None: # TODO: save standard invalid XML data...? logging.warn("\tBad news scanning, sorry!") return None xml = utils.scan(["cat", cache_xml]) if not xml: logging.warn("\tBad news reading XML, sorry!") return None utils.write(xml, cache_xml) data = parse_sslyze(xml) if data is None: logging.warn("\tNo valid target for scanning, couldn't connect.") return None utils.write(utils.json_for(data), utils.cache_path(domain, "sslyze")) yield [ data['protocols']['sslv2'], data['protocols']['sslv3'], data['protocols']['tlsv1.0'], data['protocols']['tlsv1.1'], data['protocols']['tlsv1.2'], data['config'].get('any_dhe'), data['config'].get('all_dhe'), data['config'].get('weakest_dh'), data['config'].get('any_rc4'), data['config'].get('all_rc4'), data['config'].get('ocsp_stapling'), data['certs'].get('key_type'), data['certs'].get('key_length'), data['certs'].get('leaf_signature'), data['certs'].get('any_sha1'), data['certs'].get('not_before'), data['certs'].get('not_after'), data['certs'].get('served_issuer'), data.get('errors') ]
def scan(domain, options): logging.debug("[%s][tls]" % domain) # If inspection data exists, check to see if we can skip. inspection = utils.data_for(domain, "inspect") if inspection and (not inspection.get("support_https")): logging.debug("\tSkipping, HTTPS not supported in inspection.") return None else: # cache reformatted JSON from ssllabs cache = utils.cache_path(domain, "tls") force = options.get("force", False) if (force is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get("invalid"): return None else: logging.debug("\t %s %s" % (command, domain)) usecache = str(not force).lower() if options.get("debug"): cmd = [command, "--usecache=%s" % usecache, "--verbosity=debug", domain] else: cmd = [command, "--usecache=%s" % usecache, "--quiet", domain] raw = utils.scan(cmd) if raw: data = json.loads(raw) # we only give ssllabs-scan one at a time, # so we can de-pluralize this data = data[0] # if SSL Labs had an error hitting the site, cache this # as an invalid entry. if data["status"] == "ERROR": utils.write(utils.invalid(data), cache) return None utils.write(utils.json_for(data), cache) else: return None # raise Exception("Invalid data from ssllabs-scan: %s" % raw) # can return multiple rows, one for each 'endpoint' for endpoint in data["endpoints"]: # this meant it couldn't connect to the endpoint if not endpoint.get("grade"): continue sslv3 = False tlsv12 = False for protocol in endpoint["details"]["protocols"]: if (protocol["name"] == "SSL") and (protocol["version"] == "3.0"): sslv3 = True if (protocol["name"] == "TLS") and (protocol["version"] == "1.2"): tlsv12 = True spdy = False h2 = False npn = endpoint["details"].get("npnProtocols", None) if npn: spdy = "spdy" in npn h2 = "h2-" in npn yield [ endpoint["grade"], endpoint["details"]["cert"]["sigAlg"], endpoint["details"]["key"]["alg"], endpoint["details"]["key"]["size"], endpoint["details"]["forwardSecrecy"], endpoint["details"]["ocspStapling"], endpoint["details"].get("fallbackScsv", "N/A"), endpoint["details"]["supportsRc4"], sslv3, tlsv12, spdy, endpoint["details"]["sniRequired"], h2, ]
def scan(domain, options): logging.debug("[%s][sslyze]" % domain) # Optional: skip domains which don't support HTTPS in prior inspection inspection = utils.data_for(domain, "inspect") if inspection and (not inspection.get("support_https")): logging.debug("\tSkipping, HTTPS not supported in inspection.") return None # Optional: if inspect data says canonical endpoint uses www and this domain # doesn't have it, add it. if inspection and (inspection.get("canonical_endpoint") == "www") and (not domain.startswith("www.")): scan_domain = "www.%s" % domain else: scan_domain = domain # cache XML from sslyze cache_xml = utils.cache_path(domain, "sslyze", ext="xml") # because sslyze manages its own output (can't yet print to stdout), # we have to mkdir_p the path ourselves utils.mkdir_p(os.path.dirname(cache_xml)) force = options.get("force", False) if (force is False) and (os.path.exists(cache_xml)): logging.debug("\tCached.") xml = open(cache_xml).read() else: logging.debug("\t %s %s" % (command, domain)) # use scan_domain (possibly www-prefixed) to do actual scan raw = utils.scan([command, "--regular", "--quiet", scan_domain, "--xml_out=%s" % cache_xml], env=command_env) if raw is None: # TODO: save standard invalid XML data...? logging.warn("\tBad news scanning, sorry!") return None xml = utils.scan(["cat", cache_xml]) if not xml: logging.warn("\tBad news reading XML, sorry!") return None utils.write(xml, cache_xml) data = parse_sslyze(xml) if data is None: logging.warn("\tNo valid target for scanning, couldn't connect.") return None utils.write(utils.json_for(data), utils.cache_path(domain, "sslyze")) yield [ data['protocols']['sslv2'], data['protocols']['sslv3'], data['protocols']['tlsv1.0'], data['protocols']['tlsv1.1'], data['protocols']['tlsv1.2'], data['config'].get('any_dhe'), data['config'].get('all_dhe'), data['config'].get('weakest_dh'), data['config'].get('any_rc4'), data['config'].get('all_rc4'), data['config'].get('ocsp_stapling'), data['certs'].get('key_type'), data['certs'].get('key_length'), data['certs'].get('leaf_signature'), data['certs'].get('any_sha1'), data['certs'].get('not_before'), data['certs'].get('not_after'), data['certs'].get('served_issuer'), data.get('errors') ]
def scan(domain, options): logging.debug("[%s][third_parties]" % domain) # Default timeout is 15s, too little. timeout = int(options.get("timeout", 60)) # If we have data from pshtt, skip if it's not a live domain. if utils.domain_not_live(domain): logging.debug("\tSkipping, domain not reachable during inspection.") return None # If we have data from pshtt, skip if it's just a redirector. if utils.domain_is_redirect(domain): logging.debug( "\tSkipping, domain seen as just an external redirector during inspection." ) return None # phantomas needs a URL, not just a domain. if not (domain.startswith('http://') or domain.startswith('https://')): # If we have data from pshtt, use the canonical endpoint. if utils.domain_canonical(domain): url = utils.domain_canonical(domain) # Otherwise, well, whatever. else: url = 'http://' + domain else: url = domain # calculated_domain = re.sub("https?:\/\/", "", url) # We'll cache prettified JSON from the output. cache = utils.cache_path(domain, "third_parties") # If we've got it cached, use that. if (options.get("force", False) is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None # If no cache, or we should run anyway, do the scan. else: logging.debug( "\t %s %s --modules=domains --reporter=json --timeout=%i --ignore-ssl-errors" % (command, url, timeout)) raw = utils.scan([ command, url, "--modules=domains", "--reporter=json", "--timeout=%i" % timeout, "--ignore-ssl-errors" ], allowed_return_codes=[252]) if not raw: utils.write(utils.invalid({}), cache) return None # It had better be JSON, which we can cache in prettified form. data = json.loads(raw) utils.write(utils.json_for(data), cache) services = services_for(data, domain, options) # Convert to CSV row known_names = list(known_services.keys()) known_names.sort() known_matches = [ 'Yes' if host in services['known'] else 'No' for host in known_names ] yield [ len(services['external']), len(services['internal']), services['external_requests'], services['internal_requests'], serialize(services['external']), serialize(services['internal']), # services['affiliated'], # services['unknown'] ] + known_matches
def scan(domain, options): logging.debug("[%s][sslyze]" % domain) # Optional: skip domains which don't support HTTPS in prior inspection if utils.domain_doesnt_support_https(domain): logging.debug("\tSkipping, HTTPS not supported in inspection.") return None # Optional: if pshtt data says canonical endpoint uses www and this domain # doesn't have it, add it. if utils.domain_uses_www(domain): scan_domain = "www.%s" % domain else: scan_domain = domain # cache XML from sslyze cache_xml = utils.cache_path(domain, "sslyze", ext="xml") # because sslyze manages its own output (can't yet print to stdout), # we have to mkdir_p the path ourselves utils.mkdir_p(os.path.dirname(cache_xml)) force = options.get("force", False) if (force is False) and (os.path.exists(cache_xml)): logging.debug("\tCached.") xml = open(cache_xml).read() else: logging.debug("\t %s %s" % (command, scan_domain)) # use scan_domain (possibly www-prefixed) to do actual scan # Give the Python shell environment a pyenv environment. pyenv_init = "eval \"$(pyenv init -)\" && pyenv shell %s" % pyenv_version # Really un-ideal, but calling out to Python2 from Python 3 is a nightmare. # I don't think this tool's threat model includes untrusted CSV, either. raw = utils.unsafe_execute("%s && %s --regular --quiet %s --xml_out=%s" % (pyenv_init, command, scan_domain, cache_xml)) if raw is None: # TODO: save standard invalid XML data...? logging.warn("\tBad news scanning, sorry!") return None xml = utils.scan(["cat", cache_xml]) if not xml: logging.warn("\tBad news reading XML, sorry!") return None utils.write(xml, cache_xml) data = parse_sslyze(xml) if data is None: logging.warn("\tNo valid target for scanning, couldn't connect.") return None utils.write(utils.json_for(data), utils.cache_path(domain, "sslyze")) yield [ data['protocols']['sslv2'], data['protocols']['sslv3'], data['protocols']['tlsv1.0'], data['protocols']['tlsv1.1'], data['protocols']['tlsv1.2'], data['config'].get('any_dhe'), data['config'].get('all_dhe'), data['config'].get('weakest_dh'), data['config'].get('any_rc4'), data['config'].get('all_rc4'), data['config'].get('ocsp_stapling'), data['certs'].get('key_type'), data['certs'].get('key_length'), data['certs'].get('leaf_signature'), data['certs'].get('any_sha1'), data['certs'].get('not_before'), data['certs'].get('not_after'), data['certs'].get('served_issuer'), data.get('errors') ]
def scan(domain, options): logging.debug("[%s][tls]" % domain) # If inspection data exists, check to see if we can skip. if utils.domain_doesnt_support_https(domain): logging.debug("\tSkipping, HTTPS not supported in inspection.") return None # cache reformatted JSON from ssllabs cache = utils.cache_path(domain, "tls") # Optional: if pshtt data says canonical endpoint uses www and this domain # doesn't have it, add it. if utils.domain_uses_www(domain): scan_domain = "www.%s" % domain else: scan_domain = domain force = options.get("force", False) if (force is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None else: logging.debug("\t %s %s" % (command, scan_domain)) usecache = str(not force).lower() if options.get("debug"): cmd = [command, "--usecache=%s" % usecache, "--verbosity=debug", scan_domain] else: cmd = [command, "--usecache=%s" % usecache, "--quiet", scan_domain] raw = utils.scan(cmd) if raw: data = json.loads(raw) # if SSL Labs gave us back an error response, cache this # as an invalid entry. if len(data) < 1: utils.write(utils.invalid({'response': data}), cache) return None # we only give ssllabs-scan one at a time, # so we can de-pluralize this data = data[0] # if SSL Labs had an error hitting the site, cache this # as an invalid entry. if data["status"] == "ERROR": utils.write(utils.invalid(data), cache) return None utils.write(utils.json_for(data), cache) else: return None # raise Exception("Invalid data from ssllabs-scan: %s" % raw) # can return multiple rows, one for each 'endpoint' for endpoint in data['endpoints']: # this meant it couldn't connect to the endpoint if not endpoint.get("grade"): continue sslv3 = False tlsv12 = False for protocol in endpoint['details']['protocols']: if ((protocol['name'] == "SSL") and (protocol['version'] == '3.0')): sslv3 = True if ((protocol['name'] == "TLS") and (protocol['version'] == '1.2')): tlsv12 = True spdy = False h2 = False npn = endpoint['details'].get('npnProtocols', None) if npn: spdy = ("spdy" in npn) h2 = ("h2" in npn) yield [ endpoint['grade'], endpoint['details']['cert']['sigAlg'], endpoint['details']['key']['alg'], endpoint['details']['key']['size'], endpoint['details']['forwardSecrecy'], endpoint['details']['ocspStapling'], endpoint['details'].get('fallbackScsv', "N/A"), endpoint['details']['supportsRc4'], sslv3, tlsv12, spdy, endpoint['details']['sniRequired'], h2 ]
def scan(domain, options): logging.debug("[%s][sslyze]" % domain) # Optional: skip domains which don't support HTTPS in pshtt scan. if utils.domain_doesnt_support_https(domain): logging.debug("\tSkipping, HTTPS not supported.") return None # Optional: if pshtt data says canonical endpoint uses www and this domain # doesn't have it, add it. if utils.domain_uses_www(domain): scan_domain = "www.%s" % domain else: scan_domain = domain # cache JSON from sslyze cache_json = utils.cache_path(domain, "sslyze") # because sslyze manages its own output (can't yet print to stdout), # we have to mkdir_p the path ourselves utils.mkdir_p(os.path.dirname(cache_json)) force = options.get("force", False) if (force is False) and (os.path.exists(cache_json)): logging.debug("\tCached.") raw_json = open(cache_json).read() try: data = json.loads(raw_json) if (data.__class__ is dict) and data.get('invalid'): return None except json.decoder.JSONDecodeError as err: logging.warn("Error decoding JSON. Cache probably corrupted.") return None else: # use scan_domain (possibly www-prefixed) to do actual scan logging.debug("\t %s %s" % (command, scan_domain)) # This is --regular minus --heartbleed # See: https://github.com/nabla-c0d3/sslyze/issues/217 raw_response = utils.scan([ command, "--sslv2", "--sslv3", "--tlsv1", "--tlsv1_1", "--tlsv1_2", "--reneg", "--resum", "--certinfo", "--http_get", "--hide_rejected_ciphers", "--compression", "--openssl_ccs", "--fallback", "--quiet", scan_domain, "--json_out=%s" % cache_json ]) if raw_response is None: # TODO: save standard invalid JSON data...? utils.write(utils.invalid({}), cache_json) logging.warn("\tBad news scanning, sorry!") return None raw_json = utils.scan(["cat", cache_json]) if not raw_json: logging.warn("\tBad news reading JSON, sorry!") return None utils.write(raw_json, cache_json) data = parse_sslyze(raw_json) if data is None: logging.warn("\tNo valid target for scanning, couldn't connect.") return None yield [ scan_domain, data['protocols']['sslv2'], data['protocols']['sslv3'], data['protocols']['tlsv1.0'], data['protocols']['tlsv1.1'], data['protocols']['tlsv1.2'], data['config'].get('any_dhe'), data['config'].get('all_dhe'), data['config'].get('weakest_dh'), data['config'].get('any_rc4'), data['config'].get('all_rc4'), data['certs'].get('key_type'), data['certs'].get('key_length'), data['certs'].get('leaf_signature'), data['certs'].get('any_sha1_served'), data['certs'].get('any_sha1_constructed'), data['certs'].get('not_before'), data['certs'].get('not_after'), data['certs'].get('served_issuer'), data['certs'].get('constructed_issuer'), data.get('errors') ]
def scan(domain, options): logging.debug("[%s][tls]" % domain) # If inspection data exists, check to see if we can skip. inspection = utils.data_for(domain, "inspect") if inspection and (not inspection.get("support_https")): logging.debug("\tSkipping, HTTPS not supported in inspection.") yield None else: # cache reformatted JSON from ssllabs cache = utils.cache_path(domain, "tls") force = options.get("force", False) if (force is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None else: logging.debug("\t %s %s" % (command, domain)) usecache = str(not force).lower() if options.get("debug"): cmd = [command, "--usecache=%s" % usecache, "--verbosity=debug", domain] else: cmd = [command, "--usecache=%s" % usecache, "--quiet", domain] raw = utils.scan(cmd) if raw: data = json.loads(raw) # we only give ssllabs-scan one at a time, # so we can de-pluralize this data = data[0] # if SSL Labs had an error hitting the site, cache this # as an invalid entry. if data["status"] == "ERROR": utils.write(utils.invalid(data), cache) return None utils.write(utils.json_for(data), cache) else: return None # raise Exception("Invalid data from ssllabs-scan: %s" % raw) # can return multiple rows, one for each 'endpoint' for endpoint in data['endpoints']: # this meant it couldn't connect to the endpoint if not endpoint.get("grade"): continue sslv3 = False tlsv12 = False for protocol in endpoint['details']['protocols']: if ((protocol['name'] == "SSL") and (protocol['version'] == '3.0')): sslv3 = True if ((protocol['name'] == "TLS") and (protocol['version'] == '1.2')): tlsv12 = True spdy = False h2 = False npn = endpoint['details'].get('npnProtocols', None) if npn: spdy = ("spdy" in npn) h2 = ("h2-" in npn) def ccs_map(n): return { -1: "N/A (Error)", 0: "N/A (Unknown)", 1: "No (not vulnerable)", 2: "No (not exploitable)", 3: "Yes" }[n] def fs_map(n): return { 0: "0 - No", 1: "1 - Some", 2: "2 - Modern", 4: "3 - Robust" }[n] yield [ endpoint['grade'], endpoint['details']['cert']['sigAlg'], endpoint['details']['key']['alg'], endpoint['details']['key']['size'], fs_map(endpoint['details']['forwardSecrecy']), endpoint['details']['ocspStapling'], endpoint['details'].get('fallbackScsv', "N/A"), endpoint['details'].get('freak'), ccs_map(endpoint['details']['openSslCcs']), sslv3, tlsv12, spdy, endpoint['details']['sniRequired'], h2 ]
def scan(domain, options): logging.debug("[%s][tls]" % domain) # If pshtt data exists, check to see if we can skip. if utils.domain_doesnt_support_https(domain): logging.debug("\tSkipping, HTTPS not supported.") return None # cache reformatted JSON from ssllabs cache = utils.cache_path(domain, "tls") # Optional: if pshtt data says canonical endpoint uses www and this domain # doesn't have it, add it. if utils.domain_uses_www(domain): scan_domain = "www.%s" % domain else: scan_domain = domain force = options.get("force", False) if (force is False) and (os.path.exists(cache)): logging.debug("\tCached.") raw = open(cache).read() data = json.loads(raw) if data.get('invalid'): return None else: logging.debug("\t %s %s" % (command, scan_domain)) usecache = str(not force).lower() if options.get("debug"): cmd = [ command, "--usecache=%s" % usecache, "--verbosity=debug", scan_domain ] else: cmd = [command, "--usecache=%s" % usecache, "--quiet", scan_domain] raw = utils.scan(cmd) if raw: data = json.loads(raw) # if SSL Labs gave us back an error response, cache this # as an invalid entry. if len(data) < 1: utils.write(utils.invalid({'response': data}), cache) return None # we only give ssllabs-scan one at a time, # so we can de-pluralize this data = data[0] # if SSL Labs had an error hitting the site, cache this # as an invalid entry. if data["status"] == "ERROR": utils.write(utils.invalid(data), cache) return None utils.write(utils.json_for(data), cache) else: return None # raise Exception("Invalid data from ssllabs-scan: %s" % raw) # can return multiple rows, one for each 'endpoint' for endpoint in data['endpoints']: # this meant it couldn't connect to the endpoint if not endpoint.get("grade"): continue sslv3 = False tlsv12 = False for protocol in endpoint['details']['protocols']: if ((protocol['name'] == "SSL") and (protocol['version'] == '3.0')): sslv3 = True if ((protocol['name'] == "TLS") and (protocol['version'] == '1.2')): tlsv12 = True spdy = False h2 = False npn = endpoint['details'].get('npnProtocols', None) if npn: spdy = ("spdy" in npn) h2 = ("h2" in npn) yield [ endpoint['grade'], endpoint['details']['cert']['sigAlg'], endpoint['details']['key']['alg'], endpoint['details']['key']['size'], endpoint['details']['forwardSecrecy'], endpoint['details']['ocspStapling'], endpoint['details'].get('fallbackScsv', "N/A"), endpoint['details']['supportsRc4'], sslv3, tlsv12, spdy, endpoint['details']['sniRequired'], h2 ]
def network_check(subdomain, endpoint, options): cache = utils.cache_path(subdomain, "subdomains") wildcard = wildcard_for(subdomain) if (options.get("force", False) is False) and (os.path.exists(cache)): logging.debug("\tDNS and content cached.") raw = open(cache).read() data = json.loads(raw) # Hit DNS and HTTP. else: # HTTP content: just use curl. # # Turn on --insecure because we want to see the content even at sites # where the certificate isn't right or proper. logging.debug("\t curl --silent --insecure %s" % endpoint) content = utils.scan(["curl", "--silent", "--insecure", endpoint]) # DNS content: just use dig. # # Not awesome - uses an unsafe shell execution of `dig` to look up DNS, # as I couldn't figure out a way to get "+short" to play nice with # the more secure execution methods available to me. Since this system # isn't expected to process untrusted input, this should be okay. logging.debug("\t dig +short '%s'" % wildcard) raw_wild = utils.unsafe_execute("dig +short '%s'" % wildcard) if raw_wild == "": raw_wild = None raw_self = None else: logging.debug("\t dig +short '%s'" % subdomain) raw_self = utils.unsafe_execute("dig +short '%s'" % subdomain) if raw_wild: parsed_wild = raw_wild.split("\n") parsed_wild.sort() else: parsed_wild = None if raw_self: parsed_self = raw_self.split("\n") parsed_self.sort() else: parsed_self = None # Cache HTTP and DNS data to disk. data = {'response': { 'content': content, 'wildcard_dns': parsed_wild, 'self_dns': parsed_self }} if (parsed_wild) and (parsed_wild == parsed_self): data['response']['matched_wild'] = True else: data['response']['matched_wild'] = False utils.write(utils.json_for(data), cache) return data['response']