def __init__(self, host): self.host = host self.cnames = host.dns_results.get('CNAME') self.request_handler = RequestHandler() self.web_server_validator = WebServerValidator() self.waf_present = False self.waf_cname_map = { "incapdns": "Incapsula", "edgekey": "Akamai", "akamai": "Akamai", "edgesuite": "Akamai", "distil": "Distil Networks", "cloudfront": "CloudFront", "netdna-cdn": "MaxCDN" } self.waf_app_method_map = { "CloudFront": WAFApplicationMethods.detect_cloudfront, "Cloudflare": WAFApplicationMethods.detect_cloudflare, "Incapsula": WAFApplicationMethods.detect_incapsula, "MaxCDN": WAFApplicationMethods.detect_maxcdn, "Edgecast": WAFApplicationMethods.detect_edgecast, "Distil Networks": WAFApplicationMethods.detect_distil } log_file = HelpUtilities.get_output_path("{}/WAF.txt".format( self.host.target)) self.logger = Logger(log_file)
def __init__(self, host, full_scan, scripts, services, port_range): self.target = host.target self.full_scan = full_scan self.scripts = scripts self.services = services self.port_range = port_range self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format( self.target)) self.logger = Logger(self.path) self.script = self.build_script()
def __init__(self, host): self.host = host self.request_handler = RequestHandler() self.web_server_validator = WebServerValidator() self.web_scan_results = [] self.headers = None self.robots = None log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(self.host.target)) self.target_dir = "/".join(log_file.split("/")[:-1]) self.logger = Logger(log_file)
def __init__(self, host, port=443): super().__init__(host) self.target = host.target self.port = port self._versions = ("tls1", "tls1_1", "tls1_2") # OpenSSL likes to hang, Linux timeout to the rescue self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format( self.target, self.port) self.begin = "-----BEGIN CERTIFICATE-----" self.end = "-----END CERTIFICATE-----" self.sni_data = {} self.non_sni_data = {} self.ciphers = "" log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format( self.target)) self.logger = Logger(log_file)
def __init__(self, target, dns_records): self.target = target.strip() self.dns_records = dns_records self.port = 80 self.protocol = "http" self.is_ip = False self.fqdn = None self.naked = None self.dns_results = {} self.logger = SystemOutLogger()
class Host: """ Host parsing, IP to host resolution (and vice verse), etc Sets domain/IP, port, protocol. also tries to parse FQDN, naked domain, if possible. """ def __init__(self, target, dns_records): self.target = target.strip() self.dns_records = dns_records self.port = 80 self.protocol = "http" self.is_ip = False self.fqdn = None self.naked = None self.dns_results = {} self.logger = SystemOutLogger() def __str__(self): return self.target def __repr__(self): return self.__dict__ @staticmethod def _create_host_dir(path): try: os.mkdir("/".join(path.split("/")[:-1])) except FileExistsError: pass def validate_ip(self, addr=None): if not addr: addr = self.target try: ip_address(addr.strip()) return True except ValueError: return def _extract_port(self, addr): try: self.target, self.port = addr.split(":") self.port = int(self.port) self.logger.info("{} Port detected: {}".format( COLORED_COMBOS.GOOD, self.port)) except IndexError: self.logger.info( "{} Did not detect port. Using default port 80".format( COLORED_COMBOS.WARNING)) return return def _is_proto(self, domain=None): if not domain: domain = self.target if "://" in domain: if any(domain.startswith(proto) for proto in ("https", "http")): return True else: raise HostHandlerException( "Unknown or unsupported protocol: {}".format( self.target.split("://")[0])) return def write_up(self): self.logger.info("{} Writing DNS query results".format( COLORED_COMBOS.GOOD, self)) for record in self.dns_results: self.logger.debug(record + "\n") for value in self.dns_results.get(record): self.logger.debug("\t{}".format(value)) def create_host_dir_and_set_file_logger(self): log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format( self.target)) self._create_host_dir(log_file) self.logger = Logger(log_file) def parse(self): """ Try to extract domain (full, naked, sub-domain), IP and port. """ if self.target.endswith("/"): self.target = self.target[:-1] if self._is_proto(self.target): try: self.protocol, self.target = self.target.split("://") self.logger.info("{} Protocol detected: {}".format( COLORED_COMBOS.GOOD, self.protocol)) if self.protocol.lower() == "https" and self.port == 80: self.port = 443 except ValueError: raise HostHandlerException( "Could not make domain and protocol from host") if ":" in self.target: self._extract_port(self.target) if self.validate_ip(self.target): self.logger.info("{} Detected {} as an IP address.".format( COLORED_COMBOS.GOOD, self.target)) self.is_ip = True else: domains = [] if self.target.startswith("www."): # Obviously an FQDN domains.extend((self.target, self.target.split("www.")[1])) self.fqdn = self.target self.naked = ".".join(self.fqdn.split('.')[1:]) else: # Can't be sure if FQDN or just naked domain domains.append(self.target) self.dns_results = DNSHandler.query_dns(domains, self.dns_records) if self.dns_results.get("CNAME"): # Naked domains shouldn't hold CNAME records according to RFC regulations self.logger.info( "{} Found {} to be an FQDN by CNAME presence in DNS records" .format(COLORED_COMBOS.GOOD, self.target)) self.fqdn = self.target self.naked = ".".join(self.fqdn.split('.')[1:]) self.create_host_dir_and_set_file_logger() self.write_up()
def create_host_dir_and_set_file_logger(self): log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format( self.target)) self._create_host_dir(log_file) self.logger = Logger(log_file)
class WAF: def __init__(self, host): self.host = host self.cnames = host.dns_results.get('CNAME') self.request_handler = RequestHandler() self.web_server_validator = WebServerValidator() self.waf_present = False self.waf_cname_map = { "incapdns": "Incapsula", "edgekey": "Akamai", "akamai": "Akamai", "edgesuite": "Akamai", "distil": "Distil Networks", "cloudfront": "CloudFront", "netdna-cdn": "MaxCDN" } self.waf_app_method_map = { "CloudFront": WAFApplicationMethods.detect_cloudfront, "Cloudflare": WAFApplicationMethods.detect_cloudflare, "Incapsula": WAFApplicationMethods.detect_incapsula, "MaxCDN": WAFApplicationMethods.detect_maxcdn, "Edgecast": WAFApplicationMethods.detect_edgecast, "Distil Networks": WAFApplicationMethods.detect_distil } log_file = HelpUtilities.get_output_path("{}/WAF.txt".format( self.host.target)) self.logger = Logger(log_file) def _waf_detected(self, name): self.logger.info( "{} Detected WAF presence in web application: {}{}{}".format( COLORED_COMBOS.BAD, COLOR.RED, name, COLOR.RESET)) self.waf_present = True def _detect_by_cname(self): for waf in self.waf_cname_map: if any(waf in str(cname) for cname in self.cnames): self.logger.info( "{} Detected WAF presence in CNAME: {}{}{}".format( COLORED_COMBOS.BAD, COLOR.RED, self.waf_cname_map.get(waf), COLOR.RESET)) self.waf_present = True def _detect_by_application(self): try: response = self.request_handler.send( "HEAD", timeout=20, allow_redirects=True, url="{}://{}:{}".format(self.host.protocol, self.host.target, self.host.port)) for waf, method in self.waf_app_method_map.items(): result = method(response.headers) if result: self._waf_detected(waf) except (ConnectionError, TooManyRedirects) as e: raise WAFException("Couldn't get response from server.\n" "Caused due to exception: {}".format(str(e))) async def detect(self): self.logger.info("{} Trying to detect WAF presence in {}".format( COLORED_COMBOS.INFO, self.host)) if self.cnames: self._detect_by_cname() try: self.web_server_validator.validate_target_webserver(self.host) self._detect_by_application() if not self.waf_present: self.logger.info( "{} Did not detect WAF presence in target".format( COLORED_COMBOS.GOOD)) except WebServerValidatorException: self.logger.info( "{} Target does not seem to have an active web server on port: {}\n" "No WAF could be detected on an application level.".format( COLORED_COMBOS.WARNING, self.host.port))
class WebApplicationScanner: def __init__(self, host): self.host = host self.request_handler = RequestHandler() self.web_server_validator = WebServerValidator() self.web_scan_results = [] self.headers = None self.robots = None log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(self.host.target)) self.target_dir = "/".join(log_file.split("/")[:-1]) self.logger = Logger(log_file) def _detect_cms(self, tries=0): """ Detect CMS using whatcms.org. Has a re-try mechanism because false negatives may occur :param tries: Count of tries for CMS discovery """ page = requests.get("https://whatcms.org/?s={}".format(self.host.target)) soup = BeautifulSoup(page.text, "lxml") found = soup.select(".panel.panel-success") if found: try: cms = [a for a in soup.select("a") if "/c/" in a.get("href")][0] self.logger.info("{} CMS detected: target is using {}{}{}".format( COLORED_COMBOS.GOOD, COLOR.GREEN, cms.get("title"), COLOR.RESET)) except IndexError: if tries >= 4: return else: self._detect_cms(tries=tries + 1) else: if tries >= 4: return else: self._detect_cms(tries=tries + 1) def _cookie_info(self, jar): for cookie in jar: key = cookie.__dict__.get("name") value = cookie.__dict__.get("value") domain = cookie.__dict__.get("domain") secure = cookie.__dict__.get("secure") try: if domain in self.host.target or self.host.target in domain: if not secure: self.logger.info( "%s Found cookie without secure flag: {%s: %s}" % (COLORED_COMBOS.GOOD, key, value) ) except TypeError: continue def _server_info(self): if self.headers.get("server"): self.logger.info("{} Web server detected: {}{}{}".format( COLORED_COMBOS.GOOD, COLOR.GREEN, self.headers.get("server"), COLOR.RESET)) def _x_powered_by(self): if self.headers.get("X-Powered-By"): self.logger.info("{} X-Powered-By header detected: {}{}{}".format( COLORED_COMBOS.GOOD, COLOR.GREEN, self.headers.get("X-Powered-By"), COLOR.RESET)) def _anti_clickjacking(self): if not self.headers.get("X-Frame-Options"): self.logger.info( "{} X-Frame-Options header not detected - target might be vulnerable to clickjacking".format( COLORED_COMBOS.GOOD) ) def _xss_protection(self): xss_header = self.headers.get("X-XSS-PROTECTION") if xss_header and "1" in xss_header: self.logger.info("{} Found X-XSS-PROTECTION header".format(COLORED_COMBOS.BAD)) def _cors_wildcard(self): if self.headers.get("Access-Control-Allow-Origin") == "*": self.logger.info("{} CORS wildcard detected".format(COLORED_COMBOS.GOOD)) def _robots(self): res = self.request_handler.send( "GET", url="{}://{}:{}/robots.txt".format( self.host.protocol, self.host.target, self.host.port ) ) if res.status_code != 404 and res.text and "<!DOCTYPE html>" not in res.text: self.logger.info("{} Found robots.txt".format(COLORED_COMBOS.GOOD)) with open("{}/robots.txt".format(self.target_dir), "w") as file: file.write(res.text) def _sitemap(self): res = self.request_handler.send( "GET", url="{}://{}:{}/sitemap.xml".format( self.host.protocol, self.host.target, self.host.port ) ) if res.status_code != 404 and res.text and "<!DOCTYPE html>" not in res.text: self.logger.info("{} Found sitemap.xml".format(COLORED_COMBOS.GOOD)) with open("{}/sitemap.xml".format(self.target_dir), "w") as file: file.write(res.text) def get_web_application_info(self): session = self.request_handler.get_new_session() try: with session: # Test if target is serving HTTP requests response = session.get( timeout=20, url="{}://{}:{}".format( self.host.protocol, self.host.target, self.host.port ) ) self.headers = response.headers self._detect_cms() self._robots() self._sitemap() self._server_info() self._x_powered_by() self._cors_wildcard() self._xss_protection() self._anti_clickjacking() self._cookie_info(session.cookies) except (ConnectionError, TooManyRedirects) as e: raise WebAppScannerException("Couldn't get response from server.\n" "Caused due to exception: {}".format(str(e))) async def run_scan(self): self.logger.info("{} Trying to collect {} web application data".format(COLORED_COMBOS.INFO, self.host)) try: self.web_server_validator.validate_target_webserver(self.host) self.get_web_application_info() except WebServerValidatorException: self.logger.info( "{} Target does not seem to have an active web server on port: {}. " "No web application data will be gathered.".format(COLORED_COMBOS.WARNING, self.host.port)) return
class TLSHandler(TLSCipherSuiteChecker): def __init__(self, host, port=443): super().__init__(host) self.target = host.target self.port = port self._versions = ("tls1", "tls1_1", "tls1_2") # OpenSSL likes to hang, Linux timeout to the rescue self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format( self.target, self.port) self.begin = "-----BEGIN CERTIFICATE-----" self.end = "-----END CERTIFICATE-----" self.sni_data = {} self.non_sni_data = {} self.ciphers = "" log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format( self.target)) self.logger = Logger(log_file) def _tls_results_exist(self): if all(not x for x in (self.ciphers, *self.non_sni_data.values(), *self.sni_data.values())): return return True def _are_certificates_identical(self): """ Validate that both certificates exist. :returns: True if they are identical, False otherwise """ sni_cert = self.sni_data.get("Certificate_details") non_sni_cert = self.non_sni_data.get("Certificate_details") if all(cert for cert in (sni_cert, non_sni_cert) if cert) and sni_cert == non_sni_cert: return True return def _is_certificate_exists(self, text): if self.begin in text and self.end in text: return True return async def _extract_certificate_details(self, data): process = await create_subprocess_exec("timeout", "5", "openssl", "x509", "-text", stdin=PIPE, stderr=PIPE, stdout=PIPE) result, err = await process.communicate( input=bytes(data, encoding='ascii')) result = result.decode().strip() cert_details = result.split(self.begin)[0].strip() result_lines = cert_details.split("\n") for i, line in enumerate(result_lines): if "DNS:" in line: result_lines.pop(i) result_lines.pop(i - 1) cert_details = "\n".join(result_lines) return cert_details async def is_heartbleed_vulnerable(self): script = self._base_script + "-tlsextdebug" process = await create_subprocess_exec(*script.split(), stdout=PIPE, stderr=PIPE) result, err = await process.communicate() try: if "server extension \"heartbeat\" (id=15)" in result.decode( ).strip(): self.logger.info( "{} Target seems to be vulnerable to Heartbleed - CVE-2014-0160" .format(COLORED_COMBOS.GOOD)) except TypeError: # Type error means no result pass async def _execute_ssl_data_extraction(self, sni=False): """ Test for version support (SNI/non-SNI), get all SANs, get certificate details :param sni: True will call cause _exec_openssl to call openssl with -servername flag """ # Do for all responses responses = await self._run_openssl_sclient_cmd(self._base_script, sni) tls_dict = self._parse_openssl_sclient_output(responses) # Do for one successful SSL response for res in responses: if self._is_certificate_exists(res): tls_dict["SANs"] = await self._get_sans_from_openssl_cmd(res) tls_dict[ "Certificate_details"] = await self._extract_certificate_details( res) break return tls_dict async def _run_openssl_sclient_cmd(self, script, sni=False): processes = [] outputs = [] if sni: script += " -servername {}".format(self.target) for v in self._versions: curr = (script + ' -{}'.format(v)).split() processes.append(await create_subprocess_exec(*curr, stdout=PIPE, stderr=PIPE)) for p in processes: result, err = await p.communicate() outputs.append(result.decode().strip()) return outputs @staticmethod async def _get_sans_from_openssl_cmd(data): process = await create_subprocess_exec("openssl", "x509", "-noout", "-text", stdin=PIPE, stderr=PIPE, stdout=PIPE) result, err = await process.communicate( input=bytes(data, encoding='ascii')) sans = re.findall(r"DNS:\S*\b", result.decode().strip()) return {san.replace("DNS:", '') for san in sans} def _parse_openssl_sclient_output(self, results): is_supported = {"TLSv1": False, "TLSv1.1": False, "TLSv1.2": False} for res in results: if not self._is_certificate_exists(res): continue for line in res.split('\n'): if "Protocol" in line: ver = line.strip().split(':')[1].strip() is_supported[ver] = True return is_supported def _dictionary_log_procedure(self, result_dict): for k, v in result_dict.items(): if k == "SANs": self.logger.debug("{0}:\n{1}\n {2}\n{1}\n".format( k, "-" * 15, "\n".join(v))) elif k == "Certificate_details": self.logger.debug(v) else: self.logger.debug("{}: {}\n".format(k, v)) def write_up(self): self.logger.info("Supporting Ciphers:\n") self.logger.info(self.ciphers + "\n") self.logger.debug("-" * 80 + "\n") self.logger.debug("SNI Data:\n") self._dictionary_log_procedure(self.sni_data) self.logger.debug("-" * 80 + "\n") self.logger.debug("non-SNI Data:\n") self._dictionary_log_procedure(self.non_sni_data) async def run(self, sni=True): self.logger.info("{} Started collecting TLS data for {}".format( COLORED_COMBOS.INFO, self.target)) self.ciphers = await self.scan_ciphers(self.port) self.non_sni_data = await self._execute_ssl_data_extraction() if sni: self.sni_data = await self._execute_ssl_data_extraction(sni=sni) await self.is_heartbleed_vulnerable() if self._tls_results_exist(): self.logger.info("{} Done collecting TLS data".format( COLORED_COMBOS.GOOD)) if self._are_certificates_identical(): self.non_sni_data[ "Certificate_details"] = "Same as SNI Certificate" self.write_up() else: self.logger.info( "{} Could not obtain any TLS data from target on port {}. " "Target may not support SSL/TLS or supports it on a different port." .format(COLORED_COMBOS.BAD, self.port))