def delete_all(self, filter_data, target_id=None): """Delete all plugin output .note:: Here keeping filter_data optional is very risky :param filter_data: Filter data :type filter_data: `dict` :param target_id: target ID :type target_id: `int` :return: None :rtype: None """ # for_delete = True: empty dict will match all results query = self.gen_query(filter_data, target_id, for_delete=True) # Delete the folders created for these plugins for plugin in query.all(): # First check if path exists in db if plugin.output_path: output_path = os.path.join(self.config.get_output_dir_target(), plugin.output_path) if os.path.exists(output_path): FileOperations.rm_tree(output_path) # When folders are removed delete the results from db results = query.delete() self.db.session.commit()
def __init__(self): """Initialize a Core instance. .. note:: [*] Tightly coupled, cohesive framework components [*] Order is important + IO decorated so as to abort on any permission errors + Required folders created + All other components are attached to core: shell, db etc... (using ServiceLocator) :return: instance of :class:`owtf.core.Core` :rtype::class:`owtf.core.Core` """ self.register_in_service_locator() # ------------------------ IO decoration ------------------------ # self.file_handler = catch_io_errors(logging.FileHandler) # -------------------- Component attachment -------------------- # self.db = self.get_component("db") self.config = self.get_component("config") self.db_config = self.get_component("db_config") self.error_handler = self.get_component("error_handler") # ----------------------- Directory creation ----------------------- # FileOperations.create_missing_dirs(self.config.get_logs_dir()) self.create_temp_storage_dirs() self.enable_logging() # The following attributes will be initialised later self.tor_process = None
def create_output_dir_target(self, target_url): """Creates output directories for the target URL :param target_url: The target URL :type target_url: `str` :return: None :rtype: None """ FileOperations.create_missing_dirs(self.get_target_dir(target_url))
def create_temp_storage_dirs(self): """Create a temporary directory in /tmp with pid suffix. :return: :rtype: None """ tmp_dir = os.path.join('/tmp', 'owtf') if not os.path.exists(tmp_dir): tmp_dir = os.path.join(tmp_dir, str(self.config.owtf_pid)) if not os.path.exists(tmp_dir): FileOperations.make_dirs(tmp_dir)
def target_service(self, nmap_file, service): """Services for a target :param nmap_file: Path to nmap file :type nmap_file: `str` :param service: Service to get :type service: `str` :return: Response :rtype: `str` """ ports_for_service = self.get_ports_for_service(service, "") f = FileOperations.open(nmap_file.strip()) response = "" for host_ports in re.findall('Host: (.*?)\tPorts: (.*?)[\t\n]', f.read()): host = host_ports[0].split(' ')[0] # Remove junk at the end ports = host_ports[1].split(',') for port_info in ports: if len(port_info) < 1: continue chunk = port_info.split('/') port = chunk[0].strip() port_state = chunk[1].strip() # No point in wasting time probing closed/filtered ports!! # (nmap sometimes adds these to the gnmap file for some reason ..) if port_state in ['closed', 'filtered']: continue try: prot = chunk[2].strip() except: continue if port in ports_for_service: response += "%s:%s:%s##" % (host, port, prot) f.close() return response
def get_test_groups_config(self, file_path): """Reads the test groups from a config file .note:: This needs to be a list instead of a dictionary to preserve order in python < 2.7 :param file_path: The path to the config file :type file_path: `str` :return: List of test groups :rtype: `list` """ test_groups = [] config_file = FileOperations.open(file_path, 'r').read().splitlines() for line in config_file: if '#' == line[0]: continue # Skip comments try: code, priority, descrip, hint, url = line.strip().split(' | ') except ValueError: self.error_handler.abort_framework( "Problem in Test Groups file: '%s' -> Cannot parse line: %s" % (file_path, line)) if len(descrip) < 2: descrip = hint if len(hint) < 2: hint = "" test_groups.append({ 'code': code, 'priority': priority, 'descrip': descrip, 'hint': hint, 'url': url }) return test_groups
def load_config_from_file(self, config_path): """Load the configuration into a global dictionary. :param config_path: The configuration file path :type config_path: `str` :return: None :rtype: None """ cprint("Loading config from: %s.." % config_path) config_file = FileOperations.open(config_path, 'r') self.set_val('FRAMEWORK_DIR', self.root_dir) # Needed Later. for line in config_file: try: key = line.split(':')[0] if key[0] == '#': # Ignore comment lines. continue value = line.replace("%s: " % key, "").strip() self.set_val( key, self.multi_replace( value, { 'FRAMEWORK_DIR': self.root_dir, 'OWTF_PID': str(self.owtf_pid) })) except ValueError: self.error_handler.abort_framework( "Problem in config file: %s -> Cannot parse line: %s" % (config_path, line))
def get_test_groups_config(self, file_path): """Reads the test groups from a config file .note:: This needs to be a list instead of a dictionary to preserve order in python < 2.7 :param file_path: The path to the config file :type file_path: `str` :return: List of test groups :rtype: `list` """ test_groups = [] config_file = FileOperations.open(file_path, 'r').read().splitlines() for line in config_file: if '#' == line[0]: continue # Skip comments try: code, priority, descrip, hint, url = line.strip().split(' | ') except ValueError: self.error_handler.abort_framework("Problem in Test Groups file: '%s' -> Cannot parse line: %s" % (file_path, line)) if len(descrip) < 2: descrip = hint if len(hint) < 2: hint = "" test_groups.append({'code': code, 'priority': priority, 'descrip': descrip, 'hint': hint, 'url': url}) return test_groups
def add_body(self, message, text): # If a file has been specified as Body, then set Body to file contents. if os.path.isfile(text): body = FileOperations.open(text).read().strip() else: body = text message.attach(MIMEText.MIMEText(body, message))
def cleanup_target_dirs(self, target_url): """Cleanup the directories for the specific target :return: None :rtype: None """ return FileOperations.rm_tree(self.get_target_dir(target_url))
def dns_sweep(self, file_with_ips, file_prefix): """Do a DNS sweep :param file_with_ips: Path of file with IP addresses :type file_with_ips: `str` :param file_prefix: File name prefix :type file_prefix: `str` :return: None :rtype: None """ logging.info("Finding misconfigured DNS servers that might allow zone transfers among live ips ..") self.shell.shell_exec("nmap -PN -n -sS -p 53 -iL %s -oA %s" % (file_with_ips, file_prefix)) # Step 2 - Extract IPs dns_servers = "%s.dns_server.ips" % file_prefix self.shell.shell_exec('grep \"53/open/tcp\" %s.gnmap | cut -f 2 -d \" \" > %s' % (file_prefix, dns_servers)) file = FileOperations.open(dns_servers) domain_names = "%s.domain_names" % file_prefix self.shell.shell_exec("rm -f %s" % domain_names) num_dns_servers = 0 for line in file: if line.strip('\n'): dns_server = line.strip('\n') self.shell.shell_exec("host %s %s | grep 'domain name' | cut -f 5 -d' ' | cut -f 2,3,4,5,6,7 -d. " "| sed 's/\.$//' >> %s" % (dns_server, dns_server, domain_names)) num_dns_servers += 1 try: file = FileOperations.open(domain_names, owtf_clean=False) except IOError: return for line in file: domain = line.strip('\n') raw_axfr = "%s.%s.%s.axfr.raw" % (file_prefix, dns_server, domain) self.shell.shell_exec("host -l %s %s | grep %s > %s" % (domain, dns_server, domain, raw_axfr)) success = self.shell.shell_exec("wc -l %s | cut -f 1 -d ' '" % raw_axfr) if success > 3: logging.info("Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) axfr = "%s.%s.%s.axfr" % (file_prefix, dns_server, domain) self.shell.shell_exec("rm -f %s" % axfr) logging.info(self.shell.shell_exec("grep 'has address' %s | cut -f 1,4 -d ' ' | sort -k 2 -t ' ' " "| sed 's/ /#/g'" % raw_axfr)) else: logging.info("Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) self.shell.shell_exec("rm -f %s" % raw_axfr) if num_dns_servers == 0: return
def add_attachment(self, message, attachment): if not attachment: return False binary_blob = MIMEBase.MIMEBase('application', 'octet-stream') binary_blob.set_payload(FileOperations.open(attachment, 'rb').read()) Encoders.encode_base64(binary_blob) # base64 encode the Binary Blob. # Binary Blob headers. binary_blob.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(attachment)) message.attach(binary_blob) return True
def get_resources_from_file(self, resource_file): """Fetch resources for a file :param resource_file: Path to the resource file :type resource_file: `str` :return: Resources as a set :rtype: `set` """ resources = set() config_file = FileOperations.open(resource_file, 'r').read().splitlines() # To remove stupid '\n' at the end for line in config_file: if '#' == line[0]: continue # Skip comment lines try: type, name, resource = line.split('_____') resources.add((type, name, resource)) except ValueError: cprint("ERROR: The delimiter is incorrect in this line at Resource File: %s" % str(line.split('_____'))) return resources
def dump_output_file(self, filename, contents, plugin, relative_path=False): """Dumps output file to path :param filename: Name of the file :type filename: `str` :param contents: Contents of the file :type contents: `str` :param plugin: Plugin :type plugin: `dict` :param relative_path: use relative path :type relative_path: `bool` :return: Absolute path to the file :rtype: `str` """ save_dir = self.get_plugin_output_dir(plugin) abs_path = FileOperations.dump_file(filename, contents, save_dir) if relative_path: return (os.path.relpath(abs_path, self.config.get_output_dir_target())) return abs_path
def _get_db_settings(self): """Create DB settings according to the configuration file. :return: Settings dict :rtype: `dict` """ config_path = os.path.expanduser(self.config.get_val('DATABASE_SETTINGS_FILE')) settings = {} with FileOperations.open(config_path, 'r') as f: for line in f: line = line.rstrip() # Ignore empty/comment lines. if not line or line.startswith('#'): continue try: key, value = line.split(':') settings[key.strip()] = value.strip() except ValueError: self.error_handler.abort_framework("Problem in config file: '%s' -> Cannot parse line: %s" % (config_path, line)) return settings
def get_resources_from_file(self, resource_file): """Fetch resources for a file :param resource_file: Path to the resource file :type resource_file: `str` :return: Resources as a set :rtype: `set` """ resources = set() config_file = FileOperations.open( resource_file, 'r').read().splitlines() # To remove stupid '\n' at the end for line in config_file: if '#' == line[0]: continue # Skip comment lines try: type, name, resource = line.split('_____') resources.add((type, name, resource)) except ValueError: cprint( "ERROR: The delimiter is incorrect in this line at Resource File: %s" % str(line.split('_____'))) return resources
def get_ports_for_service(self, service, protocol): """Get ports for different services :param service: Service name :type service: `str` :param protocol: Protocol :type protocol: `str` :return: List of ports :rtype: `list` """ regexp = '(.*?)\t(.*?/.*?)\t(.*?)($|\t)(#.*){0,1}' re.compile(regexp) list = [] f = FileOperations.open(self.get_nmap_services_file()) for line in f.readlines(): if line.lower().find(service) >= 0: match = re.findall(regexp, line) if match: port = match[0][1].split('/')[0] prot = match[0][1].split('/')[1] if (not protocol or protocol == prot) and port not in list: list.append(port) f.close() return list
def InitPluginOutputDir(self, PluginInfo): PluginOutputDir = self.SetConfigPluginOutputDir(PluginInfo) FileOperations.create_missing_dirs(PluginOutputDir) # Create output dir so that scripts can cd to it :) return PluginOutputDir
def check_mount_point_existence(self, options): if not os.path.exists(options['SMB_MOUNT_POINT']): FileOperations.make_dirs(options['SMB_MOUNT_POINT'])
def dns_sweep(self, file_with_ips, file_prefix): """Do a DNS sweep :param file_with_ips: Path of file with IP addresses :type file_with_ips: `str` :param file_prefix: File name prefix :type file_prefix: `str` :return: None :rtype: None """ logging.info( "Finding misconfigured DNS servers that might allow zone transfers among live ips .." ) self.shell.shell_exec("nmap -PN -n -sS -p 53 -iL %s -oA %s" % (file_with_ips, file_prefix)) # Step 2 - Extract IPs dns_servers = "%s.dns_server.ips" % file_prefix self.shell.shell_exec( 'grep \"53/open/tcp\" %s.gnmap | cut -f 2 -d \" \" > %s' % (file_prefix, dns_servers)) file = FileOperations.open(dns_servers) domain_names = "%s.domain_names" % file_prefix self.shell.shell_exec("rm -f %s" % domain_names) num_dns_servers = 0 for line in file: if line.strip('\n'): dns_server = line.strip('\n') self.shell.shell_exec( "host %s %s | grep 'domain name' | cut -f 5 -d' ' | cut -f 2,3,4,5,6,7 -d. " "| sed 's/\.$//' >> %s" % (dns_server, dns_server, domain_names)) num_dns_servers += 1 try: file = FileOperations.open(domain_names, owtf_clean=False) except IOError: return for line in file: domain = line.strip('\n') raw_axfr = "%s.%s.%s.axfr.raw" % (file_prefix, dns_server, domain) self.shell.shell_exec("host -l %s %s | grep %s > %s" % (domain, dns_server, domain, raw_axfr)) success = self.shell.shell_exec("wc -l %s | cut -f 1 -d ' '" % raw_axfr) if success > 3: logging.info( "Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) axfr = "%s.%s.%s.axfr" % (file_prefix, dns_server, domain) self.shell.shell_exec("rm -f %s" % axfr) logging.info( self.shell.shell_exec( "grep 'has address' %s | cut -f 1,4 -d ' ' | sort -k 2 -t ' ' " "| sed 's/ /#/g'" % raw_axfr)) else: logging.info( "Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) self.shell.shell_exec("rm -f %s" % raw_axfr) if num_dns_servers == 0: return
def initialize(self, outbound_options=[], outbound_auth=""): """Initialize the proxy process :param outbound_options: Outbound proxy options :type outbound_options: `list` :param outbound_auth: Authentication string :type outbound_auth: `str` :return: None :rtype: None """ # The tornado application, which is used to pass variables to request handler self.application = tornado.web.Application(handlers=[(r'.*', ProxyHandler)], debug=False, gzip=True,) self.config = self.get_component("config") self.db_config = self.get_component("db_config") # All required variables in request handler # Required variables are added as attributes to application, so that request handler can access these self.application.core = self.get_component("core") self.application.inbound_ip = self.db_config.get('INBOUND_PROXY_IP') self.application.inbound_port = int(self.db_config.get('INBOUND_PROXY_PORT')) self.instances = self.db_config.get("INBOUND_PROXY_PROCESSES") # Proxy CACHE # Cache related settings, including creating required folders according to cache folder structure self.application.cache_dir = self.db_config.get("INBOUND_PROXY_CACHE_DIR") # Clean possible older cache directory. if os.path.exists(self.application.cache_dir): FileOperations.rm_tree(self.application.cache_dir) FileOperations.make_dirs(self.application.cache_dir) # SSL MiTM # SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory # ~/.owtf/proxy) self.application.ca_cert = os.path.expanduser(self.db_config.get('CA_CERT')) self.application.ca_key = os.path.expanduser(self.db_config.get('CA_KEY')) # To stop OWTF from breaking for our beloved users :P try: self.application.ca_key_pass = FileOperations.open(os.path.expanduser(self.db_config.get('CA_PASS_FILE')), 'r', owtf_clean=False).read().strip() except IOError: self.application.ca_key_pass = "******" # XXX: Legacy CA key pass for older versions. self.application.proxy_folder = os.path.dirname(self.application.ca_cert) self.application.certs_folder = os.path.expanduser(self.db_config.get('CERTS_FOLDER')) try: # Ensure CA.crt and Key exist. assert os.path.exists(self.application.ca_cert) assert os.path.exists(self.application.ca_key) except AssertionError: self.get_component("error_handler").abort_framework("Files required for SSL MiTM are missing." " Please run the install script") try: # If certs folder missing, create that. assert os.path.exists(self.application.certs_folder) except AssertionError: FileOperations.make_dirs(self.application.certs_folder) # Blacklist (or) Whitelist Cookies # Building cookie regex to be used for cookie filtering for caching if self.db_config.get('WHITELIST_COOKIES') == 'None': cookies_list = self.db_config.get('BLACKLIST_COOKIES').split(',') self.application.cookie_blacklist = True else: cookies_list = self.db_config.get('WHITELIST_COOKIES').split(',') self.application.cookie_blacklist = False if self.application.cookie_blacklist: regex_cookies_list = [cookie + "=([^;]+;?)" for cookie in cookies_list] else: regex_cookies_list = ["(" + cookie + "=[^;]+;?)" for cookie in self.db_config.get('COOKIES_LIST')] regex_string = '|'.join(regex_cookies_list) self.application.cookie_regex = re.compile(regex_string) # Outbound Proxy # Outbound proxy settings to be used inside request handler if outbound_options: if len(outbound_options) == 3: self.application.outbound_proxy_type = outbound_options[0] self.application.outbound_ip = outbound_options[1] self.application.outbound_port = int(outbound_options[2]) else: self.application.outbound_proxy_type = "http" self.application.outbound_ip = outbound_options[0] self.application.outbound_port = int(outbound_options[1]) else: self.application.outbound_ip = None self.application.outbound_port = None self.application.outbound_proxy_type = None if outbound_auth: self.application.outbound_username, self.application.outbound_password = outbound_auth.split(":") else: self.application.outbound_username = None self.application.outbound_password = None self.server = tornado.httpserver.HTTPServer(self.application) # server has to be a class variable, because it is used inside request handler to attach sockets for monitoring ProxyHandler.server = self.server # Header filters # Restricted headers are picked from framework/config/framework_config.cfg # These headers are removed from the response obtained from webserver, before sending it to browser restricted_response_headers = self.config.get_val("PROXY_RESTRICTED_RESPONSE_HEADERS").split(",") ProxyHandler.restricted_response_headers = restricted_response_headers # These headers are removed from request obtained from browser, before sending it to webserver restricted_request_headers = self.config.get_val("PROXY_RESTRICTED_REQUEST_HEADERS").split(",") ProxyHandler.restricted_request_headers = restricted_request_headers # HTTP Auth options if self.db_config.get("HTTP_AUTH_HOST") != "None": self.application.http_auth = True # All the variables are lists self.application.http_auth_hosts = self.db_config.get("HTTP_AUTH_HOST").strip().split(',') self.application.http_auth_usernames = self.db_config.get("HTTP_AUTH_USERNAME").strip().split(',') self.application.http_auth_passwords = self.db_config.get("HTTP_AUTH_PASSWORD").strip().split(',') self.application.http_auth_modes = self.db_config.get("HTTP_AUTH_MODE").strip().split(',') else: self.application.http_auth = False
def initialize(self, outbound_options=[], outbound_auth=""): """Initialize the proxy process :param outbound_options: Outbound proxy options :type outbound_options: `list` :param outbound_auth: Authentication string :type outbound_auth: `str` :return: None :rtype: None """ # The tornado application, which is used to pass variables to request handler self.application = tornado.web.Application( handlers=[(r'.*', ProxyHandler)], debug=False, gzip=True, ) self.config = self.get_component("config") self.db_config = self.get_component("db_config") # All required variables in request handler # Required variables are added as attributes to application, so that request handler can access these self.application.core = self.get_component("core") self.application.inbound_ip = self.db_config.get('INBOUND_PROXY_IP') self.application.inbound_port = int( self.db_config.get('INBOUND_PROXY_PORT')) self.instances = self.db_config.get("INBOUND_PROXY_PROCESSES") # Proxy CACHE # Cache related settings, including creating required folders according to cache folder structure self.application.cache_dir = self.db_config.get( "INBOUND_PROXY_CACHE_DIR") # Clean possible older cache directory. if os.path.exists(self.application.cache_dir): FileOperations.rm_tree(self.application.cache_dir) FileOperations.make_dirs(self.application.cache_dir) # SSL MiTM # SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory # ~/.owtf/proxy) self.application.ca_cert = os.path.expanduser( self.db_config.get('CA_CERT')) self.application.ca_key = os.path.expanduser( self.db_config.get('CA_KEY')) # To stop OWTF from breaking for our beloved users :P try: self.application.ca_key_pass = FileOperations.open( os.path.expanduser(self.db_config.get('CA_PASS_FILE')), 'r', owtf_clean=False).read().strip() except IOError: self.application.ca_key_pass = "******" # XXX: Legacy CA key pass for older versions. self.application.proxy_folder = os.path.dirname( self.application.ca_cert) self.application.certs_folder = os.path.expanduser( self.db_config.get('CERTS_FOLDER')) try: # Ensure CA.crt and Key exist. assert os.path.exists(self.application.ca_cert) assert os.path.exists(self.application.ca_key) except AssertionError: self.get_component("error_handler").abort_framework( "Files required for SSL MiTM are missing." " Please run the install script") try: # If certs folder missing, create that. assert os.path.exists(self.application.certs_folder) except AssertionError: FileOperations.make_dirs(self.application.certs_folder) # Blacklist (or) Whitelist Cookies # Building cookie regex to be used for cookie filtering for caching if self.db_config.get('WHITELIST_COOKIES') == 'None': cookies_list = self.db_config.get('BLACKLIST_COOKIES').split(',') self.application.cookie_blacklist = True else: cookies_list = self.db_config.get('WHITELIST_COOKIES').split(',') self.application.cookie_blacklist = False if self.application.cookie_blacklist: regex_cookies_list = [ cookie + "=([^;]+;?)" for cookie in cookies_list ] else: regex_cookies_list = [ "(" + cookie + "=[^;]+;?)" for cookie in self.db_config.get('COOKIES_LIST') ] regex_string = '|'.join(regex_cookies_list) self.application.cookie_regex = re.compile(regex_string) # Outbound Proxy # Outbound proxy settings to be used inside request handler if outbound_options: if len(outbound_options) == 3: self.application.outbound_proxy_type = outbound_options[0] self.application.outbound_ip = outbound_options[1] self.application.outbound_port = int(outbound_options[2]) else: self.application.outbound_proxy_type = "http" self.application.outbound_ip = outbound_options[0] self.application.outbound_port = int(outbound_options[1]) else: self.application.outbound_ip = None self.application.outbound_port = None self.application.outbound_proxy_type = None if outbound_auth: self.application.outbound_username, self.application.outbound_password = outbound_auth.split( ":") else: self.application.outbound_username = None self.application.outbound_password = None self.server = tornado.httpserver.HTTPServer(self.application) # server has to be a class variable, because it is used inside request handler to attach sockets for monitoring ProxyHandler.server = self.server # Header filters # Restricted headers are picked from framework/config/framework_config.cfg # These headers are removed from the response obtained from webserver, before sending it to browser restricted_response_headers = self.config.get_val( "PROXY_RESTRICTED_RESPONSE_HEADERS").split(",") ProxyHandler.restricted_response_headers = restricted_response_headers # These headers are removed from request obtained from browser, before sending it to webserver restricted_request_headers = self.config.get_val( "PROXY_RESTRICTED_REQUEST_HEADERS").split(",") ProxyHandler.restricted_request_headers = restricted_request_headers # HTTP Auth options if self.db_config.get("HTTP_AUTH_HOST") != "None": self.application.http_auth = True # All the variables are lists self.application.http_auth_hosts = self.db_config.get( "HTTP_AUTH_HOST").strip().split(',') self.application.http_auth_usernames = self.db_config.get( "HTTP_AUTH_USERNAME").strip().split(',') self.application.http_auth_passwords = self.db_config.get( "HTTP_AUTH_PASSWORD").strip().split(',') self.application.http_auth_modes = self.db_config.get( "HTTP_AUTH_MODE").strip().split(',') else: self.application.http_auth = False