def delete_all_poutput(session, filter_data, target_id=None): """Delete all plugin output .note:: Here keeping filter_data optional is very risky :param filter_data: Filter data :type filter_data: `dict` :param target_id: target ID :type target_id: `int` :return: None :rtype: None """ # for_delete = True: empty dict will match all results query = poutput_gen_query(session, filter_data, target_id, for_delete=True) # Delete the folders created for these plugins for plugin in query.all(): # First check if path exists in db if plugin.output_path: output_path = os.path.join(get_output_dir_target(), plugin.output_path) if os.path.exists(output_path): FileOperations.rm_tree(output_path) # When folders are removed delete the results from db query.delete() session.commit()
def get_test_groups_config(file_path): """Reads the test groups from a config file .note:: This needs to be a list instead of a dictionary to preserve order in python < 2.7 :param file_path: The path to the config file :type file_path: `str` :return: List of test groups :rtype: `list` """ test_groups = [] config_file = FileOperations.open(file_path, 'r').read().splitlines() for line in config_file: if '#' == line[0]: continue # Skip comments try: code, priority, descrip, hint, url = line.strip().split(' | ') except ValueError: abort_framework( "Problem in Test Groups file: '%s' -> Cannot parse line: %s" % (file_path, line)) if len(descrip) < 2: descrip = hint if len(hint) < 2: hint = "" test_groups.append({ 'code': code, 'priority': priority, 'descrip': descrip, 'hint': hint, 'url': url }) return test_groups
def add_body(self, message, text): # If a file has been specified as Body, then set Body to file contents. if os.path.isfile(text): body = FileOperations.open(text).read().strip() else: body = text message.attach(MIMEText.MIMEText(body, message))
def target_service(self, nmap_file, service): """Services for a target :param nmap_file: Path to nmap file :type nmap_file: `str` :param service: Service to get :type service: `str` :return: Response :rtype: `str` """ ports_for_service = self.get_ports_for_service(service, "") f = FileOperations.open(nmap_file.strip()) response = "" for host_ports in re.findall('Host: (.*?)\tPorts: (.*?)[\t\n]', f.read()): host = host_ports[0].split(' ')[0] # Remove junk at the end ports = host_ports[1].split(',') for port_info in ports: if len(port_info) < 1: continue chunk = port_info.split('/') port = chunk[0].strip() port_state = chunk[1].strip() # No point in wasting time probing closed/filtered ports!! # (nmap sometimes adds these to the gnmap file for some reason ..) if port_state in ['closed', 'filtered']: continue try: prot = chunk[2].strip() except: continue if port in ports_for_service: response += "%s:%s:%s##" % (host, port, prot) f.close() return response
def dns_sweep(self, file_with_ips, file_prefix): """Do a DNS sweep :param file_with_ips: Path of file with IP addresses :type file_with_ips: `str` :param file_prefix: File name prefix :type file_prefix: `str` :return: None :rtype: None """ logging.info("Finding misconfigured DNS servers that might allow zone transfers among live ips ..") self.shell.shell_exec("nmap -PN -n -sS -p 53 -iL %s -oA %s" % (file_with_ips, file_prefix)) # Step 2 - Extract IPs dns_servers = "%s.dns_server.ips" % file_prefix self.shell.shell_exec('grep \"53/open/tcp\" %s.gnmap | cut -f 2 -d \" \" > %s' % (file_prefix, dns_servers)) file = FileOperations.open(dns_servers) domain_names = "%s.domain_names" % file_prefix self.shell.shell_exec("rm -f %s" % domain_names) num_dns_servers = 0 for line in file: if line.strip('\n'): dns_server = line.strip('\n') self.shell.shell_exec("host %s %s | grep 'domain name' | cut -f 5 -d' ' | cut -f 2,3,4,5,6,7 -d. " "| sed 's/\.$//' >> %s" % (dns_server, dns_server, domain_names)) num_dns_servers += 1 try: file = FileOperations.open(domain_names, owtf_clean=False) except IOError: return for line in file: domain = line.strip('\n') raw_axfr = "%s.%s.%s.axfr.raw" % (file_prefix, dns_server, domain) self.shell.shell_exec("host -l %s %s | grep %s > %s" % (domain, dns_server, domain, raw_axfr)) success = self.shell.shell_exec("wc -l %s | cut -f 1 -d ' '" % raw_axfr) if success > 3: logging.info("Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) axfr = "%s.%s.%s.axfr" % (file_prefix, dns_server, domain) self.shell.shell_exec("rm -f %s" % axfr) logging.info(self.shell.shell_exec("grep 'has address' %s | cut -f 1,4 -d ' ' | sort -k 2 -t ' ' " "| sed 's/ /#/g'" % raw_axfr)) else: logging.info("Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) self.shell.shell_exec("rm -f %s" % raw_axfr) if num_dns_servers == 0: return
def add_attachment(self, message, attachment): if not attachment: return False binary_blob = MIMEBase.MIMEBase('application', 'octet-stream') binary_blob.set_payload(FileOperations.open(attachment, 'rb').read()) Encoders.encode_base64(binary_blob) # base64 encode the Binary Blob. # Binary Blob headers. binary_blob.add_header( 'Content-Disposition', 'attachment; filename="{}"'.format(os.path.basename(attachment))) message.attach(binary_blob) return True
def get_resources_from_file(resource_file): """Fetch resources for a file :param resource_file: Path to the resource file :type resource_file: `str` :return: Resources as a set :rtype: `set` """ resources = set() config_file = FileOperations.open(resource_file, 'r').read().splitlines() # To remove stupid '\n' at the end for line in config_file: if '#' == line[0]: continue # Skip comment lines try: type, name, resource = line.split('_____') resources.add((type, name, resource)) except ValueError: logging.info("ERROR: The delimiter is incorrect in this line at Resource File: %s" % str(line.split('_____'))) return resources
def dump_output_file(self, filename, contents, plugin, relative_path=False): """Dumps output file to path :param filename: Name of the file :type filename: `str` :param contents: Contents of the file :type contents: `str` :param plugin: Plugin :type plugin: `dict` :param relative_path: use relative path :type relative_path: `bool` :return: Absolute path to the file :rtype: `str` """ save_dir = self.get_plugin_output_dir(plugin) abs_path = FileOperations.dump_file(filename, contents, save_dir) if relative_path: return os.path.relpath(abs_path, get_output_dir_target()) return abs_path
def load_framework_config_file(default, fallback, root_dir, owtf_pid): """Load the configuration into a global dictionary. :param config_path: The configuration file path :type config_path: `str` :return: None :rtype: None """ config_path = default if not os.path.isfile(config_path): config_path = fallback logging.info("Loading config from: {}..".format(config_path)) config_file = FileOperations.open(config_path, 'r') config_handler.set_val('FRAMEWORK_DIR', root_dir) # Needed Later. for line in config_file: try: key = line.split(':')[0] if key[0] == '#': # Ignore comment lines. continue value = line.replace("{}: ".format(key), "").strip() config_handler.set_val(key, multi_replace(value, {'FRAMEWORK_DIR': root_dir, 'OWTF_PID': str(owtf_pid)})) except ValueError: abort_framework("Problem in config file: {} -> Cannot parse line: {}".format(config_path, line))
def get_ports_for_service(self, service, protocol): """Get ports for different services :param service: Service name :type service: `str` :param protocol: Protocol :type protocol: `str` :return: List of ports :rtype: `list` """ regexp = '(.*?)\t(.*?/.*?)\t(.*?)($|\t)(#.*){0,1}' re.compile(regexp) list = [] f = FileOperations.open(self.get_nmap_services_file()) for line in f.readlines(): if line.lower().find(service) >= 0: match = re.findall(regexp, line) if match: port = match[0][1].split('/')[0] prot = match[0][1].split('/')[1] if (not protocol or protocol == prot) and port not in list: list.append(port) f.close() return list
def check_mount_point_existence(self, options): if not os.path.exists(options['SMB_MOUNT_POINT']): FileOperations.make_dirs(options['SMB_MOUNT_POINT'])
def InitPluginOutputDir(self, PluginInfo): PluginOutputDir = self.SetConfigPluginOutputDir(PluginInfo) FileOperations.create_missing_dirs(PluginOutputDir) # Create output dir so that scripts can cd to it :) return PluginOutputDir
def initialize(self, outbound_options=[], outbound_auth=""): """Initialize the proxy process :param outbound_options: Outbound proxy options :type outbound_options: `list` :param outbound_auth: Authentication string :type outbound_auth: `str` :return: None :rtype: None """ # The tornado application, which is used to pass variables to request handler self.application = tornado.web.Application( handlers=[(r'.*', ProxyHandler)], debug=False, gzip=True, ) # All required variables in request handler # Required variables are added as attributes to application, so that request handler can access these self.application.inbound_ip = INBOUND_PROXY_IP self.application.inbound_port = int(INBOUND_PROXY_PORT) self.instances = INBOUND_PROXY_PROCESSES # Disable console logging self.logger.disable_console_logging() # Proxy CACHE # Cache related settings, including creating required folders according to cache folder structure self.application.cache_dir = INBOUND_PROXY_CACHE_DIR # Clean possible older cache directory. if os.path.exists(self.application.cache_dir): FileOperations.rm_tree(self.application.cache_dir) FileOperations.make_dirs(self.application.cache_dir) # SSL MiTM # SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory # ~/.owtf/proxy) self.application.ca_cert = os.path.expanduser(CA_CERT) self.application.ca_key = os.path.expanduser(CA_KEY) # To stop OWTF from breaking for our beloved users :P try: self.application.ca_key_pass = FileOperations.open( os.path.expanduser(CA_PASS_FILE), 'r', owtf_clean=False).read().strip() except IOError: self.application.ca_key_pass = "******" # XXX: Legacy CA key pass for older versions. self.application.proxy_folder = os.path.dirname( self.application.ca_cert) self.application.certs_folder = os.path.expanduser(CERTS_FOLDER) try: # Ensure CA.crt and Key exist. assert os.path.exists(self.application.ca_cert) assert os.path.exists(self.application.ca_key) except AssertionError: abort_framework( "Files required for SSL MiTM are missing.Please run the install script" ) try: # If certs folder missing, create that. assert os.path.exists(self.application.certs_folder) except AssertionError: FileOperations.make_dirs(self.application.certs_folder) # Blacklist (or) Whitelist Cookies # Building cookie regex to be used for cookie filtering for caching if WHITELIST_COOKIES == 'None': cookies_list = BLACKLIST_COOKIES.split(',') self.application.cookie_blacklist = True else: cookies_list = WHITELIST_COOKIES.split(',') self.application.cookie_blacklist = False if self.application.cookie_blacklist: regex_cookies_list = [ cookie + "=([^;]+;?)" for cookie in cookies_list ] else: regex_cookies_list = [ "(" + cookie + "=[^;]+;?)" for cookie in cookies_list ] regex_string = '|'.join(regex_cookies_list) self.application.cookie_regex = re.compile(regex_string) # Outbound Proxy # Outbound proxy settings to be used inside request handler if outbound_options: if len(outbound_options) == 3: self.application.outbound_proxy_type = outbound_options[0] self.application.outbound_ip = outbound_options[1] self.application.outbound_port = int(outbound_options[2]) else: self.application.outbound_proxy_type = "http" self.application.outbound_ip = outbound_options[0] self.application.outbound_port = int(outbound_options[1]) else: self.application.outbound_ip = None self.application.outbound_port = None self.application.outbound_proxy_type = None if outbound_auth: self.application.outbound_username, self.application.outbound_password = outbound_auth.split( ":") else: self.application.outbound_username = None self.application.outbound_password = None self.server = tornado.httpserver.HTTPServer(self.application) # server has to be a class variable, because it is used inside request handler to attach sockets for monitoring ProxyHandler.server = self.server # Header filters # These headers are removed from the response obtained from webserver, before sending it to browser ProxyHandler.restricted_response_headers = PROXY_RESTRICTED_RESPONSE_HEADERS #These headers are removed from request obtained from browser, before sending it to webserver ProxyHandler.restricted_request_headers = PROXY_RESTRICTED_REQUEST_HEADERS # HTTP Auth options if HTTP_AUTH_HOST is not None: self.application.http_auth = True # All the variables are lists self.application.http_auth_hosts = HTTP_AUTH_HOST.strip().split( ',') self.application.http_auth_usernames = HTTP_AUTH_USERNAME.strip( ).split(',') self.application.http_auth_passwords = HTTP_AUTH_PASSWORD.strip( ).split(',') self.application.http_auth_modes = HTTP_AUTH_MODE.strip().split( ',') else: self.application.http_auth = False
""" owtf ~~~~~ """ from owtf.utils.file import FileOperations, get_logs_dir from .db.database import get_scoped_session __version__ = '2.3b' __release__ = 'MacOWTF' print("""\033[92m _____ _ _ _ _____ _____ | | | | |_ _| __| | | | | | | | | | __| |_____|_____| |_| |__| @owtfp http://owtf.org Version: {0} Release: {1} \033[0m""".format(__version__, __release__)) db = get_scoped_session() FileOperations.create_missing_dirs(get_logs_dir())