def create_temp_storage_dirs(self): """Create a temporary directory in /tmp with pid suffix.""" tmp_dir = os.path.join('/tmp', 'owtf') if not os.path.exists(tmp_dir): tmp_dir = os.path.join(tmp_dir, str(self.config.OwtfPid)) if not os.path.exists(tmp_dir): FileOperations.make_dirs(tmp_dir)
def create_temp_storage_dirs(self): """Create a temporary directory in /tmp with pid suffix.""" tmp_dir = os.path.join('/tmp', 'owtf') if not os.path.exists(tmp_dir): tmp_dir = os.path.join(tmp_dir, str(self.config.OwtfPid)) if not os.path.exists(tmp_dir): FileOperations.make_dirs(tmp_dir)
def GetRecordConfig(self): record_config = {} record_config['ROOT_DIR'] = self.config.RootDir #record_config['OUTPUT_DIR'] = os.path.join(record_config['ROOT_DIR'], self.config.GetOutputDirForTargets()) #record_config['TARGET_DB'] = os.path.join(record_config['ROOT_DIR'], self.config.FrameworkConfigGetDBPath('TCONFIG_DB_PATH')) record_config['CREATE_SCRIPT_PATH'] = os.path.join(record_config['ROOT_DIR'], "zest", "zest_create.sh") record_config['RUNNER_SCRIPT_PATH'] = os.path.join(record_config['ROOT_DIR'], "zest", "zest_runner.sh") record_config['ZEST_DIR'] = os.path.join(record_config['ROOT_DIR'], self.config.FrameworkConfigGet("OUTPUT_PATH"), "misc", "recorded_scripts") FileOperations.create_missing_dirs(record_config['ZEST_DIR']) return record_config
def create_dirs(self): """ Any directory which needs to be created at the start of owtf needs to be placed inside here. No hardcoding of paths please """ # Logs folder creation if not os.path.exists(self.config.FrameworkConfigGetLogsDir()): FileOperations.create_missing_dirs(self.config.FrameworkConfigGetLogsDir()) # Temporary storage directories creation self.create_temp_storage_dirs()
def dns_sweep(self, file_with_ips, file_prefix): logging.info( "Finding misconfigured DNS servers that might allow zone transfers among live ips .." ) self.shell.shell_exec("nmap -PN -n -sS -p 53 -iL " + file_with_ips + " -oA " + file_prefix) # Step 2 - Extract IPs dns_servers = file_prefix + ".dns_server.ips" self.shell.shell_exec("grep \"53/open/tcp\" " + file_prefix + ".gnmap | cut -f 2 -d \" \" > " + dns_servers) file = FileOperations.open(dns_servers) domain_names = file_prefix + ".domain_names" self.shell.shell_exec("rm -f " + domain_names) num_dns_servers = 0 for line in file: if line.strip('\n'): dns_server = line.strip('\n') self.shell.shell_exec( "host " + dns_server + " " + dns_server + " | grep 'domain name' | cut -f 5 -d' ' | cut -f 2,3,4,5,6,7 -d. | sed 's/\.$//' >> " + domain_names) num_dns_servers = num_dns_servers + 1 try: file = FileOperations.open(domain_names, owtf_clean=False) except IOError: return for line in file: domain = line.strip('\n') raw_axfr = file_prefix + "." + dns_server + "." + domain + ".axfr.raw" self.shell.shell_exec("host -l " + domain + " " + dns_server + " | grep " + domain + " > " + raw_axfr) success = self.shell.shell_exec("wc -l " + raw_axfr + " | cut -f 1 -d ' '") if success > 3: logging.info( "Attempting zone transfer on $dns_server using domain " + domain + ".. Success!") axfr = file_prefix + "." + dns_server + "." + domain + ".axfr" self.shell.shell_exec("rm -f " + axfr) logging.info( self.shell.shell_exec( "grep 'has address' " + raw_axfr + " | cut -f 1,4 -d ' ' | sort -k 2 -t ' ' | sed 's/ /#/g'" )) else: logging.info( "Attempting zone transfer on $dns_server using domain " + domain + " .. Success!") self.shell.shell_exec("rm -f " + raw_axfr) if num_dns_servers == 0: return
def dns_sweep(self, file_with_ips, file_prefix): logging.info( "Finding misconfigured DNS servers that might allow zone transfers among live ips .." ) self.shell.shell_exec("nmap -PN -n -sS -p 53 -iL %s -oA %s" % (file_with_ips, file_prefix)) # Step 2 - Extract IPs dns_servers = "%s.dns_server.ips" % file_prefix self.shell.shell_exec( 'grep \"53/open/tcp\" %s.gnmap | cut -f 2 -d \" \" > %s' % (file_prefix, dns_servers)) file = FileOperations.open(dns_servers) domain_names = "%s.domain_names" % file_prefix self.shell.shell_exec("rm -f %s" % domain_names) num_dns_servers = 0 for line in file: if line.strip('\n'): dns_server = line.strip('\n') self.shell.shell_exec( "host %s %s | grep 'domain name' | cut -f 5 -d' ' | cut -f 2,3,4,5,6,7 -d. " "| sed 's/\.$//' >> %s" % (dns_server, dns_server, domain_names)) num_dns_servers += 1 try: file = FileOperations.open(domain_names, owtf_clean=False) except IOError: return for line in file: domain = line.strip('\n') raw_axfr = "%s.%s.%s.axfr.raw" % (file_prefix, dns_server, domain) self.shell.shell_exec("host -l %s %s | grep %s > %s" % (domain, dns_server, domain, raw_axfr)) success = self.shell.shell_exec("wc -l %s | cut -f 1 -d ' '" % raw_axfr) if success > 3: logging.info( "Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) axfr = "%s.%s.%s.axfr" % (file_prefix, dns_server, domain) self.shell.shell_exec("rm -f %s" % axfr) logging.info( self.shell.shell_exec( "grep 'has address' %s | cut -f 1,4 -d ' ' | sort -k 2 -t ' ' " "| sed 's/ /#/g'" % raw_axfr)) else: logging.info( "Attempting zone transfer on $dns_server using domain %s.. Success!" % domain) self.shell.shell_exec("rm -f %s" % raw_axfr) if num_dns_servers == 0: return
def GetRecordConfig(self): record_config = {} record_config['ROOT_DIR'] = self.config.RootDir record_config['CREATE_SCRIPT_PATH'] = os.path.join( record_config['ROOT_DIR'], "zest", "zest_create.sh") record_config['RUNNER_SCRIPT_PATH'] = os.path.join( record_config['ROOT_DIR'], "zest", "zest_runner.sh") record_config['ZEST_DIR'] = os.path.join( record_config['ROOT_DIR'], self.config.FrameworkConfigGet("OUTPUT_PATH"), "misc", "recorded_scripts") FileOperations.create_missing_dirs(record_config['ZEST_DIR']) return record_config
def GetTargetConfig(self, target_id): target_config = {} self.target.SetTarget(target_id) target_config['ROOT_DIR'] = self.config.RootDir target_config['OUTPUT_DIR'] = os.path.join(target_config['ROOT_DIR'], self.target.PathConfig['url_output']) #target_config['TARGET_DB'] = self.config.FrameworkConfigGet('TCONFIG_DB_PATH') target_config['ZEST_DIR'] = os.path.join(target_config['OUTPUT_DIR'], "zest") target_config['CREATE_SCRIPT_PATH'] = os.path.join(target_config['ROOT_DIR'], "zest", "zest_create.sh") target_config['RUNNER_SCRIPT_PATH'] = os.path.join(target_config['ROOT_DIR'], "zest","zest_runner.sh") target_config['HOST_AND_PORT'] = ((self.target.GetTargetConfigForID(target_id))['host_name'] + ":" + (self.target.GetTargetConfigForID(target_id))['port_number']) FileOperations.create_missing_dirs(target_config['ZEST_DIR']) return target_config
def GetRecordConfig(self): record_config = {} record_config['ROOT_DIR'] = self.config.RootDir #record_config['OUTPUT_DIR'] = os.path.join(record_config['ROOT_DIR'], self.config.GetOutputDirForTargets()) #record_config['TARGET_DB'] = os.path.join(record_config['ROOT_DIR'], self.config.FrameworkConfigGetDBPath('TCONFIG_DB_PATH')) record_config['CREATE_SCRIPT_PATH'] = os.path.join( record_config['ROOT_DIR'], "zest", "zest_create.sh") record_config['RUNNER_SCRIPT_PATH'] = os.path.join( record_config['ROOT_DIR'], "zest", "zest_runner.sh") record_config['ZEST_DIR'] = os.path.join( record_config['ROOT_DIR'], self.config.FrameworkConfigGet("OUTPUT_PATH"), "misc", "recorded_scripts") FileOperations.create_missing_dirs(record_config['ZEST_DIR']) return record_config
def target_service(self, nmap_file, service): ports_for_service = self.get_ports_for_service(service, "") f = FileOperations.open(nmap_file.strip()) response = "" for host_ports in re.findall('Host: (.*?)\tPorts: (.*?)[\t\n]', f.read()): host = host_ports[0].split(' ')[0] # Remove junk at the end ports = host_ports[1].split(',') for port_info in ports: if len(port_info) < 1: continue chunk = port_info.split('/') port = chunk[0].strip() port_state = chunk[1].strip() # No point in wasting time probing closed/filtered ports!! # (nmap sometimes adds these to the gnmap file for some reason ..) if port_state in ['closed', 'filtered']: continue try: prot = chunk[2].strip() except: continue if port in ports_for_service: response += "%s:%s:%s##" % (host, port, prot) f.close() return response
def AddBody(self, message, text): # If a file has been specified as Body, then set Body to file contents. if os.path.isfile(text): body = FileOperations.open(text).read().strip() else: body = text message.attach(MIMEText.MIMEText(body, message))
def GetTestGroupsFromFile( self, file_path ): # This needs to be a list instead of a dictionary to preserve order in python < 2.7 TestGroups = [] ConfigFile = FileOperations.open(file_path, 'r').read().splitlines() for line in ConfigFile: if '#' == line[0]: continue # Skip comments try: Code, Priority, Descrip, Hint, URL = line.strip().split(' | ') except ValueError: self.error_handler.FrameworkAbort( "Problem in Test Groups file: '" + file_path + "' -> Cannot parse line: " + line) if len(Descrip) < 2: Descrip = Hint if len(Hint) < 2: Hint = "" TestGroups.append({ 'code': Code, 'priority': Priority, 'descrip': Descrip, 'hint': Hint, 'url': URL }) return TestGroups
def DumpOutputFile(self, Filename, Contents, Plugin, RelativePath=False): SaveDir = self.GetPluginOutputDir(Plugin) abs_path = FileOperations.dump_file(Filename, Contents, SaveDir) if RelativePath: return (os.path.relpath(abs_path, self.config.GetOutputDirForTargets())) return (abs_path)
def DeleteAll(self, filter_data, target_id=None): """ Here keeping filter_data optional is very risky """ query = self.GenerateQueryUsingSession( filter_data, target_id, for_delete=True) # Empty dict will match all results # Delete the folders created for these plugins for plugin in query.all(): # First check if path exists in db if plugin.output_path: output_path = os.path.join( self.config.GetOutputDirForTargets(), plugin.output_path) if os.path.exists(output_path): FileOperations.rm_tree(output_path) # When folders are removed delete the results from db results = query.delete() self.db.session.commit()
def GetTargetConfig(self, target_id): target_config = {} self.target.SetTarget(target_id) target_config['ROOT_DIR'] = self.config.RootDir target_config['OUTPUT_DIR'] = os.path.join( target_config['ROOT_DIR'], self.target.PathConfig['url_output']) target_config['ZEST_DIR'] = os.path.join(target_config['OUTPUT_DIR'], "zest") target_config['CREATE_SCRIPT_PATH'] = os.path.join( target_config['ROOT_DIR'], "zest", "zest_create.sh") target_config['RUNNER_SCRIPT_PATH'] = os.path.join( target_config['ROOT_DIR'], "zest", "zest_runner.sh") target_config['HOST_AND_PORT'] = ( "%s:%s" % (self.target.GetTargetConfigForID(target_id)['host_name'], self.target.GetTargetConfigForID(target_id)['port_number'])) FileOperations.create_missing_dirs(target_config['ZEST_DIR']) return target_config
def DeleteAll(self, filter_data, target_id=None): """ Here keeping filter_data optional is very risky """ query = self.GenerateQueryUsingSession( filter_data, target_id, for_delete=True) # Empty dict will match all results # Delete the folders created for these plugins for plugin in query.all(): # First check if path exists in db if plugin.output_path: output_path = os.path.join( self.config.GetOutputDirForTargets(), plugin.output_path) if os.path.exists(output_path): FileOperations.rm_tree(output_path) # When folders are removed delete the results from db results = query.delete() self.db.session.commit()
def pnh_log_file(self): self.path = self.config.FrameworkConfigGet('PNH_EVENTS_FILE') self.mode = "w" try: if os.path.isfile(self.path): pass else: with FileOperations.open(self.path, self.mode, owtf_clean=False): pass except IOError as e: OWTFLogger.log("I/O error ({0}): {1}".format(e.errno, e.strerror)) raise
def AddAttachment(self, message, attachment): if not attachment: return False binary_blob = MIMEBase.MIMEBase('application', 'octet-stream') binary_blob.set_payload(FileOperations.open(attachment, 'rb').read()) Encoders.encode_base64(binary_blob) # base64 encode the Binary Blob. # Binary Blob headers. binary_blob.add_header( 'Content-Disposition', 'attachment; filename="%s"' % os.path.basename(attachment)) message.attach(binary_blob) return True
def GetResourcesFromFile(self, resource_file): resources = set() ConfigFile = FileOperations.open(resource_file, 'r').read().splitlines() # To remove stupid '\n' at the end for line in ConfigFile: if '#' == line[0]: continue # Skip comment lines try: Type, Name, Resource = line.split('_____') resources.add((Type, Name, Resource)) except ValueError: cprint("ERROR: The delimiter is incorrect in this line at Resource File: %s" % str(line.split('_____'))) return resources
def DeriveOutputSettingsFromURL(self, target_URL): # Set the output directory. self.Set( 'host_output', self.Get('OUTPUT_PATH') + "/" + self.Get('host_ip')) # Set the output directory. self.Set( 'port_output', self.Get('host_output') + "/" + self.Get('port_number')) URL_info_ID = target_URL.replace('/','_').replace(':','') # Set the URL output directory (plugins will save their data here). self.Set( 'url_output', self.Get('port_output') + "/" + URL_info_ID + "/") # Set the partial results path. self.Set('partial_url_output_path', self.Get('url_output')+'partial') self.Set( 'PARTIAL_REPORT_REGISTER', self.Get('partial_url_output_path') + "/partial_report_register.txt") # Tested in FF 8: Different directory = Different localStorage!! -> All # localStorage-dependent reports must be on the same directory. # IMPORTANT: For localStorage to work Url reports must be on the same # directory. self.Set( 'HTML_DETAILED_REPORT_PATH', self.Get('OUTPUT_PATH') + "/" + URL_info_ID + ".html") # IMPORTANT: For localStorage to work Url reports must be on the same # directory. self.Set( 'URL_REPORT_LINK_PATH', self.Get('OUTPUT_PATH') + "/index.html") if not self.Get('SIMULATION'): FileOperations.create_missing_dirs(self.Get('host_output')) # URL Analysis DBs # URL DBs: Distintion between vetted, confirmed-to-exist, in # transaction DB URLs and potential URLs. self.InitHTTPDBs(self.Get('url_output'))
def write_event(self, content, mode): self.content = content self.mode = mode self.file_path = self.config.FrameworkConfigGet('PNH_EVENTS_FILE') if (os.path.isfile(self.file_path) and os.access(self.file_path, os.W_OK)): try: with FileOperations.open(self.file_path, self.mode, owtf_clean=False) as log_file: log_file.write(self.content) log_file.write("\n") return True except IOError: return False
def GetResourcesFromFile(self, resource_file): resources = set() ConfigFile = FileOperations.open(resource_file, 'r').read().splitlines() # To remove stupid '\n' at the end for line in ConfigFile: if '#' == line[0]: continue # Skip comment lines try: Type, Name, Resource = line.split('_____') # Resource = Resource.strip() resources.add((Type, Name, Resource)) except ValueError: cprint("ERROR: The delimiter is incorrect in this line at Resource File: "+str(line.split('_____'))) return resources
def get_ports_for_service(self, service, protocol): regexp = '(.*?)\t(.*?/.*?)\t(.*?)($|\t)(#.*){0,1}' re.compile(regexp) list = [] f = FileOperations.open(self.get_nmap_services_file()) for line in f.readlines(): if line.lower().find(service) >= 0: match = re.findall(regexp, line) if match: port = match[0][1].split('/')[0] prot = match[0][1].split('/')[1] if (not protocol or protocol == prot) and port not in list: list.append(port) f.close() return list
def DeriveOutputSettingsFromURL(self, target_URL): # Set the output directory. self.Set('host_output', self.Get('OUTPUT_PATH') + "/" + self.Get('host_ip')) # Set the output directory. self.Set('port_output', self.Get('host_output') + "/" + self.Get('port_number')) URL_info_ID = target_URL.replace('/', '_').replace(':', '') # Set the URL output directory (plugins will save their data here). self.Set('url_output', self.Get('port_output') + "/" + URL_info_ID + "/") # Set the partial results path. self.Set('partial_url_output_path', self.Get('url_output') + 'partial') self.Set( 'PARTIAL_REPORT_REGISTER', self.Get('partial_url_output_path') + "/partial_report_register.txt") # Tested in FF 8: Different directory = Different localStorage!! -> All # localStorage-dependent reports must be on the same directory. # IMPORTANT: For localStorage to work Url reports must be on the same # directory. self.Set('HTML_DETAILED_REPORT_PATH', self.Get('OUTPUT_PATH') + "/" + URL_info_ID + ".html") # IMPORTANT: For localStorage to work Url reports must be on the same # directory. self.Set('URL_REPORT_LINK_PATH', self.Get('OUTPUT_PATH') + "/index.html") if not self.Get('SIMULATION'): FileOperations.create_missing_dirs(self.Get('host_output')) # URL Analysis DBs # URL DBs: Distintion between vetted, confirmed-to-exist, in # transaction DB URLs and potential URLs. self.InitHTTPDBs(self.Get('url_output'))
def GetTestGroupsFromFile(self, file_path): # This needs to be a list instead of a dictionary to preserve order in python < 2.7 TestGroups = [] ConfigFile = FileOperations.open(file_path, 'r').read().splitlines() for line in ConfigFile: if '#' == line[0]: continue # Skip comments try: Code, Priority, Descrip, Hint, URL = line.strip().split(' | ') except ValueError: self.error_handler.FrameworkAbort("Problem in Test Groups file: '" + file_path + "' -> Cannot parse line: " + line) if len(Descrip) < 2: Descrip = Hint if len(Hint) < 2: Hint = "" TestGroups.append({'code': Code, 'priority': Priority, 'descrip': Descrip, 'hint': Hint, 'url': URL}) return TestGroups
def _get_db_settings(self): """Create DB settings according to the configuration file.""" config_path = os.path.expanduser( self.config.FrameworkConfigGet('DATABASE_SETTINGS_FILE')) settings = {} with FileOperations.open(config_path, 'r') as f: for line in f: line = line.rstrip() # Ignore empty/comment lines. if not line or line.startswith('#'): continue try: key, value = line.split(':') settings[key.strip()] = value.strip() except ValueError: self.error_handler.FrameworkAbort( "Problem in config file: '%s' -> Cannot parse line: %s" % (config_path, line)) return settings
def LoadFrameworkConfigFromFile(self, config_path): """Load the configuration from into a global dictionary.""" if 'framework_config' not in config_path: cprint("Loading Config from: %s.." % config_path) config_file = FileOperations.open(config_path, 'r') self.Set('FRAMEWORK_DIR', self.RootDir) # Needed Later. for line in config_file: try: key = line.split(':')[0] if key[0] == '#': # Ignore comment lines. continue value = line.replace("%s: " % key, "").strip() self.Set( key, self.MultipleReplace( value, { 'FRAMEWORK_DIR': self.RootDir, 'OWTF_PID': str(self.OwtfPid) })) except ValueError: self.error_handler.FrameworkAbort( "Problem in config file: %s -> Cannot parse line: %s" % (config_path, line))
def LoadFrameworkConfigFromFile(self, config_path): """Load the configuration from into a global dictionary.""" if 'framework_config' not in config_path: cprint("Loading Config from: " + config_path + " ..") config_file = FileOperations.open(config_path, 'r') self.Set('FRAMEWORK_DIR', self.RootDir) # Needed Later. for line in config_file: try: key = line.split(':')[0] if key[0] == '#': # Ignore comment lines. continue value = line.replace(key + ": ", "").strip() self.Set( key, self.MultipleReplace( value, { 'FRAMEWORK_DIR': self.RootDir, 'OWTF_PID': str(self.OwtfPid)} ) ) except ValueError: self.error_handler.FrameworkAbort( "Problem in config file: '" + config_path + "' -> Cannot parse line: " + line)
def CleanUpForTarget(self, target_URL): return FileOperations.rm_tree(self.GetOutputDirForTarget(target_URL))
def InitPluginOutputDir(self, PluginInfo): PluginOutputDir = self.SetConfigPluginOutputDir(PluginInfo) FileOperations.create_missing_dirs(PluginOutputDir) # Create output dir so that scripts can cd to it :) return PluginOutputDir
def DumpOutputFile(self, Filename, Contents, Plugin, RelativePath=False): SaveDir = self.GetPluginOutputDir(Plugin) abs_path = FileOperations.dump_file(Filename, Contents, SaveDir) if RelativePath: return (os.path.relpath(abs_path, self.config.GetOutputDirForTargets())) return (abs_path)
def initialize(self, outbound_options=[], outbound_auth=""): # The tornado application, which is used to pass variables to request handler self.application = tornado.web.Application(handlers=[(r'.*', ProxyHandler)], debug=False, gzip=True,) self.config = self.get_component("config") self.db_config = self.get_component("db_config") # All required variables in request handler # Required variables are added as attributes to application, so that request handler can access these self.application.Core = self.get_component("core") try: self.proxy_manager = self.get_component("proxy_manager") except ComponentNotFoundException: self.proxy_manager = None self.application.proxy_manager = self.proxy_manager # ctypes object allocated from shared memory to verify if proxy must inject probe code or not # 'i' means ctypes type is integer, initialization value is 0 # if lock is True then a new recursive lock object is created to # synchronize access to the value self.application.Core.pnh_inject = Value('i', 0, lock=True) self.application.inbound_ip = self.db_config.Get('INBOUND_PROXY_IP') self.application.inbound_port = int(self.db_config.Get('INBOUND_PROXY_PORT')) if self.proxy_manager: self.instances = "1" # Botnet mode needs only one proxy process. else: self.instances = self.db_config.Get("INBOUND_PROXY_PROCESSES") # Proxy CACHE # Cache related settings, including creating required folders according to cache folder structure self.application.cache_dir = self.db_config.Get("INBOUND_PROXY_CACHE_DIR") # Clean possible older cache directory. if os.path.exists(self.application.cache_dir): FileOperations.rm_tree(self.application.cache_dir) FileOperations.make_dirs(self.application.cache_dir) # SSL MiTM # SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory # ~/.owtf/proxy) self.application.ca_cert = os.path.expanduser(self.db_config.Get('CA_CERT')) self.application.ca_key = os.path.expanduser(self.db_config.Get('CA_KEY')) # To stop OWTF from breaking for our beloved users :P try: self.application.ca_key_pass = FileOperations.open( os.path.expanduser(self.db_config.Get('CA_PASS_FILE')), 'r', owtf_clean=False).read().strip() except IOError: self.application.ca_key_pass = "******" # XXX: Legacy CA key pass for older versions. self.application.proxy_folder = os.path.dirname(self.application.ca_cert) self.application.certs_folder = os.path.expanduser(self.db_config.Get('CERTS_FOLDER')) try: # Ensure CA.crt and Key exist. assert os.path.exists(self.application.ca_cert) assert os.path.exists(self.application.ca_key) except AssertionError: self.get_component("error_handler").FrameworkAbort( "Files required for SSL MiTM are missing. Please run the install script") try: # If certs folder missing, create that. assert os.path.exists(self.application.certs_folder) except AssertionError: FileOperations.make_dirs(self.application.certs_folder) # Blacklist (or) Whitelist Cookies # Building cookie regex to be used for cookie filtering for caching if self.db_config.Get('WHITELIST_COOKIES') == 'None': cookies_list = self.db_config.Get('BLACKLIST_COOKIES').split(',') self.application.cookie_blacklist = True else: cookies_list = self.db_config.Get('WHITELIST_COOKIES').split(',') self.application.cookie_blacklist = False if self.application.cookie_blacklist: regex_cookies_list = [cookie + "=([^;]+;?)" for cookie in cookies_list] else: regex_cookies_list = ["(" + cookie + "=[^;]+;?)" for cookie in self.db_config.Get('COOKIES_LIST')] regex_string = '|'.join(regex_cookies_list) self.application.cookie_regex = re.compile(regex_string) # Outbound Proxy # Outbound proxy settings to be used inside request handler if outbound_options: if len(outbound_options) == 3: self.application.outbound_proxy_type = outbound_options[0] self.application.outbound_ip = outbound_options[1] self.application.outbound_port = int(outbound_options[2]) else: self.application.outbound_proxy_type = "http" self.application.outbound_ip = outbound_options[0] self.application.outbound_port = int(outbound_options[1]) else: self.application.outbound_ip = None self.application.outbound_port = None self.application.outbound_proxy_type = None if outbound_auth: self.application.outbound_username, self.application.outbound_password = outbound_auth.split(":") else: self.application.outbound_username = None self.application.outbound_password = None self.server = tornado.httpserver.HTTPServer(self.application) # server has to be a class variable, because it is used inside request handler to attach sockets for monitoring ProxyHandler.server = self.server # Header filters # Restricted headers are picked from framework/config/framework_config.cfg # These headers are removed from the response obtained from webserver, before sending it to browser restricted_response_headers = self.config.FrameworkConfigGet("PROXY_RESTRICTED_RESPONSE_HEADERS").split(",") ProxyHandler.restricted_response_headers = restricted_response_headers # These headers are removed from request obtained from browser, before sending it to webserver restricted_request_headers = self.config.FrameworkConfigGet("PROXY_RESTRICTED_REQUEST_HEADERS").split(",") ProxyHandler.restricted_request_headers = restricted_request_headers # HTTP Auth options if self.db_config.Get("HTTP_AUTH_HOST") != "None": self.application.http_auth = True # All the variables are lists self.application.http_auth_hosts = self.db_config.Get("HTTP_AUTH_HOST").strip().split(',') self.application.http_auth_usernames = self.db_config.Get("HTTP_AUTH_USERNAME").strip().split(',') self.application.http_auth_passwords = self.db_config.Get("HTTP_AUTH_PASSWORD").strip().split(',') self.application.http_auth_modes = self.db_config.Get("HTTP_AUTH_MODE").strip().split(',') else: self.application.http_auth = False
def CreateOutputDirForTarget(self, target_URL): FileOperations.create_missing_dirs( self.GetOutputDirForTarget(target_URL))
def check_mount_point_existence(self, options): if not os.path.exists(options['SMB_MOUNT_POINT']): FileOperations.make_dirs(options['SMB_MOUNT_POINT'])
def CleanUpForTarget(self, target_URL): return FileOperations.rm_tree(self.GetOutputDirForTarget(target_URL))
def initialize(self, outbound_options=[], outbound_auth=""): # The tornado application, which is used to pass variables to request handler self.application = tornado.web.Application( handlers=[(r'.*', ProxyHandler)], debug=False, gzip=True, ) self.config = self.get_component("config") self.db_config = self.get_component("db_config") # All required variables in request handler # Required variables are added as attributes to application, so that request handler can access these self.application.Core = self.get_component("core") try: self.proxy_manager = self.get_component("proxy_manager") except ComponentNotFoundException: self.proxy_manager = None self.application.proxy_manager = self.proxy_manager # ctypes object allocated from shared memory to verify if proxy must inject probe code or not # 'i' means ctypes type is integer, initialization value is 0 # if lock is True then a new recursive lock object is created to # synchronize access to the value self.application.Core.pnh_inject = Value('i', 0, lock=True) self.application.inbound_ip = self.db_config.Get('INBOUND_PROXY_IP') self.application.inbound_port = int( self.db_config.Get('INBOUND_PROXY_PORT')) if self.proxy_manager: #Botnet mode needs only one proxy process self.instances = "1" else: self.instances = self.db_config.Get("INBOUND_PROXY_PROCESSES") # Proxy CACHE # Cache related settings, including creating required folders according to cache folder structure self.application.cache_dir = self.db_config.Get( "INBOUND_PROXY_CACHE_DIR") # Clean possible older cache directory. if os.path.exists(self.application.cache_dir): FileOperations.rm_tree(self.application.cache_dir) FileOperations.make_dirs(self.application.cache_dir) # SSL MiTM # SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory ~/.owtf/proxy ) self.application.ca_cert = os.path.expanduser( self.db_config.Get('CA_CERT')) self.application.ca_key = os.path.expanduser( self.db_config.Get('CA_KEY')) try: # To stop owtf from breaking for our beloved users :P self.application.ca_key_pass = FileOperations.open( os.path.expanduser(self.db_config.Get('CA_PASS_FILE')), 'r', owtf_clean=False).read().strip() except IOError: self.application.ca_key_pass = "******" self.application.proxy_folder = os.path.dirname( self.application.ca_cert) self.application.certs_folder = os.path.expanduser( self.db_config.Get('CERTS_FOLDER')) try: # Ensure CA.crt and Key exist assert os.path.exists(self.application.ca_cert) assert os.path.exists(self.application.ca_key) except AssertionError: self.get_component("error_handler").FrameworkAbort( "Files required for SSL MiTM are missing. Please run the install script" ) try: # If certs folder missing, create that assert os.path.exists(self.application.certs_folder) except AssertionError: FileOperations.make_dirs(self.application.certs_folder) # Blacklist (or) Whitelist Cookies # Building cookie regex to be used for cookie filtering for caching if self.db_config.Get('WHITELIST_COOKIES') == 'None': cookies_list = self.db_config.Get('BLACKLIST_COOKIES').split(',') self.application.cookie_blacklist = True else: cookies_list = self.db_config.Get('WHITELIST_COOKIES').split(',') self.application.cookie_blacklist = False if self.application.cookie_blacklist: regex_cookies_list = [ cookie + "=([^;]+;?)" for cookie in cookies_list ] else: regex_cookies_list = [ "(" + cookie + "=[^;]+;?)" for cookie in self.db_config.Get('COOKIES_LIST') ] regex_string = '|'.join(regex_cookies_list) self.application.cookie_regex = re.compile(regex_string) # Outbound Proxy # Outbound proxy settings to be used inside request handler if outbound_options: if len(outbound_options) == 3: self.application.outbound_proxy_type = outbound_options[0] self.application.outbound_ip = outbound_options[1] self.application.outbound_port = int(outbound_options[2]) else: self.application.outbound_proxy_type = "http" self.application.outbound_ip = outbound_options[0] self.application.outbound_port = int(outbound_options[1]) else: self.application.outbound_ip, self.application.outbound_port, self.application.outbound_proxy_type = None, None, None if outbound_auth: self.application.outbound_username, self.application.outbound_password = outbound_auth.split( ":") else: self.application.outbound_username, self.application.outbound_password = None, None # Server has to be global, because it is used inside request handler to attach sockets for monitoring global server server = tornado.httpserver.HTTPServer(self.application) self.server = server # Header filters # Restricted headers are picked from framework/config/framework_config.cfg # These headers are removed from the response obtained from webserver, before sending it to browser global restricted_response_headers restricted_response_headers = self.config.FrameworkConfigGet( "PROXY_RESTRICTED_RESPONSE_HEADERS").split(",") # These headers are removed from request obtained from browser, before sending it to webserver global restricted_request_headers restricted_request_headers = self.config.FrameworkConfigGet( "PROXY_RESTRICTED_REQUEST_HEADERS").split(",") # HTTP Auth options if self.db_config.Get("HTTP_AUTH_HOST") != "None": self.application.http_auth = True # All the variables are lists self.application.http_auth_hosts = self.db_config.Get( "HTTP_AUTH_HOST").strip().split(',') self.application.http_auth_usernames = self.db_config.Get( "HTTP_AUTH_USERNAME").strip().split(',') self.application.http_auth_passwords = self.db_config.Get( "HTTP_AUTH_PASSWORD").strip().split(',') self.application.http_auth_modes = self.db_config.Get( "HTTP_AUTH_MODE").strip().split(',') else: self.application.http_auth = False
def check_mount_point_existence(self, options): if not os.path.exists(options['SMB_MOUNT_POINT']): FileOperations.make_dirs(options['SMB_MOUNT_POINT'])
def InitPluginOutputDir(self, PluginInfo): PluginOutputDir = self.SetConfigPluginOutputDir(PluginInfo) FileOperations.create_missing_dirs( PluginOutputDir ) # Create output dir so that scripts can cd to it :) return PluginOutputDir
def CreateOutputDirForTarget(self, target_URL): FileOperations.create_missing_dirs(self.GetOutputDirForTarget(target_URL))