def load_mappings_from_file(session, default, fallback): """Loads the mappings from the config file .note:: This needs to be a list instead of a dictionary to preserve order in python < 2.7 :param file_path: The path to the mappings config file :type file_path: `str` :return: None :rtype: None """ file_path = default if not os.path.isfile(file_path): file_path = fallback logging.info("Loading Mapping from: %s..", file_path) config_parser = parser.RawConfigParser() # Otherwise all the keys are converted to lowercase xD config_parser.optionxform = str if not os.path.isfile(file_path): # check if the mapping file exists abort_framework("Mapping file not found at: %s" % file_path) config_parser.read(file_path) for owtf_code in config_parser.sections(): mappings = {} category = None for mapping_type, data in config_parser.items(owtf_code): if mapping_type != 'category': if mapping_type not in mapping_types: mapping_types.append(mapping_type) mapped_code, mapped_name = data.split('_____') mappings[mapping_type] = [mapped_code, mapped_name] else: category = data session.merge(models.Mapping(owtf_code=owtf_code, mappings=json.dumps(mappings), category=category)) session.commit()
def get_test_groups_config(file_path): """Reads the test groups from a config file .note:: This needs to be a list instead of a dictionary to preserve order in python < 2.7 :param file_path: The path to the config file :type file_path: `str` :return: List of test groups :rtype: `list` """ test_groups = [] config_file = FileOperations.open(file_path, 'r').read().splitlines() for line in config_file: if '#' == line[0]: continue # Skip comments try: code, priority, descrip, hint, url = line.strip().split(' | ') except ValueError: abort_framework( "Problem in Test Groups file: '%s' -> Cannot parse line: %s" % (file_path, line)) if len(descrip) < 2: descrip = hint if len(hint) < 2: hint = "" test_groups.append({ 'code': code, 'priority': priority, 'descrip': descrip, 'hint': hint, 'url': url }) return test_groups
def start_proxy(): """ The proxy along with supporting processes are started here :param options: Optional arguments :type options: `dict` :return: :rtype: None """ if True: # Check if port is in use try: temp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) temp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) temp_socket.bind((INBOUND_PROXY_IP, INBOUND_PROXY_PORT)) temp_socket.close() except socket.error: abort_framework("Inbound proxy address already in use") # If everything is fine. proxy_process = ProxyProcess() logging.warn( "{0}:{1} <-- HTTP(S) Proxy to which requests can be directed". format(INBOUND_PROXY_IP, str(INBOUND_PROXY_PORT))) proxy_process.initialize(USE_OUTBOUND_PROXY, OUTBOUND_PROXY_AUTH) transaction_logger = TransactionLogger( cache_dir=INBOUND_PROXY_CACHE_DIR) transaction_logger.initialize() proxy_process.start() logging.debug("Starting transaction logger process") transaction_logger.start() logging.debug("Proxy transaction's log file at %s", PROXY_LOG)
def load_config_db_file(session, default, fallback): """Load Db config from file :param file_path: The path to config file :type file_path: `str` :return: None :rtype: None """ file_path = default if not os.path.isfile(file_path): file_path = fallback logging.info("Loading Configuration from: %s.." % file_path) config_parser = parser.RawConfigParser() config_parser.optionxform = str # Otherwise all the keys are converted to lowercase xD if not os.path.isfile(file_path): # check if the config file exists abort_framework("Config file not found at: %s" % file_path) config_parser.read(file_path) for section in config_parser.sections(): for key, value in config_parser.items(section): old_config_obj = session.query(models.ConfigSetting).get(key) if not old_config_obj or not old_config_obj.dirty: if not key.endswith("_DESCRIP"): # _DESCRIP are help values config_obj = models.ConfigSetting(key=key, value=value, section=section) # If _DESCRIP at the end, then use it as help text if config_parser.has_option(section, "%s_DESCRIP" % key): config_obj.descrip = config_parser.get(section, "%s_DESCRIP" % key) session.merge(config_obj) session.commit()
def validate_format_plugin_list(self, session, plugin_codes): """Validate the plugin codes by checking if they exist. :param list plugin_codes: OWTF plugin codes to be validated. :return: validated plugin codes. :rtype: list """ # Ensure there is always a list to iterate from! :) if not plugin_codes: return [] valid_plugin_codes = [] plugins_by_group = get_plugins_by_group(session=session, plugin_group=self.plugin_group) for code in plugin_codes: found = False for plugin in plugins_by_group: # Processing Loop if code in [plugin['code'], plugin['name']]: valid_plugin_codes.append(plugin['code']) found = True break if not found: abort_framework("The code '%s' is not a valid plugin, please use the -l option to see" "available plugin names and codes" % code) return valid_plugin_codes # Return list of Codes
def authenticate(self): """This function is handling the authentication process to TOR control connection. :return: :rtype: """ self.tor_conn.send('AUTHENTICATE "%s"\r\n' % self.password) response = self.tor_conn.recv(1024) if response.startswith('250'): # 250 is the success response logging.info("Successfully Authenticated to TOR control") else: abort_framework("Authentication Error : %s" % response)
def spawn_workers(self): """This function spawns the worker process and give them initial work :return: None :rtype: None """ # Check if maximum limit of processes has reached while (len(self.workers) < self.get_allowed_process_count()): self.spawn_worker() if not len(self.workers): abort_framework( "Zero worker processes created because of lack of memory")
def open_connection(self): """Opens a new connection to TOR control :return: :rtype: """ try: s = socket.socket() s.connect((self.ip, self.tor_control_port)) logging.info("Connected to TOR control") return s except Exception as error: abort_framework("Can't connect to the TOR daemon : %s" % str(error))
def io_error(*args, **kwargs): """Call the original function while checking for errors. If `owtf_clean` parameter is not explicitely passed or if it is set to `True`, it force OWTF to properly exit. """ owtf_clean = kwargs.pop('owtf_clean', True) try: return func(*args, **kwargs) except (OSError, IOError) as e: if owtf_clean: abort_framework("Error when calling '%s'! %s." % (func.__name__, str(e))) raise e
def show_param_info(self, full_args_list, plugin): """Show parameter info for a plugin :param full_args_list: Full args list :type full_args_list: `dict` :param plugin: Plugin :type plugin: `dict` :return: None :rtype: None """ logging.info("\nInformation for %s" % self.show_plugin(plugin)) logging.info("\nDescription: %s" % str(full_args_list['Description'])) self.list_args(full_args_list['mandatory'], True) if len(full_args_list['Optional']) > 0: self.list_args(full_args_list['Optional'], False) logging.info("\nUsage: %s\n" % self.get_args_example(full_args_list)) abort_framework("User is only viewing options, exiting")
def load_framework_config_file(default, fallback, root_dir, owtf_pid): """Load the configuration into a global dictionary. :param config_path: The configuration file path :type config_path: `str` :return: None :rtype: None """ config_path = default if not os.path.isfile(config_path): config_path = fallback logging.info("Loading config from: {}..".format(config_path)) config_file = FileOperations.open(config_path, 'r') config_handler.set_val('FRAMEWORK_DIR', root_dir) # Needed Later. for line in config_file: try: key = line.split(':')[0] if key[0] == '#': # Ignore comment lines. continue value = line.replace("{}: ".format(key), "").strip() config_handler.set_val(key, multi_replace(value, {'FRAMEWORK_DIR': root_dir, 'OWTF_PID': str(owtf_pid)})) except ValueError: abort_framework("Problem in config file: {} -> Cannot parse line: {}".format(config_path, line))
def process_http_error_code(self, error, url): """Process HTTP error code :param error: Error :type error: :param url: Target URL :type url: `str` :return: Message :rtype: `str` """ message = "" if str(error.reason).startswith("[Errno 111]"): message = "ERROR: The connection was refused!: %s" % str(error) self.req_count_refused += 1 elif str(error.reason).startswith("[Errno -2]"): abort_framework("ERROR: cannot resolve hostname!: %s" % str(error)) else: message = "ERROR: The connection was not refused, unknown error!" log = logging.getLogger('general') log.info(message) return "%s (Requester Object): %s\n%s" % (message, url, str(sys.exc_info()))
def __init__(self, args): # If the args are empty it will filled with the default values if args[0] == '': self.ip = "127.0.0.1" else: self.ip = args[0] if args[1] == '': self.port = 9050 else: try: self.port = int(args[1]) except ValueError: abort_framework("Invalid TOR port") if args[2] == '': self.tor_control_port = 9051 else: try: self.tor_control_port = int(args[2]) except ValueError: abort_framework("Invalid TOR Controlport") if args[3] == '': self.password = "******" else: self.password = args[3] if args[4] == '': self.time = 5 else: try: self.time = int(args[4]) except ValueError: abort_framework("Invalid TOR Time") if self.time < 1: abort_framework("Invalid TOR Time") self.tor_conn = self.open_connection() self.authenticate()
def initialize(self, outbound_options=[], outbound_auth=""): """Initialize the proxy process :param outbound_options: Outbound proxy options :type outbound_options: `list` :param outbound_auth: Authentication string :type outbound_auth: `str` :return: None :rtype: None """ # The tornado application, which is used to pass variables to request handler self.application = tornado.web.Application( handlers=[(r'.*', ProxyHandler)], debug=False, gzip=True, ) # All required variables in request handler # Required variables are added as attributes to application, so that request handler can access these self.application.inbound_ip = INBOUND_PROXY_IP self.application.inbound_port = int(INBOUND_PROXY_PORT) self.instances = INBOUND_PROXY_PROCESSES # Disable console logging self.logger.disable_console_logging() # Proxy CACHE # Cache related settings, including creating required folders according to cache folder structure self.application.cache_dir = INBOUND_PROXY_CACHE_DIR # Clean possible older cache directory. if os.path.exists(self.application.cache_dir): FileOperations.rm_tree(self.application.cache_dir) FileOperations.make_dirs(self.application.cache_dir) # SSL MiTM # SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory # ~/.owtf/proxy) self.application.ca_cert = os.path.expanduser(CA_CERT) self.application.ca_key = os.path.expanduser(CA_KEY) # To stop OWTF from breaking for our beloved users :P try: self.application.ca_key_pass = FileOperations.open( os.path.expanduser(CA_PASS_FILE), 'r', owtf_clean=False).read().strip() except IOError: self.application.ca_key_pass = "******" # XXX: Legacy CA key pass for older versions. self.application.proxy_folder = os.path.dirname( self.application.ca_cert) self.application.certs_folder = os.path.expanduser(CERTS_FOLDER) try: # Ensure CA.crt and Key exist. assert os.path.exists(self.application.ca_cert) assert os.path.exists(self.application.ca_key) except AssertionError: abort_framework( "Files required for SSL MiTM are missing.Please run the install script" ) try: # If certs folder missing, create that. assert os.path.exists(self.application.certs_folder) except AssertionError: FileOperations.make_dirs(self.application.certs_folder) # Blacklist (or) Whitelist Cookies # Building cookie regex to be used for cookie filtering for caching if WHITELIST_COOKIES == 'None': cookies_list = BLACKLIST_COOKIES.split(',') self.application.cookie_blacklist = True else: cookies_list = WHITELIST_COOKIES.split(',') self.application.cookie_blacklist = False if self.application.cookie_blacklist: regex_cookies_list = [ cookie + "=([^;]+;?)" for cookie in cookies_list ] else: regex_cookies_list = [ "(" + cookie + "=[^;]+;?)" for cookie in cookies_list ] regex_string = '|'.join(regex_cookies_list) self.application.cookie_regex = re.compile(regex_string) # Outbound Proxy # Outbound proxy settings to be used inside request handler if outbound_options: if len(outbound_options) == 3: self.application.outbound_proxy_type = outbound_options[0] self.application.outbound_ip = outbound_options[1] self.application.outbound_port = int(outbound_options[2]) else: self.application.outbound_proxy_type = "http" self.application.outbound_ip = outbound_options[0] self.application.outbound_port = int(outbound_options[1]) else: self.application.outbound_ip = None self.application.outbound_port = None self.application.outbound_proxy_type = None if outbound_auth: self.application.outbound_username, self.application.outbound_password = outbound_auth.split( ":") else: self.application.outbound_username = None self.application.outbound_password = None self.server = tornado.httpserver.HTTPServer(self.application) # server has to be a class variable, because it is used inside request handler to attach sockets for monitoring ProxyHandler.server = self.server # Header filters # These headers are removed from the response obtained from webserver, before sending it to browser ProxyHandler.restricted_response_headers = PROXY_RESTRICTED_RESPONSE_HEADERS #These headers are removed from request obtained from browser, before sending it to webserver ProxyHandler.restricted_request_headers = PROXY_RESTRICTED_REQUEST_HEADERS # HTTP Auth options if HTTP_AUTH_HOST is not None: self.application.http_auth = True # All the variables are lists self.application.http_auth_hosts = HTTP_AUTH_HOST.strip().split( ',') self.application.http_auth_usernames = HTTP_AUTH_USERNAME.strip( ).split(',') self.application.http_auth_passwords = HTTP_AUTH_PASSWORD.strip( ).split(',') self.application.http_auth_modes = HTTP_AUTH_MODE.strip().split( ',') else: self.application.http_auth = False
def process_plugin(self, session, plugin_dir, plugin, status=None): """Process a plugin from running to ranking. :param str plugin_dir: Path to the plugin directory. :param dict plugin: The plugin dictionary with all the information. :param dict status: Running status of the plugin. :return: The output generated by the plugin when run. :return: None if the plugin was not run. :rtype: list """ if status is None: status = {} # Ensure that the plugin CAN be run before starting anything. if not self.plugin_can_run(session=session, plugin=plugin, show_reason=True): return None # Save how long it takes for the plugin to run. self.timer.start_timer('Plugin') plugin['start'] = self.timer.get_start_date_time('Plugin') # Use relative path from targets folders while saving plugin['output_path'] = os.path.relpath(self.get_plugin_output_dir(plugin), get_output_dir_target()) status['AllSkipped'] = False # A plugin is going to be run. plugin['status'] = 'Running' self.plugin_count += 1 logging.info( '_' * 10 + ' %d - Target: %s -> Plugin: %s (%s/%s) ' + '_' * 10, self.plugin_count, target_manager.get_target_url(), plugin['title'], plugin['group'], plugin['type']) # DB empty => grep plugins will fail, skip!! if ('grep' == plugin['type'] and num_transactions(session) == 0): logging.info('Skipped - Cannot run grep plugins: The Transaction DB is empty') return None output = None status_msg = '' partial_output = [] abort_reason = '' try: output = self.run_plugin(plugin_dir, plugin) status_msg = 'Successful' status['SomeSuccessful'] = True except KeyboardInterrupt: # Just explain why crashed. status_msg = 'Aborted' abort_reason = 'Aborted by User' status['SomeAborted (Keyboard Interrupt)'] = True except SystemExit: # Abort plugin processing and get out to external exception # handling, information saved elsewhere. raise SystemExit except PluginAbortException as PartialOutput: status_msg = 'Aborted (by user)' partial_output = PartialOutput.parameter abort_reason = 'Aborted by User' status['SomeAborted'] = True except UnreachableTargetException as PartialOutput: status_msg = 'Unreachable Target' partial_output = PartialOutput.parameter abort_reason = 'Unreachable Target' status['SomeAborted'] = True except FrameworkAbortException as PartialOutput: status_msg = 'Aborted (Framework Exit)' partial_output = PartialOutput.parameter abort_reason = 'Framework Aborted' # TODO: Handle this gracefully # Replace print by logging finally: plugin['status'] = status_msg plugin['end'] = self.timer.get_end_date_time('Plugin') plugin['owtf_rank'] = self.rank_plugin(output, self.get_plugin_output_dir(plugin)) try: if status_msg == 'Successful': save_plugin_output(session=session, plugin=plugin, output=output) else: save_partial_output(session=session, plugin=plugin, output=partial_output, message=abort_reason) except SQLAlchemyError as e: logging.error("Exception occurred while during database transaction : \n%s", str(e)) output += str(e) if status_msg == 'Aborted': user_abort('Plugin') if abort_reason == 'Framework Aborted': abort_framework("Framework abort") return output