Example #1
0
def wait_for_idle(workers, queue):
    """ Wait until fetch queue is empty and handle user interrupt """
    while len(workers) > 0:
        try:
            if queue.empty():
                # Wait for all threads to return their state
                queue.join()
                for worker in workers:
                    worker.kill_received = True
                workers = []
        except KeyboardInterrupt:
            utils.output_raw('')
            utils.output_info('Keyboard Interrupt Received, cleaning up threads')
            # Kill remaining workers
            for worker in workers:
                worker.kill_received = True
                if worker is not None and worker.isAlive():
                    worker.join(1)

            sys.exit()
Example #2
0
def execute():
    """ Fetch /robots.txt and add the disallowed paths as target """
    worker_template = {'url': '', 'expected_response': [200, 302], 'timeout_count': 0, 'description': 'Robots.txt entry'}
    target_url = urljoin(conf.target_host, "/robots.txt")
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(target_url, 'GET', conf.user_agent, True, conf.fetch_timeout_secs)

    if response_code is 200 or response_code is 302 and content:
        if conf.debug:
            utils.output_debug(content)

        match = re.findall(r'Disallow:\s*[a-zA-Z0-9-/.]*', content)
        added = 0
        for match_obj in match:
            if '?' not in match_obj and '.' not in match_obj:                
                splitted = match_obj.split(':')
                if splitted[1]:
                    path = splitted[1].strip() 
                    if path != '/' or path != '':
                        new_path = urljoin(conf.target_host, path)
                        current_template = dict(worker_template)
                        current_template['url'] = new_path
                        database.paths.append(current_template)
        
                        if conf.debug:
                            utils.output_debug(str(current_template))
                            
                        added += 1
                    
        if added > 0:
            utils.output_info('Robots plugin: added ' + str(added) + ' base paths using /robots.txt')
        else :
            utils.output_info('Robots plugin: no usable entries in /robots.txt')     
               
    else:
        utils.output_info('Robots plugin: /robots.txt not found on target site')
Example #3
0
def main():
    """ Main app logic """
    # Ensure the host is of the right format
    utils.sanitize_config()

    # Load target paths
    utils.output_info('Loading target paths')
    database.paths = loaders.load_targets('data/path.lst')

    # Import and run host plugins
    utils.output_info('Executing ' + str(len(host.__all__)) + ' host plugins')
    for plugin_name in host.__all__:
        plugin = __import__ ("plugins.host." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin , 'execute'):
             plugin.execute()
    
    # Spawn workers
    if conf.search_files or conf.debug:
        workers = spawn_workers(conf.thread_count, output=False)
    else:
        workers = spawn_workers(conf.thread_count)

    # Fill work queue with fetch list
    utils.output_info('Probing ' + str(len(database.paths)) + ' paths')
    for item in database.paths:
        database.fetch_queue.put(item)

    # Wait for initial valid path lookup
    wait_for_idle(workers, database.fetch_queue)
    utils.output_info('Found ' + str(len(database.valid_paths)) + ' valid paths')

    if conf.debug:
        for item in database.valid_paths:
            utils.output_debug(str(item))

    if conf.search_files:
        # Load target files
        utils.output_info('Loading target files')
        database.files = loaders.load_targets('data/file.lst')
        if conf.debug:
            for item in database.files:
                utils.output_debug('Target file added: ' + str(item))

        # Combine files with '/' and all valid paths
        tmp_list = list()
        for file in database.files:
            file_copy = dict(file)
            file_copy['url'] = urljoin(conf.target_host, file_copy['url'])
            if conf.debug:
                utils.output('Adding base target: ' + str(file_copy))
            tmp_list.append(file_copy)

            for valid_url in database.valid_paths:
                file_copy = dict(file)
                file_copy['url'] = valid_url['url'] + file['url']
                if conf.debug:
                    utils.output('Adding combined target: ' + str(file_copy))
                tmp_list.append(file_copy)

        # Fill Valid path with generated urls
        for item in tmp_list:
            database.valid_paths.append(item)

        if conf.debug:
            for item in database.valid_paths:
                utils.output_debug('Path to test: ' + str(item))

        # Add to valid paths
        # Import and run file plugins
        utils.output_info('Executing ' + str(len(path.__all__)) + ' file plugins')
        for plugin_name in path.__all__:
            plugin = __import__ ("plugins.path." + plugin_name, fromlist=[plugin_name])
            if hasattr(plugin , 'execute'):
                 plugin.execute()

        # Spawn workers
        workers = spawn_workers(conf.thread_count)

        # Fill work queue with fetch list
        utils.output_info('Probing ' + str(len(database.valid_paths)) + ' items...')
        for item in database.valid_paths:
            database.fetch_queue.put(item)

        # Wait for file lookup
        wait_for_idle(workers, database.fetch_queue)
Example #4
0
    # Spawn synchronized print output worker
    print_worker = PrintWorker()
    print_worker.daemon = True
    print_worker.start()
    
    if conf.debug:
        utils.output_debug('Version: ' + str(conf.version))
        utils.output_debug('Use GET instead of HEAD: ' + str(conf.use_get))
        utils.output_debug('Fetch timeout: ' + str(conf.fetch_timeout_secs))
        utils.output_debug('Max timeouts per url: ' + str(conf.max_timeout_count))
        utils.output_debug('Worker threads: ' + str(conf.thread_count))
        utils.output_debug('Target Host: ' + str(conf.target_host))
        utils.output_debug('Using Tor: ' + str(conf.use_tor))
        utils.output_debug('Content-type Blacklisting: ' + str(conf.content_type_blacklist))
        utils.output_debug('Using User-Agent: ' + str(conf.user_agent))
     
    utils.output_info('Starting Discovery on ' + conf.target_host)

    # Handle keyboard exit before multi-thread operations
    try:
        # Launch main loop
        main()
        # Print all remaining messages
        utils.output_info('Done.\n')
        database.output_queue.join()
    except KeyboardInterrupt:
        utils.output_raw('')
        utils.output_info('Keyboard Interrupt Received')
        database.output_queue.join()
        sys.exit(0)
Example #5
0
    def run(self):
        while not self.kill_received:
            # don't wait for any items if empty
            if not database.fetch_queue.empty():
                queued = database.fetch_queue.get()
                url = urljoin(conf.target_host, queued.get('url'))
                expected = queued.get('expected_response')
                description = queued.get('description')
                content_type_blacklist = queued.get('blacklist_content_types')

                if not content_type_blacklist:
                    content_type_blacklist = []

                if conf.use_get:
                    method = 'GET'
                else:
                    method = 'HEAD'

                response_code, content, headers = self.fetcher.fetch_url(url, method, conf.user_agent, False, conf.fetch_timeout_secs)

                if conf.debug:
                    utils.output_info("Thread #" + str(self.thread_id) + ": " + str(queued))

                if response_code is 0: # timeout
                    if queued.get('timeout_count') < conf.max_timeout_count:
                        new_timeout_count = queued.get('timeout_count') + 1
                        queued['timeout_count'] = new_timeout_count

                        if conf.debug:
                            utils.output_info('Thread #' + str(self.thread_id) + ': re-queuing ' + str(queued))

                        # Add back the timed-out item
                        database.fetch_queue.put(queued)
                    else:
                        utils.output_timeout(url)

                elif response_code in expected:
                    # Response content type
                    content_type = headers['content-type']
                    if not content_type:
                        content_type = ''

                    # Fuse with current url. (/test become url.dom/test)
                    queued['url'] = urljoin(conf.target_host, queued['url'])

                    # If we don't blacklist, just show the result
                    if not conf.content_type_blacklist:
                        if self.output:
                            if response_code == 401:
                                utils.output_found('*Password Protected* ' + description + ' at: ' + url)
                            else:
                                utils.output_found(description + ' at: ' + url)

                        # Add to valid path
                        database.valid_paths.append(queued)

                    # if we DO blacklist but content is not blacklisted, show the result
                    elif content_type not in content_type_blacklist:
                        if self.output:
                            if response_code == 401:
                                utils.output_found('*Password Protected* ' + description + ' at: ' + url)
                            else:
                                utils.output_found(description + ' at: ' + url)

                        # Add to valid path
                        database.valid_paths.append(queued)

                # Mark item as processed
                database.fetch_queue.task_done()