Пример #1
0
def sample_root_404():
    """ Get the root 404, this has to be done as soon as possible since plugins could use this information. """
    manager = ThreadManager()
    textutils.output_info('Benchmarking root 404')

    for ext in conf.crafted_404_extensions:
        random_file = str(uuid.uuid4())
        path = conf.path_template.copy()

        if path['url'] != '/':
            path['url'] = '/' + random_file + ext
        else:
            path['url'] = random_file + ext

        # Were not using the fetch cache for 404 sampling
        database.fetch_queue.put(path)

    # Forced bogus path check
    random_file = str(uuid.uuid4())
    path = conf.path_template.copy()
    path['url'] = '/' + random_file + '/'

    # Were not using the fetch cache for 404 sampling
    database.fetch_queue.put(path)

    workers = manager.spawn_workers(len(conf.crafted_404_extensions),
                                    FetchCrafted404Worker)
    manager.wait_for_idle(workers, database.fetch_queue)
Пример #2
0
def load_execute_file_plugins():
    """ Import and run path plugins """
    textutils.output_info('Executing ' + str(len(file.__all__)) +
                          ' file plugins')
    for plugin_name in file.__all__:
        plugin = __import__("tachyon.plugins.file." + plugin_name,
                            fromlist=[plugin_name])
        if hasattr(plugin, 'execute'):
            plugin.execute()
Пример #3
0
def load_execute_host_plugins():
    """ Import and run host plugins """
    textutils.output_info('Executing ' + str(len(host.__all__)) +
                          ' host plugins')
    for plugin_name in host.__all__:
        plugin = __import__("tachyon.plugins.host." + plugin_name,
                            fromlist=[plugin_name])
        if hasattr(plugin, 'execute'):
            plugin.execute()
Пример #4
0
def test_paths_exists():
    """ 
    Test for path existence using http codes and computed 404
    Spawn workers and turn off output for now, it would be irrelevant at this point. 
    """
    manager = ThreadManager()

    # Fill work queue with fetch list
    for path in database.paths:
        dbutils.add_path_to_fetch_queue(path)

    # Consider some file target as potential path
    for file in database.files:
        if not file.get('no_suffix'):
            file_as_path = file.copy()
            file_as_path['url'] = '/' + file_as_path['url']
            dbutils.add_path_to_fetch_queue(file_as_path)

    done_paths = []
    recursion_depth = 0

    textutils.output_debug('Cached: ' + str(database.path_cache))
    while database.fetch_queue.qsize() > 0:
        textutils.output_info('Probing ' + str(database.fetch_queue.qsize()) +
                              ' paths')

        # Wait for initial valid path lookup
        workers = manager.spawn_workers(conf.thread_count,
                                        TestPathExistsWorker)
        manager.wait_for_idle(workers, database.fetch_queue)

        recursion_depth += 1

        if not conf.recursive:
            break

        if recursion_depth >= conf.recursive_depth_limit:
            break

        for validpath in database.valid_paths:

            if validpath['url'] == '/' or validpath['url'] in done_paths:
                continue

            done_paths.append(validpath['url'])

            for path in database.paths:
                if path['url'] in ('/', ''):
                    continue
                path = path.copy()
                path['url'] = validpath['url'] + path['url']
                dbutils.add_path_to_fetch_queue(path)

    textutils.output_info('Found ' + str(len(database.valid_paths)) +
                          ' valid paths')
Пример #5
0
def get_session_cookies():
    """ Fetch initial session cookies """
    textutils.output_info('Fetching session cookie')
    path = conf.path_template.copy()
    path['url'] = '/'

    # Were not using the fetch cache for session cookie sampling
    fetcher = Fetcher()

    code, content, headers = fetcher.fetch_url('/', conf.user_agent, 10)
    if code is 200:
        cookies = headers.get('Set-Cookie')
        if cookies:
            database.session_cookie = cookies
Пример #6
0
def load_target_files(running_path):
    """ Load the target files in the database """
    textutils.output_info('Loading target files')
    database.files += loaders.load_json_resource('files')
Пример #7
0
    textutils.output_debug('Max timeouts per url: ' +
                           str(conf.max_timeout_count))
    textutils.output_debug('Worker threads: ' + str(conf.thread_count))
    textutils.output_debug('Target Host: ' + str(conf.target_host))
    textutils.output_debug('Using Tor: ' + str(conf.use_tor))
    textutils.output_debug('Eval-able output: ' + str(conf.eval_output))
    textutils.output_debug('JSON output: ' + str(conf.json_output))
    textutils.output_debug('Using User-Agent: ' + str(conf.user_agent))
    textutils.output_debug('Search only for files: ' + str(conf.files_only))
    textutils.output_debug('Search only for subdirs: ' +
                           str(conf.directories_only))

    if conf.proxy_url:
        textutils.output_debug('Using proxy: ' + str(conf.proxy_url))

    textutils.output_info('Starting Discovery on ' + conf.base_url)

    if conf.use_tor:
        textutils.output_info('Using Tor, be patient it WILL be slow!')
        textutils.output_info(
            'Max timeout count and url fetch timeout doubled for the occasion ;)'
        )
        conf.max_timeout_count *= 2
        conf.fetch_timeout_secs *= 2

    # Handle keyboard exit before multi-thread operations
    print_results_worker = None
    try:
        # Resolve target host to avoid multiple dns lookups
        if not conf.proxy_url:
            resolved, port = dnscache.get_host_ip(conf.target_host,