Ejemplo n.º 1
0
def load_execute_host_plugins():
    """ Import and run host plugins """
    textutils.output_info('Executing ' + str(len(host.__all__)) + ' host plugins')
    for plugin_name in host.__all__:
        plugin = __import__ ("plugins.host." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin , 'execute'):
             plugin.execute()
Ejemplo n.º 2
0
def sample_root_404():
    """ Get the root 404, this has to be done as soon as possible since plugins could use this information. """
    manager = ThreadManager()
    textutils.output_info('Benchmarking root 404')

    for ext in conf.crafted_404_extensions:
        random_file = str(uuid.uuid4())
        path = conf.path_template.copy()

        if path['url'] != '/':
            path['url'] = '/' + random_file + ext
        else:
            path['url'] = random_file + ext

        # Were not using the fetch cache for 404 sampling
        database.fetch_queue.put(path)

    # Forced bogus path check
    random_file = str(uuid.uuid4())
    path = conf.path_template.copy()
    path['url'] = '/' + random_file + '/'

    # Were not using the fetch cache for 404 sampling
    database.fetch_queue.put(path)

    workers = manager.spawn_workers(len(conf.crafted_404_extensions),
                                    FetchCrafted404Worker)
    manager.wait_for_idle(workers, database.fetch_queue)
Ejemplo n.º 3
0
def output_stats(workers=None):
    elapsed_time = datetime.now() - database.scan_start_time
    if not elapsed_time.seconds:
        request_per_seconds = 0
    else:
        request_per_seconds = database.successful_fetch_count / elapsed_time.seconds

    if request_per_seconds:
        remaining_seconds = int(database.fetch_queue.qsize() / request_per_seconds)
        remaining_timedelta = timedelta(seconds=remaining_seconds)
    else:
        remaining_seconds = 0
        remaining_timedelta = timedelta(seconds=remaining_seconds)

    request_per_seconds = "%.2f" % round(request_per_seconds,2)

    stats_string = ''.join([
        str(request_per_seconds), ' reqs/sec',
        ', Done: ', str(database.successful_fetch_count),
        ', Queued: ', str(database.fetch_queue.qsize()),
        ', Timeouts: ', str(database.total_timeouts), ' (~', str(database.latest_successful_request_time), 's)',
        ', Remaining: ', str(remaining_timedelta),
        ', Workers: ', str(len(workers)),
        ' (hit ctrl+c again to exit)'
    ])

    textutils.output_info(stats_string)
Ejemplo n.º 4
0
def load_execute_file_plugins():
    """ Import and run path plugins """
    textutils.output_info('Executing ' + str(len(file.__all__)) + ' file plugins')
    for plugin_name in file.__all__:
        plugin = __import__ ("plugins.file." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin , 'execute'):
             plugin.execute()
Ejemplo n.º 5
0
def load_execute_file_plugins():
    """ Import and run path plugins """
    textutils.output_info('Executing ' + str(len(file.__all__)) + ' file plugins')
    for plugin_name in file.__all__:
        plugin = __import__ ("plugins.file." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin , 'execute'):
             plugin.execute()
Ejemplo n.º 6
0
def execute():
    """ Fetch /.svn/entries and parse for target paths """

    textutils.output_info(" - Svn Plugin: Searching for /.svn/entries")
    target_url = conf.target_base_path + "/.svn/entries"

    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False
    )
    if response_code is 200 or response_code is 302:
        if conf.allow_download:
            textutils.output_info(" - Svn Plugin: /.svn/entries found! crawling... (will download files to output/)")
        else:
            textutils.output_info(
                " - Svn Plugin: /.svn/entries found! crawling... (use -a to download files instead of printing)"
            )
        # parse entries
        parse_svn_entries(conf.target_base_path)

        # Clean up display
        if conf.allow_download:
            textutils.output_info("")
    else:
        textutils.output_info(" - Svn Plugin: no /.svn/entries found")
Ejemplo n.º 7
0
def execute():
    """ Generate common simple paths (a-z, 0-9) """
    path_added = 0
    file_added = 0

    for char in range(ord('a'), ord('z')):
        add_generated_dir(chr(char))
        path_added += 1
        add_generated_file(chr(char))
        file_added += 1

    for char in range(ord('0'), ord('9')):
        add_generated_dir(chr(char))
        path_added += 1
        add_generated_file(chr(char))
        file_added += 1

    for year in range(1990, date.today().year + 5):
        add_generated_dir(str(year))
        path_added += 1

    textutils.output_info(' - PathGenerator Plugin: added ' + str(path_added) +
                          ' computer generated path.')
    textutils.output_info(' - PathGenerator Plugin: added ' + str(file_added) +
                          ' computer generated files.')
Ejemplo n.º 8
0
def execute():
    """ Fetch /.svn/entries and parse for target paths """

    textutils.output_info(' - Svn Plugin: Searching for /.svn/entries')
    target_url = conf.target_base_path + "/.svn/entries"

    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)
    if response_code is 200 or response_code is 302:
        if conf.allow_download:
            textutils.output_info(
                ' - Svn Plugin: /.svn/entries found! crawling... (will download files to output/)'
            )
        else:
            textutils.output_info(
                ' - Svn Plugin: /.svn/entries found! crawling... (use -a to download files instead of printing)'
            )
        # parse entries
        parse_svn_entries(conf.target_base_path)

        # Clean up display
        if conf.allow_download:
            textutils.output_info('')
    else:
        textutils.output_info(' - Svn Plugin: no /.svn/entries found')
Ejemplo n.º 9
0
def execute():
    """ Generate common simple paths (a-z, 0-9) """
    path_added = 0
    file_added = 0

    if "skipAlpha" not in plugin_settings:
        for char in range(ord('a'), ord('z') + 1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipNumeric" not in plugin_settings:
        for char in range(ord('0'), ord('9') + 1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipYear" not in plugin_settings:
        for year in range(1990, date.today().year + 5):
            add_generated_path(str(year))
            path_added += 1

    textutils.output_info(' - PathGenerator Plugin: added ' + str(path_added) +
                          ' computer generated path.')
    textutils.output_info(' - PathGenerator Plugin: added ' + str(file_added) +
                          ' computer generated files.')
Ejemplo n.º 10
0
def sample_root_404():
    """ Get the root 404, this has to be done as soon as possible since plugins could use this information. """
    manager = ThreadManager()
    textutils.output_info('Benchmarking root 404')
    
    for ext in conf.crafted_404_extensions:
        random_file = str(uuid.uuid4())
        path = conf.path_template.copy()

        if path['url'] != '/':
            path['url'] = '/' + random_file + ext
        else:
            path['url'] = random_file + ext

        # Were not using the fetch cache for 404 sampling
        database.fetch_queue.put(path)

    # Forced bogus path check
    random_file = str(uuid.uuid4())
    path = conf.path_template.copy()
    path['url'] = '/' + random_file + '/'

    # Were not using the fetch cache for 404 sampling
    database.fetch_queue.put(path)

    workers = manager.spawn_workers(len(conf.crafted_404_extensions), FetchCrafted404Worker)
    manager.wait_for_idle(workers, database.fetch_queue)
Ejemplo n.º 11
0
def load_execute_host_plugins():
    """ Import and run host plugins """
    textutils.output_info('Executing ' + str(len(host.__all__)) + ' host plugins')
    for plugin_name in host.__all__:
        plugin = __import__ ("plugins.host." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin , 'execute'):
             plugin.execute()
Ejemplo n.º 12
0
def execute():
    """ Generate common simple paths (a-z, 0-9) """
    path_added = 0
    file_added = 0

    if "skipAlpha" not in plugin_settings:
        for char in range(ord('a'), ord('z')+1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipNumeric" not in plugin_settings:
        for char in range(ord('0'), ord('9')+1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipYear" not in plugin_settings:
        for year in range(1990, date.today().year + 5):
            add_generated_path(str(year))
            path_added += 1

    textutils.output_info(' - PathGenerator Plugin: added ' + str(path_added) + ' computer generated path.')
    textutils.output_info(' - PathGenerator Plugin: added ' + str(file_added) + ' computer generated files.')
Ejemplo n.º 13
0
    def wait_for_idle(self, workers, queue):
            """ Wait until fetch queue is empty and handle user interrupt """
            while not self.kill_received and not queue.empty():
                try:
                    sleep(0.1)
                except KeyboardInterrupt:
                    try:
                        stats.output_stats()
                        sleep(1)  
                    except KeyboardInterrupt:
                        textutils.output_info('Keyboard Interrupt Received, cleaning up threads')
                        # Clean reference to sockets
                        database.connection_pool = None

                        self.kill_received = True
                        
                        # Kill remaining workers but don't join the queue (we want to abort:))
                        for worker in workers:
                            worker.kill_received = True
                            if worker is not None and worker.isAlive():
                                worker.join(1)
            
                        # Kill the soft
                        sys.exit()  

            # Make sure everything is done before sending control back to application
            textutils.output_debug("Threads: joining queue of size: " + str(queue.qsize()))
            queue.join()
            textutils.output_debug("Threads: join done")

            for worker in workers:
                worker.kill_received = True
                worker.join()
Ejemplo n.º 14
0
    def wait_for_idle(self, workers, queue):
        """ Wait until fetch queue is empty and handle user interrupt """
        while not database.kill_received and not queue.empty():
            try:
                # Make sure everything is done before sending control back to application
                textutils.output_debug("Threads: joining queue of size: " +
                                       str(queue.qsize()))
                queue.join()
                textutils.output_debug("Threads: join done")
            except KeyboardInterrupt:
                try:
                    stats.output_stats(workers)
                    sleep(
                        1
                    )  # The time you have to re-press ctrl+c to kill the app.
                except KeyboardInterrupt:
                    textutils.output_info(
                        'Keyboard Interrupt Received, waiting for blocking threads to exit'
                    )
                    # Clean reference to sockets
                    database.connection_pool = None
                    database.kill_received = True
                    self.kill_workers(workers)
                    sys.exit(0)

        # Make sure we get all the worker's results before continuing the next step
        self.kill_workers(workers)
Ejemplo n.º 15
0
def output_stats():
    elapsed_time = datetime.now() - database.scan_start_time
    if not elapsed_time.seconds:
        request_per_seconds = 0
    else:
        request_per_seconds = database.successful_fetch_count / elapsed_time.seconds

    if request_per_seconds:
        remaining_seconds = int(database.fetch_queue.qsize() /
                                request_per_seconds)
        remaining_timedelta = timedelta(seconds=remaining_seconds)
    else:
        remaining_seconds = 0
        remaining_timedelta = timedelta(seconds=remaining_seconds)

    stats_string = ''.join([
        str(request_per_seconds), ' reqs/sec', ', Done: ',
        str(database.successful_fetch_count), ', Queued: ',
        str(database.fetch_queue.qsize()), ', Timeouts: ',
        str(database.total_timeouts), ' (~',
        str(database.latest_successful_request_time), 's)', ', remaining: ',
        str(remaining_timedelta), ' (hit ctrl+c again to exit)'
    ])

    textutils.output_info(stats_string)
Ejemplo n.º 16
0
    def wait_for_idle(self, workers, queue):
        """ Wait until fetch queue is empty and handle user interrupt """
        while not self.kill_received and not queue.empty():
            try:
                sleep(0.1)
            except KeyboardInterrupt:
                try:
                    stats.output_stats()
                    sleep(1)
                except KeyboardInterrupt:
                    textutils.output_info(
                        'Keyboard Interrupt Received, cleaning up threads')
                    self.kill_received = True

                    # Kill remaining workers but don't join the queue (we want to abort:))
                    for worker in workers:
                        worker.kill_received = True
                        if worker is not None and worker.isAlive():
                            worker.join(1)

                    # Kill the soft
                    sys.exit()

        # Make sure everything is done before sending control back to application
        textutils.output_debug("Threads: joining queue of size: " +
                               str(queue.qsize()))
        queue.join()
        textutils.output_debug("Threads: join done")

        for worker in workers:
            worker.kill_received = True
            worker.join()
Ejemplo n.º 17
0
def test_paths_exists():
    """ 
    Test for path existence using http codes and computed 404
    Spawn workers and turn off output for now, it would be irrelevant at this point. 
    """
    manager = ThreadManager()

    # Fill work queue with fetch list
    for path in database.paths:
        dbutils.add_path_to_fetch_queue(path)

    # Consider some file target as potential path
    for file in database.files:
        if not file.get('no_suffix'):
            file_as_path = file.copy()
            file_as_path['url'] = '/' + file_as_path['url']
            dbutils.add_path_to_fetch_queue(file_as_path)

    done_paths = []
    recursion_depth = 0

    textutils.output_debug('Cached: ' + str(database.path_cache))
    while database.fetch_queue.qsize() > 0:
        textutils.output_info('Probing ' + str(database.fetch_queue.qsize()) +
                              ' paths')

        # Wait for initial valid path lookup
        workers = manager.spawn_workers(conf.thread_count,
                                        TestPathExistsWorker)
        manager.wait_for_idle(workers, database.fetch_queue)

        recursion_depth += 1

        if not conf.recursive:
            break

        if recursion_depth >= conf.recursive_depth_limit:
            break

        for validpath in database.valid_paths:

            if validpath['url'] == '/' or validpath['url'] in done_paths:
                continue

            done_paths.append(validpath['url'])

            for path in database.paths:
                if path['url'] in ('/', ''):
                    continue
                path = path.copy()
                path['url'] = validpath['url'] + path['url']
                dbutils.add_path_to_fetch_queue(path)

    textutils.output_info('Found ' + str(len(database.valid_paths)) +
                          ' valid paths')
Ejemplo n.º 18
0
def parse_svn_entries(url):
    description_file = 'SVN entries file at'
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"
    fetcher = Fetcher()

    response_code, content, headers = fetcher.fetch_url(
        target_url,
        conf.user_agent,
        conf.fetch_timeout_secs,
        limit_len=False,
        add_headers=base_headers)

    if response_code in conf.expected_file_responses and content:
        tokens = content.decode().split('\n')
        if 'dir' in tokens:
            for pos, token in enumerate(tokens):
                if token == 'dir':
                    # Fetch more entries recursively
                    if tokens[pos - 1] != '':
                        textutils.output_debug(' - Svn Plugin: Found dir: ' +
                                               url + '/' + tokens[pos - 1])

                        if conf.allow_download:
                            textutils.output_info(
                                ' - Svn Plugin: Downloading: ' + url + '/' +
                                tokens[pos - 1] + '\r')
                        else:
                            textutils.output_found(description_dir + ' at: ' +
                                                   url + '/' + tokens[pos - 1])

                        # Parse next
                        parse_svn_entries(url + "/" + tokens[pos - 1])

                elif token == 'file':
                    textutils.output_debug(' - Svn Plugin: Found file: ' +
                                           url + '/' + tokens[pos - 1])
                    if conf.allow_download:
                        textutils.output_info(' - Svn Plugin: Downloading: ' +
                                              url + '/' + tokens[pos - 1] +
                                              '\r')
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + '/' + tokens[
                            pos - 1] + ".svn-base"
                        fetcher = Fetcher()
                        response_code, content, headers = fetcher.fetch_url(
                            path,
                            conf.user_agent,
                            conf.fetch_timeout_secs,
                            limit_len=False)
                        save_file(url + '/' + tokens[pos - 1], content)
                    else:
                        textutils.output_found(description_file + ' at: ' +
                                               url + '/' + tokens[pos - 1])
Ejemplo n.º 19
0
def test_paths_exists():
    """ 
    Test for path existence using http codes and computed 404
    Spawn workers and turn off output for now, it would be irrelevant at this point. 
    """
    manager = ThreadManager()
    
    # Fill work queue with fetch list
    for path in database.paths:
        dbutils.add_path_to_fetch_queue(path)

    # Consider some file target as potential path
    for file in database.files:
        if not file.get('no_suffix'):
            file_as_path = file.copy()
            file_as_path['url'] = '/' + file_as_path['url']
            dbutils.add_path_to_fetch_queue(file_as_path)

    done_paths = []
    recursion_depth = 0

    textutils.output_debug('Cached: ' + str(database.path_cache))
    while database.fetch_queue.qsize() > 0:
        textutils.output_info('Probing ' + str(database.fetch_queue.qsize()) + ' paths')

        # Wait for initial valid path lookup
        workers = manager.spawn_workers(conf.thread_count, TestPathExistsWorker)
        manager.wait_for_idle(workers, database.fetch_queue)

        recursion_depth += 1
        
        if not conf.recursive:
            break
        
        if recursion_depth >= conf.recursive_depth_limit:
            break    
        
        for validpath in database.valid_paths:
            
            if validpath['url'] == '/' or validpath['url'] in done_paths:
                continue
            
            done_paths.append(validpath['url'])
            
            for path in database.paths:
                if path['url'] in ('/', ''):
                    continue
                path = path.copy()
                path['url'] = validpath['url'] + path['url']
                dbutils.add_path_to_fetch_queue(path)

    textutils.output_info('Found ' + str(len(database.valid_paths)) + ' valid paths')
Ejemplo n.º 20
0
def get_session_cookies():
    """ Fetch initial session cookies """
    textutils.output_info('Fetching session cookie')
    path = conf.path_template.copy()
    path['url'] = '/'

    # Were not using the fetch cache for session cookie sampling
    fetcher = Fetcher()

    code, content, headers = fetcher.fetch_url('/', conf.user_agent, 10)
    if code is 200:
        cookies = headers.get('Set-Cookie')
        if cookies:
            database.session_cookie = cookies
Ejemplo n.º 21
0
def execute():
    """ Fetch /.svn/entries and parse for target paths """
    current_template = dict(conf.path_template)
    current_template['description'] = '/.svn/entries found directory'

    target_url = urljoin(conf.target_base_path, "/.svn/entries")
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)

    if response_code is 200 or response_code is 302 and content:
        added = 0
        try:
            tree = ElementTree.fromstring(content)
            entry_tags = tree.iter()
            if entry_tags:
                for entry in entry_tags:
                    kind = entry.attrib.get("kind")
                    if kind and kind == "dir":
                        current_template = current_template.copy()
                        current_template['url'] = '/' + entry.attrib["name"]
                        database.paths.append(current_template)
                        added += 1

        except Exception:
            textutils.output_info(' - Svn Plugin: no usable entries in /.svn/entries')
        else:
            if added > 0:
                textutils.output_info(' - Svn Plugin: added ' + str(added) + ' base paths using /.svn/entries')
            else :
                textutils.output_info(' - Svn Plugin: no usable entries in /.svn/entries')
    else:
        textutils.output_info(' - Svn Plugin: no /.svn/entries found')
Ejemplo n.º 22
0
def get_session_cookies():
    """ Fetch initial session cookies """
    textutils.output_info('Fetching session cookie')
    path = conf.path_template.copy()
    path['url'] = '/'

    # Were not using the fetch cache for session cookie sampling
    fetcher = Fetcher()

    code, content, headers = fetcher.fetch_url('/', conf.user_agent, 10)
    if code is 200:
        cookies = headers.get('Set-Cookie')
        if cookies:
            database.session_cookie = cookies
Ejemplo n.º 23
0
def execute():
    """ Fetch /.svn/entries and parse for target paths """

    textutils.output_info(' - Svn Plugin: Searching for /.svn/entries')
    target_url = conf.target_base_path + "/.svn/entries"

    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)
    svn_legacy = True

    if response_code in conf.expected_file_responses and content:

        if conf.allow_download:
            textutils.output_info(
                ' - Svn Plugin: /.svn/entries found! crawling... (will download files to output/)'
            )
        else:
            textutils.output_info(
                ' - Svn Plugin: /.svn/entries found! crawling... (use -a to download files instead of printing)'
            )

        # test for version 1.7+
        target_url = conf.target_base_path + "/.svn/wc.db"
        fetcher = Fetcher()
        response_code, content, headers = fetcher.fetch_url(
            target_url,
            conf.user_agent,
            conf.fetch_timeout_secs,
            limit_len=False)

        #if response_code in conf.expected_file_responses and content:
        #    textutils.output_info(' - Svn Plugin: SVN 1.7+ detected, parsing wc.db')
        #    svn_legacy = False
        #    save_file(conf.target_base_path + '/wc.db', content)

        # Process index
        if svn_legacy:
            # parse entries
            parse_svn_entries(conf.target_base_path)
        else:
            parse_svn_17_db(conf.target_base_path + '/wc.db')

        # Clean up display
        if conf.allow_download:
            textutils.output_info('')
    else:
        textutils.output_info(' - Svn Plugin: no /.svn/entries found')
Ejemplo n.º 24
0
def load_cookie_file(afile):
    """
    Loads the supplied cookie file
    """
    if not afile:
        return None

    try:
        with open(afile, 'r') as cookie_file:
            content = cookie_file.read()
            content = content.replace('Cookie: ', '')
            content = content.replace('\n', '')
            return content
    except IOError:
        textutils.output_info('Supplied cookie file not found, will use server provided cookies')
        return None
Ejemplo n.º 25
0
def output_stats():
    lock = Lock()
    lock.acquire()

    average_timeouts = database.timeouts / database.item_count
    estimated_future_timeouts = average_timeouts * database.fetch_queue.qsize()
    estimated_total_remaining = int(estimated_future_timeouts + database.fetch_queue.qsize())
    total_requests = database.item_count + database.timeouts
    elapsed_time = datetime.now() - database.scan_start_time
    request_per_seconds = elapsed_time / total_requests
    remaining = request_per_seconds * estimated_total_remaining

    textutils.output_info(str(total_requests / elapsed_time.seconds) + ' reqs/sec' + ', Done: ' + str(database.item_count) + ', Queued: ' + str(database.fetch_queue.qsize()) + ', Timeouts: ' +
        str(database.timeouts) + ', throttle: ' + str(database.throttle_delay) + "s, remaining: " + str(remaining)[:-7] + " (press ctrl+c again to exit)")

    lock.release()
Ejemplo n.º 26
0
def load_cookie_file(afile):
    """
    Loads the supplied cookie file
    """
    if not afile:
        return None

    try:
        with open(afile, 'r') as cookie_file:
            content = cookie_file.read()
            content = content.replace('Cookie: ', '')
            content = content.replace('\n', '')
            return content
    except IOError:
        textutils.output_info(
            'Supplied cookie file not found, will use server provided cookies')
        return None
Ejemplo n.º 27
0
def execute():
    """ Fetch /.svn/entries and parse for target paths """

    textutils.output_info(' - Svn Plugin: Searching for /.svn/entries')
    target_url = conf.target_base_path + "/.svn/entries"

    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)
    svn_legacy = True

    if not isinstance(content, str):
        content = content.decode('utf-8', 'ignore')

    if response_code in conf.expected_file_responses and content:

        if conf.allow_download:
            textutils.output_info(' - Svn Plugin: /.svn/entries found! crawling... (will download files to output/)')
        else:
            textutils.output_info(' - Svn Plugin: /.svn/entries found! crawling... (use -a to download files instead of printing)')
       
        # test for version 1.7+
        target_url = conf.target_base_path + "/.svn/wc.db"
        fetcher = Fetcher()
        response_code, content, headers = fetcher.fetch_url(target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)

        #if response_code in conf.expected_file_responses and content:
        #    textutils.output_info(' - Svn Plugin: SVN 1.7+ detected, parsing wc.db')
        #    svn_legacy = False
        #    save_file(conf.target_base_path + '/wc.db', content)

        # Process index
        if svn_legacy:
            # parse entries
            parse_svn_entries(conf.target_base_path)
        #else:
          #  parse_svn_17_db(conf.target_base_path + '/wc.db')

        # Clean up display
        if conf.allow_download:
            textutils.output_info('')
    else:
        textutils.output_info(' - Svn Plugin: no /.svn/entries found')
Ejemplo n.º 28
0
    def wait_for_idle(self, workers, queue):
        """ Wait until fetch queue is empty and handle user interrupt """
        while not database.kill_received and not queue.empty():
            try:
                sleep(0.1)
            except KeyboardInterrupt:
                try:
                    stats.output_stats()
                    sleep(1)
                except KeyboardInterrupt:
                    textutils.output_info(
                        'Keyboard Interrupt Received, cleaning up threads')
                    # Clean reference to sockets
                    database.connection_pool = None
                    database.kill_received = True

                    # Kill remaining workers but don't join the queue (we want to abort:))
                    for worker in workers:
                        if worker is not None and worker.isAlive():
                            worker.kill_received = True
                            worker.join(0)

                    # Set leftover done in cas of a kill.
                    while not queue.empty():
                        queue.get()
                        queue.task_done()

                    break

        # Make sure everything is done before sending control back to application
        textutils.output_debug("Threads: joining queue of size: " +
                               str(queue.qsize()))
        queue.join()
        textutils.output_debug("Threads: join done")

        # Make sure we get all the worker's results before continuing the next step
        for worker in workers:
            if worker is not None and worker.isAlive():
                worker.kill_received = True
                worker.join()
Ejemplo n.º 29
0
    def wait_for_idle(self, workers, queue):
            """ Wait until fetch queue is empty and handle user interrupt """
            while not database.kill_received and not queue.empty():
                try:
                    # Make sure everything is done before sending control back to application
                    textutils.output_debug("Threads: joining queue of size: " + str(queue.qsize()))
                    queue.join()
                    textutils.output_debug("Threads: join done")
                except KeyboardInterrupt:
                    try:
                        stats.output_stats(workers)
                        sleep(1)  # The time you have to re-press ctrl+c to kill the app.
                    except KeyboardInterrupt:
                        textutils.output_info('Keyboard Interrupt Received, waiting for blocking threads to exit')
                        # Clean reference to sockets
                        database.connection_pool = None
                        database.kill_received = True
                        self.kill_workers(workers)
                        sys.exit(0)

            # Make sure we get all the worker's results before continuing the next step
            self.kill_workers(workers)
Ejemplo n.º 30
0
def parse_svn_entries(url):
    description_file = "SVN entries file at"
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False
    )

    if response_code is 200 or response_code is 302 and content:
        tokens = content.split("\n")
        if "dir" in tokens:
            for pos, token in enumerate(tokens):
                if token == "dir":
                    # Fetch more entries recursively
                    if tokens[pos - 1] != "":
                        textutils.output_debug(" - Svn Plugin: Found dir: " + url + "/" + tokens[pos - 1])

                        if conf.allow_download:
                            textutils.output_info(" - Svn Plugin: Downloading: " + url + "/" + tokens[pos - 1] + "\r")
                        else:
                            textutils.output_found(description_dir + " at: " + url + "/" + tokens[pos - 1])

                        # Parse next
                        parse_svn_entries(url + "/" + tokens[pos - 1])

                elif token == "file":
                    textutils.output_debug(" - Svn Plugin: Found file: " + url + "/" + tokens[pos - 1])
                    if conf.allow_download:
                        textutils.output_info(" - Svn Plugin: Downloading: " + url + "/" + tokens[pos - 1] + "\r")
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + "/" + tokens[pos - 1] + ".svn-base"
                        fetcher = Fetcher()
                        response_code, content, headers = fetcher.fetch_url(
                            path, conf.user_agent, conf.fetch_timeout_secs, limit_len=False
                        )
                        save_file(url + "/" + tokens[pos - 1], content)
                    else:
                        textutils.output_found(description_file + " at: " + url + "/" + tokens[pos - 1])
Ejemplo n.º 31
0
def parse_svn_entries(url):
    description_file = 'SVN entries file at'
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"
    fetcher = Fetcher()

    response_code, content, headers = fetcher.fetch_url(target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False, add_headers=base_headers)
    if not isinstance(content, str):
        content = content.decode('utf-8', 'ignore')

    if response_code in conf.expected_file_responses and content:
        tokens = content.split('\n')
        if 'dir' in tokens:
            for pos, token in enumerate(tokens):
                if token == 'dir':
                    # Fetch more entries recursively
                    if tokens[pos-1] != '':
                        textutils.output_debug(' - Svn Plugin: Found dir: ' + url + '/' + tokens[pos-1])

                        if conf.allow_download:
                            textutils.output_info(' - Svn Plugin: Downloading: ' + url + '/' + tokens[pos-1] + '\r')
                        else:
                            textutils.output_found(description_dir + ' at: ' + url + '/' + tokens[pos-1])

                        # Parse next
                        parse_svn_entries(url + "/" + tokens[pos-1])

                elif token == 'file':
                    textutils.output_debug(' - Svn Plugin: Found file: ' + url + '/' + tokens[pos-1])
                    if conf.allow_download:
                        textutils.output_info(' - Svn Plugin: Downloading: ' + url + '/' + tokens[pos-1] + '\r')
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + '/' + tokens[pos-1] + ".svn-base"
                        fetcher = Fetcher()
                        response_code, content, headers = fetcher.fetch_url(path, conf.user_agent,
                                                                            conf.fetch_timeout_secs, limit_len=False)
                        save_file(url + '/' + tokens[pos-1], content)
                    else:
                        textutils.output_found(description_file + ' at: ' + url + '/' + tokens[pos-1])
Ejemplo n.º 32
0
def output_stats():
    lock = Lock()
    lock.acquire()

    average_timeouts = database.timeouts / database.item_count
    estimated_future_timeouts = average_timeouts * database.fetch_queue.qsize()
    estimated_total_remaining = int(estimated_future_timeouts +
                                    database.fetch_queue.qsize())
    total_requests = database.item_count + database.timeouts
    elapsed_time = datetime.now() - database.scan_start_time
    request_per_seconds = elapsed_time / total_requests
    remaining = request_per_seconds * estimated_total_remaining

    textutils.output_info(
        str(total_requests / elapsed_time.seconds) + ' reqs/sec' + ', Done: ' +
        str(database.item_count) + ', Queued: ' +
        str(database.fetch_queue.qsize()) + ', Timeouts: ' +
        str(database.timeouts) + ', throttle: ' +
        str(database.throttle_delay) + "s, remaining: " + str(remaining)[:-7] +
        " (press ctrl+c again to exit)")

    lock.release()
Ejemplo n.º 33
0
    def wait_for_idle(self, workers, queue):
            """ Wait until fetch queue is empty and handle user interrupt """
            while not database.kill_received and not queue.empty():
                try:
                    sleep(0.1)
                except KeyboardInterrupt:
                    try:
                        stats.output_stats()
                        sleep(1)  
                    except KeyboardInterrupt:
                        textutils.output_info('Keyboard Interrupt Received, cleaning up threads')
                        # Clean reference to sockets
                        database.connection_pool = None
                        database.kill_received = True
                        
                        # Kill remaining workers but don't join the queue (we want to abort:))
                        for worker in workers:
                            if worker is not None and worker.isAlive():
                                worker.kill_received = True
                                worker.join(0)

                        # Set leftover done in cas of a kill.
                        while not queue.empty():
                            queue.get()
                            queue.task_done()

                        break

            # Make sure everything is done before sending control back to application
            textutils.output_debug("Threads: joining queue of size: " + str(queue.qsize()))
            queue.join()
            textutils.output_debug("Threads: join done")

            # Make sure we get all the worker's results before continuing the next step
            for worker in workers:
                if worker is not None and worker.isAlive():
                    worker.kill_received = True
                    worker.join()
Ejemplo n.º 34
0
def execute():
    """ Generate common simple paths (a-z, 0-9) """
    path_added = 0
    file_added = 0

    for char in range(ord('a'), ord('z')):
        add_generated_dir(chr(char))
        path_added += 1
        add_generated_file(chr(char))
        file_added += 1    

    for char in range(ord('0'), ord('9')):
        add_generated_dir(chr(char))
        path_added += 1
        add_generated_file(chr(char))
        file_added += 1

    for year in range(1990, date.today().year + 5):
        add_generated_dir(str(year))
        path_added += 1

    textutils.output_info(' - PathGenerator Plugin: added ' + str(path_added) + ' computer generated path.')
    textutils.output_info(' - PathGenerator Plugin: added ' + str(file_added) + ' computer generated files.')
Ejemplo n.º 35
0
def execute():
    """ Fetch sitemap.xml and add each entry as a target """

    current_template = dict(conf.path_template)
    current_template['description'] = 'sitemap.xml entry'

    target_url = urljoin(conf.target_base_path, "/sitemap.xml")
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url,
        conf.user_agent,
        conf.fetch_timeout_secs,
        limit_len=False,
        add_headers={})

    if not isinstance(content, str):
        content = content.decode('utf-8', 'ignore')

    if response_code is 200 or response_code is 302 and content:

        regexp = re.compile('(?im).*<url>\s*<loc>(.*)</loc>\s*</url>.*')
        matches = re.findall(regexp, content)

        textutils.output_debug("SitemapXML plugin")

        added = 0
        for match in matches:
            if not isinstance(match, str):
                match = match.decode('utf-8', 'ignore')
            parsed = urlparse(match)
            if parsed.path:
                new_path = parsed.path
            else:
                continue

            # Remove trailing /
            if new_path.endswith('/'):
                new_path = new_path[:-1]

            if add_path(new_path):
                added += 1

            textutils.output_debug(" - Added: %s from /sitemap.xml" % new_path)

        if added > 0:
            textutils.output_info(' - SitemapXML Plugin: added %d base paths '
                                  'using /sitemap.xml' % added)
        else:
            textutils.output_info(' - SitemapXML Plugin: no usable entries '
                                  'in /sitemap.xml')

    else:
        textutils.output_info(
            ' - SitemapXML Plugin: /sitemap.xml not found on '
            'target site')
Ejemplo n.º 36
0
def execute():
    """ Fetch /robots.txt and add the disallowed paths as target """
    current_template = dict(conf.path_template)
    current_template['description'] = 'Robots.txt entry'

    target_url = urljoin(conf.target_base_path, "/robots.txt")

    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)
    if isinstance(content, str):
        content = content.encode('utf-8')

    if response_code is 200 or response_code is 302 and content:
        if not isinstance(content, str):
            content = content.decode('utf-8', 'ignore')
        matches = re.findall(r'Disallow:\s*/[a-zA-Z0-9-/\r]+\n', content)
        textutils.output_debug(content)

        added = 0
        for match in matches:
            # Filter out some characters
            match = filter(lambda c: c not in ' *?.\n\r\t', match)

            if match:
                match = ''.join(match)

            # Split on ':'
            splitted = match.split(':')
            if splitted[1]:
                target_path = splitted[1]
                textutils.output_debug(target_path)

                # Remove trailing /
                if target_path.endswith('/'):
                    target_path = target_path[:-1]

                current_template = current_template.copy()
                current_template['url'] = target_path
                database.paths.append(current_template)
                textutils.output_debug(' - Robots Plugin Added: ' +
                                       str(target_path) + ' from robots.txt')
                added += 1

        if added > 0:
            textutils.output_info(' - Robots Plugin: added ' + str(added) +
                                  ' base paths using /robots.txt')
        else:
            textutils.output_info(
                ' - Robots Plugin: no usable entries in /robots.txt')

    else:
        textutils.output_info(
            ' - Robots Plugin: /robots.txt not found on target site')
Ejemplo n.º 37
0
def execute():
    """ Fetch sitemap.xml and add each entry as a target """

    current_template = dict(conf.path_template)
    current_template['description'] = 'sitemap.xml entry'

    target_url = urljoin(conf.target_base_path, "/sitemap.xml")
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(target_url,
                                                        conf.user_agent,
                                                        conf.fetch_timeout_secs,
                                                        limit_len=False,
                                                        add_headers={}
                                                        )

    if not isinstance(content, str):
        content = content.decode('utf-8', 'ignore')

    if response_code is 200 or response_code is 302 and content:

        regexp = re.compile('(?im).*<url>\s*<loc>(.*)</loc>\s*</url>.*')
        matches = re.findall(regexp, content)

        textutils.output_debug("SitemapXML plugin")

        added = 0
        for match in matches:
            new_path = match.decode().split(conf.target_host)[1]

            # Remove trailing /
            if new_path.endswith('/'):
                new_path = new_path[:-1]   

            add_path(new_path)
            add_file(new_path)

            textutils.output_debug(" - Added: %s from /sitemap.xml" % new_path)

            added += 1

        if added > 0:
            textutils.output_info(' - SitemapXML Plugin: added %d base paths '
                                  'using /sitemap.xml' % added)
        else :
            textutils.output_info(' - SitemapXML Plugin: no usable entries '
                                  'in /sitemap.xml')
               
    else:
        textutils.output_info(' - SitemapXML Plugin: /sitemap.xml not found on '
                              'target site')
Ejemplo n.º 38
0
def execute():
    """ Fetch /robots.txt and add the disallowed paths as target """
    current_template = dict(conf.path_template)
    current_template['description'] = 'Robots.txt entry'
    
    target_url = urljoin(conf.target_base_path, "/robots.txt")

    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)
    if isinstance(content, str):
        content = content.encode('utf-8')

    if response_code is 200 or response_code is 302 and content:
        if not isinstance(content, str):
            content = content.decode('utf-8', 'ignore')
        matches = re.findall(r'Disallow:\s*/[a-zA-Z0-9-/\r]+\n', content)
        textutils.output_debug(content)

        added = 0
        for match in matches:
            # Filter out some characters
            match = filter(lambda c: c not in ' *?.\n\r\t', match)

            if match:
                match = ''.join(match)
            
            # Split on ':'               
            splitted = match.split(':')
            if splitted[1]:
                target_path = splitted[1]
                textutils.output_debug(target_path)
                
                # Remove trailing /
                if target_path.endswith('/'):
                    target_path = target_path[:-1]   

                current_template = current_template.copy()
                current_template['url'] = target_path
                database.paths.append(current_template)
                textutils.output_debug(' - Robots Plugin Added: ' + str(target_path) + ' from robots.txt')
                added += 1
                    
        if added > 0:
            textutils.output_info(' - Robots Plugin: added ' + str(added) + ' base paths using /robots.txt')
        else :
            textutils.output_info(' - Robots Plugin: no usable entries in /robots.txt')
               
    else:
        textutils.output_info(' - Robots Plugin: /robots.txt not found on target site')
Ejemplo n.º 39
0
def execute():
    """ Fetch /.svn/entries and parse for target paths """
    current_template = dict(conf.path_template)
    current_template['description'] = '/.svn/entries found directory'

    target_url = urljoin(conf.target_base_path, "/.svn/entries")
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)

    if response_code is 200 or response_code is 302 and content:
        added = 0
        try:
            tree = ElementTree.fromstring(content)
            entry_tags = tree.iter()
            if entry_tags:
                for entry in entry_tags:
                    kind = entry.attrib.get("kind")
                    if kind and kind == "dir":
                        current_template = current_template.copy()
                        current_template['url'] = '/' + entry.attrib["name"]
                        database.paths.append(current_template)
                        added += 1

        except Exception:
            textutils.output_info(
                ' - Svn Plugin: no usable entries in /.svn/entries')
        else:
            if added > 0:
                textutils.output_info(' - Svn Plugin: added ' + str(added) +
                                      ' base paths using /.svn/entries')
            else:
                textutils.output_info(
                    ' - Svn Plugin: no usable entries in /.svn/entries')
    else:
        textutils.output_info(' - Svn Plugin: no /.svn/entries found')
Ejemplo n.º 40
0
def load_target_paths():
    """ Load the target paths in the database """
    textutils.output_info('Loading target paths')
    # Add files
    database.paths += loaders.load_targets('data/path.lst') 
Ejemplo n.º 41
0
    conf.target_base_path = parsed_path
    conf.is_ssl = is_ssl

    textutils.output_debug('Version: ' + str(conf.version))
    textutils.output_debug('Max timeouts per url: ' +
                           str(conf.max_timeout_count))
    textutils.output_debug('Worker threads: ' + str(conf.thread_count))
    textutils.output_debug('Target Host: ' + str(conf.target_host))
    textutils.output_debug('Using Tor: ' + str(conf.use_tor))
    textutils.output_debug('Eval-able output: ' + str(conf.eval_output))
    textutils.output_debug('Using User-Agent: ' + str(conf.user_agent))
    textutils.output_debug('Search only for files: ' + str(conf.files_only))
    textutils.output_debug('Search only for subdirs: ' +
                           str(conf.directories_only))

    textutils.output_info('Starting Discovery on ' + conf.target_host)

    if conf.use_tor:
        textutils.output_info('Using Tor, be patient it WILL be slow!')
        textutils.output_info(
            'Max timeout count and url fetch timeout doubled for the occasion ;)'
        )
        conf.max_timeout_count *= 2
        conf.fetch_timeout_secs *= 2

    # Handle keyboard exit before multi-thread operations
    try:
        # Resolve target host to avoid multiple dns lookups
        resolved, port = dnscache.get_host_ip(parsed_host, parsed_port)

        # Benchmark target host
Ejemplo n.º 42
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " + str(queued))

                stats.update_stats(url)

                # Add trailing / for paths
                if not url.endswith('/') and url != '/':
                    url += '/'

                # Fetch directory
                start_time = datetime.now()
                response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time, limit_len=False)
                end_time = datetime.now()

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                elif response_code == 404 and detect_tomcat_fake_404(content):
                    database.valid_paths.append(queued)
                    textutils.output_found('Tomcat redirect, ' + description + ' at: ' + conf.base_url + url, {
                        "description": description,
                        "url": conf.base_url + url,
                        "code": response_code,
                        "special": "tomcat-redirect",
                        "severity": queued.get('severity'),
                    })
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                    else:
                        # We don't compute behavior on invalid results
                        normal_behavior = True

                    if normal_behavior and database.behavior_error:
                        textutils.output_info('Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # We don't declare a behavior change until the current hit has exceeded the maximum
                        # chances it can get.
                        if not database.behavior_error and queued.get('behavior_chances', 0) >= conf.max_behavior_tries:
                            textutils.output_info('Behavior change detected! Results may '
                                                  'be incomplete or tachyon may never exit.')
                            textutils.output_debug('Chances taken: ' + str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    if is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug('Time for a chance')
                            textutils.output_debug('Chance left to target ' + queued.get('url') + ', re-queuing ' +
                                                   ' qsize: ' + str(database.fetch_queue.qsize()) +
                                                   ' chances: ' + str(queued.get('behavior_chances')))
                            database.fetch_queue.put(queued)
                            database.fetch_queue.task_done()
                            continue
                        else:
                            textutils.output_debug('Chances count busted! ' + queued.get('url') +
                                                   ' qsize: ' + str(database.fetch_queue.qsize()))

                    elif response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found('Password Protected - ' + description + ' at: ' + conf.base_url + url, {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "severity": queued.get('severity'),
                        })
                    # At this point, we have a valid result and the behavioral buffer is full.
                    # The behavior of the hit has been taken in account and the app is not in global behavior error
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        # If we reach this point, all edge-cases should be handled and all subsequent requests
                        # should be benchmarked against this new behavior
                        reset_behavior_database()

                        if response_code == 500:
                            textutils.output_found('ISE, ' + description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                        elif response_code == 403:
                            textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                        else:
                            textutils.output_found(description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Ejemplo n.º 43
0
    
    textutils.output_debug('Version: ' + str(conf.version))
    textutils.output_debug('Max timeouts per url: ' + str(conf.max_timeout_count))
    textutils.output_debug('Worker threads: ' + str(conf.thread_count))
    textutils.output_debug('Target Host: ' + str(conf.target_host))
    textutils.output_debug('Using Tor: ' + str(conf.use_tor))
    textutils.output_debug('Eval-able output: ' + str(conf.eval_output))
    textutils.output_debug('JSON output: ' + str(conf.json_output))
    textutils.output_debug('Using User-Agent: ' + str(conf.user_agent))
    textutils.output_debug('Search only for files: ' + str(conf.files_only))
    textutils.output_debug('Search only for subdirs: ' + str(conf.directories_only))

    if conf.proxy_url:
        textutils.output_debug('Using proxy: ' + str(conf.proxy_url))

    textutils.output_info('Starting Discovery on ' + conf.target_host)
    
    if conf.use_tor:
        textutils.output_info('Using Tor, be patient it WILL be slow!')
        textutils.output_info('Max timeout count and url fetch timeout doubled for the occasion ;)')
        conf.max_timeout_count *= 2
        conf.fetch_timeout_secs *= 2

    # Handle keyboard exit before multi-thread operations
    print_results_worker = None
    try:
        # Resolve target host to avoid multiple dns lookups
        if not conf.proxy_url:
            resolved, port = dnscache.get_host_ip(conf.target_host, conf.target_port)

        # disable urllib'3 SSL warning (globally)
Ejemplo n.º 44
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Fetch the target url
                start_time = datetime.now()
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time, limit_len=False)
                    # Make sure we always match string against a string content
                    if not isinstance(content, str):
                        content = content.decode('utf-8', 'ignore')
                else:
                    response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time)
                end_time = datetime.now()

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                elif response_code == 500:
                    textutils.output_found('ISE, ' + description + ' at: ' + conf.base_url + url, {
                        "description": description,
                        "url": conf.base_url + url,
                        "code": response_code,
                        "severity": queued.get('severity'),
                    })
                elif response_code in conf.expected_file_responses:
                    # Test if result is valid
                    is_valid_result = test_valid_result(content, is_file=True)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                        textutils.output_debug('Normal behavior ' + str(normal_behavior) + ' ' + str(response_code))
                    else:
                        normal_behavior = True

                    # Reset behavior chance when we detect a new state
                    if normal_behavior and database.behavior_error:
                        textutils.output_info('Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # Looks like the new behavior is now the norm. It's a false positive.
                        # Additionally, we report a behavior change to the user at this point.
                        if not database.behavior_error:
                            textutils.output_info('Behavior change detected! Results may '
                                                  'be incomplete or tachyon may never exit.')
                            textutils.output_debug('Chances taken: ' + str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    elif is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug('Chance left to target, re-queuing')
                            database.fetch_queue.put(queued)
                    elif is_valid_result:
                        # Make sure we base our next analysis on that positive hit
                        reset_behavior_database()

                        if len(content) == 0:
                            textutils.output_found('Empty ' + description + ' at: ' + conf.base_url + url, {
                                "description": "Empty " + description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": 'info',
                            })
                        else:
                            textutils.output_found(description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                    elif match_string and re.search(re.escape(match_string), content, re.I):
                        textutils.output_found("String-Matched " + description + ' at: ' + conf.base_url + url, {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "string": match_string,
                            "severity": queued.get('severity'),
                    })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Ejemplo n.º 45
0
def execute():
    """ This plugin process the hostname to generate host and filenames relatives to it """
    target = conf.target_host

    # Remove char to figure out the human-likely expressed domain name
    # host.host.host.com = hosthosthost.com. host.com hostcom, host, /host.ext
    # We don't test for domain.dom/domain since "cp * ./sitename" is unlikely to happen (questionable)
    added = 0

    # http://oksala.org -> oksala.org
    target = target.replace('http://', '')
    target = target.replace('https://', '')

    # Remove subpath
    first_slash = target.find('/')
    if first_slash > 0:
        target = target[0:first_slash]

    target = target.replace('/', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
    added += 1

    # www.oksala.org -> oksala.org
    target = target.replace('www.', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
    added += 1

    # oksala.org -> oksala
    dom_pos = target.rfind('.')
    nodom_target = target[0:dom_pos]
    new_target = conf.path_template.copy()
    new_target['url'] = nodom_target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
    added += 1

    # shortdom (blabla.ok.ok.test.com -> test)
    new_target = conf.path_template.copy()
    dom_pos = target.rfind('.')
    if dom_pos > 0:
        nodom_target = target[0:dom_pos]
        start_pos = nodom_target.rfind('.')
        if start_pos > 0:
            short_dom = nodom_target[start_pos + 1:]
        else:
            short_dom = nodom_target

        new_target['url'] = short_dom
        new_target['description'] = "HostProcessor generated filename"
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin added: " +
                               str(new_target))
        added += 1

        new_target = new_target.copy()
        new_target['url'] = short_dom + 'admin'
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin added: " +
                               str(new_target))
        added += 1

        new_target = new_target.copy()
        new_target['url'] = short_dom + '-admin'
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin added: " +
                               str(new_target))
        added += 1

    # flatten subdomains
    target = target.replace('.', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
    added += 1

    textutils.output_info(" - HostProcessor Plugin: added " + str(added) +
                          " new filenames")
Ejemplo n.º 46
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " +
                                       str(queued))

                stats.update_stats(url)

                # Add trailing / for paths
                if not url.endswith('/') and url != '/':
                    url += '/'

                # Fetch directory
                start_time = datetime.now()
                response_code, content, headers = self.fetcher.fetch_url(
                    url,
                    conf.user_agent,
                    database.latest_successful_request_time,
                    limit_len=False)
                end_time = datetime.now()

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                elif response_code == 404 and detect_tomcat_fake_404(content):
                    database.valid_paths.append(queued)
                    textutils.output_found(
                        'Tomcat redirect, ' + description + ' at: ' +
                        conf.base_url + url, {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "special": "tomcat-redirect",
                            "severity": queued.get('severity'),
                        })
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                    else:
                        # We don't compute behavior on invalid results
                        normal_behavior = True

                    if normal_behavior and database.behavior_error:
                        textutils.output_info(
                            'Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # We don't declare a behavior change until the current hit has exceeded the maximum
                        # chances it can get.
                        if not database.behavior_error and queued.get(
                                'behavior_chances',
                                0) >= conf.max_behavior_tries:
                            textutils.output_info(
                                'Behavior change detected! Results may '
                                'be incomplete or tachyon may never exit.')
                            textutils.output_debug(
                                'Chances taken: ' +
                                str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    if is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug('Time for a chance')
                            textutils.output_debug(
                                'Chance left to target ' + queued.get('url') +
                                ', re-queuing ' + ' qsize: ' +
                                str(database.fetch_queue.qsize()) +
                                ' chances: ' +
                                str(queued.get('behavior_chances')))
                            database.fetch_queue.put(queued)
                            database.fetch_queue.task_done()
                            continue
                        else:
                            textutils.output_debug(
                                'Chances count busted! ' + queued.get('url') +
                                ' qsize: ' + str(database.fetch_queue.qsize()))

                    elif response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found(
                            'Password Protected - ' + description + ' at: ' +
                            conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                    # At this point, we have a valid result and the behavioral buffer is full.
                    # The behavior of the hit has been taken in account and the app is not in global behavior error
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        # If we reach this point, all edge-cases should be handled and all subsequent requests
                        # should be benchmarked against this new behavior
                        reset_behavior_database()

                        if response_code == 500:
                            textutils.output_found(
                                'ISE, ' + description + ' at: ' +
                                conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })
                        elif response_code == 403:
                            textutils.output_found(
                                '*Forbidden* ' + description + ' at: ' +
                                conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })
                        else:
                            textutils.output_found(
                                description + ' at: ' + conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Ejemplo n.º 47
0
def execute():
    """ This plugin process the hostname to generate host and filenames relatives to it """
    target = conf.target_host

    # Remove char to figure out the human-likely expressed domain name
    # host.host.host.com = hosthosthost.com. host.com hostcom, host, /host.ext
    # We don't test for domain.dom/domain since "cp * ./sitename" is unlikely to happen (questionable)
    added = 0


    # http://oksala.org -> oksala.org
    target = target.replace('http://', '')
    target = target.replace('https://', '')

    # Remove subpath
    first_slash = target.find('/')
    if first_slash > 0:
        target = target[0:first_slash]

    target = target.replace('/', '')
    new_target = dict(conf.path_template)
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    if new_target not in database.files:
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
        added += 1

    # www.oksala.org -> oksala.org
    target = target.replace('www.', '')
    new_target = dict(conf.path_template)
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    if new_target not in database.files:
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
        added += 1

    # oksala.org -> oksala
    dom_pos = target.rfind('.')
    nodom_target = target[0:dom_pos]
    new_target = dict(conf.path_template)
    new_target['url'] = nodom_target
    new_target['description'] = "HostProcessor generated filename"
    if new_target not in database.files:
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin added: " + str(new_target))
        added += 1

    # shortdom (blabla.ok.ok.test.com -> test)
    new_target = dict(conf.path_template)
    dom_pos = target.rfind('.')
    if dom_pos > 0:
        nodom_target = target[0:dom_pos]
        start_pos = nodom_target.rfind('.')
        if start_pos > 0:
            short_dom = nodom_target[start_pos+1:]
        else:
            short_dom = nodom_target

        new_target['url'] = short_dom
        new_target['description'] = "HostProcessor generated filename"
        if new_target not in database.files:
            database.files.append(new_target)
            textutils.output_debug(" - HostProcessor Plugin: added " + str(new_target))
            added += 1
        
        new_target = dict(new_target)    
        new_target['url'] = short_dom + 'admin'
        if new_target not in database.files:
            database.files.append(new_target)
            textutils.output_debug(" - HostProcessor Plugin: added " + str(new_target))
            added += 1
        
        new_target = dict(new_target)     
        new_target['url'] = short_dom + '-admin'
        if new_target not in database.files:
            database.files.append(new_target)
            textutils.output_debug(" - HostProcessor Plugin: added " + str(new_target))
            added += 1

    # flatten subdomains
    target = target.replace('.', '')
    new_target = dict(conf.path_template)
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    if new_target not in database.files:
        database.files.append(new_target)
        textutils.output_debug(" - HostProcessor Plugin: added " + str(new_target))
        added += 1

    textutils.output_info(" - HostProcessor Plugin: added " + str(added) + " new filenames")



        
    
Ejemplo n.º 48
0
def load_target_paths():
    """ Load the target paths in the database """
    textutils.output_info('Loading target paths')
    database.paths += loaders.load_targets('data/path.lst')
Ejemplo n.º 49
0
def load_target_files():
    """ Load the target files in the database """
    textutils.output_info('Loading target files')
    database.files += loaders.load_targets('data/file.lst')
Ejemplo n.º 50
0
def load_target_paths(running_path):
    """ Load the target paths in the database """
    textutils.output_info('Loading target paths')
    database.paths += loaders.load_targets(running_path + '/data/path.lst') 
Ejemplo n.º 51
0
def load_target_files(running_path):
    """ Load the target files in the database """
    textutils.output_info('Loading target files')
    database.files += loaders.load_targets(running_path + '/data/file.lst')
Ejemplo n.º 52
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Fetch the target url
                start_time = datetime.now()
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url,
                        conf.user_agent,
                        database.latest_successful_request_time,
                        limit_len=False)
                    # Make sure we always match string against a string content
                    if not isinstance(content, str):
                        content = content.decode('utf-8', 'ignore')
                else:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url, conf.user_agent,
                        database.latest_successful_request_time)
                end_time = datetime.now()

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                elif response_code == 500:
                    textutils.output_found(
                        'ISE, ' + description + ' at: ' + conf.base_url + url,
                        {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "severity": queued.get('severity'),
                        })
                elif response_code in conf.expected_file_responses:
                    # Test if result is valid
                    is_valid_result = test_valid_result(content, is_file=True)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                        textutils.output_debug('Normal behavior ' +
                                               str(normal_behavior) + ' ' +
                                               str(response_code))
                    else:
                        normal_behavior = True

                    # Reset behavior chance when we detect a new state
                    if normal_behavior and database.behavior_error:
                        textutils.output_info(
                            'Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # Looks like the new behavior is now the norm. It's a false positive.
                        # Additionally, we report a behavior change to the user at this point.
                        if not database.behavior_error:
                            textutils.output_info(
                                'Behavior change detected! Results may '
                                'be incomplete or tachyon may never exit.')
                            textutils.output_debug(
                                'Chances taken: ' +
                                str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    elif is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug(
                                'Chance left to target, re-queuing')
                            database.fetch_queue.put(queued)
                    elif is_valid_result:
                        # Make sure we base our next analysis on that positive hit
                        reset_behavior_database()

                        if len(content) == 0:
                            textutils.output_found(
                                'Empty ' + description + ' at: ' +
                                conf.base_url + url, {
                                    "description": "Empty " + description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": 'info',
                                })
                        else:
                            textutils.output_found(
                                description + ' at: ' + conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })
                    elif match_string and re.search(re.escape(match_string),
                                                    content, re.I):
                        textutils.output_found(
                            "String-Matched " + description + ' at: ' +
                            conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "string": match_string,
                                "severity": queued.get('severity'),
                            })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue