Esempio n. 1
0
async def execute(hammertime):
    """ Generate common simple paths (a-z, 0-9) """
    path_added = 0
    file_added = 0

    if "skipAlpha" not in plugin_settings:
        for char in range(ord('a'), ord('z')+1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipNumeric" not in plugin_settings:
        for char in range(ord('0'), ord('9')+1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipYear" not in plugin_settings:
        for year in range(1990, date.today().year + 5):
            add_generated_path(str(year))
            path_added += 1

    textutils.output_info(' - PathGenerator Plugin: added ' + str(path_added) + ' computer generated path.')
    textutils.output_info(' - PathGenerator Plugin: added ' + str(file_added) + ' computer generated files.')
Esempio n. 2
0
async def execute(hammertime):
    """ Generate common simple paths (a-z, 0-9) """
    path_added = 0
    file_added = 0

    if "skipAlpha" not in plugin_settings:
        for char in range(ord('a'), ord('z') + 1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipNumeric" not in plugin_settings:
        for char in range(ord('0'), ord('9') + 1):
            add_generated_path(chr(char))
            path_added += 1
            add_generated_file(chr(char))
            file_added += 1

    if "skipYear" not in plugin_settings:
        for year in range(1990, date.today().year + 5):
            add_generated_path(str(year))
            path_added += 1

    textutils.output_info(' - PathGenerator Plugin: added ' + str(path_added) +
                          ' computer generated path.')
    textutils.output_info(' - PathGenerator Plugin: added ' + str(file_added) +
                          ' computer generated files.')
Esempio n. 3
0
async def test_paths_exists(hammertime, *, recursive=False, depth_limit=2, accumulator):
    """
    Test for path existence using http codes and computed 404
    Turn off output for now, it would be irrelevant at this point.
    """

    check_closed(hammertime)

    path_generator = PathGenerator()
    fetcher = DirectoryFetcher(conf.base_url, hammertime, accumulator=accumulator)

    paths_to_fetch = path_generator.generate_paths(use_valid_paths=False)

    if len(paths_to_fetch) > 0:
        textutils.output_info('Probing %d paths' % len(paths_to_fetch))

    await fetcher.fetch_paths(paths_to_fetch)

    if recursive:
        recursion_depth = 0
        while recursion_depth < depth_limit:
            recursion_depth += 1
            paths_to_fetch = path_generator.generate_paths(use_valid_paths=True)
            await fetcher.fetch_paths(paths_to_fetch)

    count = len(database.valid_paths) - 1  # Removing one as it is the root path
    textutils.output_info('Found %d valid paths' % count)
Esempio n. 4
0
async def get_session_cookies(hammertime):
    try:
        """ Fetch the root path in a single request so aiohttp will use the returned cookies in all future requests. """
        textutils.output_info('Fetching session cookie')
        path = '/'
        await hammertime.request(conf.base_url + path)
    except RejectRequest:
        textutils.output_info('Request for website root failed.')
Esempio n. 5
0
 async def after_response(self, entry):
     if self._has_behavior_changed(entry):
         if self._is_normal_behavior_restored(entry):
             output_info("Normal behavior seems to be restored.")
         else:
             output_info(
                 "Behavior change detected! Results may be incomplete or tachyon may never exit."
             )
     self.is_behavior_normal = not entry.result.error_behavior
Esempio n. 6
0
def load_execute_file_plugins():
    """ Import and run path plugins """
    textutils.output_info('Executing ' + str(len(file.__all__)) +
                          ' file plugins')
    for plugin_name in file.__all__:
        plugin = __import__("tachyon.plugins.file." + plugin_name,
                            fromlist=[plugin_name])
        if hasattr(plugin, 'execute'):
            plugin.execute()
Esempio n. 7
0
async def load_execute_host_plugins(hammertime):
    """ Import and run host plugins """
    textutils.output_info('Executing ' + str(len(host.__all__)) +
                          ' host plugins')
    for plugin_name in host.__all__:
        plugin = __import__("tachyon.plugins.host." + plugin_name,
                            fromlist=[plugin_name])
        if hasattr(plugin, 'execute'):
            await plugin.execute(hammertime)
Esempio n. 8
0
def load_cookie_file(filename):
    try:
        with open(filename, 'r') as cookie_file:
            content = cookie_file.read()
            content = content.replace('Cookie: ', '')
            content = content.replace('\n', '')
            return content
    except IOError:
        textutils.output_info('Supplied cookie file not found, will use server provided cookies')
        return None
Esempio n. 9
0
async def test_file_exists(hammertime):
    """ Test for file existence using http codes and computed 404 """
    fetcher = FileFetcher(conf.base_url, hammertime)
    generator = FileGenerator()
    database.valid_paths = generator.generate_files()
    textutils.output_info('Probing ' + str(len(database.valid_paths)) +
                          ' files')
    if len(database.valid_paths) > 0:
        hammertime.heuristics.add(RejectStatusCode({401, 403}))
        await fetcher.fetch_files(database.valid_paths)
Esempio n. 10
0
def load_cookie_file(filename):
    try:
        with open(filename, 'r') as cookie_file:
            content = cookie_file.read()
            content = content.replace('Cookie: ', '')
            content = content.replace('\n', '')
            return content
    except IOError:
        textutils.output_info(
            'Supplied cookie file not found, will use server provided cookies')
        return None
Esempio n. 11
0
async def load_execute_host_plugins(hammertime):
    """ Import and run host plugins """
    count = len(host.__all__)
    if count == 0:
        return

    textutils.output_info('Executing %d host plugins' % count)
    for plugin_name in host.__all__:
        plugin = __import__("tachyon.plugins.host." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin, 'execute'):
            await plugin.execute(hammertime)
Esempio n. 12
0
def load_execute_file_plugins():
    """ Import and run path plugins """
    count = len(file.__all__)
    if count == 0:
        return

    textutils.output_info('Executing %d file plugins' % count)
    for plugin_name in file.__all__:
        plugin = __import__("tachyon.plugins.file." + plugin_name, fromlist=[plugin_name])
        if hasattr(plugin, 'execute'):
            plugin.execute()
Esempio n. 13
0
async def test_file_exists(hammertime, accumulator, skip_root=False):
    """ Test for file existence using http codes and computed 404 """

    check_closed(hammertime)

    fetcher = FileFetcher(conf.base_url, hammertime, accumulator=accumulator)
    generator = FileGenerator()
    files_to_fetch = generator.generate_files(skip_root=skip_root)

    count = len(files_to_fetch)

    textutils.output_info('Probing %d files' % count)
    if len(database.valid_paths) > 0:
        hammertime.heuristics.add(RejectStatusCode({401, 403}))
        await fetcher.fetch_files(files_to_fetch)
Esempio n. 14
0
async def scan(hammertime,
               *,
               cookies=None,
               directories_only=False,
               files_only=False,
               plugins_only=False,
               **kwargs):
    if cookies is not None:
        set_cookies(hammertime, cookies)
    else:
        await get_session_cookies(hammertime)

    await load_execute_host_plugins(hammertime)
    if not plugins_only:
        if not files_only:
            await test_paths_exists(hammertime, **kwargs)
        if not directories_only:
            textutils.output_info('Generating file targets')
            load_execute_file_plugins()
            await test_file_exists(hammertime)
Esempio n. 15
0
async def parse_svn_entries(url, hammertime):
    description_file = 'SVN entries file at'
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"

    try:
        entry = await hammertime.request(target_url)
        tokens = entry.response.content.split('\n')
        if 'dir' in tokens:
            for pos, token in enumerate(tokens):
                if token == 'dir':
                    # Fetch more entries recursively
                    if tokens[pos - 1] != '':
                        if conf.allow_download:
                            textutils.output_info(
                                ' - Svn Plugin: Downloading: ' + url + '/' +
                                tokens[pos - 1] + '\r')
                        else:
                            textutils.output_found(description_dir + ' at: ' +
                                                   url + '/' + tokens[pos - 1])

                        # Parse next
                        await parse_svn_entries(url + "/" + tokens[pos - 1],
                                                hammertime)

                elif token == 'file':
                    if conf.allow_download:
                        textutils.output_info(' - Svn Plugin: Downloading: ' +
                                              url + '/' + tokens[pos - 1] +
                                              '\r')
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + '/' + tokens[
                            pos - 1] + ".svn-base"
                        entry = await hammertime.request(path)
                        save_file(url + '/' + tokens[pos - 1],
                                  entry.response.content)
                    else:
                        textutils.output_found(description_file + ' at: ' +
                                               url + '/' + tokens[pos - 1])
    except (RejectRequest, StopRequest):
        pass
Esempio n. 16
0
async def execute(hammertime):
    """ Fetch /.svn/entries and parse for target paths """

    textutils.output_info(' - Svn Plugin: Searching for /.svn/entries')
    target_url = urljoin(conf.base_url, "/.svn/entries")
    svn_legacy = True

    try:
        entry = await hammertime.request(target_url)
        if conf.allow_download:
            textutils.output_info(
                ' - Svn Plugin: /.svn/entries found! crawling... (will download files to output/)'
            )
        else:
            textutils.output_info(
                ' - Svn Plugin: /.svn/entries found! crawling... (use -a to download files instead of printing)'
            )

        # test for version 1.7+
        target_url = urljoin(conf.base_url, "/.svn/wc.db")
        entry = await hammertime.request(target_url)

        #if response_code in conf.expected_file_responses and content:
        #    textutils.output_info(' - Svn Plugin: SVN 1.7+ detected, parsing wc.db')
        #    svn_legacy = False
        #    save_file(conf.target_base_path + '/wc.db', content)

        # Process index
        if svn_legacy:
            # parse entries
            await parse_svn_entries(conf.base_url, hammertime)
        #else:
        #  parse_svn_17_db(conf.target_base_path + '/wc.db')

        # Clean up display
        if conf.allow_download:
            textutils.output_info('')
    except (StopRequest, RejectRequest):
        textutils.output_info(' - Svn Plugin: no /.svn/entries found')
Esempio n. 17
0
async def test_paths_exists(hammertime, *, recursive=False, depth_limit=2):
    """
    Test for path existence using http codes and computed 404
    Turn off output for now, it would be irrelevant at this point.
    """

    path_generator = PathGenerator()
    fetcher = DirectoryFetcher(conf.base_url, hammertime)

    paths_to_fetch = path_generator.generate_paths(use_valid_paths=False)

    textutils.output_info('Probing %d paths' % len(paths_to_fetch))
    await fetcher.fetch_paths(paths_to_fetch)

    if recursive:
        recursion_depth = 0
        while recursion_depth < depth_limit:
            recursion_depth += 1
            paths_to_fetch = path_generator.generate_paths(
                use_valid_paths=True)
            await fetcher.fetch_paths(paths_to_fetch)
    textutils.output_info('Found ' + str(len(database.valid_paths)) +
                          ' valid paths')
Esempio n. 18
0
def main(*, target_host, cookie_file, json_output, max_retry_count,
         plugin_settings, proxy, user_agent, vhost, depth_limit,
         directories_only, files_only, plugins_only, recursive,
         allow_download):

    if not json_output:
        print_program_header()

    # Ensure the host is of the right format and set it in config
    parsed_url = urlparse(target_host)
    # Set conf values
    conf.target_host = parsed_url.netloc
    conf.base_url = "%s://%s" % (parsed_url.scheme, parsed_url.netloc)

    textutils.init_log(json_output)
    textutils.output_info('Starting Discovery on ' + conf.base_url)

    conf.allow_download = allow_download
    for option in plugin_settings:
        plugin, value = option.split(':', 1)
        conf.plugin_settings[plugin].append(value)

    try:
        root_path = conf.path_template.copy()
        root_path['url'] = '/'
        database.valid_paths.append(root_path)
        load_target_paths()
        load_target_files()
        conf.cookies = loaders.load_cookie_file(cookie_file)
        conf.user_agent = user_agent
        conf.proxy_url = proxy
        conf.forge_vhost = vhost
        loop = custom_event_loop()
        hammertime = loop.run_until_complete(
            configure_hammertime(cookies=conf.cookies,
                                 proxy=conf.proxy_url,
                                 retry_count=max_retry_count,
                                 user_agent=conf.user_agent,
                                 vhost=conf.forge_vhost))
        loop.run_until_complete(
            scan(hammertime,
                 cookies=conf.cookies,
                 directories_only=directories_only,
                 files_only=files_only,
                 plugins_only=plugins_only,
                 depth_limit=depth_limit,
                 recursive=recursive))

        textutils.output_info('Scan completed')

    except (KeyboardInterrupt, asyncio.CancelledError):
        textutils.output_error('Keyboard Interrupt Received')
    except OfflineHostException:
        textutils.output_error("Target host seems to be offline.")
    finally:
        textutils.output_info(format_stats(hammertime.stats))
        textutils.flush()
Esempio n. 19
0
async def stat_on_input(hammertime):
    import sys
    from datetime import datetime, timedelta

    if sys.stdin is None or not sys.stdin.readable() or not sys.stdin.isatty():
        return

    loop = asyncio.get_event_loop()
    reader = asyncio.StreamReader()
    reader_protocol = asyncio.StreamReaderProtocol(reader)

    await loop.connect_read_pipe(lambda: reader_protocol, sys.stdin)

    expiry = datetime.now()
    while True:
        await reader.readline()

        # Throttle stats printing
        if expiry < datetime.now():
            textutils.output_info(format_stats(hammertime.stats))
            expiry = datetime.now() + timedelta(seconds=2)

        if sys.stdin.seekable():
            sys.stdin.seek(-1, sys.SEEK_END)
Esempio n. 20
0
async def execute(hammertime):
    """ Fetch /robots.txt and add the disallowed paths as target """
    current_template = dict(conf.path_template)
    current_template['description'] = 'Robots.txt entry'

    target_url = urljoin(conf.base_url, "/robots.txt")

    try:
        entry = await hammertime.request(target_url)
        matches = re.findall(r'Disallow:\s*/[a-zA-Z0-9-/\r]+\n',
                             entry.response.content)

        added = 0
        for match in matches:
            # Filter out some characters
            match = filter(lambda c: c not in ' *?.\n\r\t', match)

            if match:
                match = ''.join(match)

            # Split on ':'
            splitted = match.split(':')
            if splitted[1]:
                target_path = splitted[1]

                # Remove trailing /
                if target_path.endswith('/'):
                    target_path = target_path[:-1]

                current_template = current_template.copy()
                current_template['url'] = target_path
                database.paths.append(current_template)
                added += 1

        if added > 0:
            textutils.output_info(' - Robots Plugin: added ' + str(added) +
                                  ' base paths using /robots.txt')
        else:
            textutils.output_info(
                ' - Robots Plugin: no usable entries in /robots.txt')
    except (StopRequest, RejectRequest):
        textutils.output_info(
            ' - Robots Plugin: /robots.txt not found on target site')
Esempio n. 21
0
async def execute(hammertime):
    """ Fetch sitemap.xml and add each entry as a target """

    current_template = dict(conf.path_template)
    current_template['description'] = 'sitemap.xml entry'

    target_url = urljoin(conf.base_url, "/sitemap.xml")

    try:
        entry = await hammertime.request(target_url)

        regexp = re.compile('(?im).*<url>\\s*<loc>(.*)</loc>\\s*</url>.*')
        matches = re.findall(regexp, entry.response.content)

        added = 0
        for match in matches:
            if not isinstance(match, str):
                match = match.decode('utf-8', 'ignore')
            parsed = urlparse(match)
            if parsed.path:
                new_path = parsed.path
            else:
                continue

            # Remove trailing /
            if new_path.endswith('/'):
                new_path = new_path[:-1]

            if add_path(new_path):
                added += 1

        if added > 0:
            textutils.output_info(' - SitemapXML Plugin: added %d base paths '
                                  'using /sitemap.xml' % added)
        else:
            textutils.output_info(' - SitemapXML Plugin: no usable entries '
                                  'in /sitemap.xml')
    except (StopRequest, RejectRequest):
        textutils.output_info(' - SitemapXML Plugin: /sitemap.xml not found on '
                              'target site')
Esempio n. 22
0
async def execute(hammertime):
    """ Fetch /robots.txt and add the disallowed paths as target """
    current_template = dict(conf.path_template)
    current_template['description'] = 'Robots.txt entry'

    target_url = urljoin(conf.base_url, "/robots.txt")

    try:
        entry = await hammertime.request(target_url)
        matches = re.findall(r'Disallow:\s*/[a-zA-Z0-9-/\r]+\n', entry.response.content)

        added = 0
        for match in matches:
            # Filter out some characters
            match = filter(lambda c: c not in ' *?.\n\r\t', match)

            if match:
                match = ''.join(match)

            # Split on ':'
            splitted = match.split(':')
            if splitted[1]:
                target_path = splitted[1]

                # Remove trailing /
                if target_path.endswith('/'):
                    target_path = target_path[:-1]

                current_template = current_template.copy()
                current_template['url'] = target_path
                database.paths.append(current_template)
                added += 1

        if added > 0:
            textutils.output_info(' - Robots Plugin: added ' + str(added) + ' base paths using /robots.txt')
        else:
            textutils.output_info(' - Robots Plugin: no usable entries in /robots.txt')
    except (StopRequest, RejectRequest):
        textutils.output_info(' - Robots Plugin: /robots.txt not found on target site')
Esempio n. 23
0
async def scan(hammertime, *, accumulator,
               cookies=None, directories_only=False, files_only=False, plugins_only=False,
               **kwargs):

    if cookies is not None:
        set_cookies(hammertime, cookies)
    else:
        await get_session_cookies(hammertime)

    await load_execute_host_plugins(hammertime)

    await drain(hammertime)

    if not plugins_only:
        if not directories_only:
            textutils.output_info('Generating file targets for target root')
            load_execute_file_plugins()
            await test_file_exists(hammertime, accumulator=accumulator)

        if not files_only:
            await test_paths_exists(hammertime, accumulator=accumulator, **kwargs)

            if not directories_only:
                textutils.output_info('Generating file targets')
                load_execute_file_plugins()
                await test_file_exists(hammertime, accumulator=accumulator, skip_root=True)

    check_closed(hammertime)

    validator = ReFetch(hammertime)
    if await validator.is_valid(Entry.create(conf.base_url + "/")):
        textutils.output_info("Re-validating prior results.")
        await accumulator.revalidate(validator)
    else:
        textutils.output_error("Re-validation aborted. Target no longer appears to be up.")

    check_closed(hammertime)
Esempio n. 24
0
 async def on_request_successful(self, entry):
     if not self.has_error and entry.result.error_behavior:
         self.has_error = True
         output_info(self.MESSAGE)
Esempio n. 25
0
async def execute(hammertime):
    """ This plugin process the hostname to generate host and filenames relatives to it """
    target = conf.target_host

    # Remove char to figure out the human-likely expressed domain name
    # host.host.host.com = hosthosthost.com. host.com hostcom, host, /host.ext
    # We don't test for domain.dom/domain since "cp * ./sitename" is unlikely to happen (questionable)
    added = 0

    # http://oksala.org -> oksala.org
    target = target.replace('http://', '')
    target = target.replace('https://', '')

    # Remove subpath
    first_slash = target.find('/')
    if first_slash > 0:
        target = target[0:first_slash]

    target = target.replace('/', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    # www.oksala.org -> oksala.org
    target = target.replace('www.', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    # oksala.org -> oksala
    dom_pos = target.rfind('.')
    nodom_target = target[0:dom_pos]
    new_target = conf.path_template.copy()
    new_target['url'] = nodom_target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    # shortdom (blabla.ok.ok.test.com -> test)
    new_target = conf.path_template.copy()
    dom_pos = target.rfind('.')
    if dom_pos > 0:
        nodom_target = target[0:dom_pos]
        start_pos = nodom_target.rfind('.')
        if start_pos > 0:
            short_dom = nodom_target[start_pos + 1:]
        else:
            short_dom = nodom_target

        new_target['url'] = short_dom
        new_target['description'] = "HostProcessor generated filename"
        database.files.append(new_target)
        added += 1

        new_target = new_target.copy()
        new_target['url'] = short_dom + 'admin'
        database.files.append(new_target)
        added += 1

        new_target = new_target.copy()
        new_target['url'] = short_dom + '-admin'
        database.files.append(new_target)
        added += 1

    # flatten subdomains
    target = target.replace('.', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    textutils.output_info(" - HostProcessor Plugin: added " + str(added) +
                          " new filenames")
Esempio n. 26
0
async def execute(hammertime):
    """ This plugin process the hostname to generate host and filenames relatives to it """
    target = conf.target_host

    # Remove char to figure out the human-likely expressed domain name
    # host.host.host.com = hosthosthost.com. host.com hostcom, host, /host.ext
    # We don't test for domain.dom/domain since "cp * ./sitename" is unlikely to happen (questionable)
    added = 0

    # http://oksala.org -> oksala.org
    target = target.replace('http://', '')
    target = target.replace('https://', '')

    # Remove subpath
    first_slash = target.find('/')
    if first_slash > 0:
        target = target[0:first_slash]

    target = target.replace('/', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    # www.oksala.org -> oksala.org
    target = target.replace('www.', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    # oksala.org -> oksala
    dom_pos = target.rfind('.')
    nodom_target = target[0:dom_pos]
    new_target = conf.path_template.copy()
    new_target['url'] = nodom_target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    # shortdom (blabla.ok.ok.test.com -> test)
    new_target = conf.path_template.copy()
    dom_pos = target.rfind('.')
    if dom_pos > 0:
        nodom_target = target[0:dom_pos]
        start_pos = nodom_target.rfind('.')
        if start_pos > 0:
            short_dom = nodom_target[start_pos+1:]
        else:
            short_dom = nodom_target

        new_target['url'] = short_dom
        new_target['description'] = "HostProcessor generated filename"
        database.files.append(new_target)
        added += 1

        new_target = new_target.copy()
        new_target['url'] = short_dom + 'admin'
        database.files.append(new_target)
        added += 1

        new_target = new_target.copy()
        new_target['url'] = short_dom + '-admin'
        database.files.append(new_target)
        added += 1

    # flatten subdomains
    target = target.replace('.', '')
    new_target = conf.path_template.copy()
    new_target['url'] = target
    new_target['description'] = "HostProcessor generated filename"
    database.files.append(new_target)
    added += 1

    textutils.output_info(" - HostProcessor Plugin: added " + str(added) + " new filenames")
Esempio n. 27
0
 async def on_request_successful(self, entry):
     if not self.has_error and entry.result.error_behavior:
         self.has_error = True
         output_info(self.MESSAGE)
Esempio n. 28
0
def main(*, target_host, cookie_file, json_output, max_retry_count, plugin_settings, proxy, user_agent, vhost,
         depth_limit, directories_only, files_only, plugins_only, recursive, allow_download, confirmation_factor,
         har_output_dir):

    output_manager = textutils.init_log(json_output)
    output_manager.output_header()

    # Ensure the host is of the right format and set it in config
    parsed_url = urlparse(target_host)
    if not parsed_url.scheme:
        parsed_url = urlparse("http://%s" % target_host)

    if not parsed_url:
        output_manager.output_error("Invald URL provided.")
        return

    # Set conf values
    conf.target_host = parsed_url.netloc
    conf.base_url = "%s://%s" % (parsed_url.scheme, parsed_url.netloc)

    hammertime = None
    accumulator = ResultAccumulator(output_manager=output_manager)

    output_manager.output_info('Starting Discovery on ' + conf.base_url)

    conf.allow_download = allow_download
    for option in plugin_settings:
        plugin, value = option.split(':', 1)
        conf.plugin_settings[plugin].append(value)

    try:
        root_path = conf.path_template.copy()
        root_path['url'] = '/'
        database.valid_paths.append(root_path)
        load_target_paths()
        load_target_files()
        conf.cookies = loaders.load_cookie_file(cookie_file)
        conf.user_agent = user_agent
        conf.proxy_url = proxy
        conf.forge_vhost = vhost
        loop = custom_event_loop()
        hammertime = loop.run_until_complete(
            configure_hammertime(cookies=conf.cookies, proxy=conf.proxy_url, retry_count=max_retry_count,
                                 user_agent=conf.user_agent, vhost=conf.forge_vhost,
                                 confirmation_factor=confirmation_factor,
                                 har_output_dir=har_output_dir))
        loop.create_task(stat_on_input(hammertime))
        loop.run_until_complete(scan(hammertime, accumulator=accumulator,
                                     cookies=conf.cookies, directories_only=directories_only,
                                     files_only=files_only, plugins_only=plugins_only, depth_limit=depth_limit,
                                     recursive=recursive))

        output_manager.output_info('Scan completed')

    except (KeyboardInterrupt, asyncio.CancelledError):
        output_manager.output_error('Keyboard Interrupt Received')
    except (OfflineHostException, StopRequest):
        output_manager.output_error("Target host seems to be offline.")
    except ImportError as e:
        output_manager.output_error("Additional module is required for the requested options: %s" % e)
    finally:
        if hammertime is not None:
            textutils.output_info(format_stats(hammertime.stats))

        output_manager.flush()
Esempio n. 29
0
def load_target_paths():
    """ Load the target paths in the database """
    textutils.output_info('Loading target paths')
    database.paths += loaders.load_json_resource('paths')
Esempio n. 30
0
def load_target_files():
    """ Load the target files in the database """
    textutils.output_info('Loading target files')
    database.files += loaders.load_json_resource('files')