Example #1
0
def parse_svn_entries(url):
    description_file = 'SVN entries file at'
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"
    fetcher = Fetcher()

    response_code, content, headers = fetcher.fetch_url(
        target_url,
        conf.user_agent,
        conf.fetch_timeout_secs,
        limit_len=False,
        add_headers=base_headers)

    if response_code in conf.expected_file_responses and content:
        tokens = content.decode().split('\n')
        if 'dir' in tokens:
            for pos, token in enumerate(tokens):
                if token == 'dir':
                    # Fetch more entries recursively
                    if tokens[pos - 1] != '':
                        textutils.output_debug(' - Svn Plugin: Found dir: ' +
                                               url + '/' + tokens[pos - 1])

                        if conf.allow_download:
                            textutils.output_info(
                                ' - Svn Plugin: Downloading: ' + url + '/' +
                                tokens[pos - 1] + '\r')
                        else:
                            textutils.output_found(description_dir + ' at: ' +
                                                   url + '/' + tokens[pos - 1])

                        # Parse next
                        parse_svn_entries(url + "/" + tokens[pos - 1])

                elif token == 'file':
                    textutils.output_debug(' - Svn Plugin: Found file: ' +
                                           url + '/' + tokens[pos - 1])
                    if conf.allow_download:
                        textutils.output_info(' - Svn Plugin: Downloading: ' +
                                              url + '/' + tokens[pos - 1] +
                                              '\r')
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + '/' + tokens[
                            pos - 1] + ".svn-base"
                        fetcher = Fetcher()
                        response_code, content, headers = fetcher.fetch_url(
                            path,
                            conf.user_agent,
                            conf.fetch_timeout_secs,
                            limit_len=False)
                        save_file(url + '/' + tokens[pos - 1], content)
                    else:
                        textutils.output_found(description_file + ' at: ' +
                                               url + '/' + tokens[pos - 1])
Example #2
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Fetch the target url
                start_time = datetime.now()
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url,
                        conf.user_agent,
                        database.latest_successful_request_time,
                        limit_len=False)
                else:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url, conf.user_agent,
                        database.latest_successful_request_time)
                end_time = datetime.now()

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                elif response_code == 500:
                    textutils.output_found('ISE, ' + description + ' at: ' +
                                           conf.target_host + url)
                elif response_code in conf.expected_file_responses:
                    # If the CRC missmatch, and we have an expected code, we found a valid link
                    if match_string and re.search(re.escape(match_string),
                                                  content, re.I):
                        textutils.output_found("String-Matched " +
                                               description + ' at: ' +
                                               conf.target_host + url)
                    elif test_valid_result(content):
                        textutils.output_found(description + ' at: ' +
                                               conf.target_host + url)

                elif response_code in conf.redirect_codes:
                    location = headers.get('location')
                    if location:
                        handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #3
0
def parse_svn_entries(url):
    description_file = 'SVN entries file at'
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"
    fetcher = Fetcher()

    response_code, content, headers = fetcher.fetch_url(target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False, add_headers=base_headers)
    if not isinstance(content, str):
        content = content.decode('utf-8', 'ignore')

    if response_code in conf.expected_file_responses and content:
        tokens = content.split('\n')
        if 'dir' in tokens:
            for pos, token in enumerate(tokens):
                if token == 'dir':
                    # Fetch more entries recursively
                    if tokens[pos-1] != '':
                        textutils.output_debug(' - Svn Plugin: Found dir: ' + url + '/' + tokens[pos-1])

                        if conf.allow_download:
                            textutils.output_info(' - Svn Plugin: Downloading: ' + url + '/' + tokens[pos-1] + '\r')
                        else:
                            textutils.output_found(description_dir + ' at: ' + url + '/' + tokens[pos-1])

                        # Parse next
                        parse_svn_entries(url + "/" + tokens[pos-1])

                elif token == 'file':
                    textutils.output_debug(' - Svn Plugin: Found file: ' + url + '/' + tokens[pos-1])
                    if conf.allow_download:
                        textutils.output_info(' - Svn Plugin: Downloading: ' + url + '/' + tokens[pos-1] + '\r')
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + '/' + tokens[pos-1] + ".svn-base"
                        fetcher = Fetcher()
                        response_code, content, headers = fetcher.fetch_url(path, conf.user_agent,
                                                                            conf.fetch_timeout_secs, limit_len=False)
                        save_file(url + '/' + tokens[pos-1], content)
                    else:
                        textutils.output_found(description_file + ' at: ' + url + '/' + tokens[pos-1])
Example #4
0
def parse_svn_entries(url):
    description_file = "SVN entries file at"
    description_dir = "SVN entries Dir at"
    target_url = url + "/.svn/entries"
    fetcher = Fetcher()
    response_code, content, headers = fetcher.fetch_url(
        target_url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False
    )

    if response_code is 200 or response_code is 302 and content:
        tokens = content.split("\n")
        if "dir" in tokens:
            for pos, token in enumerate(tokens):
                if token == "dir":
                    # Fetch more entries recursively
                    if tokens[pos - 1] != "":
                        textutils.output_debug(" - Svn Plugin: Found dir: " + url + "/" + tokens[pos - 1])

                        if conf.allow_download:
                            textutils.output_info(" - Svn Plugin: Downloading: " + url + "/" + tokens[pos - 1] + "\r")
                        else:
                            textutils.output_found(description_dir + " at: " + url + "/" + tokens[pos - 1])

                        # Parse next
                        parse_svn_entries(url + "/" + tokens[pos - 1])

                elif token == "file":
                    textutils.output_debug(" - Svn Plugin: Found file: " + url + "/" + tokens[pos - 1])
                    if conf.allow_download:
                        textutils.output_info(" - Svn Plugin: Downloading: " + url + "/" + tokens[pos - 1] + "\r")
                        # Fetch text-base file
                        path = url + "/.svn/text-base" + "/" + tokens[pos - 1] + ".svn-base"
                        fetcher = Fetcher()
                        response_code, content, headers = fetcher.fetch_url(
                            path, conf.user_agent, conf.fetch_timeout_secs, limit_len=False
                        )
                        save_file(url + "/" + tokens[pos - 1], content)
                    else:
                        textutils.output_found(description_file + " at: " + url + "/" + tokens[pos - 1])
Example #5
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get("url")
                description = queued.get("description")
                match_string = queued.get("match_string")

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Fetch the target url
                start_time = datetime.now()
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url, conf.user_agent, database.latest_successful_request_time, limit_len=False
                    )
                else:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url, conf.user_agent, database.latest_successful_request_time
                    )
                end_time = datetime.now()

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                elif response_code == 500:
                    textutils.output_found("ISE, " + description + " at: " + conf.target_host + url)
                elif response_code in conf.expected_file_responses:
                    # If the CRC missmatch, and we have an expected code, we found a valid link
                    if match_string and re.search(re.escape(match_string), content, re.I):
                        textutils.output_found("String-Matched " + description + " at: " + conf.target_host + url)
                    elif test_valid_result(content):
                        textutils.output_found(description + " at: " + conf.target_host + url)

                elif response_code in conf.redirect_codes:
                    location = headers.get("location")
                    if location:
                        handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #6
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get("url")
                description = queued.get("description")
                textutils.output_debug("Testing directory: " + url + " " + str(queued))

                stats.update_stats(url)

                # Add trailing / for paths
                if not url.endswith("/") and url != "/":
                    url += "/"

                # Fetch directory
                start_time = datetime.now()
                response_code, content, headers = self.fetcher.fetch_url(
                    url, conf.user_agent, database.latest_successful_request_time, limit_len=False
                )
                end_time = datetime.now()

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get("url") == "/":
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                elif response_code == 404 and detect_tomcat_fake_404(content):
                    database.valid_paths.append(queued)
                    textutils.output_found("Tomcat redirect, " + description + " at: " + conf.target_host + url)
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    # Skip subfile testing if forbidden
                    if response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found("Password Protected - " + description + " at: " + conf.target_host + url)
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        if response_code == 500:
                            textutils.output_found("ISE, " + description + " at: " + conf.target_host + url)
                        elif response_code == 403:
                            textutils.output_found("*Forbidden* " + description + " at: " + conf.target_host + url)
                        else:
                            textutils.output_found(description + " at: " + conf.target_host + url)

                elif response_code in conf.redirect_codes:
                    location = headers.get("location")
                    if location:
                        handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #7
0
    def run(self):
         while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Throttle if needed
                #if throttle.get_throttle() > 0:
                 #   sleep(throttle.get_throttle())

                # Fetch the target url
                timeout = False
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)
                else:
                    response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs)

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                    throttle.increase_throttle_delay()
                    timeout = True
                elif response_code in conf.expected_file_responses:
                      # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)
                    
                    # If the CRC missmatch, and we have an expected code, we found a valid link
                    if is_valid_result:
                        # Content Test if match_string provided
                        if match_string and re.search(re.escape(match_string), content, re.I):
                            # Add path to valid_path for future actions
                            database.valid_paths.append(queued)
                            textutils.output_found("String-Matched " + description + ' at: ' + conf.target_host + url)
                        elif not match_string:
                            if response_code == 500:
                                textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url)    
                            else:
                                textutils.output_found(description + ' at: ' + conf.target_host + url)
                            
                            # Add path to valid_path for future actions
                            database.valid_paths.append(queued)

                elif response_code in conf.redirect_codes:
                    location = headers.get('location')
                    if location:
                        handle_redirects(queued, location)

                # Decrease throttle delay if needed
                if not timeout:	
                    throttle.decrease_throttle_delay()
					
                # Mark item as processed
                stats.update_processed_items()
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #8
0
    def run(self):
         while not self.kill_received:
            try:
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " + str(queued))

                stats.update_stats(url)

                # Throttle if needed
               # if throttle.get_throttle() > 0:
                  #  sleep(throttle.get_throttle())

                # Add trailing / for paths
                if url[:-1] != '/' and url != '/':
                    url += '/'

                # Fetch directory
                timeout = False
                response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False)

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                    # increase throttle delay
                    throttle.increase_throttle_delay()
                    timeout = True
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    # Skip subfile testing if forbidden
                    if response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found('Password Protected - ' + description + ' at: ' + conf.target_host + url)
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        if response_code == 500:
                            textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url)    
                        elif response_code == 403:
                            textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.target_host + url)
                        else:
                            textutils.output_found(description + ' at: ' + conf.target_host + url)

                elif response_code in conf.redirect_codes:
                    location = headers.get('location')
                    if location:
                        handle_redirects(queued, location)

                # Decrease throttle delay if needed
                if not timeout:	
                    throttle.decrease_throttle_delay()
					
                # Mark item as processed
                stats.update_processed_items()
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #9
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Fetch the target url
                start_time = datetime.now()
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url,
                        conf.user_agent,
                        database.latest_successful_request_time,
                        limit_len=False)
                    # Make sure we always match string against a string content
                    if not isinstance(content, str):
                        content = content.decode('utf-8', 'ignore')
                else:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url, conf.user_agent,
                        database.latest_successful_request_time)
                end_time = datetime.now()

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                elif response_code == 500:
                    textutils.output_found(
                        'ISE, ' + description + ' at: ' + conf.base_url + url,
                        {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "severity": queued.get('severity'),
                        })
                elif response_code in conf.expected_file_responses:
                    # Test if result is valid
                    is_valid_result = test_valid_result(content, is_file=True)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                        textutils.output_debug('Normal behavior ' +
                                               str(normal_behavior) + ' ' +
                                               str(response_code))
                    else:
                        normal_behavior = True

                    # Reset behavior chance when we detect a new state
                    if normal_behavior and database.behavior_error:
                        textutils.output_info(
                            'Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # Looks like the new behavior is now the norm. It's a false positive.
                        # Additionally, we report a behavior change to the user at this point.
                        if not database.behavior_error:
                            textutils.output_info(
                                'Behavior change detected! Results may '
                                'be incomplete or tachyon may never exit.')
                            textutils.output_debug(
                                'Chances taken: ' +
                                str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    elif is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug(
                                'Chance left to target, re-queuing')
                            database.fetch_queue.put(queued)
                    elif is_valid_result:
                        # Make sure we base our next analysis on that positive hit
                        reset_behavior_database()

                        if len(content) == 0:
                            textutils.output_found(
                                'Empty ' + description + ' at: ' +
                                conf.base_url + url, {
                                    "description": "Empty " + description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": 'info',
                                })
                        else:
                            textutils.output_found(
                                description + ' at: ' + conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })
                    elif match_string and re.search(re.escape(match_string),
                                                    content, re.I):
                        textutils.output_found(
                            "String-Matched " + description + ' at: ' +
                            conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "string": match_string,
                                "severity": queued.get('severity'),
                            })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #10
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " +
                                       str(queued))

                stats.update_stats(url)

                # Add trailing / for paths
                if not url.endswith('/') and url != '/':
                    url += '/'

                # Fetch directory
                start_time = datetime.now()
                response_code, content, headers = self.fetcher.fetch_url(
                    url,
                    conf.user_agent,
                    database.latest_successful_request_time,
                    limit_len=False)
                end_time = datetime.now()

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                elif response_code == 404 and detect_tomcat_fake_404(content):
                    database.valid_paths.append(queued)
                    textutils.output_found(
                        'Tomcat redirect, ' + description + ' at: ' +
                        conf.base_url + url, {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "special": "tomcat-redirect",
                            "severity": queued.get('severity'),
                        })
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                    else:
                        # We don't compute behavior on invalid results
                        normal_behavior = True

                    if normal_behavior and database.behavior_error:
                        textutils.output_info(
                            'Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # We don't declare a behavior change until the current hit has exceeded the maximum
                        # chances it can get.
                        if not database.behavior_error and queued.get(
                                'behavior_chances',
                                0) >= conf.max_behavior_tries:
                            textutils.output_info(
                                'Behavior change detected! Results may '
                                'be incomplete or tachyon may never exit.')
                            textutils.output_debug(
                                'Chances taken: ' +
                                str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    if is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug('Time for a chance')
                            textutils.output_debug(
                                'Chance left to target ' + queued.get('url') +
                                ', re-queuing ' + ' qsize: ' +
                                str(database.fetch_queue.qsize()) +
                                ' chances: ' +
                                str(queued.get('behavior_chances')))
                            database.fetch_queue.put(queued)
                            database.fetch_queue.task_done()
                            continue
                        else:
                            textutils.output_debug(
                                'Chances count busted! ' + queued.get('url') +
                                ' qsize: ' + str(database.fetch_queue.qsize()))

                    elif response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found(
                            'Password Protected - ' + description + ' at: ' +
                            conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                    # At this point, we have a valid result and the behavioral buffer is full.
                    # The behavior of the hit has been taken in account and the app is not in global behavior error
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        # If we reach this point, all edge-cases should be handled and all subsequent requests
                        # should be benchmarked against this new behavior
                        reset_behavior_database()

                        if response_code == 500:
                            textutils.output_found(
                                'ISE, ' + description + ' at: ' +
                                conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })
                        elif response_code == 403:
                            textutils.output_found(
                                '*Forbidden* ' + description + ' at: ' +
                                conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })
                        else:
                            textutils.output_found(
                                description + ' at: ' + conf.base_url + url, {
                                    "description": description,
                                    "url": conf.base_url + url,
                                    "code": response_code,
                                    "severity": queued.get('severity'),
                                })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #11
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " +
                                       str(queued))

                stats.update_stats(url)

                # Add trailing / for paths
                if not url.endswith('/') and url != '/':
                    url += '/'

                # Fetch directory
                start_time = datetime.now()
                response_code, content, headers = self.fetcher.fetch_url(
                    url,
                    conf.user_agent,
                    database.latest_successful_request_time,
                    limit_len=False)
                end_time = datetime.now()

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                elif response_code == 404 and detect_tomcat_fake_404(content):
                    database.valid_paths.append(queued)
                    textutils.output_found('Tomcat redirect, ' + description +
                                           ' at: ' + conf.target_host + url)
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    # Skip subfile testing if forbidden
                    if response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found('Password Protected - ' +
                                               description + ' at: ' +
                                               conf.target_host + url)
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        if response_code == 500:
                            textutils.output_found('ISE, ' + description +
                                                   ' at: ' + conf.target_host +
                                                   url)
                        elif response_code == 403:
                            textutils.output_found('*Forbidden* ' +
                                                   description + ' at: ' +
                                                   conf.target_host + url)
                        else:
                            textutils.output_found(description + ' at: ' +
                                                   conf.target_host + url)

                elif response_code in conf.redirect_codes:
                    location = headers.get('location')
                    if location:
                        handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #12
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Fetch the target url
                start_time = datetime.now()
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time, limit_len=False)
                    # Make sure we always match string against a string content
                    if not isinstance(content, str):
                        content = content.decode('utf-8', 'ignore')
                else:
                    response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time)
                end_time = datetime.now()

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                elif response_code == 500:
                    textutils.output_found('ISE, ' + description + ' at: ' + conf.base_url + url, {
                        "description": description,
                        "url": conf.base_url + url,
                        "code": response_code,
                        "severity": queued.get('severity'),
                    })
                elif response_code in conf.expected_file_responses:
                    # Test if result is valid
                    is_valid_result = test_valid_result(content, is_file=True)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                        textutils.output_debug('Normal behavior ' + str(normal_behavior) + ' ' + str(response_code))
                    else:
                        normal_behavior = True

                    # Reset behavior chance when we detect a new state
                    if normal_behavior and database.behavior_error:
                        textutils.output_info('Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # Looks like the new behavior is now the norm. It's a false positive.
                        # Additionally, we report a behavior change to the user at this point.
                        if not database.behavior_error:
                            textutils.output_info('Behavior change detected! Results may '
                                                  'be incomplete or tachyon may never exit.')
                            textutils.output_debug('Chances taken: ' + str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    elif is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug('Chance left to target, re-queuing')
                            database.fetch_queue.put(queued)
                    elif is_valid_result:
                        # Make sure we base our next analysis on that positive hit
                        reset_behavior_database()

                        if len(content) == 0:
                            textutils.output_found('Empty ' + description + ' at: ' + conf.base_url + url, {
                                "description": "Empty " + description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": 'info',
                            })
                        else:
                            textutils.output_found(description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                    elif match_string and re.search(re.escape(match_string), content, re.I):
                        textutils.output_found("String-Matched " + description + ' at: ' + conf.base_url + url, {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "string": match_string,
                            "severity": queued.get('severity'),
                    })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #13
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(block=False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " + str(queued))

                stats.update_stats(url)

                # Add trailing / for paths
                if not url.endswith('/') and url != '/':
                    url += '/'

                # Fetch directory
                start_time = datetime.now()
                response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time, limit_len=False)
                end_time = datetime.now()

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued, url, self.thread_id, output=self.output)
                elif response_code == 404 and detect_tomcat_fake_404(content):
                    database.valid_paths.append(queued)
                    textutils.output_found('Tomcat redirect, ' + description + ' at: ' + conf.base_url + url, {
                        "description": description,
                        "url": conf.base_url + url,
                        "code": response_code,
                        "special": "tomcat-redirect",
                        "severity": queued.get('severity'),
                    })
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    if is_valid_result:
                        # Test if behavior is ok.
                        normal_behavior = test_behavior(content)
                    else:
                        # We don't compute behavior on invalid results
                        normal_behavior = True

                    if normal_behavior and database.behavior_error:
                        textutils.output_info('Normal behavior seems to be restored.')
                        database.behavior_error = False

                    if is_valid_result and not normal_behavior:
                        # We don't declare a behavior change until the current hit has exceeded the maximum
                        # chances it can get.
                        if not database.behavior_error and queued.get('behavior_chances', 0) >= conf.max_behavior_tries:
                            textutils.output_info('Behavior change detected! Results may '
                                                  'be incomplete or tachyon may never exit.')
                            textutils.output_debug('Chances taken: ' + str(queued.get('behavior_chances', 0)))
                            textutils.output_debug(queued.get('url'))
                            database.behavior_error = True

                    # If we find a valid result but the behavior buffer is not full, we give a chance to the
                    # url and increase it's chances count. We consider this a false behavior test.
                    # We do this since an incomplete behavior buffer could give false positives
                    # Additionally, if the fetch queue is empty and we're still not in global behavior error, we
                    # consider all the remaining hits as valid, as they are hits that were given a chance.
                    if is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \
                            and not database.behavior_error and database.fetch_queue.qsize() != 0:
                        if not queued.get('behavior_chances'):
                            queued['behavior_chances'] = 1
                        else:
                            queued['behavior_chances'] += 1

                        if queued['behavior_chances'] < conf.max_behavior_tries:
                            textutils.output_debug('Time for a chance')
                            textutils.output_debug('Chance left to target ' + queued.get('url') + ', re-queuing ' +
                                                   ' qsize: ' + str(database.fetch_queue.qsize()) +
                                                   ' chances: ' + str(queued.get('behavior_chances')))
                            database.fetch_queue.put(queued)
                            database.fetch_queue.task_done()
                            continue
                        else:
                            textutils.output_debug('Chances count busted! ' + queued.get('url') +
                                                   ' qsize: ' + str(database.fetch_queue.qsize()))

                    elif response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found('Password Protected - ' + description + ' at: ' + conf.base_url + url, {
                            "description": description,
                            "url": conf.base_url + url,
                            "code": response_code,
                            "severity": queued.get('severity'),
                        })
                    # At this point, we have a valid result and the behavioral buffer is full.
                    # The behavior of the hit has been taken in account and the app is not in global behavior error
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        # If we reach this point, all edge-cases should be handled and all subsequent requests
                        # should be benchmarked against this new behavior
                        reset_behavior_database()

                        if response_code == 500:
                            textutils.output_found('ISE, ' + description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                        elif response_code == 403:
                            textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })
                        else:
                            textutils.output_found(description + ' at: ' + conf.base_url + url, {
                                "description": description,
                                "url": conf.base_url + url,
                                "code": response_code,
                                "severity": queued.get('severity'),
                            })

                elif response_code in conf.redirect_codes:
                    if queued.get('handle_redirect', True):
                        location = headers.get('location')
                        if location:
                            handle_redirects(queued, location)

                # Stats
                if response_code not in conf.timeout_codes:
                    stats.update_processed_items()
                    compute_request_time(start_time, end_time)

                # Mark item as processed
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #14
0
    def run(self):
        while not self.kill_received:
            try:
                # Non-Blocking get since we use the queue as a ringbuffer
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                match_string = queued.get('match_string')

                textutils.output_debug("Testing: " + url + " " + str(queued))
                stats.update_stats(url)

                # Throttle if needed
                #if throttle.get_throttle() > 0:
                #   sleep(throttle.get_throttle())

                # Fetch the target url
                timeout = False
                if match_string:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url,
                        conf.user_agent,
                        conf.fetch_timeout_secs,
                        limit_len=False)
                else:
                    response_code, content, headers = self.fetcher.fetch_url(
                        url, conf.user_agent, conf.fetch_timeout_secs)

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                    throttle.increase_throttle_delay()
                    timeout = True
                elif response_code in conf.expected_file_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    # If the CRC missmatch, and we have an expected code, we found a valid link
                    if is_valid_result:
                        # Content Test if match_string provided
                        if match_string and re.search(re.escape(match_string),
                                                      content, re.I):
                            # Add path to valid_path for future actions
                            database.valid_paths.append(queued)
                            textutils.output_found("String-Matched " +
                                                   description + ' at: ' +
                                                   conf.target_host + url)
                        elif not match_string:
                            if response_code == 500:
                                textutils.output_found('ISE, ' + description +
                                                       ' at: ' +
                                                       conf.target_host + url)
                            else:
                                textutils.output_found(description + ' at: ' +
                                                       conf.target_host + url)

                            # Add path to valid_path for future actions
                            database.valid_paths.append(queued)

                elif response_code in conf.redirect_codes:
                    location = headers.get('location')
                    if location:
                        handle_redirects(queued, location)

                # Decrease throttle delay if needed
                if not timeout:
                    throttle.decrease_throttle_delay()

                # Mark item as processed
                stats.update_processed_items()
                database.fetch_queue.task_done()
            except Empty:
                continue
Example #15
0
    def run(self):
        while not self.kill_received:
            try:
                queued = database.fetch_queue.get(False)
                url = conf.target_base_path + queued.get('url')
                description = queued.get('description')
                textutils.output_debug("Testing directory: " + url + " " +
                                       str(queued))

                stats.update_stats(url)

                # Throttle if needed
                # if throttle.get_throttle() > 0:
                #  sleep(throttle.get_throttle())

                # Add trailing / for paths
                if url[:-1] != '/' and url != '/':
                    url += '/'

                # Fetch directory
                timeout = False
                response_code, content, headers = self.fetcher.fetch_url(
                    url,
                    conf.user_agent,
                    conf.fetch_timeout_secs,
                    limit_len=False)

                # Fetch '/' but don't submit it to more logging/existance tests
                if queued.get('url') == '/':
                    if queued not in database.valid_paths:
                        database.valid_paths.append(queued)

                    database.fetch_queue.task_done()
                    continue

                if response_code == 500:
                    textutils.output_debug("HIT 500 on: " + str(queued))

                # handle timeout
                if response_code in conf.timeout_codes:
                    handle_timeout(queued,
                                   url,
                                   self.thread_id,
                                   output=self.output)
                    # increase throttle delay
                    throttle.increase_throttle_delay()
                    timeout = True
                elif response_code in conf.expected_path_responses:
                    # Compare content with generated 404 samples
                    is_valid_result = test_valid_result(content)

                    # Skip subfile testing if forbidden
                    if response_code == 401:
                        # Output result, but don't keep the url since we can't poke in protected folder
                        textutils.output_found('Password Protected - ' +
                                               description + ' at: ' +
                                               conf.target_host + url)
                    elif is_valid_result:
                        # Add path to valid_path for future actions
                        database.valid_paths.append(queued)

                        if response_code == 500:
                            textutils.output_found('ISE, ' + description +
                                                   ' at: ' + conf.target_host +
                                                   url)
                        elif response_code == 403:
                            textutils.output_found('*Forbidden* ' +
                                                   description + ' at: ' +
                                                   conf.target_host + url)
                        else:
                            textutils.output_found(description + ' at: ' +
                                                   conf.target_host + url)

                elif response_code in conf.redirect_codes:
                    location = headers.get('location')
                    if location:
                        handle_redirects(queued, location)

                # Decrease throttle delay if needed
                if not timeout:
                    throttle.decrease_throttle_delay()

                # Mark item as processed
                stats.update_processed_items()
                database.fetch_queue.task_done()
            except Empty:
                continue