def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') description = queued.get('description') match_string = queued.get('match_string') textutils.output_debug("Testing: " + url + " " + str(queued)) stats.update_stats(url) # Fetch the target url start_time = datetime.now() if match_string: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time, limit_len=False) else: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time) end_time = datetime.now() # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url) elif response_code in conf.expected_file_responses: # If the CRC missmatch, and we have an expected code, we found a valid link if match_string and re.search(re.escape(match_string), content, re.I): textutils.output_found("String-Matched " + description + ' at: ' + conf.target_host + url) elif test_valid_result(content): textutils.output_found(description + ' at: ' + conf.target_host + url) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') textutils.output_debug("Fetching crafted 404: " + str(url)) stats.update_stats(url) # Fetch the target url start_time = datetime.now() response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time) end_time = datetime.now() # Handle fetch timeouts by re-adding the url back to the global fetch queue # if timeout count is under max timeout count if response_code is 0 or response_code is 500: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code in conf.expected_file_responses: # The server responded with whatever code but 404 or invalid stuff (500). We take a sample if not len(content): crafted_404 = "" # empty file, still a forged 404 elif len(content) < conf.file_sample_len: crafted_404 = content[0:len(content) - 1] else: crafted_404 = content[0:conf.file_sample_len - 1] # Edge case control crafted_404 = crafted_404.strip('\r\n ') database.crafted_404s.append(crafted_404) # Exception case for root 404, since it's used as a model for other directories textutils.output_debug( "Computed and saved a sample 404 for: " + str(queued) + ": " + crafted_404) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Dequeue item database.fetch_queue.task_done() except Empty: continue textutils.output_debug("Thread #" + str(self.thread_id) + " killed.")
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') textutils.output_debug("Fetching crafted 404: " + str(url)) stats.update_stats(url) # Fetch the target url start_time = datetime.now() response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time) end_time = datetime.now() # Handle fetch timeouts by re-adding the url back to the global fetch queue # if timeout count is under max timeout count if response_code is 0 or response_code is 500: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code in conf.expected_file_responses: # Encoding edge case # Must be a string to be compared to the 404 fingerprint if not isinstance(content, str): content = content.decode('utf-8', 'ignore') # The server responded with whatever code but 404 or invalid stuff (500). We take a sample if not len(content): crafted_404 = "" # empty file, still a forged 404 elif len(content) < conf.file_sample_len: crafted_404 = content[0:len(content) - 1] else: crafted_404 = content[0:conf.file_sample_len - 1] crafted_404 = crafted_404.strip('\r\n ') database.crafted_404s.append(crafted_404) # Exception case for root 404, since it's used as a model for other directories textutils.output_debug("Computed and saved a sample 404 for: " + str(queued) + ": " + crafted_404) elif response_code in conf.redirect_codes: if queued.get('handle_redirect', True): location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Dequeue item database.fetch_queue.task_done() except Empty: continue textutils.output_debug("Thread #" + str(self.thread_id) + " killed.")
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get("url") description = queued.get("description") match_string = queued.get("match_string") textutils.output_debug("Testing: " + url + " " + str(queued)) stats.update_stats(url) # Fetch the target url start_time = datetime.now() if match_string: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time, limit_len=False ) else: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time ) end_time = datetime.now() # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 500: textutils.output_found("ISE, " + description + " at: " + conf.target_host + url) elif response_code in conf.expected_file_responses: # If the CRC missmatch, and we have an expected code, we found a valid link if match_string and re.search(re.escape(match_string), content, re.I): textutils.output_found("String-Matched " + description + " at: " + conf.target_host + url) elif test_valid_result(content): textutils.output_found(description + " at: " + conf.target_host + url) elif response_code in conf.redirect_codes: location = headers.get("location") if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') textutils.output_debug("Fetching crafted 404: " + str(url)) stats.update_stats(url) # Fetch the target url timeout = False response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs) # Handle fetch timeouts by re-adding the url back to the global fetch queue # if timeout count is under max timeout count if response_code is 0 or response_code is 500: handle_timeout(queued, url, self.thread_id, output=self.output) # increase throttle delay throttle.increase_throttle_delay() timeout = True elif response_code in conf.expected_file_responses: # The server responded with whatever code but 404 or invalid stuff (500). We take a sample if len(content) < conf.file_sample_len: crafted_404 = content[0:len(content) - 1] else: crafted_404 = content[0:conf.file_sample_len - 1] database.crafted_404s.append(crafted_404) # Exception case for root 404, since it's used as a model for other directories textutils.output_debug("Computed and saved a sample 404 for: " + str(queued) + ": " + crafted_404) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Decrease throttle delay if needed if not timeout: throttle.decrease_throttle_delay() # Dequeue item stats.update_processed_items() database.fetch_queue.task_done() except Empty: continue textutils.output_debug("Thread #" + str(self.thread_id) + " killed.")
def run(self): while not self.kill_received: try: queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get("url") description = queued.get("description") textutils.output_debug("Testing directory: " + url + " " + str(queued)) stats.update_stats(url) # Add trailing / for paths if not url.endswith("/") and url != "/": url += "/" # Fetch directory start_time = datetime.now() response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time, limit_len=False ) end_time = datetime.now() # Fetch '/' but don't submit it to more logging/existance tests if queued.get("url") == "/": if queued not in database.valid_paths: database.valid_paths.append(queued) database.fetch_queue.task_done() continue if response_code == 500: textutils.output_debug("HIT 500 on: " + str(queued)) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 404 and detect_tomcat_fake_404(content): database.valid_paths.append(queued) textutils.output_found("Tomcat redirect, " + description + " at: " + conf.target_host + url) elif response_code in conf.expected_path_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) # Skip subfile testing if forbidden if response_code == 401: # Output result, but don't keep the url since we can't poke in protected folder textutils.output_found("Password Protected - " + description + " at: " + conf.target_host + url) elif is_valid_result: # Add path to valid_path for future actions database.valid_paths.append(queued) if response_code == 500: textutils.output_found("ISE, " + description + " at: " + conf.target_host + url) elif response_code == 403: textutils.output_found("*Forbidden* " + description + " at: " + conf.target_host + url) else: textutils.output_found(description + " at: " + conf.target_host + url) elif response_code in conf.redirect_codes: location = headers.get("location") if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') description = queued.get('description') match_string = queued.get('match_string') textutils.output_debug("Testing: " + url + " " + str(queued)) stats.update_stats(url) # Throttle if needed #if throttle.get_throttle() > 0: # sleep(throttle.get_throttle()) # Fetch the target url timeout = False if match_string: response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False) else: response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) throttle.increase_throttle_delay() timeout = True elif response_code in conf.expected_file_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) # If the CRC missmatch, and we have an expected code, we found a valid link if is_valid_result: # Content Test if match_string provided if match_string and re.search(re.escape(match_string), content, re.I): # Add path to valid_path for future actions database.valid_paths.append(queued) textutils.output_found("String-Matched " + description + ' at: ' + conf.target_host + url) elif not match_string: if response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url) else: textutils.output_found(description + ' at: ' + conf.target_host + url) # Add path to valid_path for future actions database.valid_paths.append(queued) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Decrease throttle delay if needed if not timeout: throttle.decrease_throttle_delay() # Mark item as processed stats.update_processed_items() database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') description = queued.get('description') textutils.output_debug("Testing directory: " + url + " " + str(queued)) stats.update_stats(url) # Throttle if needed # if throttle.get_throttle() > 0: # sleep(throttle.get_throttle()) # Add trailing / for paths if url[:-1] != '/' and url != '/': url += '/' # Fetch directory timeout = False response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False) # Fetch '/' but don't submit it to more logging/existance tests if queued.get('url') == '/': if queued not in database.valid_paths: database.valid_paths.append(queued) database.fetch_queue.task_done() continue if response_code == 500: textutils.output_debug("HIT 500 on: " + str(queued)) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) # increase throttle delay throttle.increase_throttle_delay() timeout = True elif response_code in conf.expected_path_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) # Skip subfile testing if forbidden if response_code == 401: # Output result, but don't keep the url since we can't poke in protected folder textutils.output_found('Password Protected - ' + description + ' at: ' + conf.target_host + url) elif is_valid_result: # Add path to valid_path for future actions database.valid_paths.append(queued) if response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url) elif response_code == 403: textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.target_host + url) else: textutils.output_found(description + ' at: ' + conf.target_host + url) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Decrease throttle delay if needed if not timeout: throttle.decrease_throttle_delay() # Mark item as processed stats.update_processed_items() database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(block=False) url = conf.target_base_path + queued.get('url') description = queued.get('description') match_string = queued.get('match_string') textutils.output_debug("Testing: " + url + " " + str(queued)) stats.update_stats(url) # Fetch the target url start_time = datetime.now() if match_string: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time, limit_len=False) # Make sure we always match string against a string content if not isinstance(content, str): content = content.decode('utf-8', 'ignore') else: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time) end_time = datetime.now() # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 500: textutils.output_found( 'ISE, ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif response_code in conf.expected_file_responses: # Test if result is valid is_valid_result = test_valid_result(content, is_file=True) if is_valid_result: # Test if behavior is ok. normal_behavior = test_behavior(content) textutils.output_debug('Normal behavior ' + str(normal_behavior) + ' ' + str(response_code)) else: normal_behavior = True # Reset behavior chance when we detect a new state if normal_behavior and database.behavior_error: textutils.output_info( 'Normal behavior seems to be restored.') database.behavior_error = False if is_valid_result and not normal_behavior: # Looks like the new behavior is now the norm. It's a false positive. # Additionally, we report a behavior change to the user at this point. if not database.behavior_error: textutils.output_info( 'Behavior change detected! Results may ' 'be incomplete or tachyon may never exit.') textutils.output_debug( 'Chances taken: ' + str(queued.get('behavior_chances', 0))) textutils.output_debug(queued.get('url')) database.behavior_error = True # If we find a valid result but the behavior buffer is not full, we give a chance to the # url and increase it's chances count. We consider this a false behavior test. # We do this since an incomplete behavior buffer could give false positives # Additionally, if the fetch queue is empty and we're still not in global behavior error, we # consider all the remaining hits as valid, as they are hits that were given a chance. elif is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \ and not database.behavior_error and database.fetch_queue.qsize() != 0: if not queued.get('behavior_chances'): queued['behavior_chances'] = 1 else: queued['behavior_chances'] += 1 if queued['behavior_chances'] < conf.max_behavior_tries: textutils.output_debug( 'Chance left to target, re-queuing') database.fetch_queue.put(queued) elif is_valid_result: # Make sure we base our next analysis on that positive hit reset_behavior_database() if len(content) == 0: textutils.output_found( 'Empty ' + description + ' at: ' + conf.base_url + url, { "description": "Empty " + description, "url": conf.base_url + url, "code": response_code, "severity": 'info', }) else: textutils.output_found( description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif match_string and re.search(re.escape(match_string), content, re.I): textutils.output_found( "String-Matched " + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "string": match_string, "severity": queued.get('severity'), }) elif response_code in conf.redirect_codes: if queued.get('handle_redirect', True): location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: queued = database.fetch_queue.get(block=False) url = conf.target_base_path + queued.get('url') description = queued.get('description') textutils.output_debug("Testing directory: " + url + " " + str(queued)) stats.update_stats(url) # Add trailing / for paths if not url.endswith('/') and url != '/': url += '/' # Fetch directory start_time = datetime.now() response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time, limit_len=False) end_time = datetime.now() # Fetch '/' but don't submit it to more logging/existance tests if queued.get('url') == '/': if queued not in database.valid_paths: database.valid_paths.append(queued) database.fetch_queue.task_done() continue if response_code == 500: textutils.output_debug("HIT 500 on: " + str(queued)) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 404 and detect_tomcat_fake_404(content): database.valid_paths.append(queued) textutils.output_found( 'Tomcat redirect, ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "special": "tomcat-redirect", "severity": queued.get('severity'), }) elif response_code in conf.expected_path_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) if is_valid_result: # Test if behavior is ok. normal_behavior = test_behavior(content) else: # We don't compute behavior on invalid results normal_behavior = True if normal_behavior and database.behavior_error: textutils.output_info( 'Normal behavior seems to be restored.') database.behavior_error = False if is_valid_result and not normal_behavior: # We don't declare a behavior change until the current hit has exceeded the maximum # chances it can get. if not database.behavior_error and queued.get( 'behavior_chances', 0) >= conf.max_behavior_tries: textutils.output_info( 'Behavior change detected! Results may ' 'be incomplete or tachyon may never exit.') textutils.output_debug( 'Chances taken: ' + str(queued.get('behavior_chances', 0))) textutils.output_debug(queued.get('url')) database.behavior_error = True # If we find a valid result but the behavior buffer is not full, we give a chance to the # url and increase it's chances count. We consider this a false behavior test. # We do this since an incomplete behavior buffer could give false positives # Additionally, if the fetch queue is empty and we're still not in global behavior error, we # consider all the remaining hits as valid, as they are hits that were given a chance. if is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \ and not database.behavior_error and database.fetch_queue.qsize() != 0: if not queued.get('behavior_chances'): queued['behavior_chances'] = 1 else: queued['behavior_chances'] += 1 if queued['behavior_chances'] < conf.max_behavior_tries: textutils.output_debug('Time for a chance') textutils.output_debug( 'Chance left to target ' + queued.get('url') + ', re-queuing ' + ' qsize: ' + str(database.fetch_queue.qsize()) + ' chances: ' + str(queued.get('behavior_chances'))) database.fetch_queue.put(queued) database.fetch_queue.task_done() continue else: textutils.output_debug( 'Chances count busted! ' + queued.get('url') + ' qsize: ' + str(database.fetch_queue.qsize())) elif response_code == 401: # Output result, but don't keep the url since we can't poke in protected folder textutils.output_found( 'Password Protected - ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) # At this point, we have a valid result and the behavioral buffer is full. # The behavior of the hit has been taken in account and the app is not in global behavior error elif is_valid_result: # Add path to valid_path for future actions database.valid_paths.append(queued) # If we reach this point, all edge-cases should be handled and all subsequent requests # should be benchmarked against this new behavior reset_behavior_database() if response_code == 500: textutils.output_found( 'ISE, ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif response_code == 403: textutils.output_found( '*Forbidden* ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) else: textutils.output_found( description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif response_code in conf.redirect_codes: if queued.get('handle_redirect', True): location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') description = queued.get('description') textutils.output_debug("Testing directory: " + url + " " + str(queued)) stats.update_stats(url) # Add trailing / for paths if not url.endswith('/') and url != '/': url += '/' # Fetch directory start_time = datetime.now() response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, database.latest_successful_request_time, limit_len=False) end_time = datetime.now() # Fetch '/' but don't submit it to more logging/existance tests if queued.get('url') == '/': if queued not in database.valid_paths: database.valid_paths.append(queued) database.fetch_queue.task_done() continue if response_code == 500: textutils.output_debug("HIT 500 on: " + str(queued)) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 404 and detect_tomcat_fake_404(content): database.valid_paths.append(queued) textutils.output_found('Tomcat redirect, ' + description + ' at: ' + conf.target_host + url) elif response_code in conf.expected_path_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) # Skip subfile testing if forbidden if response_code == 401: # Output result, but don't keep the url since we can't poke in protected folder textutils.output_found('Password Protected - ' + description + ' at: ' + conf.target_host + url) elif is_valid_result: # Add path to valid_path for future actions database.valid_paths.append(queued) if response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url) elif response_code == 403: textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.target_host + url) else: textutils.output_found(description + ' at: ' + conf.target_host + url) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(block=False) url = conf.target_base_path + queued.get('url') description = queued.get('description') match_string = queued.get('match_string') textutils.output_debug("Testing: " + url + " " + str(queued)) stats.update_stats(url) # Fetch the target url start_time = datetime.now() if match_string: response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time, limit_len=False) # Make sure we always match string against a string content if not isinstance(content, str): content = content.decode('utf-8', 'ignore') else: response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time) end_time = datetime.now() # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif response_code in conf.expected_file_responses: # Test if result is valid is_valid_result = test_valid_result(content, is_file=True) if is_valid_result: # Test if behavior is ok. normal_behavior = test_behavior(content) textutils.output_debug('Normal behavior ' + str(normal_behavior) + ' ' + str(response_code)) else: normal_behavior = True # Reset behavior chance when we detect a new state if normal_behavior and database.behavior_error: textutils.output_info('Normal behavior seems to be restored.') database.behavior_error = False if is_valid_result and not normal_behavior: # Looks like the new behavior is now the norm. It's a false positive. # Additionally, we report a behavior change to the user at this point. if not database.behavior_error: textutils.output_info('Behavior change detected! Results may ' 'be incomplete or tachyon may never exit.') textutils.output_debug('Chances taken: ' + str(queued.get('behavior_chances', 0))) textutils.output_debug(queued.get('url')) database.behavior_error = True # If we find a valid result but the behavior buffer is not full, we give a chance to the # url and increase it's chances count. We consider this a false behavior test. # We do this since an incomplete behavior buffer could give false positives # Additionally, if the fetch queue is empty and we're still not in global behavior error, we # consider all the remaining hits as valid, as they are hits that were given a chance. elif is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \ and not database.behavior_error and database.fetch_queue.qsize() != 0: if not queued.get('behavior_chances'): queued['behavior_chances'] = 1 else: queued['behavior_chances'] += 1 if queued['behavior_chances'] < conf.max_behavior_tries: textutils.output_debug('Chance left to target, re-queuing') database.fetch_queue.put(queued) elif is_valid_result: # Make sure we base our next analysis on that positive hit reset_behavior_database() if len(content) == 0: textutils.output_found('Empty ' + description + ' at: ' + conf.base_url + url, { "description": "Empty " + description, "url": conf.base_url + url, "code": response_code, "severity": 'info', }) else: textutils.output_found(description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif match_string and re.search(re.escape(match_string), content, re.I): textutils.output_found("String-Matched " + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "string": match_string, "severity": queued.get('severity'), }) elif response_code in conf.redirect_codes: if queued.get('handle_redirect', True): location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: queued = database.fetch_queue.get(block=False) url = conf.target_base_path + queued.get('url') description = queued.get('description') textutils.output_debug("Testing directory: " + url + " " + str(queued)) stats.update_stats(url) # Add trailing / for paths if not url.endswith('/') and url != '/': url += '/' # Fetch directory start_time = datetime.now() response_code, content, headers = self.fetcher.fetch_url(url, conf.user_agent, database.latest_successful_request_time, limit_len=False) end_time = datetime.now() # Fetch '/' but don't submit it to more logging/existance tests if queued.get('url') == '/': if queued not in database.valid_paths: database.valid_paths.append(queued) database.fetch_queue.task_done() continue if response_code == 500: textutils.output_debug("HIT 500 on: " + str(queued)) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) elif response_code == 404 and detect_tomcat_fake_404(content): database.valid_paths.append(queued) textutils.output_found('Tomcat redirect, ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "special": "tomcat-redirect", "severity": queued.get('severity'), }) elif response_code in conf.expected_path_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) if is_valid_result: # Test if behavior is ok. normal_behavior = test_behavior(content) else: # We don't compute behavior on invalid results normal_behavior = True if normal_behavior and database.behavior_error: textutils.output_info('Normal behavior seems to be restored.') database.behavior_error = False if is_valid_result and not normal_behavior: # We don't declare a behavior change until the current hit has exceeded the maximum # chances it can get. if not database.behavior_error and queued.get('behavior_chances', 0) >= conf.max_behavior_tries: textutils.output_info('Behavior change detected! Results may ' 'be incomplete or tachyon may never exit.') textutils.output_debug('Chances taken: ' + str(queued.get('behavior_chances', 0))) textutils.output_debug(queued.get('url')) database.behavior_error = True # If we find a valid result but the behavior buffer is not full, we give a chance to the # url and increase it's chances count. We consider this a false behavior test. # We do this since an incomplete behavior buffer could give false positives # Additionally, if the fetch queue is empty and we're still not in global behavior error, we # consider all the remaining hits as valid, as they are hits that were given a chance. if is_valid_result and len(database.behavioral_buffer) < conf.behavior_queue_size \ and not database.behavior_error and database.fetch_queue.qsize() != 0: if not queued.get('behavior_chances'): queued['behavior_chances'] = 1 else: queued['behavior_chances'] += 1 if queued['behavior_chances'] < conf.max_behavior_tries: textutils.output_debug('Time for a chance') textutils.output_debug('Chance left to target ' + queued.get('url') + ', re-queuing ' + ' qsize: ' + str(database.fetch_queue.qsize()) + ' chances: ' + str(queued.get('behavior_chances'))) database.fetch_queue.put(queued) database.fetch_queue.task_done() continue else: textutils.output_debug('Chances count busted! ' + queued.get('url') + ' qsize: ' + str(database.fetch_queue.qsize())) elif response_code == 401: # Output result, but don't keep the url since we can't poke in protected folder textutils.output_found('Password Protected - ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) # At this point, we have a valid result and the behavioral buffer is full. # The behavior of the hit has been taken in account and the app is not in global behavior error elif is_valid_result: # Add path to valid_path for future actions database.valid_paths.append(queued) # If we reach this point, all edge-cases should be handled and all subsequent requests # should be benchmarked against this new behavior reset_behavior_database() if response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif response_code == 403: textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) else: textutils.output_found(description + ' at: ' + conf.base_url + url, { "description": description, "url": conf.base_url + url, "code": response_code, "severity": queued.get('severity'), }) elif response_code in conf.redirect_codes: if queued.get('handle_redirect', True): location = headers.get('location') if location: handle_redirects(queued, location) # Stats if response_code not in conf.timeout_codes: stats.update_processed_items() compute_request_time(start_time, end_time) # Mark item as processed database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: # Non-Blocking get since we use the queue as a ringbuffer queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') description = queued.get('description') match_string = queued.get('match_string') textutils.output_debug("Testing: " + url + " " + str(queued)) stats.update_stats(url) # Throttle if needed #if throttle.get_throttle() > 0: # sleep(throttle.get_throttle()) # Fetch the target url timeout = False if match_string: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False) else: response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, conf.fetch_timeout_secs) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) throttle.increase_throttle_delay() timeout = True elif response_code in conf.expected_file_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) # If the CRC missmatch, and we have an expected code, we found a valid link if is_valid_result: # Content Test if match_string provided if match_string and re.search(re.escape(match_string), content, re.I): # Add path to valid_path for future actions database.valid_paths.append(queued) textutils.output_found("String-Matched " + description + ' at: ' + conf.target_host + url) elif not match_string: if response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url) else: textutils.output_found(description + ' at: ' + conf.target_host + url) # Add path to valid_path for future actions database.valid_paths.append(queued) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Decrease throttle delay if needed if not timeout: throttle.decrease_throttle_delay() # Mark item as processed stats.update_processed_items() database.fetch_queue.task_done() except Empty: continue
def run(self): while not self.kill_received: try: queued = database.fetch_queue.get(False) url = conf.target_base_path + queued.get('url') description = queued.get('description') textutils.output_debug("Testing directory: " + url + " " + str(queued)) stats.update_stats(url) # Throttle if needed # if throttle.get_throttle() > 0: # sleep(throttle.get_throttle()) # Add trailing / for paths if url[:-1] != '/' and url != '/': url += '/' # Fetch directory timeout = False response_code, content, headers = self.fetcher.fetch_url( url, conf.user_agent, conf.fetch_timeout_secs, limit_len=False) # Fetch '/' but don't submit it to more logging/existance tests if queued.get('url') == '/': if queued not in database.valid_paths: database.valid_paths.append(queued) database.fetch_queue.task_done() continue if response_code == 500: textutils.output_debug("HIT 500 on: " + str(queued)) # handle timeout if response_code in conf.timeout_codes: handle_timeout(queued, url, self.thread_id, output=self.output) # increase throttle delay throttle.increase_throttle_delay() timeout = True elif response_code in conf.expected_path_responses: # Compare content with generated 404 samples is_valid_result = test_valid_result(content) # Skip subfile testing if forbidden if response_code == 401: # Output result, but don't keep the url since we can't poke in protected folder textutils.output_found('Password Protected - ' + description + ' at: ' + conf.target_host + url) elif is_valid_result: # Add path to valid_path for future actions database.valid_paths.append(queued) if response_code == 500: textutils.output_found('ISE, ' + description + ' at: ' + conf.target_host + url) elif response_code == 403: textutils.output_found('*Forbidden* ' + description + ' at: ' + conf.target_host + url) else: textutils.output_found(description + ' at: ' + conf.target_host + url) elif response_code in conf.redirect_codes: location = headers.get('location') if location: handle_redirects(queued, location) # Decrease throttle delay if needed if not timeout: throttle.decrease_throttle_delay() # Mark item as processed stats.update_processed_items() database.fetch_queue.task_done() except Empty: continue