def handle_exception(self, phase, plugin_name, fuzzable_request, _exception): """ Get the exception information, and put it into the output queue then, the strategy will get the items from the output queue and handle the exceptions. :param plugin_name: The plugin that generated the exception :param fuzzable_request: The fuzzable request that was sent as input to the plugin when the exception was raised :param _exception: The exception object """ except_type, except_class, tb = sys.exc_info() enabled_plugins = pprint_plugins(self._w3af_core) status = CoreStatus(self._w3af_core) status.set_running_plugin(phase, plugin_name, log=False) status.set_current_fuzzable_request(phase, fuzzable_request) exception_data = ExceptionData(status, _exception, tb, enabled_plugins, store_tb=False) self._out_queue.put(exception_data)
def test_serialize_deserialize(self): try: raise KeyError except Exception, e: except_type, except_class, tb = sys.exc_info() enabled_plugins = '{}' fr = self.get_fuzzable_request() core = w3afCore() status = CoreStatus(core) status.set_running_plugin('audit', 'sqli', log=False) status.set_current_fuzzable_request('audit', fr) exception_data = ExceptionData(status, e, tb, enabled_plugins, store_tb=False) pickled_ed = cPickle.dumps(exception_data) unpickled_ed = cPickle.loads(pickled_ed) self.assertEqual(exception_data.to_json(), unpickled_ed.to_json())
def test_fail_traceback_serialize(self): try: raise KeyError except Exception, e: except_type, except_class, tb = sys.exc_info() enabled_plugins = '{}' fr = self.get_fuzzable_request() core = w3afCore() status = CoreStatus(core) status.set_running_plugin('audit', 'sqli', log=False) status.set_current_fuzzable_request('audit', fr) exception_data = ExceptionData(status, e, tb, enabled_plugins, store_tb=True) self.assertRaises(TypeError, cPickle.dumps, exception_data)
def test_without_traceback(self): tb = None enabled_plugins = '{}' fr = self.get_fuzzable_request() core = w3afCore() status = CoreStatus(core) status.set_running_plugin('audit', 'sqli', log=False) status.set_current_fuzzable_request('audit', fr) exception_data = ExceptionData(status, KeyError(), tb, enabled_plugins, store_tb=False) pickled_ed = cPickle.dumps(exception_data) unpickled_ed = cPickle.loads(pickled_ed) self.assertEqual(exception_data.to_json(), unpickled_ed.to_json())
def create_eta_table(scan): """ For each ETA log entry we find in the log we need to add a row to the output table which has: * Timestamp * Phase * Queue input speed * Queue output speed * Queue size * Used adjustment known * Used adjustment unknown * Proposed adjustment known * Proposed adjustment unknown :param scan: A file pointer to the scan log :return: None, the table is printed to the console """ first_timestamp = get_first_timestamp(scan) # # Find the end times for crawl, audit, grep # scan.seek(0) phase_end_timestamps = {} for line in scan: if CRAWL_INFRA_FINISHED in line: phase_end_timestamps[CRAWL] = get_line_epoch(line) - first_timestamp if 'seconds to join' not in line: continue match = JOIN_TIMES.search(line) if match: if AUDIT in line.lower(): phase_end_timestamps[AUDIT] = get_line_epoch(line) - first_timestamp if GREP in line.lower(): phase_end_timestamps[GREP] = get_line_epoch(line) - first_timestamp # # Find the crawl, audit and grep progress estimations # scan.seek(0) calculated_etas = [] for line in scan: match = CALCULATED_ETA.search(line) if not match: continue timestamp = get_line_epoch(line) - first_timestamp eta = match.group(2) if eta == 'None': eta = '0.0' eta = float(eta) phase = match.group(1).strip().lower() input_speed = float(match.group(3)) output_speed = float(match.group(4)) queue_size = int(match.group(5)) adjustment_known = float(match.group(6)) adjustment_unknown = float(match.group(7)) adjustment_average = 'true' in match.group(8).lower() if phase not in phase_end_timestamps: continue phase_end_timestamp = phase_end_timestamps[phase] calculated_eta = CalculatedETA(phase, eta, input_speed, output_speed, queue_size, adjustment_known, adjustment_unknown, adjustment_average, timestamp, phase_end_timestamp) calculated_etas.append(calculated_eta) status = CoreStatus(None, None) status.start() # Print the tables! for phase in (GREP, AUDIT, CRAWL): print(phase) print('=' * len(phase)) print('') table_data = [TABLE_HEADER] for calculated_eta in calculated_etas: if calculated_eta.phase != phase: continue adj_known, adj_unknown = calculated_eta.calculate_perfect_adjustments() adjustment = Adjustment(known=adj_known, unknown=adj_unknown) recalculated_eta = status.calculate_eta(calculated_eta.input_speed, calculated_eta.output_speed, calculated_eta.queue_size, _type=phase, adjustment=adjustment) data = [calculated_eta.timestamp, calculated_eta.phase_end_timestamp, calculated_eta.phase_end_timestamp - calculated_eta.timestamp, calculated_eta.eta, calculated_eta.get_delta(), calculated_eta.input_speed, calculated_eta.output_speed, calculated_eta.queue_size, calculated_eta.adjustment_known, calculated_eta.adjustment_unknown, calculated_eta.adjustment_average, #'%.2f' % adj_known, #'%.2f' % adj_unknown, #'%.2f' % recalculated_eta, 'TBD', 'TBD', 'TBD'] table_data.append(data) table = AsciiTable(table_data) print(table.table) print('') print('')
def create_eta_table(scan): """ For each ETA log entry we find in the log we need to add a row to the output table which has: * Timestamp * Phase * Queue input speed * Queue output speed * Queue size * Used adjustment known * Used adjustment unknown * Proposed adjustment known * Proposed adjustment unknown :param scan: A file pointer to the scan log :return: None, the table is printed to the console """ first_timestamp = get_first_timestamp(scan) # # Find the end times for crawl, audit, grep # scan.seek(0) phase_end_timestamps = {} for line in scan: if CRAWL_INFRA_FINISHED in line: phase_end_timestamps[CRAWL] = get_line_epoch( line) - first_timestamp if 'seconds to join' not in line: continue match = JOIN_TIMES.search(line) if match: if AUDIT in line.lower(): phase_end_timestamps[AUDIT] = get_line_epoch( line) - first_timestamp if GREP in line.lower(): phase_end_timestamps[GREP] = get_line_epoch( line) - first_timestamp # # Find the crawl, audit and grep progress estimations # scan.seek(0) calculated_etas = [] for line in scan: match = CALCULATED_ETA.search(line) if not match: continue timestamp = get_line_epoch(line) - first_timestamp eta = match.group(2) if eta == 'None': eta = '0.0' eta = float(eta) phase = match.group(1).strip().lower() input_speed = float(match.group(3)) output_speed = float(match.group(4)) queue_size = int(match.group(5)) adjustment_known = float(match.group(6)) adjustment_unknown = float(match.group(7)) adjustment_average = 'true' in match.group(8).lower() if phase not in phase_end_timestamps: continue phase_end_timestamp = phase_end_timestamps[phase] calculated_eta = CalculatedETA(phase, eta, input_speed, output_speed, queue_size, adjustment_known, adjustment_unknown, adjustment_average, timestamp, phase_end_timestamp) calculated_etas.append(calculated_eta) status = CoreStatus(None, None) status.start() # Print the tables! for phase in (GREP, AUDIT, CRAWL): print(phase) print('=' * len(phase)) print('') table_data = [TABLE_HEADER] for calculated_eta in calculated_etas: if calculated_eta.phase != phase: continue adj_known, adj_unknown = calculated_eta.calculate_perfect_adjustments( ) adjustment = Adjustment(known=adj_known, unknown=adj_unknown) recalculated_eta = status.calculate_eta( calculated_eta.input_speed, calculated_eta.output_speed, calculated_eta.queue_size, _type=phase, adjustment=adjustment) data = [ calculated_eta.timestamp, calculated_eta.phase_end_timestamp, calculated_eta.phase_end_timestamp - calculated_eta.timestamp, calculated_eta.eta, calculated_eta.get_delta(), calculated_eta.input_speed, calculated_eta.output_speed, calculated_eta.queue_size, calculated_eta.adjustment_known, calculated_eta.adjustment_unknown, calculated_eta.adjustment_average, #'%.2f' % adj_known, #'%.2f' % adj_unknown, #'%.2f' % recalculated_eta, 'TBD', 'TBD', 'TBD' ] table_data.append(data) table = AsciiTable(table_data) print(table.table) print('') print('')
def test_queue_status_not_started(self): core = w3afCore() s = CoreStatus(core) self.assertEqual(s.get_crawl_input_speed(), None) self.assertEqual(s.get_crawl_output_speed(), None) self.assertEqual(s.get_crawl_qsize(), None) self.assertEqual(s.get_crawl_current_fr(), None) self.assertEqual(s.get_crawl_eta(), None) self.assertEqual(s.get_audit_input_speed(), None) self.assertEqual(s.get_audit_output_speed(), None) self.assertEqual(s.get_audit_qsize(), None) self.assertEqual(s.get_audit_current_fr(), None) self.assertEqual(s.get_audit_eta(), None) core.worker_pool.terminate_join()
def test_simple(self): s = CoreStatus(Mock()) self.assertEqual(s.get_status(), STOPPED) self.assertFalse(s.is_running()) s.start() self.assertTrue(s.is_running()) s.set_current_fuzzable_request('crawl', 'unittest_fr') s.set_running_plugin('crawl', 'unittest_plugin') expected = 'Crawling unittest_fr using crawl.unittest_plugin' self.assertEqual(s.get_status(), expected) s.pause(True) self.assertEqual(s.get_status(), PAUSED) s.pause(False) expected = 'Crawling unittest_fr using crawl.unittest_plugin' self.assertEqual(s.get_status(), expected) s.set_current_fuzzable_request('audit', 'unittest_fr_audit') s.set_running_plugin('audit', 'unittest_plugin_audit') expected = 'Crawling unittest_fr using crawl.unittest_plugin\n'\ 'Auditing unittest_fr_audit using audit.unittest_plugin_audit' self.assertEqual(s.get_status(), expected) s.stop() self.assertEqual(s.get_status(), STOPPED) self.assertFalse(s.is_running())