def dump_data(w3af_core): s = w3af_core.status try: data = {'Requests sent': consecutive_number_generator.get(), 'Requests per minute': s.get_rpm(), 'Crawl input queue input speed': s.get_crawl_input_speed(), 'Crawl input queue output speed': s.get_crawl_output_speed(), 'Crawl input queue size': s.get_crawl_qsize(), 'Crawl output queue size': s.get_crawl_output_qsize(), 'Audit input queue input speed': s.get_audit_input_speed(), 'Audit input queue output speed': s.get_audit_output_speed(), 'Audit input queue size': s.get_audit_qsize(), 'Grep input queue size': s.get_audit_qsize(), 'Core worker pool input queue size': s.get_core_worker_pool_queue_size(), 'Output manager input queue size': om.manager.get_in_queue().qsize(), 'Cache stats': get_parser_cache_stats()} except Exception, e: exc_type, exc_value, exc_tb = sys.exc_info() tback = traceback.format_exception(exc_type, exc_value, exc_tb) data = {'Exception': str(e), 'Traceback': tback}
def dump_data(w3af_core): s = w3af_core.status try: data = {'Requests sent': consecutive_number_generator.get(), 'Requests per minute': s.get_rpm(), 'Crawl input queue input speed': s.get_crawl_input_speed(), 'Crawl input queue output speed': s.get_crawl_output_speed(), 'Crawl input queue size': s.get_crawl_qsize(), 'Crawl output queue size': s.get_crawl_output_qsize(), 'Crawl worker pool input queue size': s.get_crawl_worker_pool_queue_size(), 'Audit input queue input speed': s.get_audit_input_speed(), 'Audit input queue output speed': s.get_audit_output_speed(), 'Audit input queue size': s.get_audit_qsize(), 'Audit worker pool input queue size': s.get_audit_worker_pool_queue_size(), 'Grep input queue size': s.get_audit_qsize(), 'Core worker pool input queue size': s.get_core_worker_pool_queue_size(), 'Output manager input queue size': om.manager.get_in_queue().qsize(), 'Cache stats': get_parser_cache_stats()} except Exception, e: exc_type, exc_value, exc_tb = sys.exc_info() tback = traceback.format_exception(exc_type, exc_value, exc_tb) data = {'Exception': str(e), 'Traceback': tback}
def get_rpm(self): """ :return: The number of HTTP requests per minute performed since the start of the scan. """ if self._start_time_epoch is None: raise RuntimeError('Can NOT call get_run_time before start().') now = time.time() diff = now - self._start_time_epoch run_time = diff / 60.0 return int(consecutive_number_generator.get() / run_time)
def dump_data(w3af_core): s = w3af_core.status try: data = {'Requests sent': consecutive_number_generator.get(), 'Requests per minute': s.get_rpm(), 'Crawl queue input speed': s.get_crawl_input_speed(), 'Crawl queue output speed': s.get_crawl_output_speed(), 'Crawl queue size': s.get_crawl_qsize(), 'Audit queue input speed': s.get_audit_input_speed(), 'Audit queue output speed': s.get_audit_output_speed(), 'Audit queue size': s.get_audit_qsize()} except Exception, e: print('Failed to retrieve status data: "%s"' % e)
def dump_data(w3af_core): s = w3af_core.status try: data = { 'Requests sent': consecutive_number_generator.get(), 'Requests per minute': s.get_rpm(), 'Crawl queue input speed': s.get_crawl_input_speed(), 'Crawl queue output speed': s.get_crawl_output_speed(), 'Crawl queue size': s.get_crawl_qsize(), 'Audit queue input speed': s.get_audit_input_speed(), 'Audit queue output speed': s.get_audit_output_speed(), 'Audit queue size': s.get_audit_qsize() } except Exception, e: print('Failed to retrieve status data: "%s"' % e)
def dump_data(w3af_core): s = w3af_core.status try: data = { 'Requests sent': consecutive_number_generator.get(), 'Requests per minute': s.get_rpm(), 'Crawl input queue input speed': s.get_crawl_input_speed(), 'Crawl input queue output speed': s.get_crawl_output_speed(), 'Crawl input queue size': s.get_crawl_qsize(), 'Crawl output queue size': s.get_crawl_output_qsize(), 'Audit input queue input speed': s.get_audit_input_speed(), 'Audit input queue output speed': s.get_audit_output_speed(), 'Audit input queue size': s.get_audit_qsize(), 'Grep input queue size': s.get_audit_qsize(), 'Core worker pool input queue size': s.get_core_worker_pool_queue_size(), 'Output manager input queue size': om.manager.get_in_queue().qsize(), 'Cache stats': get_parser_cache_stats() } except Exception as e: exc_type, exc_value, exc_tb = sys.exc_info() tback = traceback.format_exception(exc_type, exc_value, exc_tb) data = {'Exception': str(e), 'Traceback': tback} json_data = json.dumps(data, indent=4) output_file = PROFILING_OUTPUT_FMT % get_filename_fmt() file(output_file, 'w').write(json_data)
def get_sent_request_count(self): """ :return: The number of HTTP requests that have been sent """ return consecutive_number_generator.get()