def test_save_current_to_new_profile(self): w3af_core = w3afCore() w3af_core.profiles.use_profile('OWASP_TOP10', workdir='.') audit = w3af_core.plugins.get_enabled_plugins('audit') disabled_plugin = audit[-1] audit = audit[:-1] w3af_core.plugins.set_plugins(audit, 'audit') enabled = w3af_core.plugins.get_enabled_plugins('audit') self.assertEquals(set(enabled), set(audit)) self.assertTrue(disabled_plugin not in enabled) w3af_core.profiles.save_current_to_new_profile('unittest-OWASP_TOP10') # Get a new, clean instance of the core. w3af_core = w3afCore() audit = w3af_core.plugins.get_enabled_plugins('audit') self.assertEquals(audit, []) w3af_core.profiles.use_profile('unittest-OWASP_TOP10') enabled_plugins = w3af_core.plugins.get_all_enabled_plugins() self.assertTrue(disabled_plugin not in enabled_plugins['audit']) self.assertTrue('credit_cards' in enabled_plugins['grep']) self.assertTrue('private_ip' in enabled_plugins['grep']) self.assertTrue('dns_wildcard' in enabled_plugins['infrastructure']) self.assertTrue('web_spider' in enabled_plugins['crawl']) w3af_core.profiles.remove_profile('unittest-OWASP_TOP10')
def test_remove_profile(self): w3af_core = w3afCore() w3af_core.profiles.save_current_to_new_profile('unittest-remove') w3af_core.profiles.remove_profile('unittest-remove') self.assertRaises( BaseFrameworkException, w3af_core.profiles.use_profile, 'unittest-remove')
def test_strategy_exception(self): core = w3afCore() target = core.target.get_options() target['target'].set_value(self.TARGET_URL) core.target.set_options(target) core.plugins.set_plugins(['sqli'], 'audit') core.plugins.init_plugins() core.verify_environment() core.scan_start_hook() strategy = CoreStrategy(core) strategy._fuzzable_request_router = Mock(side_effect=Exception) strategy.terminate = Mock(wraps=strategy.terminate) self.assertRaises(Exception, strategy.start) # Now test that those threads are being terminated self.assertEqual(strategy.terminate.called, True) core.exploit_phase_prerequisites = lambda: 42 core.scan_end_hook() self._assert_thread_names()
def test_use_all_profiles(self): """ This test catches the errors in my profiles that generate these messages: *************************************************************************** The profile you are trying to load (web_infrastructure) seems to be outdated, this is a common issue which happens when the framework is updated and one of its plugins adds/removes one of the configuration parameters referenced by a profile, or the plugin is removed all together. The profile was loaded but some of your settings might have been lost. This is the list of issues that were found: - Setting the options for plugin "infrastructure.server_header" raised an exception due to unknown configuration parameters. We recommend you review the specific plugin configurations, apply the required changes and save the profile in order to update it and avoid this message. If this warning does not disappear you can manually edit the profile file to fix it. *************************************************************************** """ w3af_core = w3afCore() valid, invalid = w3af_core.profiles.get_profile_list('.') self.assertTrue(len(valid) > 5) self.assertEqual(len(invalid), 0) for profile_inst in valid: profile_name = profile_inst.get_name() w3af_core.profiles.use_profile(profile_name, workdir='.')
def test_strategy_exception(self): core = w3afCore() target = core.target.get_options() target["target"].set_value(self.TARGET_URL) core.target.set_options(target) core.plugins.set_plugins(["sqli"], "audit") core.plugins.init_plugins() core.verify_environment() core.scan_start_hook() strategy = w3af_core_strategy(core) strategy.join_all_consumers = Mock(side_effect=Exception) strategy.terminate = Mock(wraps=strategy.terminate) self.assertRaises(Exception, strategy.start) # Now test that those threads are being terminated self.assertEqual(strategy.terminate.called, True) core.exploit_phase_prerequisites = lambda: 42 core.scan_end_hook() self._assert_thread_names()
def test_error_handling(self): class InvalidPlugin(object): def information(self, msg, new_line=True): raise Exception('Test') def debug(self, *args, **kwargs): pass def error(self, msg, new_line=True): pass def get_name(self): return 'InvalidPlugin' invalid_plugin = InvalidPlugin() w3af_core = w3afCore() om.out._output_plugin_instances = [invalid_plugin, ] om.out.information('abc') om.out.process_all_messages() exc_list = w3af_core.exception_handler.get_all_exceptions() self.assertEqual(len(exc_list), 1, exc_list) edata = exc_list[0] self.assertEqual(str(edata.exception), 'Test')
def test_kb_list_shells_rfi_port_scan_2181(self): """ :see: https://github.com/andresriancho/w3af/issues/2181 """ w3af_core = w3afCore() vuln = MockVuln() url = URL('http://moth/?a=1') freq = FuzzableRequest(url) exploit_mutant = QSMutant.create_mutants(freq, [''], [], False, {})[0] shell = PortScanShell(vuln, w3af_core.uri_opener, w3af_core.worker_pool, exploit_mutant) kb.append('a', 'b', shell) shells = kb.get_all_shells(w3af_core=w3af_core) self.assertEqual(len(shells), 1) unpickled_shell = shells[0] self.assertEqual(shell, unpickled_shell) self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener) self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool) self.assertEqual(unpickled_shell._exploit_mutant, exploit_mutant) w3af_core.quit()
def test_save_current_to_new_profile(self): self.core.profiles.use_profile('OWASP_TOP10', workdir='.') audit = self.core.plugins.get_enabled_plugins('audit') disabled_plugin = audit[-1] audit = audit[:-1] self.core.plugins.set_plugins(audit, 'audit') enabled = self.core.plugins.get_enabled_plugins('audit') self.assertEquals(set(enabled), set(audit)) self.assertTrue(disabled_plugin not in enabled) new_profile_name = 'save-current-new' self.core.profiles.save_current_to_new_profile(new_profile_name) # Get a new, clean instance of the core. clean_core = w3afCore() audit = clean_core.plugins.get_enabled_plugins('audit') self.assertEquals(audit, []) clean_core.profiles.use_profile(new_profile_name) enabled_plugins = clean_core.plugins.get_all_enabled_plugins() self.assertNotIn(disabled_plugin, enabled_plugins['audit']) self.assertIn('credit_cards', enabled_plugins['grep']) self.assertIn('private_ip', enabled_plugins['grep']) self.assertIn('dns_wildcard', enabled_plugins['infrastructure']) self.assertIn('web_spider', enabled_plugins['crawl']) # cleanup clean_core.profiles.remove_profile(new_profile_name) clean_core.worker_pool.terminate_join()
def test_alert_if_target_is_301_all_internal_redir(self): """ Tests that no info is created if the site redirects internally """ core = w3afCore() httpretty.register_uri(httpretty.GET, re.compile("w3af.com/(.*)"), body='301', status=301, adding_headers={'Location': 'http://w3af.com/xyz'}) target = core.target.get_options() target['target'].set_value('http://w3af.com/') core.target.set_options(target) core.plugins.set_plugins(['sqli'], 'audit') core.plugins.init_plugins() core.verify_environment() core.scan_start_hook() strategy = CoreStrategy(core) strategy.start() infos = kb.get('core', 'core') self.assertEqual(len(infos), 0, infos)
def test_kb_list_shells_xpath_2181(self): """ :see: https://github.com/andresriancho/w3af/issues/2181 """ w3af_core = w3afCore() vuln = MockVuln() str_delim = '&' true_cond = '' use_difflib = False is_error_response = IsErrorResponse(vuln, w3af_core.uri_opener, use_difflib) shell = XPathReader(vuln, w3af_core.uri_opener, w3af_core.worker_pool, str_delim, true_cond, is_error_response) kb.append('a', 'b', shell) shells = kb.get_all_shells(w3af_core=w3af_core) self.assertEqual(len(shells), 1) unpickled_shell = shells[0] self.assertEqual(shell, unpickled_shell) self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener) self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool) self.assertEqual(unpickled_shell.STR_DELIM, shell.STR_DELIM) self.assertEqual(unpickled_shell.TRUE_COND, shell.TRUE_COND) self.assertEqual(unpickled_shell.is_error_resp.use_difflib, use_difflib) self.assertEqual(unpickled_shell.is_error_resp.url_opener, w3af_core.uri_opener) w3af_core.quit()
def test_multiple_instances(self): """ Just making sure nothing crashes if I have more than 1 instance of w3afCore """ instances = [] for _ in xrange(5): instances.append(w3afCore())
def test_use_profile_variable_replace(self): w3af_core = w3afCore() w3af_core.profiles.use_profile('OWASP_TOP10', workdir='.') plugin_opts = w3af_core.plugins.get_plugin_options('audit', 'ssl_certificate') ca_path = plugin_opts['caFileName'].get_value() self.assertEqual(ca_path, self.INPUT_FILE)
def setUp(self): super(TestW3afCorePlugins, self).setUp() self.listdir_patch = patch("os.listdir") self.listdir_mock = self.listdir_patch.start() self.listdir_mock.side_effect = listdir_remove_fs self.core = w3afCore()
def test_get_plugin_instAll(self): w3af_core = w3afCore() for plugin_type in itertools.chain(w3af_core.plugins.get_plugin_types(), ['attack']): for plugin_name in w3af_core.plugins.get_plugin_list(plugin_type): plugin_inst = w3af_core.plugins.get_plugin_inst( plugin_type, plugin_name) self.assertEquals(plugin_inst.get_name(), plugin_name)
def __init__(self, torNodes=[]): BasePlugin.__init__(self, torNodes, 'w3afPlugin') self.setPluginDetails('w3afPlugin', 'Plugin to load the W3AF context in Tortazo. You can execute W3AF against the TOR deep web.', '1.0', 'Adastra: @jdaanial') if len(torNodes) > 0: self.info("[*] w3afPlugin Initialized!") self.w3afCorePlugin = w3afCore() self.w3afCorePlugin.plugins.init_plugins() self.w3afCorePlugin.plugins.zero_enabled_plugins() self.miscSettings = MiscSettings()
def __initRoot(self, do_upd): """ Root menu init routine. """ cons_upd = ConsoleUIUpdater(force=do_upd) cons_upd.update() # Core initialization self._w3af = w3afCore() self._w3af.plugins.set_plugins(['console'], 'output')
def setUp(self): self.url_str = 'http://moth/' self.url_inst = URL(self.url_str) self._w3af = w3afCore() self._plugins = [] for pname in self._w3af.plugins.get_plugin_list('grep'): self._plugins.append( self._w3af.plugins.get_plugin_inst('grep', pname))
def test_plugin_options(self): w3af_core = w3afCore() plugin_inst = w3af_core.plugins.get_plugin_inst('crawl', 'web_spider') options_1 = plugin_inst.get_options() w3af_core.plugins.set_plugin_options('crawl', 'web_spider', options_1) options_2 = w3af_core.plugins.get_plugin_options('crawl', 'web_spider') self.assertEquals(options_1, options_2)
def test_init_plugins(self): w3af_core = w3afCore() enabled = ['web_spider'] w3af_core.plugins.set_plugins(enabled, 'crawl') w3af_core.plugins.init_plugins() self.assertEquals(len(w3af_core.plugins.plugins['crawl']), 1, w3af_core.plugins.plugins['crawl']) plugin_inst = list(w3af_core.plugins.plugins['crawl'])[0] self.assertEquals(plugin_inst.get_name(), 'web_spider')
def test_enable_all(self): w3af_core = w3afCore() enabled = ['all'] w3af_core.plugins.set_plugins(enabled, 'crawl') w3af_core.plugins.init_plugins() self.assertEquals(set(w3af_core.plugins.get_enabled_plugins('crawl')), set(w3af_core.plugins.get_plugin_list('crawl'))) self.assertEquals(len(w3af_core.plugins.get_enabled_plugins('crawl')), len(w3af_core.plugins.get_plugin_list('crawl')))
def start_scan_helper(target_urls, scan_profile, scan_info_setup): """ Create a new instance of w3afCore, save it to SCANS and run core.start() :param scan_profile: The contents of a profile configuration :param scan_info_setup: Event to set when the scan started :return: The instance of w3afCore. """ scan_info = ScanInfo() SCANS[get_new_scan_id()] = scan_info scan_info.w3af_core = w3af_core = w3afCore() scan_info.target_urls = target_urls scan_info.output = RESTAPIOutput() scan_info_setup.set() scan_profile_file_name, profile_path = create_temp_profile(scan_profile) # Clear all current output plugins om.manager.set_output_plugins([]) try: # Load the profile with the core and plugin config w3af_core.profiles.use_profile(scan_profile_file_name, workdir=profile_path) # Override the target that's set in the profile target_options = w3af_core.target.get_options() target_option = target_options['target'] target_option.set_value([URL(u) for u in target_urls]) w3af_core.target.set_options(target_options) w3af_core.plugins.init_plugins() # Add the REST API output plugin om.manager.set_output_plugin_inst(scan_info.output) # Start the scan! w3af_core.verify_environment() w3af_core.start() except Exception, e: om.out.error('Scan start failed, trace=%s' % format_exc()) w3af_status = w3af_core.status w3af_status.stop() if w3af_status._err_code == '0': w3af_status._err_code = ErrorCode.SCANNER_START_FAILED scan_info.exception = e try: w3af_core.stop() except AttributeError: # Reduce some exceptions found during interpreter shutdown pass
def test_enable_dependency_same_type(self): w3af_core = w3afCore() enabled_infra = ['php_eggs', ] w3af_core.plugins.set_plugins(enabled_infra, 'infrastructure') w3af_core.plugins.init_plugins() enabled_infra.append('server_header') self.assertEquals( set(w3af_core.plugins.get_enabled_plugins('infrastructure')), set(enabled_infra))
def test_get_all_enabled_plugins(self): w3af_core = w3afCore() enabled_audit = ['sqli', 'xss'] enabled_grep = ['private_ip'] w3af_core.plugins.set_plugins(enabled_audit, 'audit') w3af_core.plugins.set_plugins(enabled_grep, 'grep') all_enabled = w3af_core.plugins.get_all_enabled_plugins() self.assertEquals(enabled_audit, all_enabled['audit']) self.assertEquals(enabled_grep, all_enabled['grep'])
def test_use_profile(self): w3af_core = w3afCore() w3af_core.profiles.use_profile('OWASP_TOP10', workdir='.') enabled_plugins = w3af_core.plugins.get_all_enabled_plugins() self.assertTrue('sqli' in enabled_plugins['audit']) self.assertTrue('credit_cards' in enabled_plugins['grep']) self.assertTrue('private_ip' in enabled_plugins['grep']) self.assertTrue('dns_wildcard' in enabled_plugins['infrastructure']) self.assertTrue('web_spider' in enabled_plugins['crawl'])
def test_enable_not_web_spider_all(self): w3af_core = w3afCore() enabled = ['!web_spider', 'all'] w3af_core.plugins.set_plugins(enabled, 'crawl') w3af_core.plugins.init_plugins() all_plugins = w3af_core.plugins.get_plugin_list('crawl') all_plugins = all_plugins[:] all_plugins.remove('web_spider') self.assertEquals(set(w3af_core.plugins.get_enabled_plugins('crawl')), set(all_plugins))
def test_spider_with_time_limit(self): # # First scan # cf.cf.save('max_discovery_time', 1) cfg = self._run_configs['basic'] start_time = time.time() self._scan(self.target_url, cfg['plugins']) end_time = time.time() first_scan_time = end_time - start_time len_first_urls = len(self.kb.get_all_known_urls()) self.assertGreater(len_first_urls, 500) self.assertLess(first_scan_time, 120) # Cleanup self.w3afcore.quit() self.kb.cleanup() self.w3afcore = w3afCore() # # Second scan # cf.cf.save('max_discovery_time', 2) cfg = self._run_configs['basic'] start_time = time.time() self._scan(self.target_url, cfg['plugins']) end_time = time.time() second_scan_time = end_time - start_time len_second_urls = len(self.kb.get_all_known_urls()) self.assertGreater(len_second_urls, 900) self.assertGreater(len_second_urls, len_first_urls) self.assertLess(second_scan_time, 150) # The setup delta is the time it takes w3af to setup the scan, and # finish once the should_stop_scan method returns true. The 60 in the # next line is the initial scan time of 1 minute setup_delta = first_scan_time - 60 # Scan should take at least the setup time, 2 minutes which is the time # delay and because the setup_delta might be a little bit off, we just # substract some seconds from it at_least_takes = setup_delta + 120 - 10 self.assertGreater(second_scan_time, at_least_takes)
def test_enable_all_all(self): w3af_core = w3afCore() for plugin_type in w3af_core.plugins.get_plugin_types(): w3af_core.plugins.set_plugins(['all', ], plugin_type) w3af_core.plugins.init_plugins() for plugin_type in w3af_core.plugins.get_plugin_types(): enabled_plugins = w3af_core.plugins.get_enabled_plugins( plugin_type) all_plugins = w3af_core.plugins.get_plugin_list(plugin_type) self.assertEqual(set(enabled_plugins), set(all_plugins)) self.assertEqual(len(enabled_plugins), len(all_plugins))
def test_enable_all_but_two(self): w3af_core = w3afCore() enabled = ['all', '!web_spider', '!archive_dot_org'] w3af_core.plugins.set_plugins(enabled, 'crawl') w3af_core.plugins.init_plugins() all_plugins = w3af_core.plugins.get_plugin_list('crawl') all_plugins = all_plugins[:] all_plugins.remove('web_spider') all_plugins.remove('archive_dot_org') self.assertEquals(set(w3af_core.plugins.get_enabled_plugins('crawl')), set(all_plugins))
def setUp(self): self.kb.cleanup() self.w3afcore = w3afCore() if self.MOCK_RESPONSES: httpretty.enable() url = URL(self.target_url) domain = url.get_domain() proto = url.get_protocol() port = url.get_port() self._register_httpretty_uri(proto, domain, port)
def test_cant_start_new_thread_bug(self): """ This tests that https://github.com/andresriancho/w3af/issues/56 was properly fixed after the change in how sqlite threads were managed. """ w3af_core = w3afCore() valid, _ = w3af_core.profiles.get_profile_list('.') for _ in xrange(10): for profile_inst in valid: profile_name = profile_inst.get_name() w3af_core.profiles.use_profile(profile_name, workdir='.')
def setUp(self): self.kb.cleanup() self.w3afcore = w3afCore() self.request_callback_call_count = 0 self.request_callback_match = 0 if self.MOCK_RESPONSES: httpretty.reset() httpretty.enable() try: url = URL(self.target_url) except ValueError, ve: msg = ('When using MOCK_RESPONSES you need to set the' ' target_url attribute to a valid URL, exception was:' ' "%s".') raise Exception(msg % ve) domain = url.get_domain() proto = url.get_protocol() port = url.get_port() self._register_httpretty_uri(proto, domain, port)
def test_kb_list_shells_sqlmap_2181(self): """ Also very related with test_pickleable_shells :see: https://github.com/andresriancho/w3af/issues/2181 """ w3af_core = w3afCore() target = Target(URL('http://w3af.org/')) sqlmap_wrapper = SQLMapWrapper(target, w3af_core.uri_opener) sqlmap_shell = SQLMapShell(MockVuln(), w3af_core.uri_opener, w3af_core.worker_pool, sqlmap_wrapper) kb.append('a', 'b', sqlmap_shell) shells = kb.get_all_shells(w3af_core=w3af_core) self.assertEqual(len(shells), 1) unpickled_shell = shells[0] self.assertEqual(sqlmap_shell, unpickled_shell) self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener) self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool) self.assertIs(unpickled_shell.sqlmap.proxy._uri_opener, w3af_core.uri_opener) w3af_core.quit()
def test_kb_list_shells_eval_2181(self): """ :see: https://github.com/andresriancho/w3af/issues/2181 """ w3af_core = w3afCore() shellcodes = get_shell_code('php', 'ls') shellcode_generator = shellcodes[0][2] shell = EvalShell(MockVuln(), w3af_core.uri_opener, w3af_core.worker_pool, shellcode_generator) kb.append('a', 'b', shell) shells = kb.get_all_shells(w3af_core=w3af_core) self.assertEqual(len(shells), 1) unpickled_shell = shells[0] self.assertEqual(shell, unpickled_shell) self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener) self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool) self.assertEqual(unpickled_shell.shellcode_generator.args, shell.shellcode_generator.args) w3af_core.quit()
def test_kb_list_shells_file_read_2181(self): """ :see: https://github.com/andresriancho/w3af/issues/2181 """ w3af_core = w3afCore() header_len, footer_len = 1, 1 vuln = MockVuln() shell = FileReaderShell(vuln, w3af_core.uri_opener, w3af_core.worker_pool, header_len, footer_len) kb.append('a', 'b', shell) shells = kb.get_all_shells(w3af_core=w3af_core) self.assertEqual(len(shells), 1) unpickled_shell = shells[0] self.assertEqual(shell, unpickled_shell) self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener) self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool) self.assertEqual(unpickled_shell._header_length, shell._header_length) self.assertEqual(unpickled_shell._footer_length, shell._footer_length) w3af_core.quit()
def test_kb_list_shells_os_commanding_2181(self): """ :see: https://github.com/andresriancho/w3af/issues/2181 """ w3af_core = w3afCore() vuln = MockVuln() vuln['separator'] = '&' vuln['os'] = 'linux' strategy = BasicExploitStrategy(vuln) shell = OSCommandingShell(strategy, w3af_core.uri_opener, w3af_core.worker_pool) kb.append('a', 'b', shell) shells = kb.get_all_shells(w3af_core=w3af_core) self.assertEqual(len(shells), 1) unpickled_shell = shells[0] self.assertEqual(shell, unpickled_shell) self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener) self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool) self.assertEqual(unpickled_shell.strategy.vuln, vuln) w3af_core.quit()
def test_should_grep_speed(self): """ This method tests the performance of the should_grep method This method is usually run as: kernprof -o nose.lprof -v -l nosetests -s -v w3af/core/controllers/core_helpers/consumers/tests/test_grep.py Remember to: * Specify a valid file in HTTP_FILE (generated during a scan) * Decorate the methods you want to analyze with @profile """ if not os.path.exists(self.HTTP_FILE): return grep_plugins = [code_disclosure()] core = w3afCore() grep_consumer = grep(grep_plugins, core) for count, (request, response) in enumerate( iter_http_request_responses(self.HTTP_FILE)): if not cf.cf.get('target_domains'): cf.cf.save('target_domains', {request.get_uri().get_domain()}) grep_consumer.should_grep(request, response) if count % self.CACHE_TEST_EVERY == 0: for _ in xrange(self.CACHE_TESTS): grep_consumer.should_grep(request, response) if count >= self.MAX_REQUEST_RESPONSE: break
def test_serialize_deserialize(self): try: raise KeyError except Exception, e: except_type, except_class, tb = sys.exc_info() enabled_plugins = '{}' fr = self.get_fuzzable_request() core = w3afCore() status = CoreStatus(core) status.set_running_plugin('audit', 'sqli', log=False) status.set_current_fuzzable_request('audit', fr) exception_data = ExceptionData(status, e, tb, enabled_plugins, store_tb=False) pickled_ed = cPickle.dumps(exception_data) unpickled_ed = cPickle.loads(pickled_ed) self.assertEqual(exception_data.to_json(), unpickled_ed.to_json())
def test_render_simple(self): w3af_core = w3afCore() w3af_core.status.start() w3af_core.status.set_running_plugin('crawl', 'web_spider') status = w3af_core.status.get_status_as_dict() total_urls = 150 x = xml_file() scan_status = ScanStatus(x._get_jinja2_env(), status, total_urls) xml = scan_status.to_string() self.maxDiff = None expected = (u'<scan-status>\n' u' <status>Running</status>\n' u' <is-paused>False</is-paused>\n' u' <is-running>True</is-running>\n' u'\n' u' <active-plugin>\n' u' <crawl>web_spider</crawl>\n' u' <audit>None</audit>\n' u' </active-plugin>\n' u'\n' u' <current-request>\n' u' <crawl>None</crawl>\n' u' <audit>None</audit>\n' u' </current-request>\n' u'\n' u' <queues>\n' u' <crawl>\n' u' <input-speed>None</input-speed>\n' u' <output-speed>None</output-speed>\n' u' <length>None</length>\n' u' </crawl>\n' u'\n' u' <audit>\n' u' <input-speed>None</input-speed>\n' u' <output-speed>None</output-speed>\n' u' <length>None</length>\n' u' </audit>\n' u'\n' u' <grep>\n' u' <input-speed>None</input-speed>\n' u' <output-speed>None</output-speed>\n' u' <length>None</length>\n' u' </grep>\n' u' </queues>\n' u'\n' u' <eta>\n' u' <crawl>None</crawl>\n' u' <audit>None</audit>\n' u' <grep>None</grep>\n' u' </eta>\n' u'\n' u' <rpm>0</rpm>\n' u'\n' u' <total-urls>150</total-urls>\n' u'</scan-status>') self.assertEqual(xml, expected) self.assertValidXML(xml)
def test_set_plugins_negative_without_raise(self): w3af_core = w3afCore() enabled = ['fake', ] unknown_plugins = w3af_core.plugins.set_plugins(enabled, 'output', raise_on_error=False) self.assertEqual(enabled, unknown_plugins) w3af_core.plugins.init_plugins()
def test_set_plugins_negative(self): w3af_core = w3afCore() enabled = ['fake', ] self.assertRaises(ValueError, w3af_core.plugins.set_plugins, enabled, 'output')
def test_set_plugins(self): w3af_core = w3afCore() enabled = ['sqli', ] w3af_core.plugins.set_plugins(enabled, 'audit') retrieved = w3af_core.plugins.get_enabled_plugins('audit') self.assertEquals(enabled, retrieved)
def test_get_plugin_inst(self): w3af_core = w3afCore() plugin_inst = w3af_core.plugins.get_plugin_inst('audit', 'sqli') self.assertEquals(plugin_inst.get_name(), 'sqli')
def test_get_plugin_listAudit(self): w3af_core = w3afCore() plugin_list = w3af_core.plugins.get_plugin_list('audit') expected = set(['sqli', 'xss', 'eval']) self.assertTrue(set(plugin_list).issuperset(expected))
def start_scan(): """ Starts a new w3af scan Receive a JSON containing: - A list with the target URLs - The profile (eg. the content of fast_scan.pw3af) :return: A JSON containing: - The URL to the newly created scan (eg. /scans/1) - The newly created scan ID (eg. 1) """ if not request.json or not 'scan_profile' in request.json: abort(400, 'Expected scan_profile in JSON object') if not request.json or not 'target_urls' in request.json: abort(400, 'Expected target_urls in JSON object') scan_profile = request.json['scan_profile'] target_urls = request.json['target_urls'] # # First make sure that there are no other scans running, remember that this # REST API is an MVP and we can only run one scan at the time (for now) # scan_infos = SCANS.values() if not all([si is None for si in scan_infos]): abort( 400, 'This version of the REST API does not support' ' concurrent scans. Remember to DELETE finished scans' ' before starting a new one.') # # Before trying to start a new scan we verify that the scan profile is # valid and return an informative error if it's not # scan_profile_file_name, profile_path = create_temp_profile(scan_profile) w3af_core = w3afCore() try: w3af_core.profiles.use_profile(scan_profile_file_name, workdir=profile_path) except BaseFrameworkException as bfe: abort(400, str(bfe)) finally: remove_temp_profile(scan_profile_file_name) # # Now that we know that the profile is valid I verify the scan target info # if target_urls is None or not len(target_urls): abort(400, 'No target URLs specified') for target_url in target_urls: try: URL(target_url) except ValueError: abort(400, 'Invalid URL: "%s"' % target_url) target_options = w3af_core.target.get_options() target_option = target_options['target'] try: target_option.set_value([URL(u) for u in target_urls]) w3af_core.target.set_options(target_options) except BaseFrameworkException as bfe: abort(400, str(bfe)) scan_id = get_new_scan_id() scan_info = ScanInfo() scan_info.w3af_core = w3af_core scan_info.target_urls = target_urls scan_info.profile_path = scan_profile_file_name scan_info.output = RESTAPIOutput() SCANS[scan_id] = scan_info # # Finally, start the scan in a different thread # args = (scan_info, ) t = Process(target=start_scan_helper, name='ScanThread', args=args) t.daemon = True t.start() return jsonify({ 'message': 'Success', 'id': scan_id, 'href': '/scans/%s' % scan_id }), 201
def setUp(self): self.w3afcore = w3afCore()
def test_get_plugin_types(self): w3af_core = w3afCore() plugin_types = w3af_core.plugins.get_plugin_types() expected = set(['grep', 'output', 'mangle', 'audit', 'crawl', 'evasion', 'bruteforce', 'auth', 'infrastructure']) self.assertEquals(set(plugin_types), expected)
def test_multiple_instances(self): """Just making sure nothing crashes if I have more than 1 instance of w3afCore""" instances = [] for _ in xrange(5): instances.append(w3afCore())
def test_remove_profile_not_exists(self): w3af_core = w3afCore() self.assertRaises(BaseFrameworkException, w3af_core.profiles.remove_profile, 'not-exists')
def setUp(self): super(TestCoreProfiles, self).setUp() self.core = w3afCore()
def start_w3af_core(exception_handler): try: w3afCore() except Exception, e: if exception_handler: exception_handler(e)
def test_get_plugin_listCrawl(self): w3af_core = w3afCore() plugin_list = w3af_core.plugins.get_plugin_list('crawl') expected = set(['web_spider', 'spider_man']) self.assertTrue(set(plugin_list).issuperset(expected))
def setUp(self): self.bc = BaseConsumer([], w3afCore(), 'TestConsumer')
def setUp(self): kb.cleanup() self.w3af = w3afCore()
def test_plugin_options_invalid(self): w3af_core = w3afCore() self.assertRaises(TypeError, w3af_core.plugins.set_plugin_options, 'crawl', 'web_spider', None)
def test_render_simple(self): w3af_core = w3afCore() w3af_core.status.start() w3af_core.status.set_running_plugin('crawl', 'web_spider') status = w3af_core.status.get_status_as_dict() known_urls = URLTree() known_urls.add_url(URL('http://w3af.org/')) known_urls.add_url(URL('http://w3af.org/foo/')) known_urls.add_url(URL('http://w3af.org/foo/abc.html')) known_urls.add_url(URL('http://w3af.org/foo/bar/')) known_urls.add_url(URL('http://w3af.org/123.txt')) total_urls = 150 x = xml_file() scan_status = ScanStatus(x._get_jinja2_env(), status, total_urls, known_urls) xml = scan_status.to_string() self.maxDiff = None expected = ( u'<scan-status>\n' u' <status>Running</status>\n' u' <is-paused>False</is-paused>\n' u' <is-running>True</is-running>\n' u'\n' u' <active-plugin>\n' u' <crawl>web_spider</crawl>\n' u' <audit>None</audit>\n' u' </active-plugin>\n' u'\n' u' <current-request>\n' u' <crawl>None</crawl>\n' u' <audit>None</audit>\n' u' </current-request>\n' u'\n' u' <queues>\n' u' <crawl>\n' u' <input-speed>0</input-speed>\n' u' <output-speed>0</output-speed>\n' u' <length>0</length>\n' u' <processed-tasks>0</processed-tasks>\n' u' </crawl>\n' u'\n' u' <audit>\n' u' <input-speed>0</input-speed>\n' u' <output-speed>0</output-speed>\n' u' <length>0</length>\n' u' <processed-tasks>0</processed-tasks>\n' u' </audit>\n' u'\n' u' <grep>\n' u' <input-speed>0</input-speed>\n' u' <output-speed>0</output-speed>\n' u' <length>0</length>\n' u' <processed-tasks>None</processed-tasks>\n' u' </grep>\n' u' </queues>\n' u'\n' u' <eta>\n' u' <crawl>0 seconds.</crawl>\n' u' <audit>0 seconds.</audit>\n' u' <grep>0 seconds.</grep>\n' u' <all>0 seconds.</all>\n' u' </eta>\n' u'\n' u' <rpm>0</rpm>\n' u' <sent-request-count>0</sent-request-count>\n' u' <progress>100</progress>\n' u'\n' u' <total-urls>150</total-urls>\n' u' <known-urls> \n' u' <node url="http://w3af.org">\n' u' \n' u' <node url="foo">\n' u' \n' u' <node url="bar" /> \n' u' <node url="abc.html" />\n' u' \n' u' </node> \n' u' <node url="123.txt" />\n' u' \n' u' </node>\n' u' </known-urls>\n' u'</scan-status>') self.assertEqual(xml, expected) self.assertValidXML(xml)