class TestDotNetErrorsWithColonInURL(PluginTest): target_url = 'http://httpretty' MOCK_RESPONSES = [ MockResponse('http://httpretty/', body='<a href="sample%3a.aspx">sample</a>', method='GET', status=200), MockResponse('http://httpretty/sample.aspx', body='Hello world', method='GET', status=200), MockResponse('http://httpretty/sample~.aspx', body='<h2> <i>Runtime Error</i> </h2></span>...', method='GET', status=200), ] _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'infrastructure': (PluginConfig('dot_net_errors'), ), 'crawl': (PluginConfig( 'web_spider', ('only_forward', True, PluginConfig.BOOL), ), ) } } } def test_dot_net_errors_with_colon_in_url(self): # # This test is here to check that no exceptions are raised in # dot_net_errors._generate_urls() while url-joining the filename that # contains the colon (sample%3a.aspx above) # cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) infos = self.kb.get('dot_net_errors', 'dot_net_errors') self.assertEqual(len(infos), 0, infos)
class TestCORSOriginScan(PluginTest): # Test scripts host/port and web context root target_url = 'http://moth/w3af/audit/cors/' # Originator for tests cases originator = 'http://moth/' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig( 'cors_origin', ('origin_header_value', originator, PluginConfig.STR), ('expected_http_response_code', 200, PluginConfig.INT), ), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ), } } } @attr('ci_fails') def test_scan(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) vulns = self.kb.get('cors_origin', 'cors_origin') self.assertEquals(2, len(vulns), vulns) EXPECTED_NAMES = [ 'Insecure Access-Control-Allow-Origin', 'Insecure Access-Control-Allow-Origin' ] self.assertEqual([v.get_name() for v in vulns], EXPECTED_NAMES) self.assertTrue( all([ v.get_url().url_string.startswith(self.target_url) for v in vulns ]))
class GenericFormAuthTest(PluginTest): BASE_PATH = os.path.join(ROOT_PATH, 'plugins', 'tests', 'bruteforce') small_users_negative = os.path.join(BASE_PATH, 'small-users-negative.txt') small_users_positive = os.path.join(BASE_PATH, 'small-users-positive.txt') small_passwords = os.path.join(BASE_PATH, 'small-passwords.txt') basic_config = { 'crawl': (PluginConfig( 'web_spider', ('only_forward', True, PluginConfig.BOOL), ), ), 'bruteforce': (PluginConfig( 'form_auth', ('users_file', small_users_positive, PluginConfig.STR), ('passwd_file', small_passwords, PluginConfig.INPUT_FILE), ('use_profiling', False, PluginConfig.BOOL), ), ), }
class TestDetailedBasic(PluginTest): target_url = get_moth_http('/auth/auth_1/') auth_url = URL(target_url + 'login_form.py') check_url = URL(target_url + 'post_auth_xss.py') check_string = 'or read your input' data_format = '%u=%U&%p=%P&Login=Login' _run_config = { 'target': target_url, 'plugins': { 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL), ('ignore_regex', '.*logout.*', PluginConfig.STR)), ), 'audit': (PluginConfig('xss', ), ), 'auth': (PluginConfig( 'detailed', ('username', '*****@*****.**', PluginConfig.STR), ('password', 'passw0rd', PluginConfig.STR), ('username_field', 'username', PluginConfig.STR), ('password_field', 'password', PluginConfig.STR), ('data_format', data_format, PluginConfig.STR), ('auth_url', auth_url, PluginConfig.URL), ('method', 'POST', PluginConfig.STR), ('check_url', check_url, PluginConfig.URL), ('check_string', check_string, PluginConfig.STR), ('follow_redirects', False, PluginConfig.BOOL), ), ), } } def test_post_auth_xss(self): self._scan(self._run_config['target'], self._run_config['plugins']) vulns = self.kb.get('xss', 'xss') self.assertEquals(len(vulns), 1, vulns) vuln = vulns[0] self.assertEquals(vuln.get_name(), 'Cross site scripting vulnerability') self.assertEquals(vuln.get_token_name(), 'text')
class TestClamAVScan(PluginTest): target_url = get_moth_http('/grep/clamav/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('clamav'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } }, } def setUp(self): self.plugin = clamav() super(TestClamAVScan, self).setUp() def tearDown(self): super(TestClamAVScan, self).tearDown() self.plugin.end() def test_found_vuln(self): """ Test to validate case in which malware is identified while crawling. """ #Configure and run test case cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) findings = kb.kb.get('clamav', 'malware') self.assertEqual(len(findings), 4) EXPECTED_FILES = ('eicar.com.txt', 'eicar.com', 'eicarcom2.zip', 'eicar_com.zip') for finding in findings: self.assertIn(finding.get_url().get_file_name(), EXPECTED_FILES) self.assertEqual(finding.get_name(), 'Malware identified') self.assertIn('ClamAV identified malware', finding.get_desc())
class TestWebDiff(PluginTest): target_url = 'http://moth/w3af/crawl/web_diff/' local_dir = os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'web_diff') _run_configs = { 'basic': { 'target': target_url, 'plugins': { 'crawl': (PluginConfig( 'web_diff', ('content', True, PluginConfig.BOOL), ('local_dir', local_dir, PluginConfig.STR), ('remote_url_path', URL(target_url), PluginConfig.URL), ('banned_ext', 'php,foo,bar', PluginConfig.LIST)), ) } }, } @attr('ci_fails') def test_compare(self): cfg = self._run_configs['basic'] with patch('w3af.plugins.crawl.web_diff.om.out') as om_mock: self._scan(cfg['target'], cfg['plugins']) EXPECTED_CALLS = [ call.information('The following files exist in the local' ' directory and in the remote server:'), call.information( u'- http://moth/w3af/crawl/web_diff/456.html'), call.information( u'- http://moth/w3af/crawl/web_diff/exclude.php'), call.information( u'- http://moth/w3af/crawl/web_diff/123.html'), call.information( u'- http://moth/w3af/crawl/web_diff/index.html'), call.information('The following files exist in the local' ' directory and in the remote server and' ' their contents match:'), call.information( u'- http://moth/w3af/crawl/web_diff/123.html'), call.information( u'- http://moth/w3af/crawl/web_diff/index.html'), call.information("The following files exist in the local" " directory and in the remote server but" " their contents don't match:"), call.information( u'- http://moth/w3af/crawl/web_diff/456.html'), call.information('Match files: 4 of 4'), call.information('Match contents: 2 of 3') ] for ecall in EXPECTED_CALLS: self.assertIn(ecall, om_mock.mock_calls)
class TestHTMLCommentsIntegration(PluginTest): target_url = 'http://httpretty' MOCK_RESPONSES = [ MockResponse('http://httpretty/', body=('<!-- secret password123 -->' '<!-- <a href="/x"></a> -->'), method='GET', status=200) ] _run_configs = { 'cfg1': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('html_comments'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_vuln(self): cfg = self._run_configs['cfg1'] self._scan(cfg['target'], cfg['plugins']) infos_html = self.kb.get('html_comments', 'html_comment_hides_html') infos_interesting = self.kb.get('html_comments', 'interesting_comments') self.assertEquals(1, len(infos_html), infos_html) self.assertEquals(1, len(infos_interesting), infos_interesting) html_info = infos_html[0] interesting_info = infos_interesting[0] self.assertEqual(interesting_info.get_name(), 'Interesting HTML comment') self.assertEqual(html_info.get_name(), 'HTML comment contains HTML code')
class TestLang(PluginTest): langs_url = get_moth_http('/grep/lang/%s.html') _run_configs = { 'direct': { 'target': None, 'plugins': { 'grep': (PluginConfig('lang'), ), } }, 'crawl': { 'target': get_moth_http('/grep/'), 'plugins': { 'grep': (PluginConfig('lang'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_id_es(self): cfg = self._run_configs['direct'] self._scan(self.langs_url % 'es', cfg['plugins']) lang = self.kb.raw_read('lang', 'lang') self.assertEquals('es', lang) def test_id_en(self): cfg = self._run_configs['direct'] self._scan(self.langs_url % 'en', cfg['plugins']) lang = self.kb.raw_read('lang', 'lang') self.assertEquals('en', lang) def test_id_en_crawl(self): cfg = self._run_configs['crawl'] self._scan(self.langs_url % 'en', cfg['plugins']) lang = self.kb.raw_read('lang', 'lang') self.assertEquals('en', lang)
class TestSVN(PluginTest): WC_DB = file( os.path.join(ROOT_PATH, 'plugins', 'tests', 'crawl', 'find_dvcs', 'sample-wc.db')).read() SECRET = 'Secret contents here!' MOCK_RESPONSES = [ MockResponse('http://mock/', 'root'), MockResponse('http://mock/.svn/wc.db', WC_DB), MockResponse( 'http://mock/.svn/pristine/96/96acedb8cc77c893b90d1ce37c7119fd0c0fba00.svn-base', SECRET), MockResponse('http://mock/seris/changelog.rst', SECRET) ] target_url = 'http://mock' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'crawl': ( PluginConfig('find_dvcs'), PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_wc_db(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) url_list = kb.kb.get_all_known_urls() self.assertEqual({u.url_string for u in url_list}, {m.url for m in self.MOCK_RESPONSES})
class TestOldMothBlindSQLI(PluginTest): base_path = '/w3af/audit/blind_sql_injection/' target_url = get_w3af_moth_http(base_path) config = { 'audit': (PluginConfig('blind_sqli'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL), ('ignore_regex', '.*(asp|aspx)', PluginConfig.STR)), ), } def test_found_blind_sqli_old_moth(self): expected_path_param = { (u'bsqli_string.php', u'email'), (u'bsqli_integer.php', u'id'), (u'forms/data_receptor.php', u'user'), (u'completely_bsqli_single.php', u'email'), (u'bsqli_string_rnd.php', u'email'), (u'completely_bsqli_double.php', u'email'), (u'completely_bsqli_integer.php', u'id'), } ok_to_miss = { # Just the HTML to have a form u'forms/', u'forms/test_forms.html', # False positive tests, these must NOT be detected by blind_sqli u'random_500_lines.php', u'random_500_lines_static.php', u'random_50_lines.php', u'random_50_lines_static.php', u'random_5_lines.php', u'random_5_lines_static.php', u'delay_random.php', } skip_startwith = set() kb_addresses = {('blind_sqli', 'blind_sqli')} self._scan_assert(self.config, expected_path_param, ok_to_miss, kb_addresses, skip_startwith)
def test_false_positive(self): raise SkipTest('FIXME: This test takes too long to run.') audit_plugin_names = self.w3afcore.plugins.get_plugin_list('audit') for audit_plugin in audit_plugin_names: run_config = { 'target': self.target_url, 'plugins': { 'audit': (PluginConfig(audit_plugin), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } # I tried to do this in the right way, with nosetest test generators, # but they have a bug with unittest.TestCase self.setUp() target = run_config['target'] plugins = run_config['plugins'] self._scan(target, plugins) vulns = self.kb.get_all_vulns() infos = self.kb.get_all_infos() vulns = [str(v) for v in vulns] infos = [str(i) for i in infos] msg_v = 'audit.%s found a vulnerability in "%s"' % ( audit_plugin, ','.join(vulns)) msg_i = 'audit.%s found a vulnerability in "%s"' % ( audit_plugin, ','.join(infos)) self.assertEquals(len(vulns), 0, msg_v) self.assertEquals(len(infos), 0, msg_i) # I tried to do this in the right way, with nosetest test generators, # but they have a bug with unittest.TestCase self.tearDown()
class TestRetireJSNotAnalyzeHTMLContentType(PluginTest): target_url = 'http://httpretty' # This is a vulnerable version of JQuery JQUERY_VULN = os.path.join(ROOT_PATH, 'plugins', 'tests', 'grep', 'retirejs', 'jquery.js') INDEX = '<html><script src="/js/jquery.js"></script></html>' MOCK_RESPONSES = [MockResponse('http://httpretty/', body=INDEX, method='GET', status=200), MockResponse('http://httpretty/js/jquery.js', body=file(JQUERY_VULN).read(), method='GET', status=200, content_type='text/html'), ] _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('retirejs'),), 'crawl': ( PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_is_vulnerable_not_detected(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) vulns = self.kb.get('retirejs', 'js') self.assertEqual(len(vulns), 0, vulns)
class TestXML0x0B(PluginTest): target_url = 'http://0x0b-path-binary/' TEST_FILE = os.path.join(ROOT_PATH, 'plugins', 'tests', 'output', 'data', '0x0b.html') MOCK_RESPONSES = [ MockResponse(url='http://0x0b-path-binary/', body=file(TEST_FILE).read(), content_type='text/plain', method='GET', status=200), ] FILENAME = 'output-unittest.xml' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('path_disclosure'),), 'output': ( PluginConfig( 'xml_file', ('output_file', FILENAME, PluginConfig.STR)), ) }, } } def test_binary_0x0b_handling_in_xml(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) self.assertEquals(len(self.kb.get_all_findings()), 1) try: tree = ElementTree.parse(self.FILENAME) tree.getroot() except Exception, e: self.assertTrue(False, 'Generated invalid XML: "%s"' % e)
class TestGeneric(PluginTest): target_url = 'http://moth/w3af/audit/sql_injection/select/sql_injection_integer.php' _run_configs = { 'generic_only': { 'target': target_url + '?id=1', 'plugins': { 'audit': (PluginConfig('generic'),), } }, 'generic_sqli': { 'target': target_url + '?id=1', 'plugins': { 'audit': (PluginConfig('generic'), PluginConfig('sqli'),), } } } @attr('ci_fails') def test_found_generic(self): cfg = self._run_configs['generic_only'] self._scan(cfg['target'], cfg['plugins']) vulns = self.kb.get('generic', 'generic') self.assertEquals(1, len(vulns)) # Now some tests around specific details of the found vuln vuln = vulns[0] self.assertEquals('Unhandled error in web application', vuln.get_name()) self.assertEquals(self.target_url, str(vuln.get_url())) @attr('ci_fails') def test_found_generic_not_reported(self): cfg = self._run_configs['generic_sqli'] self._scan(cfg['target'], cfg['plugins']) vulns = self.kb.get('generic', 'generic') self.assertEquals(0, len(vulns))
class TestSpecialChars(PluginTest): """ This test verifies that a fix for the bug identified while scanning demo.testfire.net is still working as expected. The issue was that the site had a form that looked like: <form action="/xyz"> <intput name="foo" value="bar+spam" type="hidden"> <intput name="eggs" type="text"> ... </form> And when trying to send a request to that form the "+" in the value was sent as %20. The input was an .NET's EVENTVALIDATION thus it was impossible to find any bugs in the "eggs" parameter. Please note that this is a functional test and a unittest (which does not verify that everything works as expected) can be found at test_form.py """ target_url = get_moth_http('/core/encoding_spaces/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('xss'), ), 'crawl': (PluginConfig( 'web_spider', ('only_forward', True, PluginConfig.BOOL), ), ), } } } def test_special_chars(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) xss_vulns = self.kb.get('xss', 'xss') self.assertEqual(len(xss_vulns), 2, xss_vulns)
class TestPHPEggs(PluginTest): target_url = 'http://mock/' MOCK_RESPONSES = [ MockResponse('http://mock/?=PHPB8B5F2A0-3C92-11d3-A3A9-4C7B08C10000', '1'), MockResponse('http://mock/?=PHPE9568F34-D428-11d2-A769-00AA001ACF42', '2', content_type='image/png'), MockResponse('http://mock/?=PHPE9568F35-D428-11d2-A769-00AA001ACF42', '3', content_type='image/png'), MockResponse('http://mock/?=PHPE9568F36-D428-11d2-A769-00AA001ACF42', '4', content_type='image/png') ] _run_configs = { 'cfg': { 'target': None, 'plugins': { 'infrastructure': (PluginConfig('php_eggs'), ) } } } def test_php_eggs_fingerprinted(self): cfg = self._run_configs['cfg'] with patch('w3af.plugins.infrastructure.php_eggs.md5_hash') as md5mock: def side_effect(body): return { '1': 'a4c057b11fa0fba98c8e26cd7bb762a8', '2': 'c48b07899917dfb5d591032007041ae3', '3': 'fb3bbd9ccc4b3d9e0b3be89c5ff98a14', '4': '7675f1d01c927f9e6a4752cf182345a2' }.get(body) md5mock.side_effect = side_effect self._scan(self.target_url, cfg['plugins']) eggs = self.kb.get('php_eggs', 'eggs') self.assertEqual(len(eggs), 4, eggs) for egg in eggs: self.assertIn('PHP Egg', egg.get_name()) php_version = self.kb.get('php_eggs', 'version') self.assertEqual(len(php_version), 1, php_version) php_version = php_version[0] self.assertEqual(php_version['version'], [u'5.3.2', u'5.3.1'])
class WAFTest(object): domain = 'httpretty-mock' target_url = 'http://%s/' % domain _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'infrastructure': (PluginConfig('fingerprint_WAF'), ) } } }
def test_spiderman_https(self): port = get_unused_port() run_config = { 'target': get_moth_https(), 'plugins': {'crawl': (PluginConfig('spider_man', ('listen_port', port, PluginConfig.INT), ),)} } self.generic_spiderman_run(run_config, get_moth_https, port)
class TestFormExclusions(PluginTest): """ This is an integration test for form exclusions :see: https://github.com/andresriancho/w3af/issues/15161 """ target_url = 'http://mock/' scan_config = { 'target': target_url, 'plugins': { 'crawl': (PluginConfig('web_spider'), ) } } MOCK_RESPONSES = [ MockResponse( 'http://mock/', '<html>' '' '<form action="/out/" method="POST">' '<input name="x" /></form>' '' '<form action="/in/" method="POST">' '<input name="x" /></form>' '' '</html>'), MockResponse('http://mock/out/', 'Thanks.', method='POST'), MockResponse('http://mock/in/', 'Thanks.', method='POST') ] def test_form_exclusions(self): user_value = '[{"action": "/out.*"}]' cf.cf.save('form_id_list', FormIDMatcherList(user_value)) cf.cf.save('form_id_action', EXCLUDE) self._scan(self.scan_config['target'], self.scan_config['plugins']) # Define the expected/desired output expected_files = ['', '/in/'] expected_urls = set( URL(self.target_url).url_join(end).url_string for end in expected_files) # pylint: disable=E1101 # Pylint fails to detect the object types that come out of the KB urls = self.kb.get_all_known_urls() found_urls = set(str(u).decode('utf-8') for u in urls) self.assertEquals(found_urls, expected_urls) # revert any changes to the default so we don't affect other tests cf.cf.save('form_id_list', FormIDMatcherList('[]')) cf.cf.save('form_id_action', EXCLUDE)
class TestExportRequests(PluginTest): target_url = get_moth_http('/grep/form_autocomplete/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ), 'output': (PluginConfig( 'export_requests', ('output_file', 'output-fr.b64', PluginConfig.STR)), ) } }, } def test_export_requests(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) freq = self.kb.get_all_known_fuzzable_requests() self.assertTrue(os.path.exists('output-fr.b64')) self.assertEquals(set(sorted(freq)), set(sorted(self._get_fuzzable_requests_from_file()))) def _get_fuzzable_requests_from_file(self): # Get the contents of the output file for line in file('output-fr.b64'): yield FuzzableRequest.from_base64(line) def tearDown(self): super(TestExportRequests, self).tearDown() try: os.remove('output-fr.b64') except: pass
class TestLFI(PluginTest): target_url = get_moth_http('/audit/local_file_read/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('lfi'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_lfi(self): # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Verify the specifics about the vulnerabilities EXPECTED = [ ('local_file_read.py', 'file'), ('local_file_read_full_path.py', 'file'), ] # Assert the general results vulns = self.kb.get('lfi', 'lfi') self.assertEquals(len(EXPECTED), len(vulns), vulns) self.assertEquals( all([ "Local file inclusion vulnerability" == v.get_name() for v in vulns ]), True) self.assertEqual( set(EXPECTED), set([(v.get_url().get_file_name(), v.get_mutant().get_var()) for v in vulns]))
class TestXPATH(PluginTest): target_url = get_moth_http('/audit/xpath/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('xpath'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_xpath(self): # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Assert the general results expected_vuln_number = 4 vulns = self.kb.get('xpath', 'xpath') self.assertEquals(expected_vuln_number, len(vulns), vulns) vtitle = "XPATH injection vulnerability" all_titles = all([vtitle == vuln.get_name() for vuln in vulns]) self.assertTrue(all_titles, vulns) # Verify the specifics about the vulnerabilities expected = [(u'xpath-attr-double.py', 'text'), (u'xpath-attr-tag.py', 'text'), (u'xpath-attr-or.py', 'text'), (u'xpath-attr-single.py', 'text')] found = [(v.get_url().get_file_name(), v.get_mutant().get_token_name()) for v in vulns] self.assertEquals(set(expected), set(found))
class TestFileUploadShell(ExecExploitTest): file_upload_url = 'http://moth/w3af/audit/file_upload/' _run_configs = { 'cfg': { 'target': file_upload_url, 'plugins': { 'audit': ( PluginConfig( 'file_upload', ('extensions', 'gif,html,bmp,jpg,png,txt', PluginConfig.LIST) ),) }, } } @attr('ci_fails') def test_found_exploit_file_upload(self): # Run the scan cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) # Assert the general results vulns = self.kb.get('file_upload', 'file_upload') self.assertEquals(1, len(vulns)) vuln = vulns[0] self.assertEquals("Insecure file upload", vuln.get_name()) vuln_to_exploit_id = vuln.get_id() self._exploit_vuln(vuln_to_exploit_id, 'file_upload') @attr('ci_fails') def test_from_template(self): fut = FileUploadTemplate() options = fut.get_options() options['url'].set_value('http://moth/w3af/audit/file_upload/uploader.php') options['data'].set_value('uploadedfile=&MAX_FILE_SIZE=10000000') options['file_vars'].set_value('uploadedfile') options['file_dest'].set_value('http://moth/w3af/audit/file_upload/uploads/') options['vulnerable_parameter'].set_value('uploadedfile') fut.set_options(options) fut.store_in_kb() vuln = self.kb.get(*fut.get_kb_location())[0] vuln_to_exploit_id = vuln.get_id() self._exploit_vuln(vuln_to_exploit_id, 'file_upload')
class TestAllowedMethods(PluginTest): """ Note that this is a smoke test because the code in allowed_methods calls custom/special methods on the remote server using ExtendedUrllib and that's something we want to make sure works. """ modsecurity_url = 'http://modsecurity/' moth_url = 'http://moth/' _run_configs = { 'cfg': { 'target': None, 'plugins': { 'infrastructure': (PluginConfig('allowed_methods'), ) } } } @attr('ci_fails') def test_moth(self): """ test_moth in test_allowed_methods, test the "default" configuration for Apache+PHP. """ cfg = self._run_configs['cfg'] self._scan(self.moth_url, cfg['plugins']) infos = self.kb.get('allowed_methods', 'custom-configuration') self.assertEqual(len(infos), 1, infos) info = infos[0] msg = 'The remote Web server has a custom configuration, in which any' msg += ' not implemented' self.assertTrue(info.get_desc().startswith(msg)) self.assertEqual(info.get_name(), 'Non existent methods default to GET') @attr('ci_fails') def test_modsecurity(self): """ test_modsecurity in test_allowed_methods, test a different configuration: RewriteEngine on RewriteCond %{THE_REQUEST} !^(POST|GET)\ /.*\ HTTP/1\.1$ RewriteRule .* - [F] """ cfg = self._run_configs['cfg'] self._scan(self.modsecurity_url, cfg['plugins']) infos = self.kb.get('allowed_methods', 'custom-configuration') self.assertEqual(len(infos), 0, infos)
class TestHttpInBody(PluginTest): target_url = get_moth_http('/grep/http_in_body/index.html') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('http_in_body'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_vuln(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) infos = self.kb.get('http_in_body', 'request') self.assertEquals(1, len(infos), infos) info = infos[0] self.assertEquals( get_moth_http('/grep/http_in_body/http_request.html'), str(info.get_url())) self.assertEquals(severity.INFORMATION, info.get_severity()) self.assertEquals('HTTP Request in HTTP body', info.get_name()) infos = self.kb.get('http_in_body', 'response') self.assertEquals(1, len(infos), infos) info = infos[0] self.assertEquals( get_moth_http('/grep/http_in_body/http_response.html'), str(info.get_url())) self.assertEquals(severity.INFORMATION, info.get_severity()) self.assertEquals('HTTP Response in HTTP body', info.get_name())
class TestEventValidation(PluginTest): dot_net_event_validation_url = get_moth_http( '/grep/dot_net_event_validation/') _run_configs = { 'cfg1': { 'target': dot_net_event_validation_url, 'plugins': { 'grep': (PluginConfig('dot_net_event_validation'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_vuln(self): cfg = self._run_configs['cfg1'] self._scan(cfg['target'], cfg['plugins']) vulns = self.kb.get('dot_net_event_validation', 'dot_net_event_validation') self.assertEquals(3, len(vulns), vulns) EXPECTED_VULNS = set([ ('event_validation.html', 'decode the viewstate contents.'), ('without_event_validation.html', 'decode the viewstate contents.'), ('without_event_validation.html', 'r should be manually verified.') ]) vulns_set = set() for vuln in vulns: ending = vuln.get_desc(with_id=False)[-30:] vulns_set.add((vuln.get_url().get_file_name(), ending)) self.assertEqual(EXPECTED_VULNS, vulns_set)
class TestGetEmails(PluginTest): get_emails_url = get_moth_http('/grep/get_emails/') _run_configs = { 'cfg1': { 'target': get_emails_url, 'plugins': { 'grep': (PluginConfig('get_emails', ('only_target_domain', False, PluginConfig.BOOL)),), 'crawl': ( PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_emails(self): cfg = self._run_configs['cfg1'] self._scan(cfg['target'], cfg['plugins']) target_emails = self.kb.get('emails', 'emails') self.assertEqual(len(target_emails), 0) expected = {u'*****@*****.**', u'*****@*****.**', u'*****@*****.**', u'*****@*****.**'} all_email_info_sets = self.kb.get('emails', 'external_emails') self.assertEqual(len(all_email_info_sets), len(expected)) all_emails = set([i.get_attribute('mail') for i in all_email_info_sets]) self.assertEqual(all_emails, expected)
class TestCrossDomainJS(PluginTest): target_url = get_moth_http('/grep/cross_domain_js/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('cross_domain_js'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } } } def test_found_vuln(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) info_sets = self.kb.get('cross_domain_js', 'cross_domain_js') self.assertEquals(2, len(info_sets), info_sets) self.assertEqual(set([i.get_attribute('domain') for i in info_sets]), {'moth', 'www.w3af.org'}) self.assertEqual(set([i.get_name() for i in info_sets]), {'Cross-domain javascript source'}) all_files = { 'cross_domain_script_mixed.html', 'cross_domain_script_with_type.html', 'cross_domain_script.html' } found_files = set() for info_set in info_sets: for info in info_set.infos: found_files.add(info.get_url().get_file_name()) self.assertEqual(all_files, found_files)
class TestSpecialCharacterInURL(PluginTest): target_url = u'http://hello.se/%C3%93%C3%B6' MOCK_RESPONSES = [ MockResponse(url=target_url, body=u'hi there á! /var/www/site/x.php path', content_type='text/plain', method='GET', status=200), ] FILENAME = 'output-unittest.xml' _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'grep': (PluginConfig('path_disclosure'),), 'output': ( PluginConfig( 'xml_file', ('output_file', FILENAME, PluginConfig.STR)), ) }, } } def test_special_character_in_url_handling(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) self.assertEquals(len(self.kb.get_all_findings()), 1) try: tree = ElementTree.parse(self.FILENAME) tree.getroot() except Exception, e: self.assertTrue(False, 'Generated invalid XML: "%s"' % e)
class TestGlobalRedirect(PluginTest): target_url = get_moth_http('/audit/global_redirect/') _run_configs = { 'cfg': { 'target': target_url, 'plugins': { 'audit': (PluginConfig('global_redirect'), ), 'crawl': (PluginConfig('web_spider', ('only_forward', True, PluginConfig.BOOL)), ) } }, } def test_found_redirect(self): cfg = self._run_configs['cfg'] self._scan(cfg['target'], cfg['plugins']) vulns = self.kb.get('global_redirect', 'global_redirect') self.assertEquals( all(['Insecure redirection' == vuln.get_name() for vuln in vulns]), True) # Verify the specifics about the vulnerabilities EXPECTED = [('redirect-javascript.py', 'url'), ('redirect-meta.py', 'url'), ('redirect-302.py', 'url'), ('redirect-header-302.py', 'url'), ('redirect-302-filtered.py', 'url')] found = [(str(v.get_url()), v.get_var()) for v in vulns] expected = [((self.target_url + end), param) for (end, param) in EXPECTED] self.assertEquals(set(found), set(expected))