Пример #1
0
    def test_do_req_through_proxy(self):
        resp_body = self.proxy_opener.open(get_moth_http()).read()

        # Basic check
        self.assertTrue(len(resp_body) > 0)

        # Get response using the proxy
        proxy_resp = self.proxy_opener.open(get_moth_http())
        # Get it without any proxy
        direct_resp = urllib2.urlopen(get_moth_http())

        # Must be equal
        self.assertEqual(direct_resp.read(), proxy_resp.read())

        # Have to remove the Date header because in some cases they differ
        # because one request was sent in second X and the other in X+1, which
        # makes the test fail
        direct_resp_headers = dict(direct_resp.info())
        proxy_resp_headers = dict(proxy_resp.info())

        # Make sure that a change in the seconds returned in date doesn't break
        # the test
        del direct_resp_headers['date']
        del proxy_resp_headers['date']

        del direct_resp_headers['transfer-encoding']
        del proxy_resp_headers['content-length']

        self.assertEqual(direct_resp_headers, proxy_resp_headers)
Пример #2
0
    def test_cache(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
Пример #3
0
    def test_qs_params(self):
        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('123456abc', http_response.body)

        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('root:x:0', http_response.body)
Пример #4
0
 def test_blacklist_handler_pass(self):
     """Verify that the blacklist handler works as expected"""
     opener = urllib2.build_opener(BlacklistHandler)
     
     request = urllib2.Request(get_moth_http())
     request.url_object = URL(get_moth_http())
     response = opener.open(request)
     
     self.assertEqual(response.code, 200)
Пример #5
0
 def test_proxy_req_ok(self):
     """Test if self._proxy.stop() works as expected. Note that the check
     content is the same as the previous check, but it might be that this
     check fails because of some error in start() or stop() which is run
     during setUp and tearDown."""
     # Get response using the proxy
     proxy_resp = self.proxy_opener.open(get_moth_http()).read()
     # Get it the other way
     resp = urllib2.urlopen(get_moth_http()).read()
     # They must be very similar
     self.assertEqual(resp, proxy_resp)
Пример #6
0
 def test_history_access(self):
     self.count_plugin.loops = 1
     self.w3afcore.start()
     
     history_item = HistoryItem() 
     self.assertTrue(history_item.load(1))
     self.assertEqual(history_item.id, 1)
     self.assertEqual(history_item.get_request().get_uri().url_string,
                      get_moth_http())
     self.assertEqual(history_item.get_response().get_uri().url_string,
                      get_moth_http())
Пример #7
0
 def test_request_trapped_send(self):
     def send_request(proxy_opener, result_queue):
         response = proxy_opener.open(get_moth_http())
         result_queue.put(response)
     
     self._proxy.set_trap(True)
     
     result_queue = Queue.Queue()
     send_thread = threading.Thread(target=send_request, args=(self.proxy_opener,
                                                               result_queue))
     send_thread.start()
     time.sleep(0.5)
     
     request = self._proxy.get_trapped_request()
     
     self.assertEqual(request.get_url().url_string, get_moth_http())
     self.assertEqual(request.get_method(), 'GET')
     
     self._proxy.send_raw_request(request, request.dump_request_head(),
                                  request.get_data())
     
     response = result_queue.get()
     
     self.assertEqual(response.code, 200)
     self.assertIn(self.MOTH_MESSAGE, response.read())
Пример #8
0
 def send(uri_opener, output):
     url = URL(get_moth_http())
     try:
         http_response = uri_opener.GET(url)
         output.put(http_response)
     except:
         output.put(None)
Пример #9
0
    def test_found_exploit_blind_sqli_form_GET(self):
        """
        Reproduce bug https://github.com/andresriancho/w3af/issues/262
        "it appears that you have provided tainted parameter values"
        """
        target = get_moth_http('/audit/blind_sqli/blind_where_integer_form_get.py')
        cfg = self._run_configs['blind_sqli']
        self._scan(target, cfg['plugins'])

        # Assert the general results
        vulns = self.kb.get('blind_sqli', 'blind_sqli')

        self.assertEquals(1, len(vulns))
        vuln = vulns[0]

        self.assertEquals("Blind SQL injection vulnerability", vuln.get_name())
        self.assertEquals('q', vuln.get_mutant().get_token_name())
        self.assertEquals('blind_where_integer_form_get.py',
                          vuln.get_url().get_file_name())

        vuln_to_exploit_id = vuln.get_id()

        #
        #   Execute the exploit.
        #
        plugin = self.w3afcore.plugins.get_plugin_inst('attack', 'sqlmap')

        #   Assert success
        self.assertTrue(plugin.can_exploit(vuln_to_exploit_id))
        exploit_result = plugin.exploit(vuln_to_exploit_id)
        self.assertEqual(len(exploit_result), 1, exploit_result)
Пример #10
0
def get_test_profile(profile=FAST_TEST_PROFILE):
    moth = get_moth_http('/')

    target_url = PROFILE_URL.replace('http://127.0.0.1:8000/', moth)
    profile = profile.replace('http://127.0.0.1:8000/', moth)

    return profile, target_url
Пример #11
0
 def send_request(proxy_opener, result_queue):
     try:
         proxy_opener.open(get_moth_http())
     except urllib2.HTTPError, he:
         # Catch the 403 from the local proxy when the user
         # drops the HTTP request.
         result_queue.put(he)
Пример #12
0
    def test_http_port_specification_via_proxy(self):
        self.assertEqual(self._proxy.total_handled_requests, 0)

        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)

        self.assertIn(self.MOTH_MESSAGE, http_response.body)
        self.assertEqual(self._proxy.total_handled_requests, 1)
Пример #13
0
    def test_post(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))

        data = URLEncodedForm()
        data['text'] = ['123456abc']

        http_response = self.uri_opener.POST(url, data, cache=False)
        self.assertIn('123456abc', http_response.body)
 def test_gzip(self):
     url = URL(get_moth_http('/core/gzip/gzip.html'))
     res = self.uri_opener.GET(url, cache=False)
     headers = res.get_headers()
     content_encoding, _ = headers.iget('content-encoding', '')
     test_res = 'gzip' in content_encoding or \
                'compress' in content_encoding
     self.assertTrue(test_res, content_encoding)
Пример #15
0
 def test_get_wait_time(self):
     """
     Asserts that all the responses coming out of the extended urllib have a
     get_wait_time different from the default.
     """
     url = URL(get_moth_http())
     http_response = self.uri_opener.GET(url, cache=False)
     self.assertNotEqual(http_response.get_wait_time(), DEFAULT_WAIT_TIME)
Пример #16
0
 def prepare_script(self):
     fhandler = tempfile.NamedTemporaryFile(prefix='spider_long-',
                                            suffix='.w3af',
                                            dir=tempfile.tempdir,
                                            delete=False)
     fhandler.write(file(self.SCRIPT).read() % {'moth': get_moth_http()})
     fhandler.close()
     return fhandler.name
Пример #17
0
 def test_basic(self):
     url = URL(get_moth_http())
     http_response = self.uri_opener.GET(url, cache=False)
     
     self.assertIn(self.MOTH_MESSAGE, http_response.body)
     
     self.assertGreaterEqual(http_response.id, 1)
     self.assertNotEqual(http_response.id, None)
Пример #18
0
    def test_deflate(self):
        url = URL(get_moth_http('/core/deflate/deflate.html'))
        res = self.uri_opener.GET(url, cache=False)
        headers = res.get_headers()
        content_encoding, _ = headers.iget('content-encoding', '')

        self.assertIn('deflate', content_encoding)
        self.assertIn('View HTTP response headers.', res.get_body())
Пример #19
0
 def test_redirect_handler(self):
     """Test the redirect handler using urllib2"""
     redirect_url = URL(get_moth_http('/audit/global_redirect/redirect-header-302.py?url=/'))
     opener = urllib2.build_opener(HTTP30XHandler)
     
     request = urllib2.Request(redirect_url.url_string)
     response = opener.open(request)
     
     self.assertEqual(response.code, FOUND)
Пример #20
0
    def test_post_special_chars(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))
        test_data = u'abc<def>"-á-'

        data = URLEncodedForm()
        data['text'] = [test_data]

        http_response = self.uri_opener.POST(url, data, cache=False)
        self.assertIn(test_data, http_response.body)
Пример #21
0
    def test_spiderman_http(self):
        port = get_unused_port()

        run_config = {
            "target": get_moth_http(),
            "plugins": {"crawl": (PluginConfig("spider_man", ("listen_port", port, PluginConfig.INT)),)},
        }

        self.generic_spiderman_run(run_config, get_moth_http, port)
Пример #22
0
    def test_get_cookies(self):
        self.assertEqual(len([c for c in self.uri_opener.get_cookies()]), 0)

        url_sends_cookie = URL(get_moth_http('/core/cookies/set-cookie.py'))
        self.uri_opener.GET(url_sends_cookie, cache=False)

        self.assertEqual(len([c for c in self.uri_opener.get_cookies()]), 1)
        cookie = [c for c in self.uri_opener.get_cookies()][0]
        self.assertEqual('127.0.0.1', cookie.domain)
Пример #23
0
        def send_request(_id, proxy_opener, results, exceptions):
            url = get_moth_http("/%s" % _id)

            try:
                response = proxy_opener.open(url, timeout=10)
            except urllib2.HTTPError, he:
                # Catch the 403 from the local proxy when the user
                # drops the HTTP request.
                results.put(he)
Пример #24
0
 def test_verify_vulnerability_false(self):
     not_vuln = get_moth_http('/audit/sql_injection/'
                              'where_string_single_qs.py?fake=pablo')
     uri = URL(not_vuln)
     target = Target(uri)
     
     self.sqlmap = SQLMapWrapper(target, self.uri_opener)
     
     vulnerable = self.sqlmap.is_vulnerable()
     self.assertFalse(vulnerable)
Пример #25
0
    def test_found_vuln(self):
        cfg = self._run_configs["cfg"]
        self._scan(cfg["target"], cfg["plugins"])

        infos = self.kb.get("http_in_body", "request")
        self.assertEquals(1, len(infos), infos)

        info = infos[0]
        self.assertEquals(get_moth_http("/grep/http_in_body/http_request.html"), str(info.get_url()))
        self.assertEquals(severity.INFORMATION, info.get_severity())
        self.assertEquals("HTTP Request in HTTP body", info.get_name())

        infos = self.kb.get("http_in_body", "response")
        self.assertEquals(1, len(infos), infos)

        info = infos[0]
        self.assertEquals(get_moth_http("/grep/http_in_body/http_response.html"), str(info.get_url()))
        self.assertEquals(severity.INFORMATION, info.get_severity())
        self.assertEquals("HTTP Response in HTTP body", info.get_name())
Пример #26
0
 def test_redirect_handler(self):
     """
     Test the redirect handler using urllib2
     """
     redirect_url = URL(get_moth_http(self.REDIRECT_URL))
     opener = urllib2.build_opener(HTTP30XHandler)
     
     request = urllib2.Request(redirect_url.url_string)
     response = opener.open(request)
     
     self.assertEqual(response.code, FOUND)
Пример #27
0
    def test_handler_order_pass(self):
        """Get an instance of the extended urllib and verify that the blacklist
        handler still works, even when mixed with all the other handlers."""
        # Configure the handler
        blocked_url = URL(get_moth_http('/abc/def/'))
        safe_url = URL(get_moth_http())
        cf.cf.save('non_targets', [blocked_url,])
        
        settings = opener_settings.OpenerSettings()
        settings.build_openers()
        opener = settings.get_custom_opener()

        request = HTTPRequest(safe_url)
        request.url_object = safe_url
        request.cookies = True
        request.get_from_cache = False
        response = opener.open(request)
        
        self.assertEqual(response.code, 200)
        self.assertEqual(response.id, 1)
Пример #28
0
    def test_found_vuln(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        infos = self.kb.get('http_in_body', 'request')
        self.assertEquals(1, len(infos), infos)
        
        info = infos[0]
        self.assertEquals(get_moth_http('/grep/http_in_body/http_request.html'),
                          str(info.get_url()))
        self.assertEquals(severity.INFORMATION, info.get_severity())
        self.assertEquals('HTTP Request in HTTP body', info.get_name())

        infos = self.kb.get('http_in_body', 'response')
        self.assertEquals(1, len(infos), infos)
        
        info = infos[0]
        self.assertEquals(get_moth_http('/grep/http_in_body/http_response.html'),
                          str(info.get_url()))
        self.assertEquals(severity.INFORMATION, info.get_severity())
        self.assertEquals('HTTP Response in HTTP body', info.get_name())
Пример #29
0
    def test_spiderman_http(self):
        port = get_unused_port()

        run_config = {
                'target': get_moth_http(),
                'plugins': {'crawl': (PluginConfig('spider_man',
                                                   ('listen_port', port,
                                                    PluginConfig.INT),
                                                   ),)}
        }

        self.generic_spiderman_run(run_config, get_moth_http, port)
Пример #30
0
    def test_error_handling(self):
        del self._proxy._master.uri_opener

        try:
            self.proxy_opener.open(get_moth_http()).read()
        except urllib2.HTTPError, hte:
            # By default urllib2 handles 500 errors as exceptions, so we match
            # against this exception object
            self.assertEqual(hte.code, 500)

            body = hte.read()
            self.assertIn('Proxy error', body)
            self.assertIn('HTTP request', body)
Пример #31
0
class TestLFI(PluginTest):

    target_url = get_moth_http('/audit/local_file_read/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'audit': (PluginConfig('lfi'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        }
    }

    def test_found_lfi(self):
        # Run the scan
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        # Verify the specifics about the vulnerabilities
        EXPECTED = [
            ('local_file_read.py', 'file'),
            ('local_file_read_full_path.py', 'file'),
        ]

        # Assert the general results
        vulns = self.kb.get('lfi', 'lfi')
        self.assertEquals(len(EXPECTED), len(vulns), vulns)
        self.assertEquals(
            all([
                "Local file inclusion vulnerability" == v.get_name()
                for v in vulns
            ]), True)

        self.assertEqual(
            set(EXPECTED),
            set([(v.get_url().get_file_name(), v.get_mutant().get_var())
                 for v in vulns]))
Пример #32
0
class TestExportRequests(PluginTest):

    target_url = get_moth_http('/grep/form_autocomplete/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), ),
                'output': (PluginConfig(
                    'export_requests',
                    ('output_file', 'output-fr.b64', PluginConfig.STR)), )
            }
        },
    }

    def test_export_requests(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        freq = self.kb.get_all_known_fuzzable_requests()

        self.assertTrue(os.path.exists('output-fr.b64'))

        self.assertEquals(set(sorted(freq)),
                          set(sorted(self._get_fuzzable_requests_from_file())))

    def _get_fuzzable_requests_from_file(self):
        # Get the contents of the output file
        for line in file('output-fr.b64'):
            yield FuzzableRequest.from_base64(line)

    def tearDown(self):
        super(TestExportRequests, self).tearDown()
        try:
            os.remove('output-fr.b64')
        except:
            pass
Пример #33
0
    def setUp(self):
        """
        This is a rather complex setUp since I need to create an instance of
        the count.py plugin in memory, without copying it to any plugins
        directory since that would generate issues with other tests.
        """
        self.w3afcore = w3afCore()

        target_opts = create_target_option_list(URL(get_moth_http()))
        self.w3afcore.target.set_options(target_opts)

        plugin_inst = factory(self.PLUGIN)
        plugin_inst.set_url_opener(self.w3afcore.uri_opener)
        plugin_inst.set_worker_pool(self.w3afcore.worker_pool)

        self.w3afcore.plugins.plugins['crawl'] = [plugin_inst]
        self.w3afcore.plugins._plugins_names_dict['crawl'] = ['count']
        self.count_plugin = plugin_inst

        # Verify env and start the scan
        self.w3afcore.plugins.initialized = True
        self.w3afcore.verify_environment()
Пример #34
0
    def test_send_mangled(self):

        self.w3afcore.plugins.set_plugins(['self_reference'], 'evasion')
        self.w3afcore.plugins.set_plugins(['sqli'], 'audit')

        target_opts = create_target_option_list(URL(get_moth_http()))
        self.w3afcore.target.set_options(target_opts)

        # Verify env and start the scan
        self.w3afcore.plugins.init_plugins()
        self.w3afcore.verify_environment()

        sref = self.w3afcore.plugins.plugins['evasion'][0]

        def return_arg(request):
            return request

        sref.modify_request = MagicMock(side_effect=return_arg)

        self.w3afcore.start()

        self.assertGreater(sref.modify_request.call_count, 15)
Пример #35
0
    def test_audit_plugin_timeout(self):
        plugin_inst = self.w3af.plugins.get_plugin_inst('audit', 'sqli')

        url = URL(get_moth_http('/'))
        freq = FuzzableRequest(url)

        def delay(x, y):
            """
            According to the stopit docs it can't kill a thread running an
            atomic python function such as time.sleep() , so I have to create
            a function like this. I don't mind, since it's realistic with what
            we do in w3af anyways.
            """
            total_delay = 3.0

            for _ in xrange(100):
                time.sleep(total_delay / 100)

        plugin_inst.audit = delay

        mod = 'w3af.core.controllers.plugins.audit_plugin.%s'

        mock_plugin_timeout = 2
        msg = '[timeout] The "%s" plugin took more than %s seconds to'\
              ' complete the analysis of "%s", killing it!'

        error = msg % (plugin_inst.get_name(), mock_plugin_timeout,
                       freq.get_url())

        with patch(mod % 'om.out') as om_mock,\
             patch(mod % 'AuditPlugin.PLUGIN_TIMEOUT', new_callable=PropertyMock) as timeout_mock:

            timeout_mock.return_value = mock_plugin_timeout
            plugin_inst.audit_with_copy(freq, None)

            self.assertIn(call.debug(error), om_mock.mock_calls)

        # Just to make sure we didn't affect the class attribute with our test
        self.assertEqual(plugin_inst.PLUGIN_TIMEOUT, 5 * 60)
Пример #36
0
class TestFindJBoss(PluginTest):
    target_url = get_moth_http()

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'infrastructure': (PluginConfig('find_jboss'), )
            }
        }
    }

    @attr('ci_fails')
    def test_find_jboss(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        infos = self.kb.get('find_jboss', 'find_jboss')
        self.assertEqual(len(infos), 1, infos)

        info = infos[0]
        self.assertEqual('JMX Invoker enabled without Auth', info.get_name())
Пример #37
0
class TestXPATH(PluginTest):
    target_url = get_moth_http('/audit/xpath/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'audit': (PluginConfig('xpath'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        }
    }

    def test_found_xpath(self):
        # Run the scan
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        # Assert the general results
        expected_vuln_number = 4
        vulns = self.kb.get('xpath', 'xpath')
        self.assertEquals(expected_vuln_number, len(vulns), vulns)

        vtitle = "XPATH injection vulnerability"
        all_titles = all([vtitle == vuln.get_name() for vuln in vulns])
        self.assertTrue(all_titles, vulns)

        # Verify the specifics about the vulnerabilities
        expected = [(u'xpath-attr-double.py', 'text'),
                    (u'xpath-attr-tag.py', 'text'),
                    (u'xpath-attr-or.py', 'text'),
                    (u'xpath-attr-single.py', 'text')]

        found = [(v.get_url().get_file_name(), v.get_mutant().get_token_name())
                 for v in vulns]

        self.assertEquals(set(expected), set(found))
Пример #38
0
class TestMemcachei(PluginTest):
    target_url = get_moth_http('/audit/memcache_injection/memcache_value.py')

    _run_configs = {
        'cfg': {
            'target': target_url + '?key=x',
            'plugins': {
                'audit': (PluginConfig('memcachei'),),
            }
        }
    }

    @attr('ci_fails')
    def test_found_memcachei(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])
        vulns = self.kb.get('memcachei', 'memcachei')
        self.assertEquals(1, len(vulns))
        # Now some tests around specific details of the found vuln
        vuln = vulns[0]
        self.assertEquals("Memcache injection vulnerability", vuln.get_name())
        self.assertEquals(self.target_url, str(vuln.get_url()))
Пример #39
0
class TestHttpInBody(PluginTest):

    target_url = get_moth_http('/grep/http_in_body/index.html')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'grep': (PluginConfig('http_in_body'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        }
    }

    def test_found_vuln(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        infos = self.kb.get('http_in_body', 'request')
        self.assertEquals(1, len(infos), infos)

        info = infos[0]
        self.assertEquals(
            get_moth_http('/grep/http_in_body/http_request.html'),
            str(info.get_url()))
        self.assertEquals(severity.INFORMATION, info.get_severity())
        self.assertEquals('HTTP Request in HTTP body', info.get_name())

        infos = self.kb.get('http_in_body', 'response')
        self.assertEquals(1, len(infos), infos)

        info = infos[0]
        self.assertEquals(
            get_moth_http('/grep/http_in_body/http_response.html'),
            str(info.get_url()))
        self.assertEquals(severity.INFORMATION, info.get_severity())
        self.assertEquals('HTTP Response in HTTP body', info.get_name())
Пример #40
0
class TestGetEmails(PluginTest):

    get_emails_url = get_moth_http('/grep/get_emails/')

    _run_configs = {
        'cfg1': {
            'target': get_emails_url,
            'plugins': {
                'grep': (PluginConfig('get_emails',
                                      ('only_target_domain',
                                       False,
                                       PluginConfig.BOOL)),),
                'crawl': (
                    PluginConfig('web_spider',
                                 ('only_forward', True, PluginConfig.BOOL)),
                )

            }
        }
    }

    def test_found_emails(self):
        cfg = self._run_configs['cfg1']
        self._scan(cfg['target'], cfg['plugins'])

        target_emails = self.kb.get('emails', 'emails')
        self.assertEqual(len(target_emails), 0)

        expected = {u'*****@*****.**',
                    u'*****@*****.**',
                    u'*****@*****.**',
                    u'*****@*****.**'}

        all_email_info_sets = self.kb.get('emails', 'external_emails')
        self.assertEqual(len(all_email_info_sets), len(expected))

        all_emails = set([i.get_attribute('mail') for i in all_email_info_sets])
        self.assertEqual(all_emails, expected)
Пример #41
0
class TestCrossDomainJS(PluginTest):
    target_url = get_moth_http('/grep/cross_domain_js/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'grep': (PluginConfig('cross_domain_js'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        }
    }

    def test_found_vuln(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        info_sets = self.kb.get('cross_domain_js', 'cross_domain_js')
        self.assertEquals(2, len(info_sets), info_sets)

        self.assertEqual(set([i.get_attribute('domain') for i in info_sets]),
                         {'moth', 'www.w3af.org'})

        self.assertEqual(set([i.get_name() for i in info_sets]),
                         {'Cross-domain javascript source'})

        all_files = {
            'cross_domain_script_mixed.html',
            'cross_domain_script_with_type.html', 'cross_domain_script.html'
        }
        found_files = set()
        for info_set in info_sets:
            for info in info_set.infos:
                found_files.add(info.get_url().get_file_name())

        self.assertEqual(all_files, found_files)
Пример #42
0
class TestDigitSum(PluginTest):

    target_url = get_moth_http('/crawl/digit_sum/')

    _run_config = {
        'target': None,
        'plugins': {
            'crawl': (PluginConfig('digit_sum', ), )
        }
    }

    def test_found_fname(self):
        self._scan(self.target_url + 'index-3-1.html',
                   self._run_config['plugins'])
        urls = self.kb.get_all_known_urls()

        EXPECTED_URLS = ('index-3-1.html', 'index-2-1.html')

        self.assertEquals(
            set(str(u) for u in urls),
            set((self.target_url + end) for end in EXPECTED_URLS))

    def test_found_qs(self):
        self._scan(self.target_url + 'index1.py?id=22',
                   self._run_config['plugins'])
        frs = self.kb.get_all_known_fuzzable_requests()

        EXPECTED_URLS = (
            'index1.py?id=22',
            'index1.py?id=21',
            # These last two look very uninteresting, but please take
            # a look at the comment in digit_sum._do_request()
            'index1.py?id=23',
            'index1.py?id=20')

        self.assertEquals(
            set(str(fr.get_uri()) for fr in frs),
            set((self.target_url + end) for end in EXPECTED_URLS))
class TestEventValidation(PluginTest):

    dot_net_event_validation_url = get_moth_http(
        '/grep/dot_net_event_validation/')

    _run_configs = {
        'cfg1': {
            'target': dot_net_event_validation_url,
            'plugins': {
                'grep': (PluginConfig('dot_net_event_validation'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        }
    }

    def test_found_vuln(self):
        cfg = self._run_configs['cfg1']
        self._scan(cfg['target'], cfg['plugins'])
        vulns = self.kb.get('dot_net_event_validation',
                            'dot_net_event_validation')

        self.assertEquals(3, len(vulns), vulns)

        EXPECTED_VULNS = set([
            ('event_validation.html', 'decode the viewstate contents.'),
            ('without_event_validation.html',
             'decode the viewstate contents.'),
            ('without_event_validation.html', 'r should be manually verified.')
        ])

        vulns_set = set()
        for vuln in vulns:
            ending = vuln.get_desc(with_id=False)[-30:]
            vulns_set.add((vuln.get_url().get_file_name(), ending))

        self.assertEqual(EXPECTED_VULNS, vulns_set)
Пример #44
0
class TestDirectoryIndexing(PluginTest):
    dir_indexing_url = get_moth_http('/grep/directory_indexing/index.html')

    _run_configs = {
        'cfg1': {
            'target': dir_indexing_url,
            'plugins': {
                'grep': (PluginConfig('directory_indexing'), )
            }
        }
    }

    def test_found_vuln(self):
        cfg = self._run_configs['cfg1']
        self._scan(cfg['target'], cfg['plugins'])

        vulns = self.kb.get('directory_indexing', 'directory')
        self.assertEquals(1, len(vulns))
        v = vulns[0]

        self.assertEquals(self.dir_indexing_url, str(v.get_url()))
        self.assertEquals(severity.LOW, v.get_severity())
        self.assertEquals('Directory indexing', v.get_name())
Пример #45
0
    def close_all_sockets(self, wait):
        keep_alive_http = HTTPHandler()

        uri_opener = urllib2.build_opener(keep_alive_http)

        request = HTTPRequest(URL(get_moth_http()))
        response = uri_opener.open(request)
        response.read()

        time.sleep(wait)

        # pylint: disable=E1101
        pid = os.getpid()
        p = psutil.Process(pid)
        connections_before = p.get_connections()

        keep_alive_http.close_all()

        time.sleep(1)
        connections_after = p.get_connections()
        # pylint: enable=E1101

        self.assertLess(len(connections_after), len(connections_before))
Пример #46
0
    def test_SQL_scan(self):
        target = get_moth_http('/audit/sql_injection/where_string_single_qs.py')
        qs = '?uname=pablo'
        commands_to_run = ['plugins',
                           'output console,text_file',
                           'output config text_file',
                           'set output_file %s' % self.OUTPUT_FILE,
                           'set http_output_file %s' % self.OUTPUT_HTTP_FILE,
                           'set verbose True', 'back',
                           'output config console',
                           'set verbose False', 'back',
                           'audit sqli',
                           'crawl web_spider',
                           'crawl config web_spider',
                           'set only_forward True', 'back',
                           'grep path_disclosure',
                           'back',
                           'target',
                           'set target %s%s' % (target, qs), 'back',
                           'start',
                           'exit']

        expected = ('SQL injection in ',
                    'A SQL error was found in the response supplied by ',
                    'Found 1 URLs and 1 different injections points',
                    'Scan finished')

        self.console = ConsoleUI(commands=commands_to_run, do_upd=False)
        self.console.sh()

        assert_result, msg = self.startswith_expected_in_output(expected)
        self.assertTrue(assert_result, msg)

        found_errors = self.error_in_output(['No such file or directory',
                                             'Exception'])

        self.assertFalse(found_errors)
Пример #47
0
class TestGlobalRedirect(PluginTest):

    target_url = get_moth_http('/audit/global_redirect/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'audit': (PluginConfig('global_redirect'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        },
    }

    def test_found_redirect(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        vulns = self.kb.get('global_redirect', 'global_redirect')

        self.assertEquals(
            all(['Insecure redirection' == vuln.get_name() for vuln in vulns]),
            True)

        # Verify the specifics about the vulnerabilities
        EXPECTED = [('redirect-javascript.py', 'url'),
                    ('redirect-meta.py', 'url'), ('redirect-302.py', 'url'),
                    ('redirect-header-302.py', 'url'),
                    ('redirect-302-filtered.py', 'url')]

        found = [(str(v.get_url()), v.get_var()) for v in vulns]
        expected = [((self.target_url + end), param)
                    for (end, param) in EXPECTED]

        self.assertEquals(set(found), set(expected))
Пример #48
0
class TestSQLI(PluginTest):
    target_url = get_moth_http('/audit/sql_injection/where_integer_qs.py')

    _run_configs = {
        'cfg': {
            'target': target_url + '?id=1',
            'plugins': {
                'audit': (PluginConfig('sqli'), ),
            }
        }
    }

    def test_found_sqli(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])
        vulns = self.kb.get('sqli', 'sqli')

        self.assertEquals(1, len(vulns))

        # Now some tests around specific details of the found vuln
        vuln = vulns[0]
        self.assertEquals("syntax error", vuln['error'])
        self.assertEquals("Unknown database", vuln['db'])
        self.assertEquals(self.target_url, str(vuln.get_url()))
Пример #49
0
class TestGlobalRedirect(PluginTest):
    target_url = get_moth_http('/audit/global_redirect/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'audit': (PluginConfig('global_redirect'),),
                'crawl': (
                    PluginConfig(
                            'web_spider',
                            ('only_forward', True, PluginConfig.BOOL)),
                )

            }
        },
    }

    def test_found_redirect(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        vulns = self.kb.get('global_redirect', 'global_redirect')

        self.assertAllVulnNamesEqual('Insecure redirection', vulns)

        # Verify the specifics about the vulnerabilities
        EXPECTED = [
            ('redirect-javascript.py', 'url'),
            ('redirect-meta.py', 'url'),
            ('redirect-302.py', 'url'),
            ('redirect-header-302.py', 'url'),
            ('redirect-302-filtered.py', 'url')
        ]

        self.assertExpectedVulnsFound(EXPECTED, vulns)
Пример #50
0
class TestOSCommanding(PluginTest):

    target_url = get_moth_http('/audit/os_commanding/')

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'audit': (PluginConfig('os_commanding'), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), )
            }
        }
    }

    def test_found_osc(self):
        # Run the scan
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        # Assert the general results
        vulns = self.kb.get('os_commanding', 'os_commanding')
        self.assertEquals(3, len(vulns), vulns)
        self.assertTrue(
            all(["OS commanding vulnerability" == v.get_name()
                 for v in vulns]))

        # Verify the specifics about the vulnerabilities
        EXPECTED = [('trivial_osc.py', 'cmd'), ('param_osc.py', 'param'),
                    ('blind_osc.py', 'cmd')]

        found_vulns = [(v.get_url().get_file_name(), v.get_mutant().get_var())
                       for v in vulns]

        self.assertEquals(set(EXPECTED), set(found_vulns))
Пример #51
0
    def test_two_scans(self):
        target_1 = get_moth_http('/audit/sql_injection/where_string_single_qs.py')
        qs_1 = '?uname=pablo'
        scan_commands_1 = ['plugins',
                           'output console,text_file',
                           'output config text_file',
                           'set output_file %s' % self.OUTPUT_FILE,
                           'set http_output_file %s' % self.OUTPUT_HTTP_FILE,
                           'set verbose True', 'back',
                           'output config console',
                           'set verbose False', 'back',
                           'audit sqli',
                           'crawl web_spider',
                           'crawl config web_spider',
                           'set only_forward True', 'back',
                           'grep path_disclosure',
                           'back',
                           'target',
                           'set target %s%s' % (target_1, qs_1), 'back',
                           'start']

        expected_1 = ('SQL injection in ',
                      'A SQL error was found in the response supplied by ',
                      'Found 1 URLs and 1 different injections points',
                      'Scan finished')

        target_2 = get_moth_http('/audit/xss/simple_xss.py')
        qs_2 = '?text=1'
        scan_commands_2 = ['plugins',
                           'output console,text_file',
                           'output config text_file',
                           'set output_file %s' % self.OUTPUT_FILE,
                           'set http_output_file %s' % self.OUTPUT_HTTP_FILE,
                           'set verbose True', 'back',
                           'output config console',
                           'set verbose False', 'back',
                           'audit xss',
                           'crawl web_spider',
                           'crawl config web_spider',
                           'set only_forward True', 'back',
                           'grep path_disclosure',
                           'back',
                           'plugins output',
                           'target',
                           'set target %s%s' % (target_2, qs_2), 'back',
                           'start',
                           'exit']

        expected_2 = ('A Cross Site Scripting vulnerability was found at',
                      'Scan finished')

        scan_commands = scan_commands_1 + scan_commands_2

        self.console = ConsoleUI(commands=scan_commands, do_upd=False)
        self.console.sh()

        assert_result, msg = self.startswith_expected_in_output(expected_1)
        self.assertTrue(assert_result, msg)

        assert_result, msg = self.startswith_expected_in_output(expected_2)
        self.assertTrue(assert_result, msg)

        found_errors = self.error_in_output(['No such file or directory',
                                             'Exception'])

        self.assertFalse(found_errors)
Пример #52
0
 def test_single_quote(self):
     target_url = get_moth_http(
         '/audit/blind_sqli/where_string_single_qs.py')
     qs = '?uname=pablo'
     self._scan_single_quote(target_url, qs)
Пример #53
0
 def test_single_quote_non_true_value_as_init(self):
     target_url = get_moth_http(
         '/audit/blind_sqli/where_string_single_qs.py')
     qs = '?uname=foobar39'
     self._scan_single_quote(target_url, qs)
Пример #54
0
class TestStrategy(unittest.TestCase):

    TARGET_URL = get_moth_http('/audit/sql_injection/'
                               'where_integer_qs.py?id=1')

    def test_strategy_run(self):
        core = w3afCore()

        target = core.target.get_options()
        target['target'].set_value(self.TARGET_URL)
        core.target.set_options(target)

        core.plugins.set_plugins(['sqli'], 'audit')
        core.plugins.init_plugins()

        core.verify_environment()
        core.scan_start_hook()

        def verify_threads_running(functor):
            thread_names = [t.name for t in threading.enumerate()]
            self.assertIn('WorkerThread', thread_names)
            self.called_teardown_audit = True
            return functor

        self.called_teardown_audit = False

        strategy = w3af_core_strategy(core)
        strategy._teardown_audit = verify_threads_running(
            strategy._teardown_audit)

        strategy.start()

        # Now test that those threads are being terminated
        self.assertTrue(self.called_teardown_audit)

        vulns = kb.get('sqli', 'sqli')
        self.assertEqual(len(vulns), 1, vulns)

        # Tell the core that we've finished, this should kill the WorkerThreads
        core.exploit_phase_prerequisites = lambda: 42
        core.scan_end_hook()

        self._assert_thread_names()

    def _assert_thread_names(self):
        """
        Makes sure that the threads which are living in my process are the
        ones that I want.
        """
        threads = [t for t in threading.enumerate()]
        thread_names = [t.name for t in threads]

        thread_names_set = set(thread_names)
        expected_names = {
            'MainThread', 'SQLiteExecutor', 'OutputManager',
            'QueueFeederThread'
        }

        self.assertEqual(thread_names_set, expected_names)

    def test_strategy_exception(self):
        core = w3afCore()

        target = core.target.get_options()
        target['target'].set_value(self.TARGET_URL)
        core.target.set_options(target)

        core.plugins.set_plugins(['sqli'], 'audit')
        core.plugins.init_plugins()

        core.verify_environment()
        core.scan_start_hook()

        strategy = w3af_core_strategy(core)
        strategy.join_all_consumers = Mock(side_effect=Exception)

        strategy.terminate = Mock(wraps=strategy.terminate)

        self.assertRaises(Exception, strategy.start)

        # Now test that those threads are being terminated
        self.assertEqual(strategy.terminate.called, True)

        core.exploit_phase_prerequisites = lambda: 42
        core.scan_end_hook()

        self._assert_thread_names()

    def test_strategy_verify_target_server(self):
        core = w3afCore()

        # TODO: Change 2312 by an always closed/non-http port
        INVALID_TARGET = 'http://localhost:2312/'

        target = core.target.get_options()
        target['target'].set_value(INVALID_TARGET)
        core.target.set_options(target)

        core.plugins.set_plugins([
            'sqli',
        ], 'audit')
        core.plugins.init_plugins()

        core.verify_environment()
        core.scan_start_hook()

        strategy = w3af_core_strategy(core)

        try:
            strategy.start()
        except ScanMustStopException, wmse:
            message = str(wmse)
            self.assertIn('Please verify your target configuration', message)
        else:
Пример #55
0
class TestSQLMapWrapper(unittest.TestCase):

    SQLI_GET = get_moth_http('/audit/sql_injection/'
                             'where_string_single_qs.py?uname=pablo')

    SSL_SQLI_GET = get_moth_https('/audit/sql_injection/'
                                  'where_string_single_qs.py?uname=pablo')

    SQLI_POST = get_moth_http('/audit/sql_injection/where_integer_form.py')

    DATA_POST = 'text=1'

    def setUp(self):
        uri = URL(self.SQLI_GET)
        target = Target(uri)

        self.uri_opener = ExtendedUrllib()

        self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True)

    def tearDown(self):
        self.uri_opener.end()
        self.sqlmap.cleanup()

    @classmethod
    def setUpClass(cls):
        output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output')
        if os.path.exists(output_dir):
            shutil.rmtree(output_dir)

    @classmethod
    def tearDownClass(cls):
        # Doing this in both setupclass and teardownclass in order to be sure
        # that a ctrl+c doesn't break it
        output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output')
        if os.path.exists(output_dir):
            shutil.rmtree(output_dir)

    def test_verify_vulnerability(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable)

    def test_verify_vulnerability_ssl(self):
        uri = URL(self.SSL_SQLI_GET)
        target = Target(uri)

        self.uri_opener = ExtendedUrllib()

        self.sqlmap = SQLMapWrapper(target, self.uri_opener)
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable, self.sqlmap.last_stdout)

    def test_verify_vulnerability_false(self):
        not_vuln = get_moth_http('/audit/sql_injection/'
                                 'where_string_single_qs.py?fake=pablo')
        uri = URL(not_vuln)
        target = Target(uri)

        self.sqlmap = SQLMapWrapper(target, self.uri_opener)

        vulnerable = self.sqlmap.is_vulnerable()
        self.assertFalse(vulnerable)

    def test_verify_vulnerability_POST(self):
        target = Target(URL(self.SQLI_POST), self.DATA_POST)

        self.sqlmap = SQLMapWrapper(target, self.uri_opener)

        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable, self.sqlmap.last_stdout)

    def test_wrapper_invalid_url(self):
        self.assertRaises(TypeError, SQLMapWrapper, self.SQLI_GET,
                          self.uri_opener)

    def test_stds(self):
        uri = URL(self.SQLI_GET)
        target = Target(uri)

        self.sqlmap = SQLMapWrapper(target, self.uri_opener)

        prms = [
            '--batch',
        ]
        cmd, process = self.sqlmap.run_sqlmap_with_pipes(prms)

        self.assertIsInstance(process.stdout, file)
        self.assertIsInstance(process.stderr, file)
        self.assertIsInstance(process.stdin, file)
        self.assertIsInstance(cmd, basestring)

        self.assertIn('sqlmap.py', cmd)

    def test_target_basic(self):
        target = Target(URL(self.SQLI_GET))
        params = target.to_params()

        self.assertEqual(params, ["--url=%s" % self.SQLI_GET])

    def test_target_post_data(self):
        target = Target(URL(self.SQLI_GET), self.DATA_POST)
        params = target.to_params()

        self.assertEqual(
            params, ["--url=%s" % self.SQLI_GET,
                     "--data=%s" % self.DATA_POST])

    def test_no_coloring(self):
        params = self.sqlmap.get_wrapper_params()
        self.assertIn('--disable-coloring', params)

    def test_always_batch(self):
        params = self.sqlmap.get_wrapper_params()
        self.assertIn('--batch', params)

    def test_use_proxy(self):
        params = self.sqlmap.get_wrapper_params()

        self.assertTrue(
            any(i.startswith('--proxy=http://127.0.0.1:') for i in params))

    def test_enable_coloring(self):
        uri = URL(self.SQLI_GET)
        target = Target(uri)

        sqlmap = SQLMapWrapper(target, self.uri_opener, coloring=True)
        params = sqlmap.get_wrapper_params()
        self.assertNotIn('--disable-coloring', params)

    def test_dbs(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable)

        cmd, process = self.sqlmap.dbs()
        output = process.stdout.read()

        self.assertIn('on SQLite it is not possible to enumerate databases',
                      output)

    def test_tables(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable)

        cmd, process = self.sqlmap.tables()
        output = process.stdout.read()

        self.assertIn('auth_group_permissions', output)
        self.assertIn('Database: SQLite_masterdb', output)
        self.assertIn('django_content_type', output)

    def test_users(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable)

        cmd, process = self.sqlmap.users()
        output = process.stdout.read()

        self.assertIn('on SQLite it is not possible to enumerate the users',
                      output)

    def test_dump(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable)

        cmd, process = self.sqlmap.dump()
        output = process.stdout.read()

        self.assertIn('django_session', output)
        self.assertIn('auth_user_user_permissions', output)

    def test_sqlmap(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable, self.sqlmap.last_stdout)

        cmd, process = self.sqlmap.direct('--tables')
        output = process.stdout.read()

        self.assertIn('django_session', output)
        self.assertIn('auth_user_user_permissions', output)

        self.assertNotIn('COLUMN_PRIVILEGES', output)
Пример #56
0
class TestFormAuth(PluginTest):
    BASE_PATH = os.path.join(ROOT_PATH, 'plugins', 'tests', 'bruteforce')

    small_users_negative = os.path.join(BASE_PATH, 'small-users-negative.txt')
    small_users_positive = os.path.join(BASE_PATH, 'small-users-positive.txt')
    small_passwords = os.path.join(BASE_PATH, 'small-passwords.txt')

    target_post_url = get_moth_http('/bruteforce/form/guessable_login_form.py')
    target_get_url = get_moth_http(
        '/bruteforce/form/guessable_login_form_get.py')
    target_password_only_url = get_moth_http(
        '/bruteforce/form/guessable_pass_only.py')
    target_negative_url = get_moth_http('/bruteforce/form/impossible.py')

    target_web_spider_url = get_moth_http('/bruteforce/form/')

    positive_test = {
        'target': None,
        'plugins': {
            'crawl': (PluginConfig(
                'web_spider',
                ('only_forward', True, PluginConfig.BOOL),
            ), ),
            'bruteforce': (PluginConfig(
                'form_auth',
                ('usersFile', small_users_positive, PluginConfig.STR),
                ('passwdFile', small_passwords, PluginConfig.INPUT_FILE),
                ('useProfiling', False, PluginConfig.BOOL),
            ), ),
        }
    }

    negative_test = {
        'target': None,
        'plugins': {
            'crawl': (PluginConfig(
                'web_spider',
                ('only_forward', True, PluginConfig.BOOL),
            ), ),
            'bruteforce': (PluginConfig(
                'form_auth',
                ('usersFile', small_users_negative, PluginConfig.STR),
                ('passwdFile', small_passwords, PluginConfig.INPUT_FILE),
                ('useProfiling', False, PluginConfig.BOOL),
            ), )
        }
    }

    @attr('smoke')
    def test_found_credentials_post(self):
        self._scan(self.target_post_url, self.positive_test['plugins'])

        # Assert the general results
        vulns = self.kb.get('form_auth', 'auth')
        self.assertEquals(len(vulns), 1)

        vuln = vulns[0]

        self.assertEquals(vuln.get_name(), 'Guessable credentials')
        self.assertEquals(vuln.get_url().url_string, self.target_post_url)
        self.assertEquals(vuln['user'], 'admin')
        self.assertEquals(vuln['pass'], '1234')

    def test_found_credentials_get(self):
        self._scan(self.target_get_url, self.positive_test['plugins'])

        # Assert the general results
        vulns = self.kb.get('form_auth', 'auth')
        self.assertEquals(len(vulns), 1)

        vuln = vulns[0]

        self.assertEquals(vuln.get_name(), 'Guessable credentials')
        self.assertEquals(vuln.get_url().url_string, self.target_get_url)
        self.assertEquals(vuln['user'], 'admin')
        self.assertEquals(vuln['pass'], 'admin')

    def test_found_credentials_password_only(self):
        self._scan(self.target_password_only_url,
                   self.positive_test['plugins'])

        # Assert the general results
        vulns = self.kb.get('form_auth', 'auth')
        self.assertEquals(len(vulns), 1, vulns)

        vuln = vulns[0]

        self.assertEquals(vuln.get_name(), 'Guessable credentials')
        self.assertEquals(vuln.get_url().url_string,
                          self.target_password_only_url)
        self.assertEquals(vuln['user'], 'password-only-form')
        self.assertEquals(vuln['pass'], '1234')

    def test_negative(self):
        self._scan(self.target_negative_url, self.negative_test['plugins'])

        # Assert the general results
        vulns = self.kb.get('form_auth', 'auth')
        self.assertEquals(len(vulns), 0)
Пример #57
0
class TestHTMLOutput(PluginTest):

    target_url = get_moth_http('/audit/xss/')
    OUTPUT_FILE = 'output-unittest.html'

    _run_configs = {
        'cfg': {
            'target': target_url,
            'plugins': {
                'audit':
                (PluginConfig('xss', ('checkStored', True, PluginConfig.BOOL),
                              ('numberOfChecks', 3, PluginConfig.INT)), ),
                'crawl':
                (PluginConfig('web_spider',
                              ('only_forward', True, PluginConfig.BOOL)), ),
                'output': (PluginConfig(
                    'html_file',
                    ('output_file', OUTPUT_FILE, PluginConfig.STR)), )
            },
        }
    }

    def test_found_xss(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        xss_vulns = self.kb.get('xss', 'xss')
        file_vulns = self._from_html_get_vulns()

        self.assertGreaterEqual(len(xss_vulns), 2)

        self.assertEquals(set(sorted([v.get_url() for v in xss_vulns])),
                          set(sorted([v.get_url() for v in file_vulns])))

        self._validate_xhtml()

    def _from_html_get_vulns(self):
        vuln_url_re = re.compile('<li>Vulnerable URL: <a href="(.*?)">')
        vulns = []

        for line in file(self.OUTPUT_FILE):

            mo = vuln_url_re.search(line)
            if mo:
                url = URL(mo.group(1))
                v = MockVuln('TestCase', None, 'High', 1, 'plugin')
                v.set_url(url)
                vulns.append(v)

        return vulns

    def _validate_xhtml(self):
        parser = etree.XMLParser()

        def generate_msg(parser):
            msg = 'XHTML parsing errors:\n'
            for error in parser.error_log:
                msg += '\n    %s (line: %s, column: %s)' % (
                    error.message, error.line, error.column)
            return msg

        try:
            parser = etree.XML(file(self.OUTPUT_FILE).read(), parser)
        except etree.XMLSyntaxError:
            self.assertTrue(False, generate_msg(parser))
        else:
            if hasattr(parser, 'error_log'):
                self.assertFalse(len(parser.error_log), generate_msg(parser))

    def tearDown(self):
        super(TestHTMLOutput, self).tearDown()
        try:
            os.remove(self.OUTPUT_FILE)
        except:
            pass
Пример #58
0
class TestXMLOutput(PluginTest):

    target_url = get_moth_http('/audit/sql_injection/where_integer_qs.py')

    FILENAME = 'output-unittest.xml'
    XSD = os.path.join(ROOT_PATH, 'plugins', 'output', 'xml_file', 'report.xsd')

    _run_configs = {
        'cfg': {
            'target': target_url + '?id=3',
            'plugins': {
                'audit': (PluginConfig('sqli'),),
                'output': (
                    PluginConfig(
                        'xml_file',
                        ('output_file', FILENAME, PluginConfig.STR)),
                )
            },
        }
    }

    def test_found_vuln(self):
        cfg = self._run_configs['cfg']
        self._scan(cfg['target'], cfg['plugins'])

        kb_vulns = self.kb.get('sqli', 'sqli')
        file_vulns = self._from_xml_get_vulns()

        self.assertEqual(len(kb_vulns), 1, kb_vulns)

        self.assertEquals(
            set(sorted([v.get_url() for v in kb_vulns])),
            set(sorted([v.get_url() for v in file_vulns]))
        )

        self.assertEquals(
            set(sorted([v.get_name() for v in kb_vulns])),
            set(sorted([v.get_name() for v in file_vulns]))
        )

        self.assertEquals(
            set(sorted([v.get_plugin_name() for v in kb_vulns])),
            set(sorted([v.get_plugin_name() for v in file_vulns]))
        )

        self.assertEqual(validate_xml(file(self.FILENAME).read(), self.XSD),
                         '')

    def _from_xml_get_vulns(self):
        xp = XMLParser()
        parser = etree.XMLParser(target=xp)
        vulns = etree.fromstring(file(self.FILENAME).read(), parser)
        return vulns

    def tearDown(self):
        super(TestXMLOutput, self).tearDown()
        try:
            os.remove(self.FILENAME)
        except:
            pass

    def test_error_null_byte(self):
        """
        https://github.com/andresriancho/w3af/issues/12924
        """
        plugin_instance = xml_file()
        plugin_instance.error('\0')
        plugin_instance.flush()
Пример #59
0
 def test_special_char_header(self):
     url = URL(get_moth_http('/core/headers/echo-headers.py'))
     header_content = u'name=ábc'
     headers = Headers([('Cookie', header_content)])
     http_response = self.uri_opener.GET(url, cache=False, headers=headers)
     self.assertIn(header_content, http_response.body)
Пример #60
0
 def test_pause_stop(self):
     self.uri_opener.pause(True)
     self.uri_opener.stop()
     url = URL(get_moth_http())
     self.assertRaises(ScanMustStopByUserRequest, self.uri_opener.GET, url)