Ejemplo n.º 1
0
    def test_repr(self):
        url = "http://www.w3af.com/"
        fr = FuzzableRequest(URL(url))

        self.assertEqual(repr(fr), '<fuzzable request | GET | %s>' % url)
Ejemplo n.º 2
0
 def test_clean_reference_simple(self):
     self.assertEqual(self.vdb._clean_reference(URL('http://w3af.org/')),
                      u'http://w3af.org/')
Ejemplo n.º 3
0
 def test_clean_reference_int(self):
     self.assertEqual(
         self.vdb._clean_reference(URL('http://w3af.org/index.php?id=2')),
         u'http://w3af.org/index.php?id=number')
Ejemplo n.º 4
0
    def test_increasing_delay_on_errors(self):
        expected_log = {
            0: False,
            70: False,
            40: False,
            10: False,
            80: False,
            50: False,
            20: False,
            90: False,
            60: False,
            30: False,
            100: False
        }
        self.assertEqual(self.uri_opener._sleep_log, expected_log)

        return_empty_daemon = UpperDaemon(EmptyTCPHandler)
        return_empty_daemon.start()
        return_empty_daemon.wait_for_start()

        port = return_empty_daemon.get_port()

        # No retries means that the test is easier to read/understand
        self.uri_opener.settings.set_max_http_retries(0)

        # We want to keep going, don't test the _should_stop_scan here.
        self.uri_opener._should_stop_scan = lambda x: False

        url = URL('http://127.0.0.1:%s/' % port)
        http_exception_count = 0
        loops = 100

        # Not check the delays
        with patch('w3af.core.data.url.extended_urllib.time.sleep') as sleepm:
            for i in xrange(loops):
                try:
                    self.uri_opener.GET(url, cache=False)
                except HTTPRequestException:
                    http_exception_count += 1
                except Exception, e:
                    msg = 'Not expecting: "%s"'
                    self.assertTrue(False, msg % e.__class__.__name__)
                else:
                    self.assertTrue(False, 'Expecting HTTPRequestException')

            self.assertEqual(loops - 1, i)

            # Note that the timeouts are increasing based on the error rate and
            # SOCKET_ERROR_DELAY
            expected_calls = [
                call(1.5),
                call(3.0),
                call(4.5),
                call(6.0),
                call(7.5),
                call(9.0),
                call(10.5),
                call(12.0),
                call(13.5)
            ]

            expected_log = {
                0: False,
                70: True,
                40: True,
                10: True,
                80: True,
                50: True,
                20: True,
                90: True,
                60: True,
                30: True,
                100: False
            }
            self.assertEqual(expected_calls, sleepm.call_args_list)
            self.assertEqual(http_exception_count, 100)
            self.assertEqual(self.uri_opener._sleep_log, expected_log)

            # This one should also clear the log
            try:
                self.uri_opener.GET(url, cache=False)
            except HTTPRequestException:
                pass
            else:
                self.assertTrue(False, 'Expected HTTPRequestException')

            # The log was cleared, all values should be False
            self.assertTrue(
                all([not v for v in self.uri_opener._sleep_log.values()]))
Ejemplo n.º 5
0
import w3af.core.data.constants.ports as ports

from w3af.core.controllers.plugins.crawl_plugin import CrawlPlugin
from w3af.core.controllers.daemons.proxy import Proxy, w3afProxyHandler
from w3af.core.controllers.exceptions import RunOnce, ProxyException
from w3af.core.controllers.misc.decorators import runonce

from w3af.core.data.options.opt_factory import opt_factory
from w3af.core.data.options.option_list import OptionList
from w3af.core.data.parsers.url import URL
from w3af.core.data.dc.headers import Headers

# Cohny changed the original http://w3af/spider_man?terminate
# to http://127.7.7.7/spider_man?terminate because in Opera we got
# an error if we used the original one! Thanks Cohny!
TERMINATE_URL = URL('http://127.7.7.7/spider_man?terminate')


class spider_man(CrawlPlugin):
    """
    SpiderMan is a local proxy that will collect new URLs.

    :author: Andres Riancho ([email protected])
    :author: Alexander Berezhnoy < alexander.berezhnoy |at| gmail.com >
    """
    def __init__(self):
        CrawlPlugin.__init__(self)
        self._first_captured_request = True

        # User configured parameters
        self._listen_address = '127.0.0.1'
Ejemplo n.º 6
0
 def test_https_via_proxy(self):
     raise SkipTest(TODO_183)
 
     url = URL(get_moth_https())
     http_response = self.uri_opener.GET(url, cache=False)
     self.assertIn(self.MOTH_MESSAGE, http_response.body)
Ejemplo n.º 7
0
 def test_POST_via_proxy(self):
     url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))
     http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False)
     self.assertIn('123456abc', http_response.body)
Ejemplo n.º 8
0
            pos_x = screen_width - popup_width

        pos_y = cell_y_ + 3
        if pos_y + popup_height > screeen_height:
            pos_y = cell_y - 3 - popup_height

        return (pos_x, pos_y)


def main():
    gtk.main()

if __name__ == "__main__":

    from w3af.core.data.parsers.url import URL
    url_instance = URL('http://a/index.html')

    #    We create the data
    data = [
        HTTPResponse(200, 'my data1 looks like this and has no errors',
                     {}, url_instance, url_instance, id=1),
        HTTPResponse(200, 'errors? i like errors like this one: SQL',
                     {}, url_instance, url_instance, id=2),
        HTTPResponse(200, 'my data is really happy', {},
                     url_instance, url_instance, id=3),
        HTTPResponse(
            200, 'my data1 loves me', {}, url_instance, url_instance, id=4),
        HTTPResponse(
            200, 'my data likes me', {}, url_instance, url_instance, id=5)
    ]
Ejemplo n.º 9
0
 def _create_http_response(self, domain, body, is_404):
     url = URL('http://%s/%s' % (domain, FAILED_FILENAME if is_404 else ''))
     resp = HTTPResponse(200, body, self.empty_headers, url, url)
     return resp
Ejemplo n.º 10
0
# Anchor templates
A_LINK_RELATIVE = u'<a href="/index.php">XXX</a>'
A_LINK_ABSOLUTE = u'<a href="www.w3af.com/home.php">XXX</a>'
A_LINK_FRAGMENT = u'<a href="#mark">XXX</a>'

# Other templates
BASE_TAG = u"""
<base href="http://www.w3afbase.com">
<base target="_blank">
"""
META_REFRESH = u"""<meta http-equiv="refresh" content="600">"""
META_REFRESH_WITH_URL = u"""
<meta http-equiv="refresh" content="2;url=http://crawler.w3af.com/">"""

URL_INST = URL('http://w3af.com')


def _build_http_response(url, body_content, headers=Headers()):
    if 'content-type' not in headers:
        headers['content-type'] = 'text/html'
    return HTTPResponse(200, body_content, headers, url, url, charset='utf-8')


# We subclass SGMLParser to prevent that the parsing process
# while init'ing the parser instance


class _SGMLParser(SGMLParser):
    def __init__(self, http_resp):
        # Save "_parse" reference
Ejemplo n.º 11
0
 def test_baseurl(self):
     body = HTML_DOC % {'head': BASE_TAG, 'body': ''}
     resp = _build_http_response(URL_INST, body)
     p = _SGMLParser(resp)
     p._parse(resp)
     self.assertEquals(URL('http://www.w3afbase.com/'), p._base_url)
Ejemplo n.º 12
0
 def setUp(self):
     kb.kb.cleanup()
     self.plugin = private_ip()
     self.url = URL('http://www.w3af.com/')
     self.request = FuzzableRequest(self.url)
Ejemplo n.º 13
0
 def setUp(self):
     self.url = URL('http://w3af.com')
     self.headers = Headers([(u'content-type', u'text/html')])
     self.dpc = ParserCache()
Ejemplo n.º 14
0
 def setUp(self):
     self.url = URL('http://w3af.com/a/b/c.php')
Ejemplo n.º 15
0
 def wsdl_url_generator(self, url_string):
     for wsdl_parameter in self.WSDL:
         url_to_request = url_string + wsdl_parameter
         url_instance = URL(url_to_request)
         yield url_instance
Ejemplo n.º 16
0
 def setUp(self):
     self.url = URL('http://www.w3af.com/')
Ejemplo n.º 17
0
 def test_http_port_specification_via_proxy(self):
     url = URL(get_moth_http())
     http_response = self.uri_opener.GET(url, cache=False)
     self.assertIn(self.MOTH_MESSAGE, http_response.body)
Ejemplo n.º 18
0
    def _scan(self,
              target,
              plugins,
              debug=False,
              assert_exceptions=True,
              verify_targets=True):
        """
        Setup env and start scan. Typically called from children's
        test methods.

        :param target: The target to scan.
        :param plugins: PluginConfig objects to activate and setup before
            the test runs.
        """
        if not isinstance(target, (basestring, tuple)):
            raise TypeError('Expected basestring or tuple in scan target.')

        if isinstance(target, tuple):
            target = tuple([URL(u) for u in target])

        elif isinstance(target, basestring):
            target = (URL(target), )

        if verify_targets:
            self._verify_targets_up(target)

        target_opts = create_target_option_list(*target)
        self.w3afcore.target.set_options(target_opts)

        # Enable plugins to be tested
        for ptype, plugincfgs in plugins.items():
            self.w3afcore.plugins.set_plugins([p.name for p in plugincfgs],
                                              ptype)

            for pcfg in plugincfgs:

                if pcfg.name == 'all':
                    continue

                plugin_instance = self.w3afcore.plugins.get_plugin_inst(
                    ptype, pcfg.name)
                default_option_list = plugin_instance.get_options()
                unit_test_options = pcfg.options

                for option in default_option_list:
                    if option.get_name() not in unit_test_options:
                        unit_test_options.add(option)

                self.w3afcore.plugins.set_plugin_options(
                    ptype, pcfg.name, unit_test_options)

        # Enable text output plugin for debugging
        environ_debug = os.environ.get('DEBUG', '0') == '1'
        if debug or environ_debug:
            self._configure_debug()

        # Verify env and start the scan
        self.w3afcore.plugins.init_plugins()
        self.w3afcore.verify_environment()
        self.w3afcore.start()

        #
        # I want to make sure that we don't have *any hidden* exceptions in our
        # tests. This was in tearDown before, but moved here because I was
        # getting failed assertions in my test code that were because of
        # exceptions in the scan and they were hidden.
        #
        if assert_exceptions:
            caught_exceptions = self.w3afcore.exception_handler.get_all_exceptions(
            )
            msg = self._pprint_exception_summary(caught_exceptions)
            self.assertEqual(len(caught_exceptions), 0, msg)
Ejemplo n.º 19
0
 def test_offline_port_via_proxy(self):
     url = URL('http://127.0.0.1:8181/')
     http_response = self.uri_opener.GET(url, cache=False)
     self.assertEqual(http_response.get_code(), 400)
Ejemplo n.º 20
0
 def setUp(self):
     self.url = URL('http://www.w3af.com/')
     cf.cf.save('fuzzable_headers', [])
     cf.cf.save('form_fuzzing_mode', 'tmb')
Ejemplo n.º 21
0
class TestGeneric(PluginTest):

    base_url = get_moth_http('/auth/auth_1/')
    demo_testfire = 'http://demo.testfire.net/bank/'

    _run_config = {
        'target': base_url,
        'plugins': {
            'crawl': (PluginConfig('web_spider',
                        ('only_forward', True, PluginConfig.BOOL),
                        ('ignore_regex', '.*logout.*', PluginConfig.STR)),),
            'audit': (PluginConfig('xss',),),
            'auth': (PluginConfig('generic',
                                 ('username', '*****@*****.**', PluginConfig.STR),
                                 ('password', 'passw0rd', PluginConfig.STR),
                                 ('username_field',
                                  'username', PluginConfig.STR),
                                 ('password_field',
                                  'password', PluginConfig.STR),
                                 ('auth_url', URL(base_url +
                                  'login_form.py'), PluginConfig.URL),
                                 ('check_url', URL(base_url +
                                  'post_auth_xss.py'), PluginConfig.URL),
                                 ('check_string', 'read your input',
                                  PluginConfig.STR),
                                  ),
                         ),
        }
    }

    demo_testfire_net = {
        'target': demo_testfire,
        'plugins': {
        'crawl': (
        PluginConfig('web_spider',
                     ('only_forward', True, PluginConfig.BOOL),
                     ('ignore_regex',
                      '.*logout.*', PluginConfig.STR),
                     (
        'follow_regex', '.*queryxpath.*', PluginConfig.STR)),

        ),
            'auth': (PluginConfig('generic',
                                 ('username', 'admin', PluginConfig.STR),
                                 ('password', 'admin', PluginConfig.STR),
                                 ('username_field', 'uid', PluginConfig.STR),
                                 ('password_field', 'passw', PluginConfig.STR),
                                 ('auth_url', URL(demo_testfire +
                                  'login.aspx'), PluginConfig.URL),
                                 ('check_url', URL(demo_testfire +
                                  'main.aspx'), PluginConfig.URL),
                                 ('check_string', 'View Recent Transactions',
                                  PluginConfig.STR),
                                  ),
                         ),
        }
    }

    @attr('smoke')
    def test_post_auth_xss(self):
        self._scan(self._run_config['target'], self._run_config['plugins'])

        vulns = self.kb.get('xss', 'xss')

        self.assertEquals(len(vulns), 1, vulns)

        vuln = vulns[0]
        self.assertEquals(vuln.get_name(), 'Cross site scripting vulnerability')
        self.assertEquals(vuln.get_token_name(), 'text')
        self.assertEquals(vuln.get_url().get_path(),
                          '/auth/auth_1/post_auth_xss.py')

    @attr('internet')
    def test_demo_testfire_net(self):
        # We don't control the demo.testfire.net domain, so we'll check if its
        # up before doing anything else
        uri_opener = ExtendedUrllib()
        login_url = URL(self.demo_testfire + 'login.aspx')
        try:
            res = uri_opener.GET(login_url)
        except:
            raise SkipTest('demo.testfire.net is unreachable!')
        else:
            if not 'Online Banking Login' in res.body:
                raise SkipTest('demo.testfire.net has changed!')

        self._scan(self.demo_testfire_net['target'],
                   self.demo_testfire_net['plugins'])

        urls = self.kb.get_all_known_urls()
        url_strings = set(str(u) for u in urls)

        self.assertTrue(self.demo_testfire + 'queryxpath.aspx' in url_strings)
        self.assertTrue(
            self.demo_testfire + 'queryxpath.aspx.cs' in url_strings)
Ejemplo n.º 22
0
 def test_raise_on_local_domain(self):
     url = URL('http://moth/')
     fr = FuzzableRequest(url, method='GET')
     ado = archive_dot_org()
     self.assertRaises(RunOnce, ado.crawl_wrapper, fr)
Ejemplo n.º 23
0
 def __init__(self):
     self.name = ''
     self.url = URL('http://host.tld/')
     self.data = parse_qs('')
     self.method = 'GET'
     self.vulnerable_parameter = ''
Ejemplo n.º 24
0
def http_request_parser(head, postdata):
    """
    This function parses HTTP Requests from a string to a FuzzableRequest.

    :param head: The head of the request.
    :param postdata: The post data of the request
    :return: A FuzzableRequest object with all the corresponding information
        that was sent in head and postdata

    :author: Andres Riancho ([email protected])

    """
    # Parse the request head, the strip() helps us deal with the \r (if any)
    splitted_head = head.split('\n')
    splitted_head = [h.strip() for h in splitted_head if h]

    if not splitted_head:
        msg = 'The HTTP request is invalid.'
        raise BaseFrameworkException(msg)

    # Get method, uri, version
    method_uri_version = splitted_head[0]
    first_line = method_uri_version.split(' ')
    if len(first_line) == 3:
        # Ok, we have something like "GET /foo HTTP/1.0". This is the best case
        # for us!
        method, uri, version = first_line

    elif len(first_line) < 3:
        msg = 'The HTTP request has an invalid <method> <uri> <version>: "%s"'
        raise BaseFrameworkException(msg % method_uri_version)

    elif len(first_line) > 3:
        # GET /hello world.html HTTP/1.0
        # Mostly because we are permissive... we are going to try to parse
        # the request...
        method = first_line[0]
        version = first_line[-1]
        uri = ' '.join(first_line[1:-1])

    check_version_syntax(version)

    # If we got here, we have a nice method, uri, version first line
    # Now we parse the headers (easy!) and finally we send the request
    headers_str = splitted_head[1:]
    headers_inst = Headers()
    for header in headers_str:
        one_splitted_header = header.split(':', 1)
        if len(one_splitted_header) == 1:
            msg = 'The HTTP request has an invalid header: "%s".'
            raise BaseFrameworkException(msg % header)

        header_name = one_splitted_header[0].strip()
        header_value = one_splitted_header[1].strip()
        if header_name in headers_inst:
            headers_inst[header_name] += ', ' + header_value
        else:
            headers_inst[header_name] = header_value

    host, _ = headers_inst.iget('host', None)
    
    try:
        uri = URL(check_uri_syntax(uri, host))
    except ValueError, ve:
        raise BaseFrameworkException(str(ve))
Ejemplo n.º 25
0
class test_http_vs_https_dist(unittest.TestCase):
    """
    :author: Javier Andalia <jandalia =at= gmail.com>
    """

    test_url = URL('http://host.tld')
    tracedict = {
        'localhost': {
            1: ('192.168.1.1', False),
            3: ('200.115.195.33', False),
            5: ('207.46.47.14', True)
        }
    }

    def setUp(self):
        kb.kb.cleanup()

    def test_discover_override_port(self):
        plugininst = hvshsdist.http_vs_https_dist()
        # pylint: disable=E0202
        # An attribute affected in plugins.tests.infrastructure.
        # test_http_vs_https_dist line 53 hide this method
        plugininst._has_permission = MagicMock(return_value=True)

        url = URL('https://host.tld:4444/')
        fuzz_req = FuzzableRequest(url)

        # HTTPS and HTTP responses, with one different hop
        tracedict1 = copy.deepcopy(self.tracedict)
        tracedict2 = copy.deepcopy(self.tracedict)
        tracedict2['localhost'][3] = ('200.200.0.0', False)
        self._mock_traceroute(tracedict1, tracedict2)

        # Mock output manager. Ensure that is called with the proper desc.
        om.out.information = MagicMock(return_value=True)
        plugininst.discover(fuzz_req)

        result = ('Routes to target "host.tld" using ports 80 and 4444 are different:\n'\
                  '  TCP trace to host.tld:80\n    0 192.168.1.1\n    1 200.200.0.0\n    2 207.46.47.14\n'\
                  '  TCP trace to host.tld:4444\n    0 192.168.1.1\n    1 200.115.195.33\n    2 207.46.47.14')
        om.out.information.assert_called_once_with(result)

    def test_discover_eq_routes(self):
        plugininst = hvshsdist.http_vs_https_dist()
        plugininst._has_permission = MagicMock(return_value=True)

        url = URL('https://host.tld:80/')
        fuzz_req = FuzzableRequest(url)

        # HTTPS and HTTP responses, with the same hops
        tracedict1 = copy.deepcopy(self.tracedict)
        tracedict2 = copy.deepcopy(self.tracedict)
        self._mock_traceroute(tracedict1, tracedict2)

        # Mock output manager. Ensure that is called with the proper desc.
        om.out.information = MagicMock(
            side_effect=ValueError('Unexpected call.'))
        plugininst.discover(fuzz_req)

        infos = kb.kb.get('http_vs_https_dist', 'http_vs_https_dist')
        self.assertEqual(len(infos), 1)

        info = infos[0]
        self.assertEqual('HTTP traceroute', info.get_name())
        self.assertTrue('are the same' in info.get_desc())

    def test_discover_diff_routes(self):
        plugininst = hvshsdist.http_vs_https_dist()
        plugininst._has_permission = MagicMock(return_value=True)

        url = URL('https://host.tld/')
        fuzz_req = FuzzableRequest(url)

        # HTTPS and HTTP responses, with one different hop
        tracedict1 = copy.deepcopy(self.tracedict)
        tracedict2 = copy.deepcopy(self.tracedict)
        tracedict2['localhost'][3] = ('200.200.0.0', False)
        self._mock_traceroute(tracedict1, tracedict2)

        # Mock output manager. Ensure that is called with the proper desc.
        om.out.information = MagicMock(return_value=True)
        plugininst.discover(fuzz_req)

        result = ('Routes to target "host.tld" using ports 80 and 443 are different:\n'\
                  '  TCP trace to host.tld:80\n    0 192.168.1.1\n    1 200.200.0.0\n    2 207.46.47.14\n'\
                  '  TCP trace to host.tld:443\n    0 192.168.1.1\n    1 200.115.195.33\n    2 207.46.47.14')
        om.out.information.assert_called_once_with(result)

    def test_discover_runonce(self):
        """ Discovery routine must be executed only once. Upcoming calls should
        fail"""
        url = URL('https://host.tld/')
        fuzz_req = FuzzableRequest(url)

        plugininst = hvshsdist.http_vs_https_dist()
        plugininst._has_permission = MagicMock(side_effect=[True, True])

        plugininst.discover(fuzz_req)
        self.assertRaises(RunOnce, plugininst.discover, fuzz_req)

    def test_not_root_user(self):
        plugininst = hvshsdist.http_vs_https_dist()

        plugininst._has_permission = MagicMock(return_value=False)

        with patch('w3af.plugins.infrastructure.http_vs_https_dist.om.out'
                   ) as om_mock:
            plugininst.discover(None)
            ecall = call.error(hvshsdist.PERM_ERROR_MSG)
            self.assertIn(ecall, om_mock.mock_calls)

    def _mock_traceroute(self, trace_resp_1, trace_resp_2):
        """
        Helper method: Mocks scapy 'traceroute' function
        """
        https_tracerout_obj_1 = Mock()
        https_tracerout_obj_1.get_trace = MagicMock(return_value=trace_resp_1)
        resp_tuple_1 = (https_tracerout_obj_1, None)

        https_tracerout_obj_2 = Mock()
        https_tracerout_obj_2.get_trace = MagicMock(return_value=trace_resp_2)
        resp_tuple_2 = (https_tracerout_obj_2, None)

        hvshsdist.traceroute = create_autospec(
            hvshsdist.traceroute, side_effect=[resp_tuple_1, resp_tuple_2])
Ejemplo n.º 26
0
 def get_url(self):
     return URL('http://w3af.com/a/b/c.php')
Ejemplo n.º 27
0
 def test_clean_reference_directory_file(self):
     self.assertEqual(
         self.vdb._clean_reference(URL('http://w3af.org/foo/index.php')),
         u'http://w3af.org/foo/index.php')
Ejemplo n.º 28
0
 def setUp(self):
     self.fuzzer_config = {'fuzz_cookies': True}
     self.payloads = ['abc', 'def']
     self.url = URL('http://moth/')
Ejemplo n.º 29
0
 def test_clean_reference_int_str(self):
     self.assertEqual(
         self.vdb._clean_reference(
             URL('http://w3af.org/index.php?id=2&foo=bar')),
         u'http://w3af.org/index.php?id=number&foo=string')
Ejemplo n.º 30
0
 def test_str_qs(self):
     fr = FuzzableRequest(URL("http://www.w3af.com/?id=3"))
     expected = 'Method: GET | http://www.w3af.com/ |' \
                ' Query string: (id)'
     self.assertEqual(str(fr), expected)