Ejemplo n.º 1
0
    def from_httplib_resp(cls, httplibresp, original_url=None):
        """
        Factory function. Build a HTTPResponse object from a
        httplib.HTTPResponse instance
    
        :param httplibresp: httplib.HTTPResponse instance
        :param original_url: Optional 'url_object' instance.
    
        :return: A HTTPResponse instance
        """
        resp = httplibresp
        code, msg, hdrs, body = (resp.code, resp.msg, resp.info(), resp.read())
        hdrs = Headers(hdrs.items())

        if original_url:
            url_inst = URL(resp.geturl(), original_url.encoding)
            url_inst = url_inst.url_decode()
        else:
            url_inst = original_url = URL(resp.geturl())

        httplib_time = DEFAULT_WAIT_TIME
        if hasattr(httplibresp, 'get_wait_time'):
            # This is defined in the keep alive http response object
            httplib_time = httplibresp.get_wait_time()

        if isinstance(resp, urllib2.HTTPError):
            # This is possible because in errors.py I do:
            # err = urllib2.HTTPError(req.get_full_url(), code, msg, hdrs, resp)
            charset = getattr(resp.fp, 'encoding', None)
        else:
            # The encoding attribute is only set on CachedResponse instances
            charset = getattr(resp, 'encoding', None)
        
        return cls(code, body, hdrs, url_inst, original_url,
                   msg, charset=charset, time=httplib_time)
Ejemplo n.º 2
0
    def do_ALL(self):
        global global_first_request
        if global_first_request:
            global_first_request = False
            om.out.information(
                'The user is navigating through the spider_man proxy.')

        # Convert to url_object
        path = URL(self.path)

        if path == TERMINATE_URL:
            om.out.information('The user terminated the spider_man session.')
            self._send_end()
            self._spider_man.stop_proxy()
            return

        om.out.debug("[spider_man] Handling request: %s %s" %
                    (self.command, path))
        #   Send this information to the plugin so it can send it to the core
        freq = self._create_fuzzable_request()
        self._spider_man.append_fuzzable_request(freq)

        grep = True
        if path.get_domain() != self.server.w3afLayer.target_domain:
            grep = False

        try:
            response = self._send_to_server(grep=grep)
        except Exception, e:
            self._send_error(e)
Ejemplo n.º 3
0
 def test_url_join_case03(self):
     u = URL('http://w3af.com/def/jkl/')
     self.assertEqual(u.url_join('/def/abc.html').url_string,
                      u'http://w3af.com/def/abc.html')
     
     self.assertEqual(u.url_join('def/abc.html').url_string,
                      u'http://w3af.com/def/jkl/def/abc.html')
Ejemplo n.º 4
0
 def test_simplest_url(self):
     u = URL('http://w3af.com/foo/bar.txt')
     
     self.assertEqual(u.path, '/foo/bar.txt')
     self.assertEqual(u.scheme, 'http')
     self.assertEqual(u.get_file_name(), 'bar.txt')
     self.assertEqual(u.get_extension(), 'txt')
 def test_default_proto(self):
     """
     http is the default protocol, we can provide URLs with no proto
     """
     u = URL("w3af.com")
     self.assertEqual(u.get_domain(), "w3af.com")
     self.assertEqual(u.get_protocol(), "http")
    def test_simplest_url(self):
        u = URL("http://w3af.com/foo/bar.txt")

        self.assertEqual(u.path, "/foo/bar.txt")
        self.assertEqual(u.scheme, "http")
        self.assertEqual(u.get_file_name(), "bar.txt")
        self.assertEqual(u.get_extension(), "txt")
Ejemplo n.º 7
0
 def test_url_join_case01(self):
     u = URL('http://w3af.com/foo.bar')
     self.assertEqual(u.url_join('abc.html').url_string,
                      u'http://w3af.com/abc.html')
     
     self.assertEqual(u.url_join('/abc.html').url_string,
                      u'http://w3af.com/abc.html')
 def test_encode_decode(self):
     """Encode and Decode should be able to run one on the result of the
     other and return the original"""
     original = URL(u"https://w3af.com:443/file.asp?id=1%202")
     encoded = original.url_encode()
     decoded = URL(encoded).url_decode()
     self.assertEqual(original, decoded)
Ejemplo n.º 9
0
    def test_can_be_pickled(self):
        # Pickle a URL object that contains a cache
        u = URL('http://www.w3af.com/')
        domain_path = u.get_domain_path()

        cPickle.dumps(u)
        cPickle.dumps(domain_path)
Ejemplo n.º 10
0
 def test_remove_fragment(self):
     u = URL('http://w3af.com/foo/bar.txt?id=3#foobar')
     self.assertEqual(u.remove_fragment().url_string,
                      u'http://w3af.com/foo/bar.txt?id=3')
     
     u = URL('http://w3af.com/foo/bar.txt#foobar')
     self.assertEqual(u.remove_fragment().url_string,
                      u'http://w3af.com/foo/bar.txt')
Ejemplo n.º 11
0
    def test_from_url_keep_form(self):
        o = URL('http://w3af.com/foo/bar.txt')
        o.querystring = URLEncodedForm()

        u = URL.from_URL(o)
        self.assertIsInstance(u.querystring, URLEncodedForm)
        self.assertIsNot(u.querystring, o.querystring)
        self.assertEqual(u.querystring, o.querystring)
Ejemplo n.º 12
0
    def http_request(self, req):
        url_instance = URL(req.get_full_url())
        url_instance.set_param(self._url_parameter)

        new_request = HTTPRequest(url_instance, headers=req.headers,
                                  origin_req_host=req.get_origin_req_host(),
                                  unverifiable=req.is_unverifiable())
        return new_request
Ejemplo n.º 13
0
    def test_memoized(self):
        u = URL('http://www.w3af.com/')
        self.assertEqual(u._cache, dict())

        domain_path = u.get_domain_path()
        self.assertNotEqual(u._cache, dict())
        self.assertIn(domain_path, u._cache.values())

        second_domain_path = u.get_domain_path()
        self.assertIs(domain_path, second_domain_path)

        self.assertIsInstance(domain_path, URL)
        self.assertIsInstance(second_domain_path, URL)
Ejemplo n.º 14
0
    def setUp(self):
        self.kb.cleanup()
        self.w3afcore = w3afCore()
        
        if self.MOCK_RESPONSES:
            httpretty.enable()
            
            url = URL(self.target_url)
            domain = url.get_domain()
            proto = url.get_protocol()
            port = url.get_port()

            self._register_httpretty_uri(proto, domain, port)
Ejemplo n.º 15
0
    def test_memoized(self):
        u = URL('http://www.w3af.com/')
        self.assertEqual(u._cache, dict())

        url = u.uri2url()
        self.assertNotEqual(u._cache, dict())
        self.assertIn(url, u._cache.values())

        second_url = u.uri2url()
        self.assertIs(url, second_url)

        self.assertIsInstance(url, URL)
        self.assertIsInstance(second_url, URL)
Ejemplo n.º 16
0
 def test_from_url(self):
     o = URL('http://w3af.com/foo/bar.txt')
     u = URL.from_URL(o)
     
     self.assertEqual(u.path, '/foo/bar.txt')
     self.assertEqual(u.scheme, 'http')
     self.assertEqual(u.get_file_name(), 'bar.txt')
     self.assertEqual(u.get_extension(), 'txt')
     
     o = URL('w3af.com')
     u = URL.from_URL(o)
     self.assertEqual(u.get_domain(), 'w3af.com')
     self.assertEqual(u.get_protocol(), 'http')
    def test_from_url(self):
        o = URL("http://w3af.com/foo/bar.txt")
        u = URL.from_URL(o)

        self.assertEqual(u.path, "/foo/bar.txt")
        self.assertEqual(u.scheme, "http")
        self.assertEqual(u.get_file_name(), "bar.txt")
        self.assertEqual(u.get_extension(), "txt")

        o = URL("w3af.com")
        u = URL.from_URL(o)
        self.assertEqual(u.get_domain(), "w3af.com")
        self.assertEqual(u.get_protocol(), "http")
    def test_set_params(self):
        u = URL("http://w3af.com/;id=1")
        u.set_param("file=2")

        self.assertEqual(u.get_params_string(), "file=2")

        u = URL("http://w3af.com/xyz.txt;id=1?file=2")
        u.set_param("file=3")

        self.assertEqual(u.get_params_string(), "file=3")
        self.assertEqual(u.get_path_qs(), "/xyz.txt;file=3?file=2")
Ejemplo n.º 19
0
 def test_set_params(self):
     u = URL('http://w3af.com/;id=1')
     u.set_param('file=2')
     
     self.assertEqual(u.get_params_string(), 'file=2')
     
     u = URL('http://w3af.com/xyz.txt;id=1?file=2')
     u.set_param('file=3')
     
     self.assertEqual(u.get_params_string(), 'file=3')
     self.assertEqual(u.get_path_qs(), '/xyz.txt;file=3?file=2')
Ejemplo n.º 20
0
    def test_redirect_uri_relative(self):
        ws = web_spider()
        body = ''
        url = URL('http://www.w3af.org')
        redir_url = '/redir'
        headers = Headers([('content-type', 'text/html'),
                           ('uri', redir_url)])
        resp = HTTPResponse(200, body, headers, url, url)

        gen = ws._headers_url_generator(resp, None)

        extracted_data = [i for i in gen]
        expected_data = [(url.url_join(redir_url), None, resp, False)]

        self.assertEqual(extracted_data, expected_data)
Ejemplo n.º 21
0
    def test_phishtank_match_last_url(self):
        phishtank_inst = self.w3afcore.plugins.get_plugin_inst('crawl',
                                                               'phishtank')

        vuln_url = URL(self.get_last_vulnerable_url())
        phishtank_inst.crawl(FuzzableRequest(vuln_url))

        vulns = self.kb.get('phishtank', 'phishtank')

        self.assertEqual(len(vulns), 1, vulns)
        vuln = vulns[0]

        self.assertEqual(vuln.get_name(), 'Phishing scam')
        self.assertEqual(vuln.get_severity(), MEDIUM)
        self.assertEqual(vuln.get_url().get_domain(), vuln_url.get_domain())
Ejemplo n.º 22
0
    def met_search(self, query):
        """
        Query a Public Key Server.

        This method is based from the pks.py file from the massive enumeration
        toolset, coded by pdp and released under GPL v2.
        """
        url = URL(u'http://pgp.mit.edu:11371/pks/lookup')
        url.querystring = [(u'op', [u'index']), (u'search', [query])]

        try:
            response = self._uri_opener.GET(url, headers=self._headers,
                                            cache=True, grep=False)
        except HTTPRequestException:
            # Very naive exception handling for the case where we can't reach
            # the PKS server (it's down, blocking us, bad internet connection)
            return []

        content = response.get_body()

        content = re.sub('(<.*?>|&lt;|&gt;)', '', content)

        results = []
        accounts = []

        for line in content.split('\n')[2:]:
            if not line.strip():
                continue

            tokens = line.split()

            if len(tokens) >= 5:
                email = tokens[-1]
                name = ' '.join(tokens[3:-1])

                if SGMLParser.EMAIL_RE.match(email):

                    account = email.split('@')[0]
                    domain = email.split('@')[1]

                    if domain == query:
                        if account not in accounts:
                            accounts.append(account)
                            
                            pksr = PKSResult(name, account, domain, response.id)
                            results.append(pksr)

        return results
Ejemplo n.º 23
0
class xssed_dot_com(InfrastructurePlugin):
    """
    Search in xssed.com to find xssed pages.

    :author: Nicolas Crocfer ([email protected])
    :author: Raul Siles: set "." in front of the root domain to limit search
    """
    def __init__(self):
        InfrastructurePlugin.__init__(self)

        #
        #   Could change in time,
        #
        self._xssed_url = URL("http://www.xssed.com")
        self._fixed = "<img src='http://data.xssed.org/images/fixed.gif'>&nbsp;FIXED</th>"

    @runonce(exc_class=RunOnce)
    def discover(self, fuzzable_request):
        """
        Search in xssed.com and parse the output.

        :param fuzzable_request: A fuzzable_request instance that contains
                                    (among other things) the URL to test.
        """
        target_domain = fuzzable_request.get_url().get_root_domain()

        try:
            check_url = self._xssed_url.url_join(
                "/search?key=." + target_domain)
            response = self._uri_opener.GET(check_url)
        except BaseFrameworkException, e:
            msg = 'An exception was raised while running xssed_dot_com'\
                  ' plugin. Exception: "%s".' % e
            om.out.debug(msg)
        else:
    def test_from_parts(self):
        u = URL.from_parts("http", "w3af.com", "/foo/bar.txt", None, "a=b", "frag")

        self.assertEqual(u.path, "/foo/bar.txt")
        self.assertEqual(u.scheme, "http")
        self.assertEqual(u.get_file_name(), "bar.txt")
        self.assertEqual(u.get_extension(), "txt")
Ejemplo n.º 25
0
    def __init__(self):
        InfrastructurePlugin.__init__(self)

        #
        #   Could change in time,
        #
        self._xssed_url = URL("http://www.xssed.com")
        self._fixed = "<img src='http://data.xssed.org/images/fixed.gif'>&nbsp;FIXED</th>"
Ejemplo n.º 26
0
 def test_from_parts(self):
     u = URL.from_parts('http', 'w3af.com', '/foo/bar.txt', None, 'a=b',
                        'frag')
     
     self.assertEqual(u.path, '/foo/bar.txt')
     self.assertEqual(u.scheme, 'http')
     self.assertEqual(u.get_file_name(), 'bar.txt')
     self.assertEqual(u.get_extension(), 'txt')
Ejemplo n.º 27
0
    def test_ssl_fail_when_requesting_moth_http(self):
        """
        https://github.com/andresriancho/w3af/issues/7989

        This test takes considerable time to run since it needs to timeout the
        SSL connection for each SSL protocol
        """
        # Note that here I'm using httpS <<---- "S" and that I'm connecting to
        # the net location (host:port) of an HTTP server.
        http_url = URL(get_moth_http())
        test_url = URL('https://%s' % http_url.get_net_location())

        self.uri_opener.settings.set_max_http_retries(0)

        self.assertRaises(HTTPRequestException,
                          self.uri_opener.GET,
                          test_url,
                          timeout=1)
Ejemplo n.º 28
0
    def met_search(self, query):
        """
        Query a Public Key Server.

        This method is based from the pks.py file from the massive enumeration toolset,
        coded by pdp and released under GPL v2.
        """
        url = URL(u'http://pgp.mit.edu:11371/pks/lookup')
        url.querystring = {u'op': u'index', u'search': query}

        response = self._uri_opener.GET(url, headers=self._headers,
                                        cache=True, grep=False)
        content = response.get_body()

        content = re.sub('(<.*?>|&lt;|&gt;)', '', content)

        results = []
        accounts = []

        for line in content.split('\n')[2:]:
            if not line.strip():
                continue

            tokens = line.split()

            if len(tokens) >= 5:
                email = tokens[-1]
                name = ' '.join(tokens[3:-1])

                # Copy+paste from baseparser.py
                email_regex = '([A-Z0-9\._%-]{1,45}@([A-Z0-9\.-]{1,45}\.){1,10}[A-Z]{2,4})'
                if re.match(email_regex, email, re.IGNORECASE):

                    account = email.split('@')[0]
                    domain = email.split('@')[1]

                    if domain == query:
                        if account not in accounts:
                            accounts.append(account)
                            
                            pksr = PKSResult(name, account, domain, response.id)
                            results.append(pksr)

        return results
Ejemplo n.º 29
0
 def test_get_path_qs(self):
     u = URL(u'https://w3af.com:443/xyz/123/456/789/')
     self.assertEqual(u.get_path(), u'/xyz/123/456/789/')
     
     u = URL(u'https://w3af.com:443/xyz/123/456/789/')
     self.assertEqual(u.get_path_qs(), u'/xyz/123/456/789/')
     
     u = URL(u'https://w3af.com:443/xyz/file.asp')
     self.assertEqual(u.get_path_qs(), u'/xyz/file.asp')
     
     u = URL(u'https://w3af.com:443/xyz/file.asp?id=1')
     self.assertEqual(u.get_path_qs(), u'/xyz/file.asp?id=1')
    def test_get_path_qs(self):
        u = URL(u"https://w3af.com:443/xyz/123/456/789/")
        self.assertEqual(u.get_path(), u"/xyz/123/456/789/")

        u = URL(u"https://w3af.com:443/xyz/123/456/789/")
        self.assertEqual(u.get_path_qs(), u"/xyz/123/456/789/")

        u = URL(u"https://w3af.com:443/xyz/file.asp")
        self.assertEqual(u.get_path_qs(), u"/xyz/file.asp")

        u = URL(u"https://w3af.com:443/xyz/file.asp?id=1")
        self.assertEqual(u.get_path_qs(), u"/xyz/file.asp?id=1")
Ejemplo n.º 31
0
    def test_delete(self):
        i = random.randint(1, 499)

        url = URL('http://w3af.com/a/b/c.php')
        request = HTTPRequest(url, data='a=1')
        hdr = Headers([('Content-Type', 'text/html')])
        res = HTTPResponse(200, '<html>', hdr, url, url)
        res.set_id(i)

        h1 = HistoryItem()
        h1.request = request
        h1.response = res
        h1.save()

        fname = h1._get_fname_for_id(i)
        self.assertTrue(os.path.exists(fname))

        h1.delete(i)

        self.assertRaises(DBException, h1.read, i)
        self.assertFalse(os.path.exists(fname))
Ejemplo n.º 32
0
    def test_analyze_cookies_fingerprint(self):
        body = ''
        url = URL('http://www.w3af.com/')
        headers = Headers({
            'content-type': 'text/html',
            'Set-Cookie': 'PHPSESSID=d98238ab39de038'
        }.items())
        response = HTTPResponse(200, body, headers, url, url, _id=1)
        request = FuzzableRequest(url, method='GET')

        self.plugin.grep(request, response)

        security = kb.kb.get('analyze_cookies', 'security')

        self.assertEqual(len(kb.kb.get('analyze_cookies', 'cookies')), 1)
        self.assertEqual(len(security), 2)
        self.assertEqual(len(kb.kb.get('analyze_cookies', 'invalid-cookies')),
                         0)

        msg = 'The remote platform is: "PHP"'
        self.assertTrue(any([True for i in security if msg in i.get_desc()]))
Ejemplo n.º 33
0
    def test_error_handling_disable_per_request(self):
        upper_daemon = UpperDaemon(TimeoutTCPHandler)
        upper_daemon.start()
        upper_daemon.wait_for_start()

        port = upper_daemon.get_port()

        self.uri_opener.settings.set_configured_timeout(1)
        self.uri_opener.clear_timeout()
        self.uri_opener._retry = Mock()

        url = URL('http://127.0.0.1:%s/' % port)

        try:
            self.uri_opener.GET(url, error_handling=False)
        except HTTPRequestException:
            self.assertEqual(self.uri_opener._retry.call_count, 0)
        else:
            self.assertTrue(False, 'Exception not raised')

        self.uri_opener.settings.set_default_values()
Ejemplo n.º 34
0
    def test_bruteforcer_default(self):
        url = URL('http://www.w3af.org/')

        bf = user_password_bruteforcer(url)

        expected_combinations = [
            ('prueba1', '123abc'),
            ('test', 'freedom'),
            ('user', 'letmein'),
            ('www.w3af.org', 'master'),  # URL feature
            ('admin', '7emp7emp'),  # l337 feature
            ('user1', ''),  # No password
            ('user1', 'user1')  # User eq password
        ]
        generated = []

        for (user, pwd) in bf.generator():
            generated.append((user, pwd))

        for expected_comb in expected_combinations:
            self.assertTrue(expected_comb in generated)
Ejemplo n.º 35
0
    def test_handler_order_block(self):
        """Get an instance of the extended urllib and verify that the blacklist
        handler still works, even when mixed with all the other handlers."""
        # Configure the handler
        blocked_url = URL(get_moth_http('/abc/def/'))
        cf.cf.save('non_targets', [
            blocked_url,
        ])

        settings = opener_settings.OpenerSettings()
        settings.build_openers()
        opener = settings.get_custom_opener()

        request = HTTPRequest(blocked_url)
        request.url_object = blocked_url
        request.cookies = True
        request.get_from_cache = False
        response = opener.open(request)

        self.assertEqual(response.code, NO_CONTENT)
        self.assertEqual(response.id, 1)
Ejemplo n.º 36
0
    def close_all_sockets(self, wait):
        keep_alive_http = HTTPHandler()

        uri_opener = urllib2.build_opener(keep_alive_http)

        request = HTTPRequest(URL(get_moth_http()))
        response = uri_opener.open(request)
        response.read()

        time.sleep(wait)

        pid = os.getpid()
        p = psutil.Process(pid)
        connections_before = p.get_connections()

        keep_alive_http.close_all()

        time.sleep(1)
        connections_after = p.get_connections()

        self.assertLess(len(connections_after), len(connections_before))
Ejemplo n.º 37
0
    def test_kb_list_shells_file_upload_2181(self):
        """
        :see: https://github.com/andresriancho/w3af/issues/2181
        """
        w3af_core = w3afCore()
        exploit_url = URL('http://w3af.org/')

        shell = FileUploadShell(MockVuln(), w3af_core.uri_opener,
                                w3af_core.worker_pool, exploit_url)
        kb.append('a', 'b', shell)

        shells = kb.get_all_shells(w3af_core=w3af_core)
        self.assertEqual(len(shells), 1)
        unpickled_shell = shells[0]

        self.assertEqual(shell, unpickled_shell)
        self.assertIs(unpickled_shell._uri_opener, w3af_core.uri_opener)
        self.assertIs(unpickled_shell.worker_pool, w3af_core.worker_pool)
        self.assertEqual(unpickled_shell._exploit_url, shell._exploit_url)

        w3af_core.quit()
Ejemplo n.º 38
0
    def test_special_url_characters(self):
        initial_url = 'http://w3af.org/' \
                      '?__VIEWSTATE=/' \
                      '&__EVENTVALIDATION=\\X+W=='\
                      '&_ctl0:TextBox1=%s'

        url = URL(initial_url % '')
        freq = FuzzableRequest(url)
        generated_mutants = create_mutants(freq, self.payloads)

        decoded_url = 'http://w3af.org/' \
                      '?__VIEWSTATE=/' \
                      '&__EVENTVALIDATION=\\X%%20W=='\
                      '&_ctl0:TextBox1=%s'

        expected_urls = [decoded_url % 'abc', decoded_url % 'def']
        generated_urls = [str(m.get_uri()) for m in generated_mutants]

        self.assertEqual(generated_urls, expected_urls)
        self.assertAllInstance(generated_mutants, QSMutant)
        self.assertAllHaveTokens(generated_mutants)
Ejemplo n.º 39
0
 def start(self, tag, attrib):
     """
     <vulnerability id="[87]" method="GET" name="Cross site scripting vulnerability"
                    plugin="xss" severity="Medium" url="http://moth/w3af/audit/xss/simple_xss_no_script_2.php"
                    var="text">
     """
     if tag == 'vulnerability':
         name = attrib['name']
         plugin = attrib['plugin']
         
         v = MockVuln(name, None, 'High', 1, plugin)
         v.set_url(URL(attrib['url']))
         
         self.vulns.append(v)
     
     # <body content-encoding="text">
     elif tag == 'body':
         content_encoding = attrib['content-encoding']
         
         assert content_encoding == 'text'
         self._inside_body = True
Ejemplo n.º 40
0
    def test_basic(self):
        url = URL('http://www.w3af.org')
        request = HTTPRequest(url, cache=True)

        cache = CacheHandler()
        self.assertEqual(cache.default_open(request), None)

        response = FakeHttplibHTTPResponse(200, 'OK', 'spameggs', Headers(),
                                           url.url_string)
        cache.http_response(request, response)

        cached_response = cache.default_open(request)

        self.assertIsInstance(cached_response, SQLCachedResponse)

        self.assertEqual(cached_response.code, response.code)
        self.assertEqual(cached_response.msg, response.msg)
        self.assertEqual(cached_response.read(), response.read())
        self.assertEqual(Headers(cached_response.info().items()),
                         response.info())
        self.assertEqual(cached_response.geturl(), response.geturl())
Ejemplo n.º 41
0
    def test_analyze_cookies_no_httponly(self):
        body = ''
        url = URL('http://www.w3af.com/')
        headers = Headers({
            'content-type': 'text/html',
            'Set-Cookie': 'abc=def'
        }.items())
        response = HTTPResponse(200, body, headers, url, url, _id=1)
        request = FuzzableRequest(url, method='GET')

        self.plugin.grep(request, response)

        security = kb.kb.get('analyze_cookies', 'security')

        self.assertEqual(len(kb.kb.get('analyze_cookies', 'cookies')), 1)
        self.assertEqual(len(security), 1)
        self.assertEqual(len(kb.kb.get('analyze_cookies', 'invalid-cookies')),
                         0)

        msg = 'A cookie without the HttpOnly flag'
        self.assertTrue(any([True for i in security if msg in i.get_desc()]))
Ejemplo n.º 42
0
    def test_is_token_checked_false(self):
        """
        This covers the case where there is a token but for some reason it
        is NOT verified by the web application.
        """
        generator = URL('http://moth/w3af/audit/csrf/vulnerable-token-ignored/')
        http_response = self.uri_opener.GET(generator)
        
        # Please note that this freq holds a fresh/valid CSRF token
        cookie = Cookie.from_http_response(http_response)
        freq = FuzzableRequest(generator, cookie=cookie)

        # FIXME:
        # And I use this token here to get the original response, and if the
        # application is properly developed, that token will be invalidated
        # and that's where this algorithm fails.
        original_response = self.uri_opener.send_mutant(freq)
        
        token = {'token': 'cc2544ba4af772c31bc3da928e4e33a8'}
        checked = self.csrf_plugin._is_token_checked(freq, token, original_response)
        self.assertFalse(checked)
Ejemplo n.º 43
0
    def test_json_mutant_create_mutants(self):
        freq = JSONPostDataRequest(URL('http://www.w3af.com/?id=3'))
        freq.set_dc({"a": "b", "c": "d"})

        generated_mutants = JSONMutant.create_mutants(freq, self.payloads, [],
                                                      False,
                                                      self.fuzzer_config)

        self.assertEqual(len(generated_mutants), 4, generated_mutants)

        m0 = generated_mutants[0]
        self.assertEqual(m0.get_data(), '{"a": "abc", "c": "d"}')

        m1 = generated_mutants[1]
        self.assertEqual(m1.get_data(), '{"a": "53", "c": "d"}')

        m2 = generated_mutants[2]
        self.assertEqual(m2.get_data(), '{"a": "b", "c": "abc"}')

        m3 = generated_mutants[3]
        self.assertEqual(m3.get_data(), '{"a": "b", "c": "53"}')
Ejemplo n.º 44
0
    def test_discover_diff_routes(self):
        plugininst = hvshsdist.http_vs_https_dist()
        plugininst._has_permission = MagicMock(return_value=True)

        url = URL('https://host.tld/')
        fuzz_req = FuzzableRequest(url)

        # HTTPS and HTTP responses, with one different hop
        tracedict1 = copy.deepcopy(self.tracedict)
        tracedict2 = copy.deepcopy(self.tracedict)
        tracedict2['localhost'][3] = ('200.200.0.0', False)
        self._mock_traceroute(tracedict1, tracedict2)

        # Mock output manager. Ensure that is called with the proper desc.
        om.out.information = MagicMock(return_value=True)
        plugininst.discover(fuzz_req)

        result = ('Routes to target "host.tld" using ports 80 and 443 are different:\n'\
                  '  TCP trace to host.tld:80\n    0 192.168.1.1\n    1 200.200.0.0\n    2 207.46.47.14\n'\
                  '  TCP trace to host.tld:443\n    0 192.168.1.1\n    1 200.115.195.33\n    2 207.46.47.14')
        om.out.information.assert_called_once_with(result)
Ejemplo n.º 45
0
 def test_not_find_credit_cards(self):
     invalid_cards = (
         'b71449635402848',  # Start with a letter
         '356 600 20203605 05',
         # Spaces in incorrect locations
         '35660020203605054',  # Extra number added at the end
         '13566002020360505',
         # Extra number added at the beginning
         # Not a credit card at all
         '_c3E6E547C-BFB7-4897-86EA-882A04BDE274_kDF867BE9-DEC5-0FFF-6629-127552370B17',
     )
     for card in invalid_cards:
         body = '<A href="#123">%s</A>' % card
         url = URL('http://www.w3af.com/')
         headers = Headers([('content-type', 'text/html')])
         response = HTTPResponse(200, body, headers, url, url, _id=1)
         request = FuzzableRequest(url, method='GET')
         self.plugin.grep(request, response)
         self.assertEquals(len(kb.kb.get('credit_cards', 'credit_cards')),
                           0)
         kb.kb.clear('credit_cards', 'credit_cards')
Ejemplo n.º 46
0
    def test_from_info(self):
        url = URL('http://moth/')

        inst1 = MockInfo()
        inst1.set_uri(url)
        inst1['eggs'] = 'spam'

        inst2 = Info.from_info(inst1)

        self.assertNotEqual(id(inst1), id(inst2))
        self.assertIsInstance(inst2, Info)

        self.assertEqual(inst1.get_uri(), inst2.get_uri())
        self.assertEqual(inst1.get_uri(), url)
        self.assertEqual(inst2.get_uri(), url)
        self.assertEqual(inst2['eggs'], 'spam')
        self.assertEqual(inst1.get_url(), inst2.get_url())
        self.assertEqual(inst1.get_method(), inst2.get_method())
        self.assertEqual(inst1.get_dc(), inst2.get_dc())
        self.assertEqual(inst1.get_var(), inst2.get_var())
        self.assertEqual(inst1.get_to_highlight(), inst2.get_to_highlight())
Ejemplo n.º 47
0
    def test_ghdb_match(self):

        call_count = 0

        def generate_google_result(*args):
            global call_count
            call_count += 1
            if call_count == 52:

                return [
                    google_result,
                ]
            else:
                return []

        pmodule = 'w3af.plugins.crawl.ghdb.%s'
        with patch(pmodule % 'is_private_site') as private_site_mock:
            with patch.object(google, 'get_n_results') as google_mock_method:

                # Mock
                private_site_mock.return_value = False

                google_result = GoogleResult(
                    URL('http://moth/w3af/crawl/ghdb/'))
                google_mock_method.side_effect = [[], ] * 50 + [[google_result, ]] +\
                                                 [[], ] * 50000

                # Scan
                cfg = self._run_configs['cfg']
                self._scan(self.private_url, cfg['plugins'])

        # Assert
        vulns = self.kb.get('ghdb', 'vuln')
        self.assertEqual(len(vulns), 1, vulns)

        vuln = vulns[0]
        self.assertEqual(vuln.get_url().url_string,
                         'http://moth/w3af/crawl/ghdb/')
        self.assertEqual(vuln.get_severity(), severity.MEDIUM)
        self.assertEqual(vuln.get_name(), 'Google hack database match')
Ejemplo n.º 48
0
    def test_mutant_creation_file(self):
        form_params = FormParameters()
        form_params.add_input([("name", "username"), ("value", "default")])
        form_params.add_file_input([("name", "file_upload")])

        form = MultipartContainer(form_params)
        freq = FuzzableRequest(URL('http://www.w3af.com/upload'),
                               post_data=form,
                               method='POST')

        payloads = [file(__file__)]
        created_mutants = PostDataMutant.create_mutants(
            freq, payloads, [
                'file_upload',
            ], False, self.fuzzer_config)

        self.assertEqual(len(created_mutants), 1, created_mutants)

        mutant = created_mutants[0]

        self.assertIsInstance(mutant.get_token().get_value(), file)
        self.assertEqual(mutant.get_dc()['username'][0], 'default')
Ejemplo n.º 49
0
    def test_from_mutant(self):
        url = URL('http://moth/?a=1&b=2')
        payloads = ['abc', 'def']

        freq = FuzzableRequest(url)
        fuzzer_config = {}

        created_mutants = QSMutant.create_mutants(freq, payloads, [], False,
                                                  fuzzer_config)

        mutant = created_mutants[0]

        inst = Info.from_mutant('TestCase', 'desc' * 30, 1, 'plugin_name',
                                mutant)

        self.assertIsInstance(inst, Info)

        self.assertEqual(inst.get_uri(), mutant.get_uri())
        self.assertEqual(inst.get_url(), mutant.get_url())
        self.assertEqual(inst.get_method(), mutant.get_method())
        self.assertEqual(inst.get_dc(), mutant.get_dc())
        self.assertIsInstance(inst.get_dc(), QueryString)
Ejemplo n.º 50
0
    def test_3234(self):
        """
        is_404 can not handle URLs with : in path #3234

        :see: https://github.com/andresriancho/w3af/issues/3234
        """
        # setup
        httpretty.register_uri(httpretty.GET,
                               re.compile("w3af.com/(.*)"),
                               body="404 found",
                               status=404)

        url = URL('http://w3af.com/d:a')
        resp = HTTPResponse(200, 'body', Headers(), url, url)

        # setup, just to make some config settings values default
        core = w3afCore()
        core.scan_start_hook()

        # test
        db = fingerprint_404_singleton()
        self.assertFalse(db._is_404_with_extra_request(resp, 'body'))
Ejemplo n.º 51
0
    def test_parse_db_line_basic_w3af_scan_database(self):
        """
        This test reads a line from the w3af scan database and parses it, it's
        objective is to make sure that we can read both formats (or better yet,
        that both files: the one from nikto and the one we have are in the same
        format).
        
        https://github.com/andresriancho/w3af/issues/317
        """
        config = Config([], [], [], [], [])
        url = URL('http://moth/')
        pykto_inst = self.w3afcore.plugins.get_plugin_inst('crawl', 'pykto')
        nikto_parser = NiktoTestParser(pykto_inst._extra_db_file, config, url)

        # Go through all the lines
        generator = nikto_parser.test_generator()
        nikto_tests = [i for (i, ) in generator]

        self.assertLess(len(nikto_parser.ignored), 30,
                        len(nikto_parser.ignored))

        self.assertEqual(len(nikto_tests), 3)

        nikto_test = nikto_tests[0]

        self.assertEqual(nikto_test.id, '900001')
        self.assertEqual(nikto_test.osvdb, '0')
        self.assertEqual(nikto_test.tune, '3')
        self.assertEqual(nikto_test.uri.url_string, 'http://moth/debug.seam')
        self.assertEqual(nikto_test.method, 'GET')
        self.assertIsInstance(nikto_test.match_1, type(re.compile('')))
        self.assertEqual(nikto_test.match_1_or, None)
        self.assertEqual(nikto_test.match_1_and, None)
        self.assertEqual(nikto_test.fail_1, None)
        self.assertEqual(nikto_test.fail_2, None)
        self.assertEqual(nikto_test.message,
                         'JBoss Seam Debug Page is available.')
        self.assertEqual(nikto_test.data, '')
        self.assertEqual(nikto_test.headers, '')
Ejemplo n.º 52
0
    def test_strange_headers_positive(self):
        body = 'Hello world'
        url = URL('http://www.w3af.com/')
        headers = Headers([('content-type', 'text/html'),
                           ('hello-world', 'yes!')])
        request = FuzzableRequest(url, method='GET')

        resp_positive = HTTPResponse(200, body, headers, url, url, _id=1)
        self.plugin.grep(request, resp_positive)

        info_sets = kb.kb.get('strange_headers', 'strange_headers')
        self.assertEquals(len(info_sets), 1)

        info = info_sets[0]
        expected_desc = u'The remote web server sent 1 HTTP responses with' \
                        u' the uncommon response header "hello-world", one' \
                        u' of the received header values is "yes!". The' \
                        u' first ten URLs which sent the uncommon header' \
                        u' are:\n - http://www.w3af.com/\n'
        self.assertEqual(info.get_name(), 'Strange header')
        self.assertEqual(info.get_url(), url)
        self.assertEqual(info.get_desc(), expected_desc)
Ejemplo n.º 53
0
    def test_analyze_cookies_secure_over_http(self):
        body = ''
        url = URL('http://www.w3af.com/')
        headers = Headers({
            'content-type': 'text/html',
            'Set-Cookie': 'abc=def; secure;'
        }.items())
        response = HTTPResponse(200, body, headers, url, url, _id=1)
        request = FuzzableRequest(url, method='GET')

        self.plugin.grep(request, response)

        false_secure = kb.kb.get('analyze_cookies', 'false_secure')

        self.assertEqual(len(kb.kb.get('analyze_cookies', 'cookies')), 1)
        self.assertEqual(len(false_secure), 1)
        self.assertEqual(len(kb.kb.get('analyze_cookies', 'invalid-cookies')),
                         0)

        msg = 'A cookie marked with the secure flag'
        self.assertTrue(
            any([True for i in false_secure if msg in i.get_desc()]))
Ejemplo n.º 54
0
    def test_ntlm_auth_valid_creds(self):

        self.uri_opener = ExtendedUrllib()

        settings = OpenerSettings()
        options = settings.get_options()
        ntlm_domain = options['ntlm_auth_domain']
        ntlm_user = options['ntlm_auth_user']
        ntlm_pass = options['ntlm_auth_passwd']
        ntlm_url = options['ntlm_auth_url']

        ntlm_domain.set_value('moth')
        ntlm_user.set_value('admin')
        ntlm_pass.set_value('admin')
        ntlm_url.set_value('http://moth/w3af/core/ntlm_auth/ntlm_v1/')

        settings.set_options(options)
        self.uri_opener.settings = settings

        url = URL("http://moth/w3af/core/ntlm_auth/ntlm_v1/")
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('You are admin from MOTH/', http_response.body)
Ejemplo n.º 55
0
    def test_audit_plugin_timeout(self):
        plugin_inst = self.w3af.plugins.get_plugin_inst('audit', 'sqli')

        url = URL(get_moth_http('/'))
        freq = FuzzableRequest(url)

        def delay(x, y):
            """
            According to the stopit docs it can't kill a thread running an
            atomic python function such as time.sleep() , so I have to create
            a function like this. I don't mind, since it's realistic with what
            we do in w3af anyways.
            """
            total_delay = 3.0

            for _ in xrange(100):
                time.sleep(total_delay / 100)

        plugin_inst.audit = delay

        mod = 'w3af.core.controllers.plugins.audit_plugin.%s'

        mock_plugin_timeout = 2
        msg = '[timeout] The "%s" plugin took more than %s seconds to'\
              ' complete the analysis of "%s", killing it!'

        error = msg % (plugin_inst.get_name(), mock_plugin_timeout,
                       freq.get_url())

        with patch(mod % 'om.out') as om_mock,\
             patch(mod % 'AuditPlugin.PLUGIN_TIMEOUT', new_callable=PropertyMock) as timeout_mock:

            timeout_mock.return_value = mock_plugin_timeout
            plugin_inst.audit_with_copy(freq, None)

            self.assertIn(call.debug(error), om_mock.mock_calls)

        # Just to make sure we didn't affect the class attribute with our test
        self.assertEqual(plugin_inst.PLUGIN_TIMEOUT, 5 * 60)
Ejemplo n.º 56
0
    def setUp(self):
        """
        This is a rather complex setUp since I need to create an instance of
        the count.py plugin in memory, without copying it to any plugins
        directory since that would generate issues with other tests.
        """
        self.w3afcore = w3afCore()

        target_opts = create_target_option_list(URL(get_moth_http()))
        self.w3afcore.target.set_options(target_opts)

        plugin_inst = factory(self.PLUGIN)
        plugin_inst.set_url_opener(self.w3afcore.uri_opener)
        plugin_inst.set_worker_pool(self.w3afcore.worker_pool)

        self.w3afcore.plugins.plugins['crawl'] = [plugin_inst]
        self.w3afcore.plugins._plugins_names_dict['crawl'] = ['count']
        self.count_plugin = plugin_inst

        # Verify env and start the scan
        self.w3afcore.plugins.initialized = True
        self.w3afcore.verify_environment()
Ejemplo n.º 57
0
    def test_analyze_cookies_collect(self):
        body = ''
        url = URL('http://www.w3af.com/')
        headers = Headers({
            'content-type': 'text/html',
            'Set-Cookie': 'abc=def'
        }.items())
        response = HTTPResponse(200, body, headers, url, url, _id=1)
        request = FuzzableRequest(url, method='GET')
        self.plugin.grep(request, response)

        headers = Headers({
            'content-type': 'text/html',
            'Set-Cookie': '123=456'
        }.items())
        response = HTTPResponse(200, body, headers, url, url, _id=1)
        request = FuzzableRequest(url, method='GET')
        self.plugin.grep(request, response)

        self.assertEqual(len(kb.kb.get('analyze_cookies', 'cookies')), 2)
        self.assertEqual(len(kb.kb.get('analyze_cookies', 'invalid-cookies')),
                         0)
Ejemplo n.º 58
0
    def test_demo_testfire_net(self):
        # We don't control the demo.testfire.net domain, so we'll check if its
        # up before doing anything else
        uri_opener = ExtendedUrllib()
        login_url = URL(self.demo_testfire + 'login.aspx')
        try:
            res = uri_opener.GET(login_url)
        except:
            raise SkipTest('demo.testfire.net is unreachable!')
        else:
            if not 'Online Banking Login' in res.body:
                raise SkipTest('demo.testfire.net has changed!')

        self._scan(self.demo_testfire_net['target'],
                   self.demo_testfire_net['plugins'])

        urls = self.kb.get_all_known_urls()
        url_strings = set(str(u) for u in urls)

        self.assertTrue(self.demo_testfire + 'queryxpath.aspx' in url_strings)
        self.assertTrue(self.demo_testfire +
                        'queryxpath.aspx.cs' in url_strings)
Ejemplo n.º 59
0
    def _send_to_server(self, grep=False):
        """
        Send a request that arrived from the browser to the remote web server.

        Important variables used here:
            - self.headers : Stores the headers for the request
            - self.rfile : A file like object that stores the post_data
            - self.path : Stores the URL that was requested by the browser
        """
        self.headers['Connection'] = 'close'

        # See HTTPWrapperClass
        if hasattr(self.server, 'chainedHandler'):
            base_path = "https://" + self.server.chainedHandler.path
            path = base_path + self.path
        else:
            path = self.path

        uri_instance = URL(path)

        #
        # Do the request to the remote server
        #
        post_data = None
        if 'content-length' in self.headers.dict:
            # most likely a POST request
            post_data = self._get_post_data()

        http_method = getattr(self._uri_opener, self.command)
        headers = Headers(self.headers.items())

        try:
            res = http_method(uri_instance,
                              data=post_data,
                              headers=headers,
                              grep=grep)
        except BaseFrameworkException, w:
            om.out.error('The proxy request failed, error: ' + str(w))
            raise w
Ejemplo n.º 60
0
    def _create_file(self):
        """
        Create random name file php with random php content. To be used in the
        remote file inclusion test.

        :return: The file content to be served via the webserver.

        Please note that the generated code works both in PHP and JSP without
        any issues, since PHP will run everything between "<?" and "?>" and
        JSP will run code between "<%" and "%>".

        TODO: make this code compatible with: asp/aspx, jsp, js (nodejs), pl,
              py, rb, etc. Some code snippets that might help to achieve this
              task:

        asp_code = 'response.write("%s");\n response.write("%s");' % (
            rand1, rand2)
        asp_code = '<% \n '+asp_code+'\n %>'
        """
        with self._plugin_lock:
            # First, generate the php file to be included.
            rfi_result_part_1 = rand1 = rand_alnum(9)
            rfi_result_part_2 = rand2 = rand_alnum(9)
            rfi_result = rand1 + rand2

            filename = rand_alnum(8)
            php_jsp_code = '<? echo "%s"; echo "%s"; ?>'
            php_jsp_code += '<%% out.print("%s"); out.print("%s"); %%>'
            php_jsp_code = php_jsp_code % (rand1, rand2, rand1, rand2)

            # Define the required parameters
            netloc = self._listen_address + ':' + str(self._listen_port)
            path = '/' + filename
            rfi_url = URL.from_parts('http', netloc, path, None, None, None)

            rfi_data = RFIData(rfi_url, rfi_result_part_1, rfi_result_part_2,
                               rfi_result)

            return php_jsp_code, rfi_data