예제 #1
0
파일: spider_man.py 프로젝트: weisst/w3af
 def crawl(self, freq):
     
     # Create the proxy server
     try:
         self._proxy = Proxy(self._listen_address, self._listen_port,
                             self._uri_opener, self.create_p_h())
     except w3afProxyException, proxy_exc:
         om.out.error('%s' % proxy_exc)
예제 #2
0
    def _start_proxy(self, uri_opener):
        '''
        Saves the proxy configuration to self.local_proxy_url in order for the
        wrapper to use it in the calls to sqlmap.py and have the traffic go
        through our proxy (which has the user configuration, logging, etc).
        
        :return: None, an exception is raised if something fails.
        '''
        host = '127.0.0.1'

        for port in xrange(SQLMAP_PROXY, SQLMAP_PROXY + 25):
            try:
                self.proxy = Proxy(host, port, uri_opener)
            except w3afProxyException, pe:
                pass
            else:
                self.proxy.start()
                self.local_proxy_url = 'http://%s:%s/' % (host, port)
                return
예제 #3
0
    def setUp(self):
        self.uri_opener = ExtendedUrllib()

        # Start the proxy daemon
        self._proxy = Proxy('127.0.0.1', 0, ExtendedUrllib(), w3afProxyHandler)
        self._proxy.start()
        self._proxy.wait_for_start()

        port = self._proxy.get_port()

        # Configure the proxy
        settings = OpenerSettings()
        options = settings.get_options()
        proxy_address_opt = options['proxy_address']
        proxy_port_opt = options['proxy_port']

        proxy_address_opt.set_value('127.0.0.1')
        proxy_port_opt.set_value(port)

        settings.set_options(options)
        self.uri_opener.settings = settings