def _isBlacklisted( self, uri ): ''' If the user configured w3af to ignore a URL, we are going to be applying that configuration here. This is the lowest layer inside w3af. ''' listOfNonTargets = cf.cf.getData('nonTargets') or [] for u in listOfNonTargets: if urlParser.uri2url( uri ) == urlParser.uri2url( u ): msg = 'The URL you are trying to reach was configured as a non-target. ( ' msg += uri +' ). Returning an empty response.' om.out.debug( msg ) return True return False
def _gen_url_to_include( self, file_content, extension ): ''' Generate the URL to include, based on the configuration it will return a URL poiting to a XSS bug, or a URL poiting to our local webserver. ''' if self._use_XSS_vuln and self._xss_vuln: url = urlParser.uri2url(self._xss_vuln.getURL()) data_container = self._xss_vuln.getDc() data_container = data_container.copy() data_container[self._xss_vuln.getVar()] = file_content url_to_include = url + '?' + str(data_container) return url_to_include else: # Write the php to the webroot filename = createRandAlNum() try: file_handler = open(os.path.join(get_home_dir(), 'webroot', filename) , 'w') file_handler.write(file_content) file_handler.close() except: raise w3afException('Could not create file in webroot.') else: url_to_include = 'http://' + self._listen_address +':' url_to_include += str(self._listen_port) +'/' + filename return url_to_include
def _get_images( self, fuzzable_request ): ''' Get all img tags and retrieve the src. @parameter fuzzable_request: The request to modify @return: A map with the img src as a key, and a hash of the image contents as the value ''' res = {} try: response = self._urlOpener.GET( fuzzable_request.getURI(), useCache=False ) except: om.out.debug('Failed to retrieve the page for finding captchas.') else: # Do not use dpCache here, it's no good. #dp = dpCache.dpc.getDocumentParserFor( response ) try: document_parser = documentParser.documentParser( response ) except w3afException: pass else: image_list = document_parser.getReferencesOfTag('img') image_list = [ urlParser.uri2url(i) for i in image_list] for img_src in image_list: # TODO: Use self._tm.startFunction try: image_response = self._urlOpener.GET( img_src, useCache=False ) except: om.out.debug('Failed to retrieve the image for finding captchas.') else: if image_response.is_image(): res[ img_src ] = sha.new(image_response.getBody()).hexdigest() return res
def end(self): om.out.debug("davShellObj is going to delete the webshell that was uploaded before.") url_to_del = urlParser.uri2url(self._exploit_url) try: self._urlOpener.DELETE(url_to_del) except w3afException, e: om.out.error("davShellObj cleanup failed with exception: " + str(e))
def urlReplace(self, parameter="", value="", newValue=""): mutant = self._vuln.getMutant() mutant.setModValue( self._vuln['falseValue'] + newValue ) if mutant.getDc() != '': baseUrl = urlParser.uri2url( mutant.getURL() ) + '?' + urllib.unquote_plus( str( mutant.getDc() ) ) else: baseUrl = mutant.getURL() return baseUrl
def getPage(self, url): """ Connect to the target url or proxy and return the target url page. """ m = self._vuln.getMutant() m.setDc( urlParser.getQueryString( url ) ) m.setURL( urlParser.uri2url( url ) ) response = self._sendMutant( m , analyze=False ) if response.getCode() in range( 500, 599 ): raise w3afException('getPage request returned an HTTP error 500.') return response.getBody()
def are_variants( url_a , url_b ): ''' This function analyzes if two URLs are variants. Two requests are variants if: - They have the same URL - They have the same method - They have the same parameters - The values for each parameter have the same type (int / string) @parameter url_a: The URL we want to analyze @parameter url_b: The other URL we want to analyze @return: True if the URLs are variants. ''' qs_a = urlParser.getQueryString( url_a ) qsr_a = httpQsRequest.httpQsRequest() qsr_a.setURL( urlParser.uri2url(url_a) ) qsr_a.setDc( qs_a ) qs_b = urlParser.getQueryString( url_b ) qsr_b = httpQsRequest.httpQsRequest() qsr_b.setURL( urlParser.uri2url(url_b) ) qsr_b.setDc( qs_b ) return qsr_a.is_variant_of( qsr_b )
def __init__( self, code, read , info, geturl, originalUrl, msg='OK', id=None, time=0.2): ''' @parameter time: The time between the request and the response. ''' # A nice and comfortable default self._charset = 'utf-8' self._content_type = '' self._dom = None self._clear_text_body = None # Set the URL variables # The URL that we really GET'ed self._realurl = uri2url( originalUrl ) self._uri = originalUrl # The URL where we were redirected (may be the same as originalUrl when no redirect) self._redirectedURL = geturl self._redirectedURI = uri2url( geturl ) # Set the rest self.setCode(code) # Save the type for fast access, so I don't need to calculate the type each time # someone calls the "is_text_or_html" method. This attributes are set in the # setHeaders() method. self._is_text_or_html_response = False self._is_swf_response = False self._is_pdf_response = False self._is_image_response = False self.setHeaders(info) self.setBody(read) self._msg = msg self._time = time # A unique id identifier for the response self.id = id self._fromCache = False
def _generateShell( self, vuln ): ''' @parameter vuln: The vuln to exploit. @return: True if the user can start using the proxy. ''' # Set proxy parameters self._url = urlParser.uri2url( vuln.getURL() ) self._method = vuln.getMethod() self._exploitData = vuln.getDc() self._variable = vuln.getVar() self.start2() p = proxy_rfi_shell( self._proxyAddress + ':' + str(self._proxyPort) ) return p
def _return_without_eval( self, parameters, uri ): if urlParser.getDomainPath( uri ) == uri: return False (server, query , expected_response, method , desc) = parameters function_reference = getattr( self._urlOpener , method ) url = urlParser.uri2url( uri ) url += createRandAlNum( 7 ) if urlParser.getQueryString( query ): url = url + '?' + str( urlParser.getQueryString( query ) ) try: response = function_reference( url ) except KeyboardInterrupt,e: raise e
def setOptions(self, optionsMap): """ This method sets all the options that are configured using the user interface generated by the framework using the result of getOptions(). @parameter optionsMap: A map with the options for the plugin. @return: No value is returned. """ self._url = urlParser.uri2url(optionsMap["url"].getValue()) if optionsMap["method"].getValue() not in ["GET", "POST"]: raise w3afException("Unknown method.") else: self._method = optionsMap["method"].getValue() self._data = optionsMap["data"].getValue() self._injvar = optionsMap["injvar"].getValue() self._equAlgorithm = optionsMap["equAlgorithm"].getValue() self._equalLimit = optionsMap["equalLimit"].getValue() self._goodSamaritan = optionsMap["goodSamaritan"].getValue() self._generateOnlyOne = optionsMap["generateOnlyOne"].getValue()
def setOptions( self, optionsMap ): ''' This method sets all the options that are configured using the user interface generated by the framework using the result of getOptions(). @parameter optionsMap: A map with the options for the plugin. @return: No value is returned. ''' self._url = urlParser.uri2url( optionsMap['url'].getValue() ) if optionsMap['method'].getValue() not in ['GET', 'POST']: raise w3afException('Unknown method.') else: self._method = optionsMap['method'].getValue() self._data = optionsMap['data'].getValue() self._injvar = optionsMap['injvar'].getValue() self._equAlgorithm = optionsMap['equAlgorithm'].getValue() self._equalLimit = optionsMap['equalLimit'].getValue() self._goodSamaritan = optionsMap['goodSamaritan'].getValue() self._generateOnlyOne = optionsMap['generateOnlyOne'].getValue()
def discover(self, fuzzableRequest ): ''' If url not in _tested, append a ?wsdl and check the response. @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' url = urlParser.uri2url( fuzzableRequest.getURL() ) if url not in self._already_tested: self._already_tested.add( url ) # perform the requests for wsdl_parameter in self._get_WSDL(): url_to_request = url + wsdl_parameter # Send the requests using threads: targs = ( url_to_request, ) self._tm.startFunction( target=self._do_request, args=targs, ownerObj=self ) # Wait for all threads to finish self._tm.join( self ) return self._new_fuzzable_requests
def setURI( self, uri ): self._uri = uri self._url = uri2url( uri )
def setURL( self, url ): self._url = uri2url( url )
def setURI( self, uri ): self._dc = urlParser.getQueryString(uri) self._uri = uri.replace(' ', '%20') self._url = urlParser.uri2url( uri )
# most likely a POST request postData = self._getPostData() try: httpCommandMethod = getattr( self._urlOpener, self.command ) res = httpCommandMethod( path, data=postData, headers=self.headers ) except w3afException, w: om.out.error('The proxy request failed, error: ' + str(w) ) except Exception, e: raise e else: return res else: # most likely a GET request url = uri2url( path ) qs = getQueryString( self.path ) try: httpCommandMethod = getattr( self._urlOpener, self.command ) res = httpCommandMethod( url, data=str(qs), headers=self.headers, grepResult=grep ) except w3afException, w: traceback.print_exc() om.out.error('The proxy request failed, error: ' + str(w) ) raise w except Exception, e: traceback.print_exc() raise e else: return res def _sendError( self, exceptionObj, trace=None ):
def discover(self, fuzzableRequest ): ''' Nothing strange, just do some GET requests to the eggs and analyze the response. @parameter fuzzableRequest: A fuzzableRequest instance that contains (among other things) the URL to test. ''' if not self._exec: # This will remove the plugin from the discovery plugins to be runned. raise w3afRunOnce() else: # Get the extension of the URL (.html, .php, .. etc) ext = urlParser.getExtension( fuzzableRequest.getURL() ) # Only perform this analysis if we haven't already analyzed this type of extension # OR if we get an URL like http://f00b5r/4/ (Note that it has no extension) # This logic will perform some extra tests... but we won't miss some special cases # Also, we aren't doing something like "if 'php' in ext:" because we never depend # on something so changable as extensions to make decisions. if ext == '' or ext not in self._already_analyzed_ext: # Init some internal variables GET_results = [] original_response = self._urlOpener.GET( fuzzableRequest.getURL(), useCache=True ) # Perform the GET requests to see if we have a phpegg for egg, egg_desc in self._get_eggs(): egg_URL = urlParser.uri2url( fuzzableRequest.getURL() ) + egg try: response = self._urlOpener.GET( egg_URL, useCache=True ) except KeyboardInterrupt,e: raise e except w3afException, w3: raise w3 else: GET_results.append( (response, egg_desc, egg_URL) ) # # Now I analyze if this is really a PHP eggs thing, or simply a response that # changes a lot on each request. Before, I had something like this: # # if relative_distance(original_response.getBody(), response.getBody()) < 0.1: # # But I got some reports about false positives with this approach, so now I'm # changing it to something a little bit more specific. images = 0 not_images = 0 for response, egg_desc, egg_URL in GET_results: if 'image' in response.getContentType(): images += 1 else: not_images += 1 if images == 3 and not_images == 1: # # The remote web server has expose_php = On. Report all the findings. # for response, egg_desc, egg_URL in GET_results: i = info.info() i.setPluginName(self.getName()) i.setName('PHP Egg - ' + egg_desc) i.setURL( egg_URL ) desc = 'The PHP framework running on the remote server has a "' desc += egg_desc +'" easter egg, access to the PHP egg is possible' desc += ' through the URL: "'+ egg_URL + '".' i.setDesc( desc ) kb.kb.append( self, 'eggs', i ) om.out.information( i.getDesc() ) # Only run once. self._exec = False # analyze the info to see if we can identify the version self._analyze_egg( GET_results ) # Now we save the extension as one of the already analyzed if ext != '': self._already_analyzed_ext.add(ext)
def setURL( self , url ): url = urlParser.uri2url( url ) self._url = url.replace(' ', '%20') self._uri = self._url