def store_in_cache(request, response): hi = HistoryItem() # Set the request headers = dict(request.headers) headers.update(request.unredirected_hdrs) req = createFuzzableRequestRaw(method=request.get_method(), url=request.url_object, postData=str(request.get_data() or ''), headers=headers) hi.request = req # Set the response resp = response code, msg, hdrs, url, body, id = (resp.code, resp.msg, resp.info(), resp.geturl(), resp.read(), resp.id) # BUGBUG: This is where I create/log the responses that always have # 0.2 as the time! url_instance = url_object( url ) resp = httpResponse.httpResponse(code, body, hdrs, url_instance, request.url_object, msg=msg, id=id, alias=gen_hash(request)) hi.response = resp # Now save them try: hi.save() except KeyboardInterrupt, k: raise k
def httpRequestParser(head, postdata): ''' This function parses HTTP Requests from a string to a fuzzableRequest. @parameter head: The head of the request. @parameter postdata: The post data of the request @return: A fuzzableRequest object with all the corresponding information that was sent in head and postdata @author: Andres Riancho ( [email protected] ) ''' # Parse the request head splitted_head = head.split('\n') splitted_head = [h.strip() for h in splitted_head if h] if not splitted_head: msg = 'The HTTP request is invalid.' raise w3afException(msg) # Get method, uri, version metUriVer = splitted_head[0] firstLine = metUriVer.split(' ') if len(firstLine) == 3: # Ok, we have something like "GET /foo HTTP/1.0". This is the best case for us! method, uri, version = firstLine elif len(firstLine) < 3: msg = 'The HTTP request has an invalid <method> <uri> <version> token: "' msg += metUriVer +'".' raise w3afException(msg) elif len(firstLine) > 3: # GET /hello world.html HTTP/1.0 # Mostly because we are permissive... we are going to try to send the request... method = firstLine[0] version = firstLine[-1] uri = ' '.join( firstLine[1:-1] ) checkVersionSintax(version) # If we got here, we have a nice method, uri, version first line # Now we parse the headers (easy!) and finally we send the request headers = splitted_head[1:] headersDict = {} for header in headers: one_splitted_header = header.split(':', 1) if len(one_splitted_header) == 1: raise w3afException('The HTTP request has an invalid header: "' + header + '"') headersDict[ one_splitted_header[0].strip() ] = one_splitted_header[1].strip() host = '' for headerName in headersDict: if headerName.lower() == 'host': host = headersDict[headerName] uri = checkURISintax(uri, host) fuzzReq = createFuzzableRequestRaw(method, uri, postdata, headersDict) return fuzzReq
def _grepResult(self, request, response): # The grep process is all done in another thread. This improves the # speed of all w3af. if len( self._grepPlugins ) and urlParser.getDomain( request.get_full_url() ) in cf.cf.getData('targetDomains'): # I'll create a fuzzable request based on the urllib2 request object fuzzReq = createFuzzableRequestRaw( request.get_method(), request.get_full_url(), request.get_data(), request.headers ) for grep_plugin in self._grepPlugins: # # For debugging, do not remove, only comment out if needed. # self._grep_worker( grep_plugin, fuzzReq, response ) # TODO: Analyze if creating a different threadpool for grep workers speeds up the whole process #targs = (grep_plugin, fuzzReq, response) #self._tm.startFunction( target=self._grep_worker, args=targs, ownerObj=self, restrict=False ) self._tm.join( self )
def mangleRequest(self, request ): ''' This method mangles the request. @param request: This is the request to mangle. @return: A mangled version of the request. ''' data = request.getData() for regex, string in self._req_body_manglers: data = regex.sub( string, data ) header_string = headersToString( request.getHeaders() ) for regex, string in self._req_head_manglers: header_string = regex.sub( string, header_string ) header_dict = stringToHeaders( header_string ) request = createFuzzableRequestRaw( request.getMethod() , request.getURL(), data, header_dict ) return request
def _log_request_response( self, request, response ): ''' Send the request and the response to the output manager. ''' method = request.get_method() url = request.get_full_url() headers = request.headers postData = request.get_data() for i in request.unredirected_hdrs.keys(): headers[ i ] = request.unredirected_hdrs[ i ] fr = createFuzzableRequestRaw(method, url, postData, headers) if isinstance(response, httpResponse.httpResponse): res = response else: code, msg, hdrs = response.code, response.msg, response.info() url = response.geturl() body = response.read() id = response.id # BUGBUG: This is where I create/log the responses that always have 0.2 as the time! res = httpResponse.httpResponse( code, body, hdrs, url, url, msg=msg, id=id) om.out.logHttp( fr, res )
def _log_request_response(self, request, response): ''' Send the request and the response to the output manager. ''' headers = dict(request.headers) headers.update(request.unredirected_hdrs) fr = createFuzzableRequestRaw(method=request.get_method(), url=request.url_object, postData=request.get_data(), headers=headers) if isinstance(response, httpResponse.httpResponse): res = response else: code, msg, hdrs = response.code, response.msg, response.info() url = response.geturl() body = response.read() id = response.id # BUGBUG: This is where I create/log the responses that always have 0.2 as the time! url_instance = url_object( url ) res = httpResponse.httpResponse(code, body, hdrs, request.url_object, url_instance, msg=msg, id=id) om.out.logHttp(fr, res)
def _objs_from_log( self, req_file ): ''' This code was largely copied from Bernardo Damele's sqlmap[0] . See __feedTargetsDict() in lib/core/options.py. So credits belong to the sqlmap project. [0] http://sqlmap.sourceforge.net/ @author Patrick Hof ''' res = [] fp = open( req_file, "r" ) fread = fp.read() fread = fread.replace( "\r", "" ) req_res_list = fread.split( "======================================================" ) port = None scheme = None for request in req_res_list: if scheme is None: scheme_port = re.search( "\d\d[\:|\.]\d\d[\:|\.]\d\d\s+(http[\w]*)\:\/\/.*?\:([\d]+)", request, re.I ) if scheme_port: scheme = scheme_port.group( 1 ) port = scheme_port.group( 2 ) if not re.search ( "^[\n]*(GET|POST).*?\sHTTP\/", request, re.I ): continue if re.search( "^[\n]*(GET|POST).*?\.(gif|jpg|png)\sHTTP\/", request, re.I ): continue method = None url = None postdata = None host = None headers = {} get_post_req = False lines = request.split( "\n" ) for line in lines: if len( line ) == 0 or line == "\n": continue if line.startswith( "GET " ) or line.startswith( "POST " ): if line.startswith( "GET " ): index = 4 else: index = 5 url = line[index:line.index(" HTTP/")] method = line[:index-1] get_post_req = True # XXX do we really need this? This is from the sqlmap code. # 'data' would be 'postdata' here. I can't figure out why this # is needed. Does WebScarab occasionally split requests to a new # line if they are overly long, so that we need to search for # GET parameters even after the URL was parsed? But that # wouldn't make sense with the way 'url' is set in line 168. # # GET parameters # elif "?" in line and "=" in line and ": " not in line: # data = line # Parse headers elif ": " in line: key, value = line.split(": ", 1) headers[key] = value if key.lower() == 'host': host = value # POST parameters elif method is not None and method == "POST" and "=" in line: postdata = line if get_post_req: if not url.startswith( "http" ): url = "%s://%s:%s%s" % ( scheme or "http", host, port or "80", url ) scheme = None port = None url_instance = url_object(url) res.append( createFuzzableRequestRaw( method, url_instance, postdata, headers ) ) return res
>>> pdr.getData() {'id': ['1']} >>> str(pdr.getData()) 'id=1' ''' try: (method, uri, postdata) = csv_row except ValueError, value_error: msg = 'The file format is incorrect, an error was found while parsing: "' msg += str(csv_row) + '". Exception: "' + str(value_error) + '".' om.out.error( msg ) else: # Create the obj based on the information uri = url_object( uri ) if uri.is_valid_domain(): return createFuzzableRequestRaw( method, uri, postdata, {} ) def _objs_from_log( self, req_file ): ''' This code was largely copied from Bernardo Damele's sqlmap[0] . See __feedTargetsDict() in lib/core/options.py. So credits belong to the sqlmap project. [0] http://sqlmap.sourceforge.net/ @author Patrick Hof ''' res = [] fp = open( req_file, "r" ) fread = fp.read() fread = fread.replace( "\r", "" )
def httpRequestParser(head, postdata): ''' This function parses HTTP Requests from a string to a fuzzableRequest. @parameter head: The head of the request. @parameter postdata: The post data of the request @return: A fuzzableRequest object with all the corresponding information that was sent in head and postdata @author: Andres Riancho ( [email protected] ) >>> httpRequestParser('200 http://www.w3af.com/ HTTP/1.0', 'foo=bar') <postdata fuzzable request | 200 | http://www.w3af.com/ > >>> httpRequestParser('200 http://www.w3af.com/ HTTP/1.0', '') <QS fuzzable request | 200 | http://www.w3af.com/ > >>> httpRequestParser('200 / HTTP/1.0', '') Traceback (most recent call last): File "<stdin>", line 1, in ? w3afException: You have to specify the complete URI, including the protocol and the host. Invalid URI: / >>> httpRequestParser('ABCDEF', '') Traceback (most recent call last): File "<stdin>", line 1, in ? w3afException: The HTTP request has an invalid <method> <uri> <version> token: "ABCDEF". >>> head = "200 http://www.w3af.com/ HTTP/1.0" >>> head += '\\nHost: www.w3af.com' >>> httpRequestParser( head, 'foo=bar') <postdata fuzzable request | 200 | http://www.w3af.com/ > ''' # Parse the request head splitted_head = head.split('\n') splitted_head = [h.strip() for h in splitted_head if h] if not splitted_head: msg = 'The HTTP request is invalid.' raise w3afException(msg) # Get method, uri, version metUriVer = splitted_head[0] firstLine = metUriVer.split(' ') if len(firstLine) == 3: # Ok, we have something like "GET /foo HTTP/1.0". This is the best case for us! method, uri, version = firstLine elif len(firstLine) < 3: msg = 'The HTTP request has an invalid <method> <uri> <version> token: "' msg += metUriVer +'".' raise w3afException(msg) elif len(firstLine) > 3: # GET /hello world.html HTTP/1.0 # Mostly because we are permissive... we are going to try to send the request... method = firstLine[0] version = firstLine[-1] uri = ' '.join( firstLine[1:-1] ) checkVersionSintax(version) # If we got here, we have a nice method, uri, version first line # Now we parse the headers (easy!) and finally we send the request headers = splitted_head[1:] headersDict = {} for header in headers: one_splitted_header = header.split(':', 1) if len(one_splitted_header) == 1: raise w3afException('The HTTP request has an invalid header: "' + header + '"') headersDict[ one_splitted_header[0].strip() ] = one_splitted_header[1].strip() host = '' for headerName in headersDict: if headerName.lower() == 'host': host = headersDict[headerName] uri = checkURISintax(uri, host) fuzzReq = createFuzzableRequestRaw(method, url_object(uri), postdata, headersDict) return fuzzReq