def setURI(self, uri): if not isinstance(uri, url_object): msg = 'The "url" parameter of setURL @ fuzzableRequest' msg += ' must be of urlParser.url_object type.' raise ValueError(msg) self._uri = url_object(uri.url_string.replace(' ', '%20')) self._url = self._uri.uri2url()
def setURI(self, uri): if not isinstance(uri, url_object): msg = 'The "url" parameter of setURL @ fuzzableRequest' msg += " must be of urlParser.url_object type." raise ValueError(msg) self._uri = url_object(uri.url_string.replace(" ", "%20")) self._url = self._uri.uri2url()
def _w3af_site_test_inclusion(self, freq): ''' Check for RFI using the official w3af site. @param freq: A fuzzableRequest object @return: None, everything is saved to the kb ''' self._rfi_url = url_object('http://w3af.sourceforge.net/w3af/remoteFileInclude.html') self._rfi_result = 'w3af is goood!' # Perform the real work self._test_inclusion(freq)
def _w3af_site_test_inclusion(self, freq): ''' Check for RFI using the official w3af site. @param freq: A fuzzableRequest object @return: None, everything is saved to the kb ''' self._rfi_url = url_object( 'http://w3af.sourceforge.net/w3af/remoteFileInclude.html') self._rfi_result = 'w3af is goood!' # Perform the real work self._test_inclusion(freq)
def request_to_fuzzable_request(req): """ This functions takes our crawler's request and converts it to the proper fuzzable request """ fuzzable_request = None if req.isPOST: fuzzable_request = httpPostDataRequest() url = url_object(req.webrequest.getUrl().toString()) fuzzable_request.setURL(url) dc = dataContainer() for nv in req.webrequest.getRequestParameters(): name = nv.getName() value = nv.getValue() dc[name] = [value] fuzzable_request.setDc(dc) else: # request is a GET fuzzable_request = httpQsRequest() url = url_object(req.webrequest.getUrl().toString()) fuzzable_request.setURI(url) return fuzzable_request
def response_to_w3af_response(response): """ This function take our crawler's response object and converts it to a w3af response object. """ code = response.code request_url = url_object(response.page.reqresp.request.webrequest.getUrl().toString()) actual_data = response.content headers = {} for nv in response.webresponse.getResponseHeaders(): name = nv.getName() value = nv.getValue() headers[name] = value w3af_response = httpResponse(code, actual_data, headers, request_url, request_url, time=response.time) return w3af_response
def response_to_w3af_response(response): """ This function take our crawler's response object and converts it to a w3af response object. """ code = response.code request_url = url_object( response.page.reqresp.request.webrequest.getUrl().toString()) actual_data = response.content headers = {} for nv in response.webresponse.getResponseHeaders(): name = nv.getName() value = nv.getValue() headers[name] = value w3af_response = httpResponse(code, actual_data, headers, request_url, request_url, time=response.time) return w3af_response
import sys sys.path.append("/home/adamd/research/black-box/blackbox/crawler/audit") sys.path.append("/home/adamd/research/black-box/blackbox/crawler") print sys.path from fuzzableRequest import fuzzableRequest from httpQsRequest import httpQsRequest from urlParser import url_object from xss import xss from plugin_wrapper import * url = url_object("http://127.0.0.1/adam.php?test=blah") fr = httpQsRequest() fr.setURI(url) plugin = xss("crawler") plugin.audit(fr)