Example #1
0
  def detect_csrf_token(self,messageIsRequest,message):
    #Only process requests as that's where the Token should be
    request_byte_array=message.getMessageInfo().getRequest()
    if messageIsRequest:
      t1=[]
      t2=[]
      flag=0

      requestInfo = self._helpers.analyzeRequest(request_byte_array)

      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,requestInfo)

      #Check if the URL is in scope. This is to eliminate stray traffic.
      if hostname and hostname[1] in urls_in_scope:
        csrf_token_value=self._helpers.getRequestParameter(request_byte_array,anticsrf_token_name)
        request_string=self._helpers.bytesToString(request_byte_array)
        urlpath=request_string.split("\n")
        tmp2=urlpath[0].split(' ')
 
      #If there's no token, check if it's an image, js or css file. In this case, a token isn't needed
        if not csrf_token_value:
          for tmp3 in excluded_file_extensions:
            #Search for file extension. If you want a more complex regex..remember to compile the regex. DO.NOT.FORGET :)
            tmp4=re.search(tmp3,tmp2[-2])
            if tmp4:
              flag=1
 
          #Not to be excluded and the request doesn't contain a token
          if flag != 1:
            return urlpath[0]
    def processProxyMessage(self, messageIsRequest, message):
        response_byte_array = message.getMessageInfo().getResponse()

        request_http_service = message.getMessageInfo().getHttpService()
        request_byte_array = message.getMessageInfo().getRequest()
        request_object = self._helpers.analyzeRequest(request_http_service,
                                                      request_byte_array)

        #Extract hostname from header
        hostname = webcommon.get_host_header_from_request(self, request_object)
        #hostname=BurpExtender.get_host_header_from_request(self,request_object)

        #Check if the URL is in scope. This is to eliminate stray traffic.
        if hostname and hostname[1] in urls_in_scope:
            if not messageIsRequest:
                responseInfo = self._helpers.analyzeResponse(
                    response_byte_array)

                #Extract banner from response
                banner = webcommon.get_banner_from_response(self, responseInfo)
                if banner not in unique_banners.keys():
                    unique_banners[banner] = ''
                    print banner

                #Extract platform specific content from response
                responseBody = webcommon.get_response_body(
                    self, response_byte_array, responseInfo)
                responseBody_string = self._helpers.bytesToString(responseBody)

                for platform_name in list_of_platforms:
                    regex = re.compile('.{30}%s.{30}' % platform_name,
                                       re.IGNORECASE | re.DOTALL)
                    m2 = regex.search(responseBody_string)
                    if m2:
                        print m2.group(0) + '\n' + '-' * 30 + '\n'
Example #3
0
  def registerExtenderCallbacks(self,callbacks):
    global hostname
    list_of_urls=[]
    # Get a reference to the Burp helpers object
    self._helpers = callbacks.getHelpers()
    self._callbacks = callbacks

    # set our extension name
    callbacks.setExtensionName("HTTP method test")

    # Get proxy history
    proxyhistory=callbacks.getProxyHistory()

    #Read each request in proxy history
    for request in proxyhistory:
      request_byte_array=request.getRequest()
      requestInfo = self._helpers.analyzeRequest(request_byte_array)

      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,requestInfo)

      #Test PUT for each directory in the proxy history
      filepath=BurpExtender.test_put(self,callbacks,request_byte_array,hostname,requestInfo)

      #Get the file that you just PUT
      respcode=BurpExtender.check_file_existence_put(self,filepath)

      if respcode=='200':
        #Test DELETE for the file you uploaded
        BurpExtender.test_delete(self,filepath)

        #Get the file that you just DELETED. It should return a 404 if DELETE is enabled
        BurpExtender.check_file_existence_delete(self,filepath)
  def processProxyMessage(self,messageIsRequest,message):
    response_byte_array=message.getMessageInfo().getResponse()

    request_http_service=message.getMessageInfo().getHttpService()
    request_byte_array=message.getMessageInfo().getRequest()
    request_object=self._helpers.analyzeRequest(request_http_service, request_byte_array)

    #Extract hostname from header
    hostname=webcommon.get_host_header_from_request(self,request_object)
    #hostname=BurpExtender.get_host_header_from_request(self,request_object)

    #Check if the URL is in scope. This is to eliminate stray traffic.
    if hostname and hostname[1] in urls_in_scope:
       if not messageIsRequest:
         responseInfo = self._helpers.analyzeResponse(response_byte_array)

         #Extract banner from response
         banner=webcommon.get_banner_from_response(self,responseInfo)
         if banner not in unique_banners.keys():
           unique_banners[banner]=''
           print banner

         #Extract platform specific content from response
         responseBody=webcommon.get_response_body(self,response_byte_array,responseInfo)
         responseBody_string=self._helpers.bytesToString(responseBody)

         for platform_name in list_of_platforms:
           regex=re.compile('.{30}%s.{30}'%platform_name,re.IGNORECASE|re.DOTALL)
           m2=regex.search(responseBody_string)
           if m2:
             print m2.group(0)+'\n'+'-'*30+'\n'
    def setMessage(self, content, isRequest):
        if content is None:
            self._txtInput.setText(None)
            self._txtInput.setEditable(False)
        else:
            request_byte_array = []
            requestInfo = self._extender._helpers.analyzeRequest(content)
            hostname = webcommon.get_host_header_from_request(
                self, requestInfo)
            http_service = self._extender._helpers.buildHttpService(
                hostname[1], remote_listening_port, protocol)
            r1 = self._extender._callbacks.makeHttpRequest(
                http_service, content)
            r2 = r1.getResponse()
            orig_resp = self._extender._helpers.bytesToString(r2)

            request_byte_array, flag, http_service = self.remove_sessioncookie_from_request(
                content, http_service)
            if flag == 1:
                new_resp = self.generate_request(request_byte_array,
                                                 http_service)
                if len(orig_resp) == len(new_resp):
                    output = 'Direct requesting without cookies has the same response as the original. This might be a vuln. Here is the request that was sent:\n\n'
                    output += '-' * 20 + '\n'
                    output += self._extender._helpers.bytesToString(
                        request_byte_array)
            self._txtInput.setText(output)
            self._txtInput.setEditable(self._editable)
            self._currentMessage = content
        return
Example #6
0
  def detect_urls_in_parameters(self,messageIsRequest,message):
    #Only process requests
    if messageIsRequest:
      request_http_service=message.getMessageInfo().getHttpService()
      request_byte_array=message.getMessageInfo().getRequest()
      request_object=self._helpers.analyzeRequest(request_http_service, request_byte_array)

      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,request_object)

      #Check if the URL is in scope. This is to eliminate stray traffic.
      if hostname and hostname[1] in urls_in_scope:
        request_url=request_object.getUrl()
        request_parameters=request_object.getParameters()

        #Check if the value of each parameter matches a whitelist or a blacklist. Both lists are defined above as global variables.
        for param in request_parameters:
          blacklist=0
          whitelist=0
          for excluded_pattern in excluded_url_patterns:
            regex=re.compile('.*%s.*'%excluded_pattern,re.IGNORECASE)
            m2=regex.match(str(param.getValue()))
            #m3=regex.match(urllib.quote(str(param.getValue())))
            if m2:# or m3:
              blacklist=1

          #If it doesn't match a blacklist
          if blacklist == 0:
            for pattern in url_patterns:
              regex=re.compile('.*%s.*'%pattern,re.IGNORECASE)
              m1=regex.match(str(param.getValue()))
              #m4=regex.match(urllib.quote_plus(str(param.getValue())))
              if m1:# or m4:
                whitelist=1

          #If the value for the URL parameter matches a pattern print it out
          if whitelist==1:
            #The moment you detect that a URL matches a pattern you also want to fuzz it. Hence you do the following:
            # -- Check if you already sent it to Intruder
            # -- If not, mark the positions that you want scanned
            # -- Set the payload list, set any other Intruder customizations up
            # -- Send the URL to be fuzzed to Intruder
            # -- Probably fuzz it as well and save the Intruder results to be imported later
            print str(request_url)+"\t\t"+str(param_constant_type_mapping[str(param.getType())])+"\t\t"+str(param.getName())+"\t\t"+str(param.getValue())
    else:
      response_byte_array=message.getMessageInfo().getResponse()
      responseInfo = self._helpers.analyzeResponse(response_byte_array)

      responseCode=webcommon.get_response_code_from_headers(self,responseInfo)
      location=webcommon.get_location_from_headers(self,responseInfo)
      if location:
        print str(responseCode[0])+'\t\t'+str(location[1])
Example #7
0
  def processProxyMessage(self,messageIsRequest,message):
    request_http_service=message.getMessageInfo().getHttpService()
    request_byte_array=message.getMessageInfo().getRequest()
    request_object=self._helpers.analyzeRequest(request_http_service, request_byte_array)

    #Extract hostname from header
    hostname=webcommon.get_host_header_from_request(self,request_object)

    #Check if the URL is NOT in scope. We want to look at referers for the requests that are made to OTHER domains.
    if (hostname) and (hostname[1] not in urls_in_scope):
      #Extract referer from header
      referer=webcommon.get_referer_header_from_request(self,request_object)
      if referer:
        t1=referer[1].split('/')
        if t1[2] in urls_in_scope:
          print referer[1]
Example #8
0
    def setMessage(self, content, isRequest):
        if content is None:
            self._txtInput.setText(None)
            self._txtInput.setEditable(False)
        else:
            requestInfo= self._extender._helpers.analyzeRequest(content)
            hostname= webcommon.get_host_header_from_request(self,requestInfo)
            http_service= self._extender._helpers.buildHttpService(hostname[1],remote_listening_port,protocol)

            bytes_req= self._extender._helpers.toggleRequestMethod(content)
            r1= self._extender._callbacks.makeHttpRequest(http_service, bytes_req)
            r2= r1.getResponse()
            orig_resp= self._extender._helpers.bytesToString(r2)
            self._txtInput.setText(self._extender._helpers.bytesToString(bytes_req)+'-'*10+'\n'+orig_resp)
            self._txtInput.setEditable(self._editable)
            self._currentMessage= orig_resp
        return
  def download_all_JS_files(self,messageIsRequest,message):
    request_byte_array=message.getMessageInfo().getRequest()
    if messageIsRequest:
      request_http_service=message.getMessageInfo().getHttpService()
      request_byte_array=message.getMessageInfo().getRequest()
      request_object=self._helpers.analyzeRequest(request_http_service, request_byte_array)

      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,request_object)

      #Check if the URL is in scope. This is to eliminate stray traffic.
      if hostname and hostname[1] in urls_in_scope:
        request_url=request_object.getUrl()
        if str(request_url).endswith('.js'):
          print request_url
          os.chdir(download_path)
          os.system("wget "+str(request_url))
Example #10
0
  def detect_valid_referer(self,messageIsRequest,message):
    #Only process requests as that's where the valid Referer should be 
    request_http_service=message.getMessageInfo().getHttpService()
    request_byte_array=message.getMessageInfo().getRequest()
    requestInfo=self._helpers.analyzeRequest(request_http_service, request_byte_array)
    request_url=requestInfo.getUrl()

    if messageIsRequest:
      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,requestInfo)

      #Check if the URL is in scope. This is to eliminate stray traffic.
      if hostname and hostname[1] in urls_in_scope:
        #Extract referer. If it's not a referer from the same site - print it out and let the engineer decide if it is unsafe.
        referer=webcommon.get_referer_header_from_request(self,requestInfo)
        if not referer[1].startswith(referer_header_value):
          print str(request_url)+'\t\t'+str(referer[1])
Example #11
0
    def remove_sessioncookie_from_request(self, messageIsRequest, message):
        request_byte_array = message.getMessageInfo().getRequest()
        requestInfo = self._helpers.analyzeRequest(request_byte_array)

        # Extract hostname from header
        global hostname
        hostname = webcommon.get_host_header_from_request(self, requestInfo)

        # Check if the URL is in scope. This is to eliminate stray traffic.
        if hostname and hostname[1] in urls_in_scope:
            request_string = self._helpers.bytesToString(request_byte_array)
            # Find and then remove all session cookies
            for cookie in session_cookie_names:
                regex = re.compile(r"(.*)(%s=\w+)(;*?)" % cookie, re.IGNORECASE | re.DOTALL)
                m1 = regex.match(request_string)
                if m1:
                    request_string = re.sub(m1.group(2), "", request_string)
                    # Restore the manipulated string to the byte array so it can be reused.
                    request_byte_array = self._helpers.stringToBytes(request_string)

        return request_byte_array
Example #12
0
  def get_all_hosts(self):
    unique_list_of_urls=[]
    # Get proxy history
    proxyhistory=self._callbacks.getProxyHistory()

    #Read each request in proxy history
    for request in proxyhistory:
      request_byte_array=request.getRequest()
      request_http_service=request.getHttpService()
      requestInfo = self._helpers.analyzeRequest(request_http_service,request_byte_array)

      t1=str(requestInfo.getUrl())
      t2=t1.split('/')
      url=t2[0]+'//'+t2[2]

      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,requestInfo)
      if url not in unique_list_of_urls:
        unique_list_of_urls.append(url)

    return unique_list_of_urls
Example #13
0
  def get_all_hosts(self):
    unique_list_of_urls=[]
    # Get proxy history
    proxyhistory=self._callbacks.getProxyHistory()

    #Read each request in proxy history
    for request in proxyhistory:
      request_byte_array=request.getRequest()
      request_http_service=request.getHttpService()
      requestInfo = self._helpers.analyzeRequest(request_http_service,request_byte_array)

      t1=str(requestInfo.getUrl())
      t2=t1.split('/')
      url=t2[0]+'//'+t2[2]

      #Extract hostname from header
      hostname=webcommon.get_host_header_from_request(self,requestInfo)
      if url not in unique_list_of_urls:
        unique_list_of_urls.append(url)

    return unique_list_of_urls
Example #14
0
  def remove_sessioncookie_from_request(self,messageIsRequest,message):
    request_byte_array=message.getMessageInfo().getRequest()
    requestInfo = self._helpers.analyzeRequest(request_byte_array)

    #Extract hostname from header
    global hostname
    hostname=webcommon.get_host_header_from_request(self,requestInfo)

    #Check if the URL is in scope. This is to eliminate stray traffic.
    if hostname and hostname[1] in urls_in_scope:
      request_string=self._helpers.bytesToString(request_byte_array)
      #Find and then remove all session cookies
      for cookie in session_cookie_names:
        regex=re.compile(r'(.*)(%s=\w+)(;*?)'%cookie,re.IGNORECASE|re.DOTALL)
        m1=regex.match(request_string)
        if m1:
          request_string=re.sub(m1.group(2),'',request_string)
          #Restore the manipulated string to the byte array so it can be reused.
          request_byte_array = self._helpers.stringToBytes(request_string)

    return request_byte_array