def Engine(): # lets begin it! os.system('clear') # Clear shit from terminal :p banner() # Print the banner banabout() # The second banner web, fld = inputin() # Take the input form1 = testFormx1() # Get the form 1 ready form2 = testFormx2() # Get the form 2 ready # For the cookies that we encounter during requests... Cookie0 = http.cookiejar.CookieJar() # First as User1 Cookie1 = http.cookiejar.CookieJar() # Then as User2 resp1 = build_opener(HTTPCookieProcessor(Cookie0)) # Process cookies resp2 = build_opener(HTTPCookieProcessor(Cookie1)) # Process cookies actionDone = [] # init to the done stuff csrf = '' # no token initialise / invalid token ref_detect = 0x00 # Null Char Flag ori_detect = 0x00 # Null Char Flags form = Debugger.Form_Debugger() # init to the form parser+token generator bs1 = BeautifulSoup(form1).findAll( 'form', action=True)[0] # make sure the stuff works properly bs2 = BeautifulSoup(form2).findAll('form', action=True)[0] # same as above init1 = web # First init resp1.open(init1) # Makes request as User2 resp2.open(init1) # Make request as User1 # Now there are 2 different modes of scanning and crawling here. # 1st -> Testing a single endpoint without the --crawl flag. # 2nd -> Testing all endpoints with the --crawl flag. try: # Implementing the first mode. [NO CRAWL] if not CRAWL_SITE: url = web response = Get(url).text try: verbout(O, 'Trying to parse response...') soup = BeautifulSoup(response) # Parser init except HTMLParser.HTMLParseError: verbout(R, 'BeautifulSoup Error: ' + url) i = 0 # Init user number if REFERER_ORIGIN_CHECKS: # Referer Based Checks if True... verbout( O, 'Checking endpoint request validation via ' + color.GREY + 'Referer' + color.END + ' Checks...') if Referer(url): ref_detect = 0x01 verbout(O, 'Confirming the vulnerability...') # We have finished with Referer Based Checks, lets go for Origin Based Ones... verbout( O, 'Confirming endpoint request validation via ' + color.GREY + 'Origin' + color.END + ' Checks...') if Origin(url): ori_detect = 0x01 # Now lets get the forms... verbout( O, 'Retrieving all forms on ' + color.GREY + url + color.END + '...') for m in Debugger.getAllForms( soup): # iterating over all forms extracted verbout(O, 'Testing form:\n' + color.CYAN) formPrettify(m.prettify()) verbout('', '') FORMS_TESTED.append('(i) ' + url + ':\n\n' + m.prettify() + '\n') try: if m['action']: pass except KeyError: m['action'] = '/' + url.rsplit('/', 1)[1] ErrorLogger(url, 'No standard form "action".') action = Parser.buildAction( url, m['action']) # get all forms which have 'action' attribute if not action in actionDone and action != '': # if url returned is not a null value nor duplicate... # If form submission is kept to True if FORM_SUBMISSION: try: # NOTE: Slow connections may cause read timeouts which may result in AttributeError # So the idea here is tp make requests pretending to be 3 different users. # Now a series of requests will be targeted against the site with different # identities. Refer to XSRFProbe wiki for more info. # # NOTE: Slow connections may cause read timeouts which may result in AttributeError result, genpoc = form.prepareFormInputs( m) # prepare inputs as user 1 r1 = Post( url, action, result ) # make request with token values generated as user1 result, genpoc = form.prepareFormInputs( m) # prepare inputs as user 2 r2 = Post( url, action, result ) # again make request with token values generated as user2 # Go for cookie based checks if COOKIE_BASED: Cookie(url, r1) # Go for token based entropy checks... try: if m['name']: query, token = Entropy( result, url, r1.headers, m.prettify(), m['action'], m['name']) except KeyError: query, token = Entropy(result, url, r1.headers, m.prettify(), m['action']) # Now its time to detect the encoding type (if any) of the Anti-CSRF token. fnd, detct = Encoding(token) if fnd == 0x01 and detct: VulnLogger( url, 'Token is a string encoded value which can be probably decrypted.', '[i] Encoding: ' + detct) else: NovulLogger( url, 'Anti-CSRF token is not a string encoded value.' ) # Go for token parameter tamper checks. if (query and token): txor = Tamper(url, action, result, r2.text, query, token) o2 = resp2.open( url).read() # make request as user2 try: form2 = Debugger.getAllForms(BeautifulSoup( o2))[i] # user2 gets his form except IndexError: verbout(R, 'Form Index Error') ErrorLogger(url, 'Form Index Error.') continue # Making sure program won't end here (dirty fix :( ) verbout(GR, 'Preparing form inputs...') contents2, genpoc = form.prepareFormInputs( form2) # prepare for form 3 as user3 r3 = Post( url, action, contents2 ) # make request as user3 with user3's form if (POST_BASED) and ((not query) or (txor)): try: if m['name']: PostBased(url, r1.text, r2.text, r3.text, m['action'], result, genpoc, m.prettify(), m['name']) except KeyError: PostBased(url, r1.text, r2.text, r3.text, m['action'], result, genpoc, m.prettify()) else: print( color.GREEN + ' [+] The form was requested with a Anti-CSRF token.' ) print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to POST-Based CSRF Attacks!') NovulLogger( url, 'Not vulnerable to POST-Based CSRF Attacks.' ) except HTTPError as msg: # if runtime exception... verbout(R, 'Exception : ' + msg.__str__()) # again exception :( ErrorLogger(url, msg) actionDone.append(action) # add the stuff done i += 1 # Increase user iteration else: # Implementing the 2nd mode [CRAWLING AND SCANNING]. verbout(GR, "Initializing crawling and scanning...") crawler = Crawler.Handler(init1, resp1) # Init to the Crawler handler while crawler.noinit(): # Until 0 urls left url = next(crawler) # Go for next! print(C + 'Testing :> ' + color.CYAN + url) # Display what url its crawling try: soup = crawler.process(fld) # Start the parser if not soup: continue # Making sure not to end the program yet... i = 0 # Set count = 0 (user number 0, which will be subsequently incremented) if REFERER_ORIGIN_CHECKS: # Referer Based Checks if True... verbout( O, 'Checking endpoint request validation via ' + color.GREY + 'Referer' + color.END + ' Checks...') if Referer(url): ref_detect = 0x01 verbout(O, 'Confirming the vulnerability...') # We have finished with Referer Based Checks, lets go for Origin Based Ones... verbout( O, 'Confirming endpoint request validation via ' + color.GREY + 'Origin' + color.END + ' Checks...') if Origin(url): ori_detect = 0x01 # Now lets get the forms... verbout( O, 'Retrieving all forms on ' + color.GREY + url + color.END + '...') for m in Debugger.getAllForms( soup): # iterating over all forms extracted FORMS_TESTED.append('(i) ' + url + ':\n\n' + m.prettify() + '\n') try: if m['action']: pass except KeyError: m['action'] = '/' + url.rsplit('/', 1)[1] ErrorLogger(url, 'No standard "action" attribute.') action = Parser.buildAction( url, m['action'] ) # get all forms which have 'action' attribute if not action in actionDone and action != '': # if url returned is not a null value nor duplicate... # If form submission is kept to True if FORM_SUBMISSION: try: result, genpoc = form.prepareFormInputs( m) # prepare inputs as user 1 r1 = Post( url, action, result ) # make request with token values generated as user1 result, genpoc = form.prepareFormInputs( m) # prepare inputs as user 2 r2 = Post( url, action, result ) # again make request with token values generated as user2 if COOKIE_BASED: Cookie(url, r1) # Go for token based entropy checks... try: if m['name']: query, token = Entropy( result, url, r1.headers, m.prettify(), m['action'], m['name']) except KeyError: query, token = Entropy( result, url, r1.headers, m.prettify(), m['action']) ErrorLogger( url, 'No standard form "name".') # Now its time to detect the encoding type (if any) of the Anti-CSRF token. fnd, detct = Encoding(token) if fnd == 0x01 and detct: VulnLogger( url, 'String encoded token value. Token might be decrypted.', '[i] Encoding: ' + detct) else: NovulLogger( url, 'Anti-CSRF token is not a string encoded value.' ) # Go for token parameter tamper checks. if (query and token): txor = Tamper(url, action, result, r2.text, query, token) o2 = resp2.open( url).read() # make request as user2 try: form2 = Debugger.getAllForms( BeautifulSoup(o2))[ i] # user2 gets his form except IndexError: verbout(R, 'Form Index Error') ErrorLogger(url, 'Form Index Error.') continue # making sure program won't end here (dirty fix :( ) verbout(GR, 'Preparing form inputs...') contents2, genpoc = form.prepareFormInputs( form2) # prepare for form 3 as user3 r3 = Post( url, action, contents2 ) # make request as user3 with user3's form if (POST_BASED) and ((query == '') or (txor == True)): try: if m['name']: PostBased( url, r1.text, r2.text, r3.text, m['action'], result, genpoc, m.prettify(), m['name']) except KeyError: PostBased(url, r1.text, r2.text, r3.text, m['action'], result, genpoc, m.prettify()) else: print( color.GREEN + ' [+] The form was requested with a Anti-CSRF token.' ) print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to P0ST-Based CSRF Attacks!') NovulLogger( url, 'Not vulnerable to POST-Based CSRF Attacks.' ) except HTTPError as msg: # if runtime exception... verbout( color.RED, ' [-] Exception : ' + color.END + msg.__str__()) # again exception :( ErrorLogger(url, msg) actionDone.append(action) # add the stuff done i += 1 # Increase user iteration except URLError as e: # if again... verbout(R, 'Exception at : ' + url) # again exception -_- time.sleep(0.4) verbout(O, 'Moving on...') ErrorLogger(url, e) continue # make sure it doesn't stop at exceptions # This error usually happens when some sites are protected by some load balancer # example Cloudflare. These domains return a 403 forbidden response in various # contexts. For example when making reverse DNS queries. except HTTPError as e: if str(e.code) == '403': verbout(R, 'HTTP Authentication Error!') verbout(R, 'Error Code : ' + O + str(e.code)) ErrorLogger(url, e) quit() GetLogger( ) # The scanning has finished, so now we can log out all the links ;) print('\n' + G + "Scan completed!" + '\n') Analysis() # For Post Scan Analysis except KeyboardInterrupt as e: # Incase user wants to exit :') (while crawling) verbout(R, 'User Interrupt!') time.sleep(1.5) Analysis() # For Post scan Analysis print(R + 'Aborted!') # say goodbye ErrorLogger('KeyBoard Interrupt', 'Aborted') GetLogger( ) # The scanning has interrupted, so now we can log out all the links ;) quit()
def process(self, root): # Our first task is to remove urls that aren't to be scanned and have been # passed via the --exclude parameter. if EXCLUDE_DIRS: for link in EXCLUDE_DIRS: self.toVisit.remove(link) url = self.currentURI # Main Url (Current) try: query = Get(url) # Open it (to check if it exists) if query != None and not str( query.status_code).startswith('40'): # Avoiding 40x errors INTERNAL_URLS.append( url) # We append it to the list of valid urls else: if url in self.toVisit: self.toVisit.remove(url) except (urllib.error.HTTPError, urllib.error.URLError ) as msg: # Incase there isan exception connecting to Url verbout(R, 'HTTP Request Error: ' + msg.__str__()) if url in self.toVisit: self.toVisit.remove(url) # Remove non-existent / errored urls return # Making sure the content type is in HTML format, so that BeautifulSoup # can parse it... if not query or not re.search('html', query.headers['Content-Type']): return # Just in case there is a redirection, we are supposed to follow it :D verbout(GR, 'Making request to new location...') if hasattr(query.headers, 'Location'): url = query.headers['Location'] verbout(O, 'Reading response...') response = query.content # Read the response contents try: verbout(O, 'Trying to parse response...') soup = BeautifulSoup(response) # Parser init except HTMLParser.HTMLParseError: verbout(R, 'BeautifulSoup Error: ' + url) self.visited.append(url) if url in self.toVisit: self.toVisit.remove(url) return for m in soup.findAll('a', href=True): # find out all href^?://* app = '' # Making sure that href is not a function or doesn't begin with http:// if not re.match(r'javascript:', m['href']) or re.match( 'http://', m['href']): app = Parser.buildUrl(url, m['href']) # If we get a valid link if app != '' and re.search(root, app): # Getting rid of Urls starting with '../../../..' while re.search(RID_DOUBLE, app): p = re.compile(RID_COMPILE) app = p.sub('/', app) # Getting rid of Urls starting with './' p = re.compile(RID_SINGLE) app = p.sub('', app) # Add new link to the queue only if its pattern has not been added yet uriPattern = removeIDs(app) # remove IDs if self.notExist(uriPattern) and app != url: verbout(G, 'Added :> ' + color.BLUE + app) # display what we have got! self.toVisit.append(app) # add up urls to visit self.uriPatterns.append(uriPattern) self.visited.append(url) # add urls visited return soup # go back!
def Tamper(url, action, req, body, query, para): ''' The main idea behind this is to tamper the Anti-CSRF tokens found and check the content length for related vulnerabilities. ''' verbout(color.RED, '\n +---------------------------------------+') verbout(color.RED, ' | Anti-CSRF Token Tamper Validation |') verbout(color.RED, ' +---------------------------------------+\n') # Null char flags (hex) flagx1 = 0x00 flagx2 = 0x00 flagx3 = 0x00 verbout(GR, 'Proceeding for CSRF attack via Anti-CSRF token tampering...') # First of all lets get out token from request if para == '': return True # Coverting the token to a raw string, cause some special # chars might fu*k with the operation. value = r'%s' % para copy = req # Alright lets start... # [Step 1]: First we take the token and then replace a particular character # at a specific position (here at 4th position) and test the response body. # # Required check for checking if string at that position isn't the # same char we are going to replace with. verbout(GR, 'Tampering Token by '+color.GREY+'index replacement'+color.END+'...') if value[3] != 'a': tampvalx1 = replaceStrIndex(value, 3, 'a') else: tampvalx1 = replaceStrIndex(value, 3, 'x') verbout(color.BLUE, ' [+] Original Token: '+color.CYAN+value) verbout(color.BLUE, ' [+] Tampered Token: '+color.CYAN+tampvalx1) # Lets build up the request... req[query] = tampvalx1 resp = Post(url, action, req) # If there is a 40x (Not Found) or a 50x (Internal Error) error, # we assume that the tamper did not work :( But if there is a 20x # (Accepted) or a 30x (Redirection), then we know it worked. # # Or if the previous request has same content length as this tampered # request, then we have the vulnerability. # # NOTE: This algorithm has lots of room for improvement. if ((str(resp.status_code).startswith('2') and not any(search(s, resp.text, I) for s in TOKEN_ERRORS)) or (len(body) == len(resp.text))): verbout(color.RED,' [-] Anti-CSRF Token tamper by index replacement returns valid response!') flagx1 = 0x01 VulnLogger(url, 'Anti-CSRF Token tamper by index replacement returns valid response.', '[i] POST Query: '+req.__str__()) else: verbout(color.RED,' [+] Token tamper in request does not return valid response!') NovulLogger(url, 'Anti-CSRF Token tamper by index replacement does not return valid response.') # [Step 2]: Second we take the token and then remove a character # at a specific position and test the response body. verbout(GR, 'Tampering Token by '+color.GREY+'index removal'+color.END+'...') tampvalx2 = replaceStrIndex(value, 3) verbout(color.BLUE, ' [+] Original Token: '+color.CYAN+value) verbout(color.BLUE, ' [+] Tampered Token: '+color.CYAN+tampvalx2) # Lets build up the request... req[query] = tampvalx2 resp = Post(url, action, req) # If there is a 40x (Not Found) or a 50x (Internal Error) error, # we assume that the tamper did not work :( But if there is a 20x # (Accepted) or a 30x (Redirection), then we know it worked. # # NOTE: This algorithm has lots of room for improvement. if ((str(resp.status_code).startswith('2') and not any(search(s, resp.text, I) for s in TOKEN_ERRORS)) or (len(body) == len(resp.text))): verbout(color.RED,' [-] Anti-CSRF Token tamper by index removal returns valid response!') flagx2 = 0x01 VulnLogger(url, 'Anti-CSRF Token tamper by index removal returns valid response.', '[i] POST Query: '+req.__str__()) else: verbout(color.RED,' [+] Token tamper in request does not return valid response!') NovulLogger(url, 'Anti-CSRF Token tamper by index removal does not return valid response.') # [Step 3]: Third we take the token and then remove the whole # anticsrf token and test the response body. verbout(GR, 'Tampering Token by '+color.GREY+'Token removal'+color.END+'...') # Removing the anti-csrf token from request del req[query] verbout(color.GREY, ' [+] Removed token parameter from request!') # Lets build up the request... resp = Post(url, action, req) # If there is a 40x (Not Found) or a 50x (Internal Error) error, # we assume that the tamper did not work :(. But if there is a 20x # (Accepted) or a 30x (Redirection), then we know it worked. # # NOTE: This algorithm has lots of room for improvement. if ((str(resp.status_code).startswith('2') and not any(search(s, resp.text, I) for s in TOKEN_ERRORS)) or (len(body) == len(resp.text))): verbout(color.RED,' [-] Anti-CSRF Token removal returns valid response!') flagx3 = 0x01 VulnLogger(url, 'Anti-CSRF Token removal returns valid response.', '[i] POST Query: '+req.__str__()) else: verbout(color.RED,' [+] Token tamper in request does not return valid response!') NovulLogger(url, 'Anti-CSRF Token removal does not return valid response.') # If any of the forgeries worked... if (flagx1 or flagx2 or flagx3) == 0x01: verbout(color.RED,' [+] The tampered token value works! Endpoint '+color.BR+' VULNERABLE to Replay Attacks '+color.END+'!') verbout(color.ORANGE,' [-] The Tampered Anti-CSRF Token requested does NOT return a 40x or 50x response! ') print(color.RED+' [-] Endpoint '+color.BR+' CONFIRMED VULNERABLE '+color.END+color.RED+' to Request Forgery Attacks...') print(color.ORANGE+' [!] Vulnerability Type: '+color.BR+' Non-Unique Anti-CSRF Tokens in Requests '+color.END+'\n') VulnLogger(url, 'Anti-CSRF Tokens are not Unique. Token Reuse detected.', '[i] Request: '+str(copy)) return True else: print(color.RED+' [-] The Tampered Anti-CSRF Token requested returns a 40x or 50x response... ') print(color.GREEN+' [-] Endpoint '+color.BG+' NOT VULNERABLE '+color.END+color.ORANGE+' to CSRF Attacks...') print(color.ORANGE+' [!] CSRF Mitigation Method: '+color.BG+' Unique Anti-CSRF Tokens '+color.END+'\n') NovulLogger(url, 'Unique Anti-CSRF Tokens. No token reuse.') return False
def Persistence(url, postq): ''' The main idea behind this is to check for Cookie Persistence. ''' verbout(color.RED, '\n +-----------------------------------+') verbout(color.RED, ' | Cookie Persistence Validation |') verbout(color.RED, ' +-----------------------------------+\n') # Checking if user has supplied a value. verbout( GR, 'Proceeding to test for ' + color.GREY + 'Cookie Persistence' + color.END + '...') time.sleep(0.7) found = 0x00 # Now let the real test begin... # # [Step 1]: Lets examine now whether cookies set by server are persistent or not. # For this we'll have to parse the cookies set by the server and check for the # time when the cookie expires. Lets do it! # # First its time for GET type requests. Lets prepare our request. cookies = [] verbout( C, 'Proceeding to test cookie persistence via ' + color.CYAN + 'Prepared GET Requests' + color.END + '...') gen_headers = HEADER_VALUES gen_headers[ 'User-Agent'] = 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36' if COOKIE_VALUE: for cookie in COOKIE_VALUE: gen_headers['Cookie'] = cookie verbout(GR, 'Making the request...') req = Get(url, headers=gen_headers) if req.cookies: for cook in req.cookies: if cook.expires: print(color.GREEN + ' [+] Persistent Cookies found in Response Headers!') print(color.GREY + ' [+] Cookie: ' + color.CYAN + cook.__str__()) # cookie.expires returns a timestamp value. I didn't know it. :( Spent over 2+ hours scratching my head # over this, until I stumbled upon a stackoverflow answer comment. So to decode this, we'd need to # convert it a human readable format. print(color.GREEN + ' [+] Cookie Expiry Period: ' + color.ORANGE + datetime.fromtimestamp(cook.expires).__str__()) found = 0x01 VulnLogger(url, 'Persistent Session Cookies Found.', '[i] Cookie: ' + req.headers.get('Set-Cookie')) else: NovulLogger(url, 'No Persistent Session Cookies.') if found == 0x00: verbout( R, 'No persistent session cookies identified on GET Type Requests!') verbout( C, 'Proceeding to test cookie persistence on ' + color.CYAN + 'POST Requests' + color.END + '...') # Now its time for POST Based requests. # # NOTE: As a standard method, every web application should supply a cookie upon a POST query. # It might or might not be in case of GET requests. if postq.cookies: for cookie in postq.cookies: if cookie.expires: print(color.GREEN + ' [+] Persistent Cookies found in Response Headers!') print(color.GREY + ' [+] Cookie: ' + color.CYAN + cookie.__str__()) # So to decode this, we'd need to convert it a human readable format. print(color.GREEN + ' [+] Cookie Expiry Period: ' + color.ORANGE + datetime.fromtimestamp(cookie.expires).__str__()) found = 0x01 VulnLogger(url, 'Persistent Session Cookies Found.', '[i] Cookie: ' + req.headers.get('Set-Cookie')) print(color.ORANGE + ' [!] Probable Insecure Practice: ' + color.BR + ' Persistent Session Cookies ' + color.END) else: NovulLogger(url, 'No Persistent Cookies.') if found == 0x00: verbout( R, 'No persistent session cookies identified upon POST Requests!') print(color.GREEN + ' [+] Endpoint might be ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF attacks!') print(color.GREEN + ' [+] Detected : ' + color.BG + ' No Persistent Cookies ' + color.END) # [Step 2]: The idea here is to try to identify cookie persistence on basis of observing # variations in cases of using different user-agents. For this test we have chosen 5 different # well used and common user-agents (as below) and then we observe the variation of set-cookie # header under different conditions. # # We'll test this method only when we haven't identified requests based on previous algo. if found != 0x01: verbout( C, 'Proceeding to test cookie persistence via ' + color.CYAN + 'User-Agent Alteration' + color.END + '...') user_agents = { 'Chrome on Windows 8.1': 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36', 'Safari on iOS': 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4', 'IE6 on Windows XP': 'Mozilla/5.0 (Windows; U; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727)', 'Opera on Windows 10': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991', 'Chrome on Android': 'Mozilla/5.0 (Linux; U; Android 2.3.1; en-us; MID Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1' } verbout(GR, 'Setting custom generic headers...') gen_headers = HEADER_VALUES for name, agent in user_agents.items(): verbout(C, 'Using User-Agent : ' + color.CYAN + name) verbout(GR, 'Value : ' + color.ORANGE + agent) gen_headers['User-Agent'] = agent if COOKIE_VALUE: for cookie in COOKIE_VALUE: gen_headers['Cookie'] = cookie req = Get(url, headers=gen_headers) resps.append(req.headers.get('Set-Cookie')) if checkDuplicates(resps): verbout( G, 'Set-Cookie header does not change with varied User-Agents...') verbout(color.GREEN, ' [+] Possible persistent session cookies found...') print(color.RED + ' [+] Possible CSRF Vulnerability Detected : ' + color.ORANGE + url + '!') print(color.ORANGE + ' [!] Probable Insecure Practice: ' + color.BY + ' Persistent Session Cookies ' + color.END) VulnLogger(url, 'Persistent Session Cookies Found.', '[i] Cookie: ' + req.headers.get('Set-Cookie')) else: verbout(G, 'Set-Cookie header changes with varied User-Agents...') verbout(R, 'No possible persistent session cookies found...') verbout( color.GREEN, ' [+] Endpoint ' + color.BG + ' PROBABLY NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF attacks!') verbout( color.ORANGE, ' [+] Application Practice Method Detected : ' + color.BG + ' No Persistent Cookies ' + color.END) NovulLogger(url, 'No Persistent Cookies.')
def Get(url, headers=headers): ''' The main use of this function is as a Url Requester [GET]. ''' # We do not verify thr request while GET requests time.sleep(DELAY_VALUE) # We make requests after the time delay # Making sure the url is not a file if url.split('.')[-1].lower() in (FILE_EXTENSIONS or EXECUTABLES): FILES_EXEC.append(url) verbout(G, 'Found File: ' + color.BLUE + url) return None try: verbout( GR, 'Processing the ' + color.GREY + 'GET' + color.END + ' Request...') req = requests.get(url, headers=headers, timeout=TIMEOUT_VALUE, stream=False) # Displaying headers if DISPLAY_HEADERS is 'True' if DISPLAY_HEADERS: pheaders(req.headers) # Return the object return req except requests.exceptions.MissingSchema as e: verbout(R, 'Exception at: ' + color.GREY + url) verbout(R, 'Error: Invalid URL Format') ErrorLogger(url, e.__str__()) return None except requests.exceptions.ReadTimeout as e: verbout(R, 'Exception at: ' + color.GREY + url) verbout( R, 'Error: Read Timeout. Consider increasing the timeout value via --timeout.' ) ErrorLogger(url, e.__str__()) return None except requests.exceptions.HTTPError as e: # if error verbout(R, "HTTP Error Encountered : " + main_url) ErrorLogger(url, e.__str__()) return None except requests.exceptions.ConnectionError as e: verbout(R, 'Connection Aborted : ' + main_url) ErrorLogger(url, e.__str__()) return None except Exception as e: verbout(R, "Exception Caught: " + e.__str__()) ErrorLogger(url, e.__str__()) return None # if at all nothing happens :(
def prepareFormInputs(self, form): ''' This method parses form types and generates strings based on their input types. ''' verbout(O, 'Crafting inputs as form type...') cr_input = {} totcr = [] verbout(GR, 'Processing ' + color.BOLD + '<input type="text" name="...') # get name type inputs for m in form.findAll('input', {'name': True, 'type': 'text'}): if re.search( 'value=', str(m).strip(), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() cr_input[m['name']] = value # assign passed on value cr0 = {} cr0['type'] = 'text' cr0['name'] = m['name'] cr0['label'] = m['name'].title() cr0['value'] = '' totcr.append(cr0) verbout(GR, 'Processing' + color.BOLD + ' <input type="password" name="...') # get password inputs for m in form.findAll('input', {'name': True, 'type': 'password'}): if re.search( 'value=', str(m).strip(), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() cr_input[m['name']] = value # assign passed on value cr1 = {} cr1['type'] = 'password' cr1['name'] = m['name'] cr1['label'] = 'Password' cr1['value'] = '' totcr.append(cr1) try: verbout( GR, 'Processing' + color.BOLD + ' <input type="hidden" name="...') # get hidden input types for m in form.findAll('input', {'name': True, 'type': 'hidden'}): if re.search('value=', m.__str__(), re.IGNORECASE ): # Ignore case while searching for a match value = m['value'] # make sure no encoding errors there else: value = randString() cr_input[m['name']] = value # assign passed on value cr2 = {} cr2['type'] = 'hidden' cr2['name'] = m['name'] cr2['label'] = '' # Nothing since its a hidden field cr2['value'] = value totcr.append(cr2) except KeyboardInterrupt: cr2['value'] = '' verbout(GR, 'Processing ' + color.BOLD + '<input type="submit" name="...') # get submit buttons :D for m in form.findAll('input', {'name': True, 'type': 'submit'}): if re.search( 'value=', str(m).strip(), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() cr_input[m['name']] = value # assign passed on value verbout( GR, 'Processing' + color.BOLD + ' <input type="checkbox" name="...') # get checkbox type inputs for m in form.findAll('input', {'name': True, 'type': 'checkbox'}): if re.search( 'value=', m.__str__(), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() # assign passed on value cr_input[m['name']] = value # assign discovered value cr3 = {} cr3['type'] = 'checkbox' cr3['name'] = m['name'] cr3['label'] = m['name'].title() cr3['value'] = '' totcr.append(cr3) verbout(GR, 'Processing' + color.BOLD + ' <input type="radio" name="...') # get radio buttons :D listRadio = [] for m in form.findAll('input', {'name': True, 'type': 'radio'}): if (not m['name'] in listRadio) and re.search( 'value=', str(m).strip(), re.IGNORECASE): # Ignore case while searching for a match listRadio.append(m['name']) cr_input[m['name']] = value.encode( 'utf8') # make sure no encoding errors there cr4 = {} cr4['type'] = 'radio' cr4['name'] = m['name'] cr4['label'] = m['name'].title() cr4['value'] = '' totcr.append(cr4) verbout(GR, 'Processing' + color.BOLD + ' <textarea name="...') # get textarea input types for m in form.findAll('textarea', {'name': True}): if len(m.contents) == 0: m.contents.append(randString()) # get random strings cr_input[m['name']] = m.contents[0].encode( 'utf8') # make sure no encoding errors there cr5 = {} cr5['type'] = 'text' cr5['name'] = m['name'] cr5['label'] = m['name'].title() cr5['value'] = '' totcr.append(cr5) verbout(GR, 'Processing' + color.BOLD + ' <select name="...') # selection type inputs for m in form.findAll('select', {'name': True}): if m.findAll('option', value=True): name = m['name'] # assign passed on value cr_input[name] = m.findAll( 'option', value=True)[0]['value'].encode( 'utf8') # find forms fields based on value verbout(GR, 'Parsing final inputs...') return (cr_input, totcr) # Return the form input types
def Analysis(): ''' The main idea behind this is to observe and analyse the patterns in which the CSRF tokens are generated by server. ''' ctr = 0 # Counter variable set to 0 # Checking if the no of tokens is greater than 1 if len(REQUEST_TOKENS) > 1: print(color.RED + '\n +--------------+') print(color.RED + ' | Analysis |') print(color.RED + ' +--------------+\n') print(GR + 'Proceeding for post-scan analysis of tokens gathered...') verbout( G, 'A total of %s tokens was discovered during the scan' % (len(REQUEST_TOKENS))) # The idea behind this is to generate all possible combinations (not # considering permutations) from the given list of discovered tokens # and generate anti-CSRF token generation pattern. for tokenx1, tokenx2 in itertools.combinations(REQUEST_TOKENS, 2): try: verbout( GR, 'Analysing 2 Anti-CSRF Tokens from gathered requests...') verbout(C, 'First Token: ' + color.ORANGE + str(tokenx1)) verbout(C, 'Second Token: ' + color.ORANGE + str(tokenx2)) # Calculating the edit distance via Damerau Levenshtein algorithm m = stringdist.rdlevenshtein(tokenx1, tokenx2) verbout( color.CYAN, ' [+] Edit Distance Calculated: ' + color.GREY + str(m) + '%') # Now its time to detect the alignment ratio n = stringdist.rdlevenshtein_norm(tokenx1, tokenx2) verbout( color.CYAN, ' [+] Alignment Ratio Calculated: ' + color.GREY + str(n)) # If both tokens are same, then if tokenx1 == tokenx2: verbout( C, 'Token length calculated is same: ' + color.ORANGE + 'Each %s bytes' % len(byteString(tokenx1))) else: verbout( C, 'Token length calculated is different: ' + color.ORANGE + 'By %s bytes' % (len(byteString(tokenx1)) - len(byteString(tokenx2)))) time.sleep(0.5) # In my experience with web security assessments, often the Anti-CSRF token # is composed of two parts, one of them remains static while the other one dynamic. # # For example, if the Anti CSRF Tokens “837456mzy29jkd911139” for one request, the # other time “837456mzy29jkd337221”, “837456mzy29jkd” part of the token remains same # in both requests. # # The main idea behind this is to detect the static and dynamic part via DL Algorithm # as discussed above by calculating edit distance. if n == 0.5 or m == len(tokenx1) / 2: verbout( GR, 'The tokens are composed of 2 parts (one static and other dynamic)... ' ) p = sameSequence(tokenx1, tokenx2) verbout( C, 'Static Part : ' + color.GREY + p + color.END + ' | Length: ' + str(len(p))) verbout( O, 'Dynamic Part of Token 0x1: ' + color.GREY + tokenx1.replace(p, '') + color.END + ' | Length: ' + str(len(tokenx1.replace(p, '')))) verbout( O, 'Dynamic Part of Token 0x2: ' + color.GREY + tokenx2.replace(p, '') + color.END + ' | Length: ' + str(len(tokenx2.replace(p, '')))) if len(len(tokenx1) / 2) <= 6: verbout( color.RED, ' [-] Post-Analysis reveals that token might be ' + color.BR + ' VULNERABLE ' + color.END + '!') print(color.GREEN + ' [+] Possible CSRF Vulnerability Detected!') print(color.ORANGE + ' [!] Vulnerability Type: ' + color.BR + ' Weak Dynamic Part of Tokens ' + color.END) print(color.GREY + ' [+] Tokens can easily be ' + color.RED + ' Forged by Bruteforcing/Guessing ' + color.END + '!') elif n < 0.5 or m < len(tokenx1) / 2: verbout( R, 'Token distance calculated is ' + color.RED + 'less than 0.5!') p = sameSequence(tokenx1, tokenx2) verbout( C, 'Static Part : ' + color.GREY + p + color.END + ' | Length: ' + str(len(p))) verbout( O, 'Dynamic Part of Token 0x1: ' + color.GREY + tokenx1.replace(p, '') + color.END + ' | Length: ' + str(len(tokenx1.replace(p, '')))) verbout( O, 'Dynamic Part of Token 0x2: ' + color.GREY + tokenx2.replace(p, '') + color.END + ' | Length: ' + str(len(tokenx2.replace(p, '')))) verbout( color.RED, ' [-] Post-Analysis reveals that token might be ' + color.BR + ' VULNERABLE ' + color.END + '!') print(color.GREEN + ' [+] Possible CSRF Vulnerability Detected!') print(color.ORANGE + ' [!] Vulnerability Type: ' + color.BR + ' Weak Dynamic Part of Tokens ' + color.END) print(color.GREY + ' [+] Tokens can easily be ' + color.RED + ' Forged by Bruteforcing/Guessing ' + color.END + '!') else: verbout( R, 'Token distance calculated is ' + color.GREEN + 'greater than 0.5!') p = sameSequence(tokenx1, tokenx2) verbout( C, 'Static Part : ' + color.GREY + p + color.END + ' | Length: ' + str(len(p))) verbout( O, 'Dynamic Part of Token 0x1: ' + color.GREY + tokenx1.replace(p, '') + color.END + ' | Length: ' + str(len(tokenx1.replace(p, '')))) verbout( O, 'Dynamic Part of Token 0x2: ' + color.GREY + tokenx2.replace(p, '') + color.END + ' | Length: ' + str(len(tokenx2.replace(p, '')))) verbout( color.RED, ' [-] Post-Analysis reveals that token might be ' + color.BG + ' NOT VULNERABLE ' + color.END + '!') print(color.ORANGE + ' [!] Vulnerability Mitigation: ' + color.BG + ' Strong Dynamic Part of Tokens ' + color.END) print(color.GREY + ' [+] Tokens ' + color.GREEN + ' Cannot be Forged by Bruteforcing/Guessing ' + color.END + '!') time.sleep(1) except KeyboardInterrupt: continue print(C + 'Post-Scan Analysis Completed!')
def SameSite(url): ''' This function parses and verifies the cookies with SameSite Flags. ''' foundx1, foundx2, foundx3 = 0x00, 0x00, 0x00 # Step 1: First we check that if the server returns any # SameSite flag on Cookies with the same Referer as the netloc verbout(color.GREY, ' [+] Lets examine how server reacts to same referer...') gen_headers = HEADER_VALUES gen_headers['User-Agent'] = USER_AGENT or RandomAgent() verbout(GR, 'Setting Referer header same as host...') gen_headers['Referer'] = urlsplit(url).netloc if COOKIE_VALUE: for cook in COOKIE_VALUE: gen_headers['Cookie'] = cook getreq = Get(url, headers=gen_headers) # Making the request head = getreq.headers for h in head: #if search('cookie', h, I) or search('set-cookie', h, I): if 'Cookie'.lower() in h.lower(): verbout(G, 'Found cookie header value...') cookieval = head[h] verbout(color.ORANGE, ' [+] Cookie Received: ' + color.CYAN + str(cookieval)) m = cookieval.split(';') verbout(GR, 'Examining Cookie...') for q in m: if search('SameSite', q, I): verbout( G, 'SameSite Flag ' + color.ORANGE + ' detected on cookie!') foundx1 = 0x01 q = q.split('=')[1].strip() verbout(C, 'Cookie: ' + color.ORANGE + q) break else: foundx3 = 0x02 if foundx1 == 0x01: verbout( R, ' [+] Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' Present!') # Step 2: Now we check security mechanisms when the Referer is # different, i.e. request originates from a different url other # than the host. (This time without the Cookie assigned) verbout( color.GREY, ' [+] Lets examine how server reacts to a fake external referer...') gen_headers = HEADER_VALUES gen_headers['User-Agent'] = USER_AGENT or RandomAgent( ) # Setting user-agents gen_headers['Referer'] = REFERER_URL # Assigning a fake referer getreq = Get(url, headers=gen_headers) head = getreq.headers # Getting headers from requests for h in head: # If search('cookie', h, I) or search('set-cookie', h, I): if 'Cookie'.lower() in h.lower(): verbout(G, 'Found cookie header value...') cookieval = head[h] verbout(color.ORANGE, ' [+] Cookie Received: ' + color.CYAN + str(cookieval)) m = cookieval.split(';') verbout(GR, 'Examining Cookie...') for q in m: if search('SameSite', q, I): verbout( G, 'SameSite Flag ' + color.ORANGE + ' detected on cookie!') foundx2 = 0x01 q = q.split('=')[1].strip() verbout(C, 'Cookie: ' + color.ORANGE + q) break else: foundx3 = 0x02 if foundx1 == 0x01: verbout( R, ' [+] Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' Present!') # Step 3: And finally comes the most important step. Lets see how # the site reacts to a valid cookie (ofc supplied by the user) coming # from a a different site, i.e Referer set to other than host. # This is the most crucial part of the detection. # # TODO: Improve the logic in detection. verbout( color.GREY, ' [+] Lets examine how server reacts to valid cookie from a different referer...' ) gen_headers = HEADER_VALUES gen_headers['User-Agent'] = USER_AGENT or RandomAgent() gen_headers['Referer'] = REFERER_URL if COOKIE_VALUE: for cook in COOKIE_VALUE: gen_headers['Cookie'] = cook getreq = Get(url, headers=gen_headers) head = getreq.headers for h in head: # if search('cookie', h, I) or search('set-cookie', h, I): if 'Cookie'.lower() in h.lower(): verbout(G, 'Found cookie header value...') cookieval = head[h] verbout(color.ORANGE, ' [+] Cookie Received: ' + color.CYAN + str(cookieval)) m = cookieval.split(';') verbout(GR, 'Examining Cookie...') for q in m: if search('SameSite', q, I): verbout( G, 'SameSite Flag ' + color.ORANGE + ' detected on cookie!') foundx3 = 0x01 q = q.split('=')[1].strip() verbout(C, 'Cookie: ' + color.ORANGE + q) break else: foundx3 = 0x02 if foundx1 == 0x01: verbout( R, 'Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' Present!') if (foundx1 == 0x01 and foundx3 == 0x00) and (foundx2 == 0x00 or foundx2 == 0x01): print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to ANY type of CSRF attacks!') print(color.GREEN + ' [+] Protection Method Detected : ' + color.BG + ' SameSite Flag on Cookies ' + color.END) elif foundx1 == 0x02 and foundx2 == 0x02 and foundx3 == 0x02: print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF attacks!') print(color.GREEN + ' [+] Type: ' + color.BG + ' No Cookie Set while Cross Origin Requests ' + color.END) else: verbout( R, 'Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' Not Present!') verbout( R, 'Heuristic(s) reveal endpoint might be ' + color.BY + ' VULNERABLE ' + color.END + ' to CSRFs...') print(color.GREEN + ' [+] Possible CSRF Vulnerability Detected : ' + color.ORANGE + url + '!') print(color.ORANGE + ' [!] Possible Vulnerability Type: ' + color.BY + ' No SameSite Flag on Cookies ' + color.END)
def Entropy(req, url, headers, form, m_action, m_name=''): """ This function has the work of comparing and calculating Shannon Entropy and related POST Based requests' security. """ found = 0x00 # The minimum length of a csrf token should be 5 bytes. min_length = 5 # I have never seen a CSRF token longer than 256 bytes, # so the main concept here is doubling that and checking # to make sure we don't check parameters which are # files in multipart uploads or stuff like that. # # Multipart uploads usually have a trailing sequence of # characters which could be misunderstood as a CSRF token. # This is a very important step with respect to # decreasing [[ False Positives ]]. max_length = 256 * 2 # Shannon Entropy calculated for a particular CSRF token # should be at least 2.4. If the token entropy is less # than that, the application request can be easily # forged making the application vulnerable even in # presence of a CSRF token. min_entropy = 2.4 # Check for common CSRF token names _q, para = Token(req, headers) if (para and _q) == None: VulnLogger( url, 'Form Requested Without Anti-CSRF Token.', '[i] Form Requested: ' + form + '\n[i] Request Query: ' + req.__str__()) return '', '' verbout(color.RED, '\n +------------------------------+') verbout(color.RED, ' | Token Strength Detection |') verbout(color.RED, ' +------------------------------+\n') for para in REQUEST_TOKENS: # Coverting the token to a raw string, cause some special # chars might fu*k with the Shannon Entropy operation. value = r'%s' % para verbout( color.CYAN, ' [!] Testing Anti-CSRF Token: ' + color.ORANGE + '%s' % (value)) # Check length if len(value) <= min_length: print(color.RED + ' [-] CSRF Token Length less than 5 bytes. ' + color.ORANGE + 'Token value can be guessed/bruteforced...') print(color.ORANGE + ' [-] Endpoint likely ' + color.BR + ' VULNERABLE ' + color.END + color.ORANGE + ' to CSRF Attacks...') print(color.RED + ' [!] Vulnerability Type: ' + color.BR + ' Very Short/No Anti-CSRF Tokens ' + color.END) VulnLogger(url, 'Very Short Anti-CSRF Tokens.', 'Token: ' + value) if len(value) >= max_length: print(color.ORANGE + ' [+] CSRF Token Length greater than ' + color.CYAN + '256 bytes. ' + color.GREEN + 'Token value cannot be guessed/bruteforced...') print(color.GREEN + ' [+] Endpoint likely ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF Attacks...') print(color.GREEN + ' [!] CSRF Mitigation Method: ' + color.BG + ' Long Anti-CSRF Tokens ' + color.END) NovulLogger(url, 'Long Anti-CSRF tokens with Good Strength.') found = 0x01 # Checking entropy verbout( O, 'Proceeding to calculate ' + color.GREY + 'Shannon Entropy' + color.END + ' of Token audited...') entropy = calcEntropy(value) verbout(GR, 'Calculating Entropy...') verbout(color.BLUE, ' [+] Entropy Calculated: ' + color.CYAN + str(entropy)) if entropy >= min_entropy: verbout( color.ORANGE, ' [+] Anti-CSRF Token Entropy Calculated is ' + color.BY + ' GREATER than 2.4 ' + color.END + '... ') print(color.GREEN + ' [+] Endpoint ' + color.BG + ' PROBABLY NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF Attacks...') print(color.GREEN + ' [!] CSRF Mitigation Method: ' + color.BG + ' High Entropy Anti-CSRF Tokens ' + color.END) NovulLogger(url, 'High Entropy Anti-CSRF Tokens.') found = 0x01 else: verbout( color.RED, ' [-] Anti-CSRF Token Entropy Calculated is ' + color.BY + ' LESS than 2.4 ' + color.END + '... ') print(color.RED + ' [-] Endpoint likely ' + color.BR + ' VULNERABLE ' + color.END + color.RED + ' to CSRF Attacks inspite of CSRF Tokens...') print(color.RED + ' [!] Vulnerability Type: ' + color.BR + ' Low Entropy Anti-CSRF Tokens ' + color.END) VulnLogger(url, 'Low Entropy Anti-CSRF Tokens.', 'Token: ' + value) if found == 0x00: if m_name: print(color.RED + '\n +---------+') print(color.RED + ' | PoC |') print(color.RED + ' +---------+\n') print(color.BLUE + ' [+] URL : ' + color.CYAN + url) print(color.CYAN + ' [+] Name : ' + color.ORANGE + m_name) print(color.GREEN + ' [+] Action : ' + color.ORANGE + m_action) else: # if value m_name not there :( print(color.RED + '\n +---------+') print(color.RED + ' | PoC |') print(color.RED + ' +---------+\n') print(color.BLUE + ' [+] URL : ' + color.CYAN + url) print(color.GREEN + ' [+] Action : ' + color.ORANGE + m_action) # Print out the params print(color.ORANGE + ' [+] Query : ' + color.GREY + urllib.parse.urlencode(req)) print('') return (_q, para) # Return the query paramter and anti-csrf token
def Token(req, headers): ''' This method checks for whether Anti-CSRF Tokens are present in the request. ''' verbout(color.RED, '\n +---------------------------+') verbout(color.RED, ' | Anti-CSRF Token Check |') verbout(color.RED, ' +---------------------------+\n') param = '' # Initializing param query = '' found = False # First lets have a look at config.py and see if its set if config.TOKEN_CHECKS: verbout(O, 'Parsing request for detecting anti-csrf tokens...') try: # Lets check for the request values. But before that lets encode and unquote the request :D con = unquote(urlencode(req)).split('&') for c in con: for name in COMMON_CSRF_NAMES: # Iterate over the list qu = c.split('=') # Search if the token is there in request... if name.lower() in qu[0].lower(): verbout( color.GREEN, ' [+] The form was requested with an ' + color.BG + ' Anti-CSRF Token ' + color.END + color.GREEN + '!') verbout( color.GREY, ' [+] Token Parameter: ' + color.CYAN + qu[0] + '=' + color.ORANGE + qu[1]) query, param = qu[0], qu[1] # We are appending the token to a variable for further analysis REQUEST_TOKENS.append(param) found = True break # Break execution if a Anti-CSRF token is found # If we haven't found the Anti-CSRF token in query, we'll search for it in headers :) if not found: for key, value in headers.items(): for name in COMMON_CSRF_HEADERS: # Iterate over the list # Search if the token is there in request... if name.lower() in key.lower(): verbout( color.GREEN, ' [+] The form was requested with an ' + color.BG + ' Anti-CSRF Token Header ' + color.END + color.GREEN + '!') verbout( color.GREY, ' [+] Token Parameter: ' + color.CYAN + qu[0] + '=' + color.ORANGE + qu[1]) query, param = key, value # We are appending the token to a variable for further analysis REQUEST_TOKENS.append(param) break # Break execution if a Anti-CSRF token is found except Exception as e: verbout(R, 'Request Parsing Exception!') verbout(R, 'Error: ' + e.__str__()) if param: return (query, param) verbout( color.ORANGE, ' [-] The form was requested ' + color.RED + ' Without an Anti-CSRF Token ' + color.END + color.ORANGE + '...') print(color.RED + ' [-] Endpoint seems ' + color.BR + ' VULNERABLE ' + color.END + color.RED + ' to ' + color.BR + ' POST-Based Request Forgery ' + color.END) return (None, None)
def Persistence(url): if COOKIE_VALUE: verbout( GR, 'Proceeding to test for ' + color.GREY + 'Cookie Persistence' + color.END + '...') time.sleep(0.7) user_agents = { 'Chrome on Windows 8.1': 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36', 'Safari on iOS': 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4', 'IE6 on Windows XP': 'Mozilla/5.0 (Windows; U; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727)', 'Opera on Windows 10': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991', 'Chrome on Android': 'Mozilla/5.0 (Linux; U; Android 2.3.1; en-us; MID Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1' } verbout(GR, 'Setting custom generic headers...') gen_headers = HEADER_VALUES for name, agent in user_agents.items(): verbout(C, 'Using User-Agent : ' + color.CYAN + name) verbout(GR, 'Value : ' + color.ORANGE + agent) gen_headers['User-Agent'] = agent for cookie in COOKIE_VALUE: gen_headers['Cookie'] = cookie verbout(GR, 'Making the request...') req = Get(url, headers=gen_headers) resps.append(req.headers.get('Set-Cookie')) if checkDuplicates(resps): verbout( G, 'Set-Cookie header does not change with varied User-Agents...') verbout(R, 'Possible persistent session cookies found...') print(color.RED + ' [+] Possible CSRF Vulnerability Detected : ' + color.ORANGE + url + '!') print(color.ORANGE + ' [!] Possible Vulnerability Type: ' + color.BR + ' Persistent Session Cookies ' + color.END) else: verbout(G, 'Set-Cookie header changes with varied User-Agents...') verbout(R, 'No possible persistent session cookies found...') print(color.GREEN + ' [+] Endpoint might be ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF attacks!') print(color.GREEN + ' [+] Protection Method Detected : ' + color.BG + ' No Persistent Cookies ' + color.END) else: verbout(R, 'Skipping persistence checks as no cookie value supplied...')
def Engine(): # lets begin it! os.system('clear') # Clear shit from terminal :p banner() # Print the banner banabout() # The second banner web = inputin() # Take the input form1 = form10() # Get the form 1 ready form2 = form20() # Get the form 2 ready # For the cookies that we encounter during requests... Cookie0 = http.cookiejar.CookieJar() # First as User1 Cookie1 = http.cookiejar.CookieJar() # Then as User2 resp1 = build_opener(HTTPCookieProcessor(Cookie0)) # Process cookies resp2 = build_opener(HTTPCookieProcessor(Cookie1)) # Process cookies actionDone = [] # init to the done stuff csrf = '' # no token initialise / invalid token ref_detect = 0x00 # Null Char ori_detect = 0x00 init1 = web # get the starting page form = Debugger.Form_Debugger() # init to the form parser+token generator bs1 = BeautifulSoup(form1).findAll( 'form', action=True)[0] # make sure the stuff works properly bs2 = BeautifulSoup(form2).findAll('form', action=True)[0] # same as above action = init1 # First init resp1.open(action) # Makes request as User2 resp2.open(action) # Make request as User1 verbout(GR, "Initializing crawling and scanning...") crawler = Crawler.Handler(init1, resp1) # Init to the Crawler handler try: while crawler.noinit(): # Until 0 urls left url = next(crawler) # Go for next! print(C + 'Crawling :> ' + color.CYAN + url) # Display what url its crawling try: soup = crawler.process(web) # Start the parser if not soup: continue # Making sure not to end the program yet... i = 0 # Set count = 0 if REFERER_ORIGIN_CHECKS: # Referer Based Checks if True... verbout( O, 'Checking endpoint request validation via ' + color.GREY + 'Referer' + color.END + ' Checks...') if Referer(url): ref_detect = 0x01 verbout(O, 'Confirming the vulnerability...') # We have finished with Referer Based Checks, lets go for Origin Based Ones... verbout( O, 'Confirming endpoint request validation via ' + color.GREY + 'Origin' + color.END + ' Checks...') if Origin(url): ori_detect = 0x01 if COOKIE_BASED: Cookie(url) # Now lets get the forms... verbout( O, 'Retrieving all forms on ' + color.GREY + url + color.END + '...') for m in Debugger.getAllForms( soup): # iterating over all forms extracted action = Parser.buildAction( url, m['action'] ) # get all forms which have 'action' attribute if not action in actionDone and action != '': # if url returned is not a null value nor duplicate... # If form submission is kept to True if FORM_SUBMISSION: try: result = form.prepareFormInputs( m) # prepare inputs r1 = Post( url, action, result ).text # make request with token values generated as user1 result = form.prepareFormInputs( m) # prepare the input types r2 = Post( url, action, result ).text # again make request with token values generated as user2 # Go for token based entropy checks... try: if m['name']: query, token = Entropy( result, url, m['action'], m['name']) except KeyError: query, token = Entropy( result, url, m['action']) # Go for token parameter tamper checks. if (query and token): Tamper(url, action, result, r2, query, token) o2 = resp2.open( url).read() # make request as user2 try: form2 = Debugger.getAllForms( BeautifulSoup(o2))[ i] # user2 gets his form except IndexError: verbout(R, 'Form Error') continue # making sure program won't end here (dirty fix :( ) verbout(GR, 'Preparing form inputs...') contents2 = form.prepareFormInputs( form2) # prepare for form 2 as user2 r3 = Post( url, action, contents2 ).text # make request as user3 with user2's form if POST_BASED: try: if m['name']: PostBased(url, r1, r2, r3, m['action'], result, m['name']) except KeyError: PostBased(url, r1, r2, r3, m['action'], result) except HTTPError as msg: # if runtime exception... verbout(R, 'Exception : ' + msg.__str__()) # again exception :( actionDone.append(action) # add the stuff done i += 1 # Increase user iteration except URLError: # if again... verbout(R, 'Exception at : ' + url) # again exception -_- time.sleep(0.4) verbout(O, 'Moving on...') continue # make sure it doesn't stop print('\n' + G + "Scan completed!" + '\n') Analysis() # For Post Scan Analysis # This error usually happens when some sites are protected by some load balancer # example Cloudflare. These domains return a 403 forbidden response in various # contexts. For example when making reverse DNS queries. except HTTPError as e: if str(e.code) == '403': verbout(R, 'HTTP Authentication Error!') verbout(R, 'Error Code : ' + O + str(e.code)) quit() except KeyboardInterrupt: # incase user wants to exit ;-; (while crawling) verbout(R, 'User Interrupt!') time.sleep(1.5) Analysis() # For Post scan Analysis print(R + 'Aborted!') # say goodbye quit()
def Encoding(val): verbout(G, 'Proceeding to detect encoding of Anti-CSRF Token...')
def Origin(url): """ Check if the remote web application verifies the Origin before processing the HTTP request. """ # Make the request normally and get content verbout(O, 'Making request on normal basis...') req0x01 = Get(url) # Set a fake Origin along with UA (pretending to be a # legitimate request from a browser) verbout(GR, 'Setting generic headers...') gen_headers = HEADER_VALUES gen_headers['Origin'] = ORIGIN_URL # We put the cookie in request, if cookie supplied :D if COOKIE_VALUE: for cookie in COOKIE_VALUE: gen_headers['Cookie'] = cookie # Make the request with different Origin header and get the content verbout(O, 'Making request with tampered headers...') req0x02 = Get(url, headers=gen_headers) # Comparing the length of the requests' responses. If both content # lengths are same, then the site actually does not validate Origin # before processing the HTTP request which makes the site more # vulnerable to CSRF attacks. # # IMPORTANT NOTE: I'm aware that checking for the Origin header does # NOT protect the application against all cases of CSRF, but it's a # very good first step. In order to exploit a CSRF in an application # that protects using this method an intruder would have to identify # other vulnerabilities, such as XSS or open redirects, in the same # domain. # # TODO: This algorithm has lots of room for improvement if len(req0x01.content) != len(req0x02.content): verbout( color.GREEN, ' [+] Endoint ' + color.ORANGE + 'Origin Validation' + color.GREEN + ' Present!') print(color.GREEN + ' [-] Heuristics reveal endpoint might be ' + color.BG + ' NOT VULNERABLE ' + color.END + '...') print(color.ORANGE + ' [+] Mitigation Method: ' + color.BG + ' Origin Based Request Validation ' + color.END) return True else: verbout( R, 'Endpoint ' + color.ORANGE + 'Origin Validation Not Present' + color.END + '!') verbout( R, 'Heuristics reveal endpoint might be ' + color.BY + ' VULNERABLE ' + color.END + ' to Origin Based CSRFs...') print(color.CYAN + ' [+] Possible CSRF Vulnerability Detected : ' + color.GREY + url + '!') print(color.ORANGE + ' [!] Possible Vulnerability Type: ' + color.BY + ' Origin Based Request Forgery ' + color.END) return False
def randString(): # generate random strings verbout(GR, 'Compiling strings...') return ''.join(Random().sample(string.ascii_letters, TOKEN_GENERATION_LENGTH)) # any 6 chars
def Token(req): ''' This method checks for whether Anti-CSRF Tokens are present in the request. ''' param = '' # Initializing param query = '' # First lets have a look at config.py and see if its set if TOKEN_CHECKS: verbout(O, 'Parsing request for detecting anti-csrf tokens...') try: # Lets check for the request values. But before that lets encode and unquote the request :D con = unquote(urlencode(req)).split('&') for c in con: for name in COMMON_CSRF_NAMES: qu = c.split('=') if qu[0].lower() == name.lower(): verbout( color.GREEN, ' [+] The form was requested with a ' + color.ORANGE + 'Anti-CSRF Token' + color.GREEN + '...') verbout( color.GREY, ' [+] Token Parameter: ' + color.CYAN + qu[0] + '=' + qu[1] + ' ...') query, param = qu[0], qu[1] REQUEST_TOKENS.append( param ) # We are appending the token to a variable for further analysis break # Break execution if a Anti-CSRF token is found except Exception as e: verbout(R, 'Request Parsing Execption!') verbout(R, 'Error: ' + e.__str__()) if param: return query, param verbout( color.RED, ' [-] The form was requested ' + color.BR + ' Without an Anti-CSRF Token ' + color.END + color.RED + '...') print(color.RED + ' [-] Endpoint seems ' + color.BR + ' VULNERABLE ' + color.END + color.RED + ' to ' + color.BR + ' POST-Based Request Forgery ' + color.END) return '', ''
def prepareFormInputs(self, form): ''' This method parses specific form types and generates tokens based on their input types. ''' verbout(O, 'Crafting inputs as form type...') input = {} verbout(O, 'Processing ' + color.BOLD + '<input type="text" name="...') # get name type inputs for m in form.findAll('input', {'name': True, 'type': 'text'}): if re.search( ' value=', str(m), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() input[m['name']] = value # assign passed on value verbout(O, 'Processing' + color.BOLD + ' <input type="password" name="...') # get password inputs for m in form.findAll('input', {'name': True, 'type': 'password'}): if re.search( ' value=', str(m), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() input[m['name']] = value # assign passed on value verbout(O, 'Processing' + color.BOLD + ' <input type="hidden" name="...') # get hidden input types for m in form.findAll('input', {'name': True, 'type': 'hidden'}): if re.search( ' value=', str(m), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() input[m['name']] = value # assign passed on value verbout(O, 'Processing ' + color.BOLD + '<input type="submit" name="...') # get submit buttons :D for m in form.findAll('input', {'name': True, 'type': 'submit'}): if re.search( ' value=', str(m), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() input[m['name']] = value # assign passed on value verbout( O, 'Processing' + color.BOLD + ' <input type="checkbox" name="...') # get checkbox type inputs for m in form.findAll('input', {'name': True, 'type': 'checkbox'}): if re.search( ' value=', str(m), re.IGNORECASE): # Ignore case while searching for a match value = m['value'].encode( 'utf8') # make sure no encoding errors there else: value = randString() # assign passed on value input[m['name']] = value # assign discovered value verbout(O, 'Processing' + color.BOLD + ' <input type="radio" name="...') # get radio buttons :D listRadio = [] for m in form.findAll('input', {'name': True, 'type': 'radio'}): if (not m['name'] in listRadio) and re.search( ' value=', str(m), re.IGNORECASE): # Ignore case while searching for a match listRadio.append(m['name']) input[m['name']] = value.encode( 'utf8') # make sure no encoding errors there verbout(O, 'Processing' + color.BOLD + ' <textarea name="...') # get textarea input types for m in form.findAll('textarea', {'name': True}): if len(m.contents) == 0: m.contents.append(randString()) # get random strings input[m['name']] = m.contents[0].encode( 'utf8') # make sure no encoding errors there verbout(O, 'Processing' + color.BOLD + ' <select name="...') # selection type inputs for m in form.findAll('select', {'name': True}): if m.findAll('option', value=True): name = m['name'] # assign passed on value input[name] = m.findAll( 'option', value=True)[0]['value'].encode( 'utf8') # find forms fields based on value verbout(GR, 'Parsing final inputs...') return input # Return the form input types
def Post(url, action, data): ''' The main use of this function is as a Form Requester [POST]. ''' time.sleep(DELAY_VALUE) # If delay param has been supplied verbout( GR, 'Processing the ' + color.GREY + 'POST' + color.END + ' Request...') main_url = urljoin(url, action) # join url and action try: # Make the POST Request. response = requests.post(main_url, headers=headers, data=data, timeout=TIMEOUT_VALUE) if DISPLAY_HEADERS: pheaders(response.headers) return response # read data content except requests.exceptions.HTTPError as e: # if error verbout(R, "HTTP Error : " + main_url) ErrorLogger(main_url, e.__str__()) return None except requests.exceptions.ConnectionError as e: verbout(R, 'Connection Aborted : ' + main_url) ErrorLogger(main_url, e.__str__()) return None except requests.exceptions.ReadTimeout as e: verbout(R, 'Exception at: ' + color.GREY + url) verbout( R, 'Error: Read Timeout. Consider increasing the timeout value via --timeout.' ) ErrorLogger(url, e.__str__()) return None except ValueError as e: # again if valuerror verbout(R, "Value Error : " + main_url) ErrorLogger(main_url, e.__str__()) return None except Exception as e: verbout(R, "Exception Caught: " + e.__str__()) ErrorLogger(main_url, e.__str__()) return None # if at all nothing happens :(
def PostBased(url, r1, r2, r3, m_action, result, genpoc, form, m_name=''): ''' This method is for detecting POST-Based Request Forgeries on basis of fuzzy string matching and comparison based on Ratcliff-Obershelp Algorithm. ''' verbout(color.RED, '\n +------------------------------+') verbout(color.RED, ' | POST-Based Forgery Check |') verbout(color.RED, ' +------------------------------+\n') verbout(O, 'Matching response query differences...') checkdiffx1 = difflib.ndiff(r1.splitlines(1), r2.splitlines(1)) # check the diff noted checkdiffx2 = difflib.ndiff(r1.splitlines(1), r3.splitlines(1)) # check the diff noted result12 = [] # an init verbout(O, 'Matching results...') for n in checkdiffx1: if re.match('\+|-', n): # get regex matching stuff only +/- result12.append(n) # append to existing list result13 = [] # an init for n in checkdiffx2: if re.match('\+|-', n): # get regex matching stuff result13.append(n) # append to existing list # Make sure m_action has a / before it. (legitimate action). if not m_action.startswith('/'): m_action = '/' + m_action # This logic is based purely on the assumption on the difference of various requests # and response body. # If the number of differences of result12 are less than the number of differences # than result13 then we have the vulnerability. (very basic check) # # NOTE: The algorithm has lots of scopes of improvement... if len(result12) <= len(result13): print(color.GREEN + ' [+] CSRF Vulnerability Detected : ' + color.ORANGE + url + '!') print(color.ORANGE + ' [!] Vulnerability Type: ' + color.BR + ' POST-Based Request Forgery ' + color.END) VulnLogger( url, 'POST-Based Request Forgery on Forms.', '[i] Form: ' + form.__str__() + '\n[i] POST Query: ' + result.__str__() + '\n') time.sleep(0.3) verbout(O, 'PoC of response and request...') if m_name: print(color.RED + '\n +-----------------+') print(color.RED + ' | Request PoC |') print(color.RED + ' +-----------------+\n') print(color.BLUE + ' [+] URL : ' + color.CYAN + url) # url part print(color.CYAN + ' [+] Name : ' + color.ORANGE + m_name) # name if m_action.count('/') > 1: print(color.GREEN + ' [+] Action : ' + color.END + '/' + m_action.rsplit('/', 1)[1]) # action else: print(color.GREEN + ' [+] Action : ' + color.END + m_action) # action else: # if value m['name'] not there :( print(color.RED + '\n +-----------------+') print(color.RED + ' | Request PoC |') print(color.RED + ' +-----------------+\n') print(color.BLUE + ' [+] URL : ' + color.CYAN + url) # the url if m_action.count('/') > 1: print(color.GREEN + ' [+] Action : ' + color.END + '/' + m_action.rsplit('/', 1)[1]) # action else: print(color.GREEN + ' [+] Action : ' + color.END + m_action) # action print(color.ORANGE + ' [+] POST Query : ' + color.GREY + urlencode(result).strip()) # If option --skip-poc hasn't been supplied... if POC_GENERATION: # If --malicious has been supplied if GEN_MALICIOUS: # Generates a malicious CSRF form GenMalicious(url, genpoc.__str__()) else: # Generates a normal PoC GenNormalPoC(url, genpoc.__str__())
def SameSite(url): ''' This function parses and verifies the cookies with SameSite Flags. ''' verbout(color.RED, '\n +------------------------------------+') verbout(color.RED, ' | Cross Origin Cookie Validation |') verbout(color.RED, ' +------------------------------------+\n') # Some Flags we'd need later... foundx1 = 0x00 foundx2 = 0x00 foundx3 = 0x00 # Step 1: First we check that if the server returns any # SameSite flag on Cookies with the same Referer as the netloc verbout(color.GREY, ' [+] Lets examine how server reacts to same referer...') gen_headers = HEADER_VALUES gen_headers['User-Agent'] = USER_AGENT or RandomAgent() verbout(GR, 'Setting Referer header same as host...') # Setting the netloc as the referer for the first check. gen_headers['Referer'] = urlsplit(url).netloc if COOKIE_VALUE: for cook in COOKIE_VALUE: gen_headers['Cookie'] = cook getreq = Get(url, headers=gen_headers) # Making the request head = getreq.headers for h in head: #if search('cookie', h, I) or search('set-cookie', h, I): if 'Cookie'.lower() in h.lower(): verbout(G, 'Found cookie header value...') cookieval = head[h] verbout(color.ORANGE, ' [+] Cookie Received: ' + color.CYAN + str(cookieval)) m = cookieval.split(';') verbout(GR, 'Examining Cookie...') for q in m: if search('SameSite', q, I): verbout( G, 'SameSite Flag ' + color.ORANGE + ' detected on cookie!') foundx1 = 0x01 q = q.split('=')[1].strip() verbout(C, 'Cookie: ' + color.ORANGE + q) break else: foundx3 = 0x02 if foundx1 == 0x01: verbout( R, ' [+] Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' Present!') # Step 2: Now we check security mechanisms when the Referer is # different, i.e. request originates from a different url other # than the host. (This time without the Cookie assigned) verbout( color.GREY, ' [+] Lets examine how server reacts to a fake external referer...') gen_headers = HEADER_VALUES gen_headers['User-Agent'] = USER_AGENT or RandomAgent( ) # Setting user-agents # Assigning a fake referer for the second check, but no cookie. gen_headers['Referer'] = REFERER_URL getreq = Get(url, headers=gen_headers) head = getreq.headers # Getting headers from requests for h in head: # If search('cookie', h, I) or search('set-cookie', h, I): if 'Cookie'.lower() in h.lower(): verbout(G, 'Found cookie header value...') cookieval = head[h] verbout(color.ORANGE, ' [+] Cookie Received: ' + color.CYAN + str(cookieval)) m = cookieval.split(';') verbout(GR, 'Examining Cookie...') for q in m: if search('SameSite', q, I): verbout( G, 'SameSite Flag ' + color.ORANGE + ' detected on cookie!') foundx2 = 0x01 q = q.split('=')[1].strip() verbout(C, 'Cookie: ' + color.ORANGE + q) break else: foundx3 = 0x02 if foundx1 == 0x01: verbout( R, ' [+] Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' Present!') # Step 3: And finally comes the most important step. Lets see how # the site reacts to a valid cookie (ofc supplied by the user) coming # from a a different site, i.e Referer set to other than host. # This is the most crucial part of the detection. # # TODO: Improve the logic in detection. verbout( color.GREY, ' [+] Lets examine how server reacts to valid cookie from a different referer...' ) gen_headers = HEADER_VALUES gen_headers['User-Agent'] = USER_AGENT or RandomAgent() # Assigning a fake referer for third request, this time with cookie ;) gen_headers['Referer'] = REFERER_URL if COOKIE_VALUE: for cook in COOKIE_VALUE: gen_headers['Cookie'] = cook getreq = Get(url, headers=gen_headers) head = getreq.headers for h in head: # if search('cookie', h, I) or search('set-cookie', h, I): if 'Cookie'.lower() in h.lower(): verbout(G, 'Found cookie header value...') cookieval = head[h] verbout(color.ORANGE, ' [+] Cookie Received: ' + color.CYAN + str(cookieval)) m = cookieval.split(';') verbout(GR, 'Examining Cookie...') for q in m: if search('samesite', q.lower(), I): verbout( G, 'SameSite Flag ' + color.ORANGE + ' detected on cookie on Cross Origin Request!') foundx3 = 0x01 q = q.split('=')[1].strip() verbout(C, 'Cookie: ' + color.ORANGE + q) break else: foundx3 = 0x02 if foundx1 == 0x01: verbout( R, 'Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' + color.END + ' is Present!') if (foundx1 == 0x01 and foundx3 == 0x00) and (foundx2 == 0x00 or foundx2 == 0x01): print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE to ANY type of CSRF attacks! ' + color.END) print(color.GREEN + ' [+] Protection Method Detected : ' + color.BG + ' SameSite Flag on Cookies ' + color.END) NovulLogger(url, 'SameSite Flag set on Cookies on Cross-Origin Requests.') # If a SameSite flag is set on cookies, then the application is totally fool-proof # against CSRF attacks unless there is some XSS stuff on it. So for now the job of # this application is done. We need to confirm before we quit. oq = input(color.BLUE + ' [+] Continue scanning? (y/N) :> ') if oq.lower().startswith('n'): sys.exit('\n' + R + 'Shutting down XSRFProbe...\n') elif foundx1 == 0x02 and foundx2 == 0x02 and foundx3 == 0x02: print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' + color.END + color.GREEN + ' to CSRF attacks!') print(color.GREEN + ' [+] Type: ' + color.BG + ' No Cookie Set while Cross Origin Requests ' + color.END) NovulLogger(url, 'No cookie set on Cross-Origin Requests.') else: verbout( R, 'Endpoint ' + color.ORANGE + 'Cross Origin Cookie Validation' + color.END + ' Not Present!') verbout( R, 'Heuristic(s) reveal endpoint might be ' + color.BY + ' VULNERABLE ' + color.END + ' to CSRFs...') print(color.GREEN + ' [+] Possible CSRF Vulnerability Detected : ' + color.ORANGE + url + '!') print(color.ORANGE + ' [!] Possible Vulnerability Type: ' + color.BY + ' No Cross Origin Cookie Validation Presence ' + color.END) VulnLogger(url, 'No Cookie Validation on Cross-Origin Requests.', '[i] Headers: ' + str(head))
def Tamper(url, action, req, body, query, para): ''' The main idea behind this is to tamper the Anti-CSRF tokens found and check the content length for related vulnerabilities. ''' # Null char flags (hex) flagx1 = 0x00 flagx2 = 0x00 flagx3 = 0x00 verbout(GR, 'Proceeding for CSRF attack via Anti-CSRF token tampering...') # First of all lets get out token from request if para == '': return True # Coverting the token to a raw string, cause some special # chars might fu*k with the Shannon Entropy operation. value = r'%s' % para # Alright lets start... # [Step 1]: First we take the token and then replace a char # at a specific position and test the response body. # # Required check for checking if string at that position isn't the # same char we are going to replace with. verbout(GR, 'Tampering Token by '+color.GREY+'index replacement'+color.END+'...') if value[3] != 'a': tampvalx1 = replaceStrIndex(value, 3, 'a') else: tampvalx1 = replaceStrIndex(value, 3, 'x') verbout(color.BLUE, ' [+] Original Token: '+color.CYAN+value) verbout(color.BLUE, ' [+] Tampered Token: '+color.CYAN+tampvalx1) # Lets build up the request... req[query] = tampvalx1 resp = Post(url, action, req) # If there is a 40x (Not Found) or a 50x (Internal Error) error, # we assume that the tamper did not work :( But if there is a 20x # (Accepted) or a 30x (Redirection), then we know it worked. # # Or if the previous request has same content length as this tampered # request, then we have the vulnerability. # # NOTE: This algorithm has lots of room for improvement. if str(resp.status_code).startswith('50'): verbout(color.RED,' [+] Token tamper from request causes a 50x Internal Error!') if (str(resp.status_code).startswith('2') and str(resp.status_code).startswith('3')) and (len(body) == len(resp.text)): flagx1 = 0x01 # [Step 2]: Second we take the token and then remove a char # at a specific position and test the response body. verbout(GR, 'Tampering Token by '+color.GREY+'index removal'+color.END+'...') tampvalx2 = replaceStrIndex(value, 3) verbout(color.BLUE, ' [+] Original Token: '+color.CYAN+value) verbout(color.BLUE, ' [+] Tampered Token: '+color.CYAN+tampvalx2) # Lets build up the request... req[query] = tampvalx2 resp = Post(url, action, req) # If there is a 40x (Not Found) or a 50x (Internal Error) error, # we assume that the tamper did not work :( But if there is a 20x # (Accepted) or a 30x (Redirection), then we know it worked. # # NOTE: This algorithm has lots of room for improvement. if str(resp.status_code).startswith('50'): verbout(color.RED,' [+] Token tamper from request causes a 50x Internal Error!') if (str(resp.status_code).startswith('2') and str(resp.status_code).startswith('3')) and (len(body) == len(resp.text)): flagx2 = 0x01 # [Step 3]: Third we take the token and then remove the whole # anticsrf token and test the response body. verbout(GR, 'Tampering Token by '+color.GREY+'Token removal'+color.END+'...') del req[query] verbout(G, 'Removed token from request!') # Lets build up the request... resp = Post(url, action, req) # If there is a 40x (Not Found) or a 50x (Internal Error) error, # we assume that the tamper did not work :( But if there is a 20x # (Accepted) or a 30x (Redirection), then we know it worked. # # NOTE: This algorithm has lots of room for improvement. if str(resp.status_code).startswith('50'): verbout(color.RED,' [+] Token removal from request causes a 50x Internal Error!') if (str(resp.status_code).startswith('2') and str(resp.status_code).startswith('3')) and (len(body) == len(resp.text)): flagx3 = 0x01 # If any of the forgeries worked... if flagx1 == 0x01 or flagx2 == 0x01 or flagx3 == 0x01: verbout(color.GREEN,' [+] The tampered token value works!') verbout(color.GREEN,' [-] The Tampered Anti-CSRF Token requested does NOT return a 40x or 50x response! ') print(color.ORANGE+' [-] Endpoint '+color.BR+' CONFIRMED VULNERABLE '+color.END+color.ORANGE+' to Request Forgery Attacks...') print(color.ORANGE+' [!] Vulnerability Type: '+color.BG+' Non-Unique Anti-CSRF Tokens in Requests '+color.END) else: print(color.RED+' [-] The Tampered Anti-CSRF Token requested returns a 40x or 50x response... ') print(color.ORANGE+' [-] Endpoint '+color.BG+' NOT VULNERABLE '+color.END+color.ORANGE+' to CSRF Attacks...') print(color.ORANGE+' [!] CSRF Mitigation Method: '+color.BG+' Unique Anti-CSRF Tokens '+color.END)