Example #1
0
def Cookie(url, request):
    '''
    This module is for checking the varied HTTP Cookies
            and the related security on them to
                    prevent CSRF attacks.
    '''
    verbout(GR, 'Proceeding for cookie based checks...')
    SameSite(url)
    Persistence(url, request)
Example #2
0
def buildAction(url, action):
    '''
    The main function of this is to create an action Url based
                on Current Location and Destination.
    '''
    verbout(O, 'Parsing URL parameters...')
    if action and not action.startswith(
            '#'
    ):  # make sure it is not a fragment (eg. http://site.tld/index.php#search)
        return buildUrl(url, action)  # get the url and reutrn it!
    return url  # return the url itself if buildAction didn't identify the action
Example #3
0
def Encoding(val):
    '''
    This function is for detecting the encoding type of
            Anti-CSRF tokens based on pre-defined
                    regular expressions.
    '''
    found = 0x00
    if not val:
        return (found, None)
    verbout(color.RED, '\n +------------------------------+')
    verbout(color.RED, ' |   Token Encoding Detection   |')
    verbout(color.RED, ' +------------------------------+\n')
    verbout(GR, 'Proceeding to detect encoding of Anti-CSRF Token...')
    # So the idea right here is to detect whether the Anti-CSRF tokens
    # are encoded in some form or the other.
    #
    # Often in my experience with web applications, I have found that
    # most of the Anti-CSRF tokens are encoded (mostly MD5 or SHA*).
    # In those cases, I have found that the Anti-CSRF tokens follow a
    # specific pattern. For example, every request has a specific
    # iteration number, if the previous request is 144, and MD5 encrypted
    # it turns out to be 0a09c8844ba8f0936c20bd791130d6b6, then it is
    # not at all strong, since the next request is probably 145 and can
    # be easily forged! Ofc, if there is no salt in the encryption.
    #
    # This module aims to automate and simplify the task. ;)
    for h in HASH_DB:
        txt = hashcheck(h[0], h[1], val)
        if txt is not None:
            found = 0x01
            verbout(
                color.RED,
                '\n [+] Anti-CSRF Token is detected to be String Encoded!')
            print(color.GREEN + ' [+] Token Encoding Detected: ' + color.BG +
                  ' ' + txt + ' ' + color.END)
            print(color.ORANGE + ' [-] Endpoint likely ' + color.BR +
                  ' VULNERABLE ' + color.END + color.ORANGE +
                  ' to CSRF Attacks inspite of CSRF Tokens.')
            print(color.ORANGE + ' [!] Vulnerability Type: ' + color.BR +
                  ' String Encoded Anti-CSRF Tokens ' + color.END)
            print(
                color.RED +
                ' [-] The Tokens might be easily Decrypted and can be Forged!')
            break  # Break the execution if token encoding detected
    if found == 0x00:
        print(color.RED + '\n [-] ' + color.BR +
              ' No Token Encoding Detected. ' + color.END,
              end='\n\n')
    sleep(0.8)
    return (found, txt)
Example #4
0
def inputin():
    '''
    This module actually parses the url passed by the user.
    '''
    if SITE_URL:
        web = SITE_URL  # If already assigned
    if 'http' not in web:  # add protocol to site
        web = 'http://' + web
    try:
        web0 = tld.get_fld(web)
    except tld.exceptions.TldDomainNotFound:
        web0 = re.search(IP, web).group(0)
    try:
        print(O + 'Testing site ' + color.GREY + web0 + color.END +
              ' status...')
        socket.gethostbyname(web0)  # test whether site is up or not
        print(color.GREEN + ' [+] Site seems to be up!' + color.END)
    except socket.gaierror:  # if site is down
        print(R + 'Site seems to be down...')
        quit()
    # We'll test for endpoint only when the --crawl isn't supplied.
    if not CRAWL_SITE:
        try:
            print(O + 'Testing ' + color.CYAN +
                  web.split('//')[1].split('/', 1)[1] + color.END +
                  ' endpoint status...')
            requests.get(web)
            print(color.GREEN + ' [+] Endpoint seems to be up!' + color.END)
        except requests.exceptions.MissingSchema as e:
            verbout(R, 'Exception at: ' + color.GREY + web0)
            verbout(R, 'Error: Invalid URL Format')
            ErrorLogger(web0, e.__str__())
            quit()
        except requests.exceptions.HTTPError as e:
            verbout(R, "HTTP Error: " + web0)
            ErrorLogger(web0, e.__str__())
            quit()
        except requests.exceptions.ConnectionError as e:
            verbout(R, 'Connection Aborted: ' + web0)
            ErrorLogger(web0, e.__str__())
            quit()
        except Exception as e:
            verbout(R, "Exception Caught: " + e.__str__())
            ErrorLogger(web0, e.__str__())
            quit()
    if not web0.endswith('/'):
        web0 = web0 + '/'
    if web.split('//')[1] == web0:
        return web, ''
    return (web, web0)
Example #5
0
def pheaders(tup):
    '''
    This module prints out the headers as received in the
                    requests normally.
    '''
    verbout(GR, 'Receiving headers...\n')
    verbout(
        color.GREY, '  ' + color.UNDERLINE + 'HEADERS' + color.END +
        color.GREY + ':' + '\n')
    for key, val in tup.items():
        verbout('  ', color.CYAN + key + ': ' + color.ORANGE + val)
    verbout('', '')
Example #6
0
def inputin():
    '''
    This module actually parses the url passed by the user.
    '''
    if SITE_URL:
        web = SITE_URL  # If already assigned
    if not web.endswith('/'):
        web = web + '/'
    if 'http' not in web:  # add protocol to site
        web = 'http://' + web
    try:
        web0 = urlparse(web).netloc
    except Exception:
        web0 = re.search(IP, web).group(0)
    try:
        print(O+'Testing site '+color.CYAN+web0+color.END+' status...')
        requests.get(web)  # test whether site is up or not
        print(color.GREEN+' [+] Site seems to be up!'+color.END)
    except requests.exceptions.RequestException:  # if site is down
        print(R+'Site seems to be down...')
        quit()
    # We'll test for endpoint only when the --crawl isn't supplied.
    if not CRAWL_SITE:
        try:
            print(O+'Testing '+color.CYAN+web.split('//')[1].split('/', 1)[1]+color.END+' endpoint status...')
            requests.get(web, verify=VERIFY_CERT)
            print(color.GREEN+' [+] Endpoint seems to be up!'+color.END)
        except requests.exceptions.RequestException as e:
            verbout(R, 'Endpoint error: '+web.split('//')[1].split('/', 1)[1])
            ErrorLogger(web0, e.__str__())
            quit()
        except Exception as e:
            verbout(R, "Exception Caught: "+e.__str__())
            ErrorLogger(web0, e.__str__())
            quit()
    if not web0.endswith('/'):
        web0 = web0 + '/'
    if web.split('//')[1] == web0:
        return web, ''
    return (web, web0)
Example #7
0
def GenNormalPoC(action,
                 fields,
                 method='POST',
                 encoding_type='application/x-www-form-urlencoded'):
    """
    Generate a normal CSRF PoC using basic form data
    """
    print(GR + 'Generating normal PoC Form...')
    verbout(color.RED, '\n +---------------------+')
    verbout(color.RED, ' |   Normal Form PoC   |')
    verbout(color.RED, ' +---------------------+\n' + color.CYAN)
    # Main starting which we will use to generate form.
    with tag('html'):
        with tag('title'):
            text('CSRF PoC')
        with tag('body'):
            with tag('h2'):
                text('Your CSRF PoC')
            # Try not messing with this part. (1)
            with tag('form',
                     id='xsrfprobe_csrfpoc',
                     action=action,
                     enctype=encoding_type,
                     method="POST"):
                for field in literal_eval(fields):
                    with tag('label'):
                        text(field['label'].title())
                    doc.input(name=field['name'],
                              type=field['type'],
                              value=field['value'])
                # Adding the Submit Button
                doc.stag('input', value='Submit', type='submit')
            doc.stag('br')
            # Brand tag :p ...I guess...
            with tag('small'):
                text('(o) This form was generated by ')
                with tag('a', href='https://github.com/0xinfection/xsrfprobe'):
                    text('XSRFProbe')
                text('.')
    content = BeautifulSoup(doc.getvalue(), 'html.parser')
    formPrettify(indentPrettify(content))
    print('')
    # Write out the file af...
    if '//' in action:
        splitterfunc = action.split('//', 1)[1].replace('/', '-')
    else:
        splitterfunc = action.replace('/', '-')
    fi = open(OUTPUT_DIR + splitterfunc + '-csrf-poc.html',
              'w+',
              encoding='utf-8')
    fi.write(content.prettify())
    fi.close()
    print(G + 'PoC successfully saved under ' + color.ORANGE + OUTPUT_DIR +
          splitterfunc + '-csrf-poc.html')
Example #8
0
def GenMalicious(action,
                 fields,
                 method='POST',
                 encoding_type='application/x-www-form-urlencoded'):
    """
     Generate a malicious CSRF PoC using basic form data
    """
    print(GR, 'Generating malicious PoC Form...')
    verbout(color.RED, '\n +------------------------+')
    verbout(color.RED, ' |   Malicious Form PoC   |')
    verbout(color.RED, ' +------------------------+\n' + color.CYAN)
    # Main starting which we will use to generate form.
    with tag('html'):
        with tag('title'):
            text('CSRF PoC')
        with tag('body'):
            with tag('script'):
                doc.asis('alert("You have been pwned!!!")')
            # Try not messing with this part. (1)
            with tag('form',
                     id='xsrfprobe_csrfpoc',
                     action=action,
                     enctype=encoding_type,
                     method="POST"):
                for field in literal_eval(fields):
                    if not field['value']:
                        val = input(C + 'Enter value for form field ' +
                                    color.GREEN + field['name'].title() +
                                    ' :> ' + color.CYAN)
                    doc.input(name=field['name'], type='hidden', value=val)
        # The idea behind this is to generate PoC forms not requiring any
        # user interaction. As soon as the page loads, the form with submit automatically.
        with tag('script'):
            # Try not messing with this part. (2)
            doc.asis('document.getElementById("xsrfprobe_csrfpoc").submit();')
    # Brand tag :p ...I guess...
    doc.asis(
        '<!-- This form was generated by XSRFProbe (https://github.com/0xinfection/xsrfprobe) -->'
    )
    content = BeautifulSoup(doc.getvalue(), 'html.parser')
    formPrettify(indentPrettify(content))
    print('')
    # Write out the file af...
    if '//' in action:
        splitterfunc = action.split('//', 1)[1].replace('/', '-')
    else:
        splitterfunc = action.replace('/', '-')
    fi = open(OUTPUT_DIR + splitterfunc + '-csrf-poc.html',
              'w+',
              encoding='utf-8')
    fi.write(content.prettify())
    fi.close()
    print(G + 'PoC successfully saved under ' + color.ORANGE + OUTPUT_DIR +
          splitterfunc + '-malicious-poc.html')
Example #9
0
def SameSite(url):
    '''
    This function parses and verifies the cookies with
                    SameSite Flags.
    '''
    verbout(color.RED, '\n +------------------------------------+')
    verbout(color.RED, ' |   Cross Origin Cookie Validation   |')
    verbout(color.RED, ' +------------------------------------+\n')
    # Some Flags we'd need later...
    foundx1 = 0x00
    foundx2 = 0x00
    foundx3 = 0x00
    # Step 1: First we check that if the server returns any
    # SameSite flag on Cookies with the same Referer as the netloc
    verbout(color.GREY,
            ' [+] Lets examine how server reacts to same referer...')
    gen_headers = HEADER_VALUES
    gen_headers['User-Agent'] = USER_AGENT or RandomAgent()
    verbout(GR, 'Setting Referer header same as host...')
    # Setting the netloc as the referer for the first check.
    gen_headers['Referer'] = urlsplit(url).netloc
    if COOKIE_VALUE:
        for cook in COOKIE_VALUE:
            gen_headers['Cookie'] = cook
    getreq = Get(url, headers=gen_headers)  # Making the request
    head = getreq.headers
    for h in head:
        #if search('cookie', h, I) or search('set-cookie', h, I):
        if 'Cookie'.lower() in h.lower():
            verbout(G, 'Found cookie header value...')
            cookieval = head[h]
            verbout(color.ORANGE,
                    ' [+] Cookie Received: ' + color.CYAN + str(cookieval))
            m = cookieval.split(';')
            verbout(GR, 'Examining Cookie...')
            for q in m:
                if search('SameSite', q, I):
                    verbout(
                        G, 'SameSite Flag ' + color.ORANGE +
                        ' detected on cookie!')
                    foundx1 = 0x01
                    q = q.split('=')[1].strip()
                    verbout(C, 'Cookie: ' + color.ORANGE + q)
                    break
        else:
            foundx3 = 0x02
    if foundx1 == 0x01:
        verbout(
            R, ' [+] Endpoint ' + color.ORANGE +
            'SameSite Flag Cookie Validation' + color.END + ' Present!')

    # Step 2: Now we check security mechanisms when the Referer is
    # different, i.e. request originates from a different url other
    # than the host. (This time without the Cookie assigned)
    verbout(
        color.GREY,
        ' [+] Lets examine how server reacts to a fake external referer...')
    gen_headers = HEADER_VALUES
    gen_headers['User-Agent'] = USER_AGENT or RandomAgent(
    )  # Setting user-agents
    # Assigning a fake referer for the second check, but no cookie.
    gen_headers['Referer'] = REFERER_URL
    getreq = Get(url, headers=gen_headers)
    head = getreq.headers  # Getting headers from requests
    for h in head:
        # If search('cookie', h, I) or search('set-cookie', h, I):
        if 'Cookie'.lower() in h.lower():
            verbout(G, 'Found cookie header value...')
            cookieval = head[h]
            verbout(color.ORANGE,
                    ' [+] Cookie Received: ' + color.CYAN + str(cookieval))
            m = cookieval.split(';')
            verbout(GR, 'Examining Cookie...')
            for q in m:
                if search('SameSite', q, I):
                    verbout(
                        G, 'SameSite Flag ' + color.ORANGE +
                        ' detected on cookie!')
                    foundx2 = 0x01
                    q = q.split('=')[1].strip()
                    verbout(C, 'Cookie: ' + color.ORANGE + q)
                    break
        else:
            foundx3 = 0x02

    if foundx1 == 0x01:
        verbout(
            R, ' [+] Endpoint ' + color.ORANGE +
            'SameSite Flag Cookie Validation' + color.END + ' Present!')

    # Step 3: And finally comes the most important step. Lets see how
    # the site reacts to a valid cookie (ofc supplied by the user) coming
    # from a a different site, i.e Referer set to other than host.
    # This is the most crucial part of the detection.
    #
    # TODO: Improve the logic in detection.
    verbout(
        color.GREY,
        ' [+] Lets examine how server reacts to valid cookie from a different referer...'
    )
    gen_headers = HEADER_VALUES
    gen_headers['User-Agent'] = USER_AGENT or RandomAgent()
    # Assigning a fake referer for third request, this time with cookie ;)
    gen_headers['Referer'] = REFERER_URL
    if COOKIE_VALUE:
        for cook in COOKIE_VALUE:
            gen_headers['Cookie'] = cook
    getreq = Get(url, headers=gen_headers)
    head = getreq.headers
    for h in head:
        # if search('cookie', h, I) or search('set-cookie', h, I):
        if 'Cookie'.lower() in h.lower():
            verbout(G, 'Found cookie header value...')
            cookieval = head[h]
            verbout(color.ORANGE,
                    ' [+] Cookie Received: ' + color.CYAN + str(cookieval))
            m = cookieval.split(';')
            verbout(GR, 'Examining Cookie...')
            for q in m:
                if search('samesite', q.lower(), I):
                    verbout(
                        G, 'SameSite Flag ' + color.ORANGE +
                        ' detected on cookie on Cross Origin Request!')
                    foundx3 = 0x01
                    q = q.split('=')[1].strip()
                    verbout(C, 'Cookie: ' + color.ORANGE + q)
                    break
        else:
            foundx3 = 0x02

    if foundx1 == 0x01:
        verbout(
            R, 'Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' +
            color.END + ' is Present!')

    if (foundx1 == 0x01 and foundx3 == 0x00) and (foundx2 == 0x00
                                                  or foundx2 == 0x01):
        print(color.GREEN + ' [+] Endpoint ' + color.BG +
              ' NOT VULNERABLE to ANY type of CSRF attacks! ' + color.END)
        print(color.GREEN + ' [+] Protection Method Detected : ' + color.BG +
              ' SameSite Flag on Cookies ' + color.END)
        NovulLogger(url,
                    'SameSite Flag set on Cookies on Cross-Origin Requests.')
        # If a SameSite flag is set on cookies, then the application is totally fool-proof
        # against CSRF attacks unless there is some XSS stuff on it. So for now the job of
        # this application is done. We need to confirm before we quit.
        oq = input(color.BLUE + ' [+] Continue scanning? (y/N) :> ')
        if oq.lower().startswith('n'):
            sys.exit('\n' + R + 'Shutting down XSRFProbe...\n')
    elif foundx1 == 0x02 and foundx2 == 0x02 and foundx3 == 0x02:
        print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' +
              color.END + color.GREEN + ' to CSRF attacks!')
        print(color.GREEN + ' [+] Type: ' + color.BG +
              ' No Cookie Set while Cross Origin Requests ' + color.END)
        NovulLogger(url, 'No cookie set on Cross-Origin Requests.')
    else:
        verbout(
            R, 'Endpoint ' + color.ORANGE + 'Cross Origin Cookie Validation' +
            color.END + ' Not Present!')
        verbout(
            R, 'Heuristic(s) reveal endpoint might be ' + color.BY +
            ' VULNERABLE ' + color.END + ' to CSRFs...')
        print(color.CYAN + ' [+] Possible CSRF Vulnerability Detected : ' +
              color.GREY + url + '!')
        print(color.ORANGE + ' [!] Possible Vulnerability Type: ' + color.BY +
              ' No Cross Origin Cookie Validation Presence ' + color.END)
        VulnLogger(url, 'No Cookie Validation on Cross-Origin Requests.',
                   '[i] Headers: ' + str(head))
Example #10
0
def randString():  # generate random strings
    verbout(GR, 'Compiling strings...')
    return ''.join(Random().sample(string.ascii_letters,
                                   TOKEN_GENERATION_LENGTH)
                   )  # any chars to be generated as form field inputs
Example #11
0
    def prepareFormInputs(self, form):
        '''
        This method parses form types and generates strings based
                        on their input types.
        '''
        verbout(O, 'Crafting inputs as form type...')
        cr_input = {}
        totcr = []

        verbout(GR, 'Processing ' + color.BOLD +
                '<input type="text" name="...')  # get name type inputs
        for m in form.findAll('input', {'name': True, 'type': 'text'}):
            try:
                if m['value']:  # Ignore case while searching for a match
                    value = m['value'].encode(
                        'utf8')  # make sure no encoding errors there
            except KeyError:
                value = TEXT_VALUE
            cr_input[m['name']] = value  # assign passed on value
            cr0 = {}
            cr0['type'] = 'text'
            cr0['name'] = m['name']
            cr0['label'] = m['name'].title()
            cr0['value'] = ''
            totcr.append(cr0)

        verbout(GR, 'Processing' + color.BOLD +
                ' <input type="email" name="...')  # get password inputs
        for m in form.findAll('input', {'name': True, 'type': 'email'}):
            value = EMAIL_VALUE
            if m['value']:  # Ignore case while searching for a match
                value = m['value'].encode(
                    'utf8')  # make sure no encoding errors there
            cr_input[m['name']] = value  # assign passed on value
            cr1 = {}
            cr1['type'] = 'email'
            cr1['name'] = m['name']
            cr1['label'] = 'Email'
            cr1['value'] = ''
            totcr.append(cr1)

        verbout(GR, 'Processing' + color.BOLD +
                ' <input type="password" name="...')  # get password inputs
        for m in form.findAll('input', {'name': True, 'type': 'password'}):
            try:  # Ignore case while searching for a match
                if m['value']:
                    value = m['value'].encode(
                        'utf8')  # make sure no encoding errors there
            except KeyError:
                value = randString()
            cr_input[m['name']] = value  # assign passed on value
            cr2 = {}
            cr2['type'] = 'password'
            cr2['name'] = m['name']
            cr2['label'] = 'Password'
            cr2['value'] = ''
            totcr.append(cr2)

        try:
            verbout(
                GR, 'Processing' + color.BOLD +
                ' <input type="hidden" name="...')  # get hidden input types
            for m in form.findAll('input', {'name': True, 'type': 'hidden'}):
                if re.search('value=', m.__str__(), re.IGNORECASE
                             ):  # Ignore case while searching for a match
                    value = m['value']  # make sure no encoding errors there
                else:
                    value = TEXT_VALUE
                cr_input[m['name']] = value  # assign passed on value
                cr3 = {}
                cr3['type'] = 'hidden'
                cr3['name'] = m['name']
                cr3['label'] = ''  # Nothing since its a hidden field
                cr3['value'] = value
                totcr.append(cr3)
        except KeyError:
            cr3['value'] = ''

        verbout(GR, 'Processing ' + color.BOLD +
                '<input type="submit" name="...')  # get submit buttons :D
        for m in form.findAll('input', {'name': True, 'type': 'submit'}):
            if re.search(
                    'value=',
                    str(m).strip(),
                    re.IGNORECASE):  # Ignore case while searching for a match
                value = m['value'].encode(
                    'utf8')  # make sure no encoding errors there
            else:
                value = 'Submit'
            cr_input[m['name']] = value  # assign passed on value

        verbout(
            GR, 'Processing' + color.BOLD +
            ' <input type="checkbox" name="...')  # get checkbox type inputs
        for m in form.findAll('input', {'name': True, 'type': 'checkbox'}):
            if re.search(
                    'value=', m.__str__(),
                    re.IGNORECASE):  # Ignore case while searching for a match
                value = m['value'].encode(
                    'utf8')  # make sure no encoding errors there
            else:
                value = randString()  # assign passed on value
            cr_input[m['name']] = value  # assign discovered value
            cr4 = {}
            cr4['type'] = 'checkbox'
            cr4['name'] = m['name']
            cr4['label'] = m['name'].title()
            cr4['value'] = ''
            totcr.append(cr4)

        verbout(GR, 'Processing' + color.BOLD +
                ' <input type="radio" name="...')  # get radio buttons :D
        listRadio = []
        for m in form.findAll('input', {'name': True, 'type': 'radio'}):
            if (not m['name'] in listRadio) and re.search(
                    'value=',
                    str(m).strip(),
                    re.IGNORECASE):  # Ignore case while searching for a match
                listRadio.append(m['name'])
                cr_input[m['name']] = value.encode(
                    'utf8')  # make sure no encoding errors there
                cr5 = {}
                cr5['type'] = 'radio'
                cr5['name'] = m['name']
                cr5['label'] = m['name'].title()
                cr5['value'] = ''
                totcr.append(cr5)

        verbout(GR, 'Processing' + color.BOLD +
                ' <textarea name="...')  # get textarea input types
        for m in form.findAll('textarea', {'name': True}):
            if len(m.contents) == 0:
                m.contents.append(randString())  # get random strings
            cr_input[m['name']] = m.contents[0].encode(
                'utf8')  # make sure no encoding errors there
            cr6 = {}
            cr6['type'] = 'text'
            cr6['name'] = m['name']
            cr6['label'] = m['name'].title()
            cr6['value'] = ''
            totcr.append(cr6)

        verbout(GR, 'Processing' + color.BOLD +
                ' <select name="...')  # selection type inputs
        for m in form.findAll('select', {'name': True}):
            if m.findAll('option', value=True):
                name = m['name']  # assign passed on value
                cr_input[name] = m.findAll(
                    'option', value=True)[0]['value'].encode(
                        'utf8')  # find forms fields based on value

        verbout(GR, 'Parsing final inputs...')
        return (cr_input, totcr)  # Return the form input types
Example #12
0
def Get(url, headers=headers):
    '''
    The main use of this function is as a
            Url Requester [GET].
    '''
    # We do not verify thr request while GET requests
    time.sleep(DELAY_VALUE)  # We make requests after the time delay
    # Making sure the url is not a file
    if url.split('.')[-1].lower() in (FILE_EXTENSIONS or EXECUTABLES):
        FILES_EXEC.append(url)
        verbout(G, 'Found File: ' + color.BLUE + url)
        return None
    try:
        verbout(
            GR,
            'Processing the ' + color.GREY + 'GET' + color.END + ' Request...')
        req = requests.get(url,
                           headers=headers,
                           timeout=TIMEOUT_VALUE,
                           stream=False)
        # Displaying headers if DISPLAY_HEADERS is 'True'
        if DISPLAY_HEADERS:
            pheaders(req.headers)
        # Return the object
        return req
    except requests.exceptions.MissingSchema as e:
        verbout(R, 'Exception at: ' + color.GREY + url)
        verbout(R, 'Error: Invalid URL Format')
        ErrorLogger(url, e.__str__())
        return None
    except requests.exceptions.ReadTimeout as e:
        verbout(R, 'Exception at: ' + color.GREY + url)
        verbout(
            R,
            'Error: Read Timeout. Consider increasing the timeout value via --timeout.'
        )
        ErrorLogger(url, e.__str__())
        return None
    except requests.exceptions.HTTPError as e:  # if error
        verbout(R, "HTTP Error Encountered : " + url)
        ErrorLogger(url, e.__str__())
        return None
    except requests.exceptions.ConnectionError as e:
        verbout(R, 'Connection Aborted : ' + url)
        ErrorLogger(url, e.__str__())
        return None
    except Exception as e:
        verbout(R, "Exception Caught: " + e.__str__())
        ErrorLogger(url, e.__str__())
        return None
Example #13
0
def Post(url, action, data):
    '''
    The main use of this function is as a
           Form Requester [POST].
    '''
    time.sleep(DELAY_VALUE)  # If delay param has been supplied
    verbout(
        GR,
        'Processing the ' + color.GREY + 'POST' + color.END + ' Request...')
    main_url = urljoin(url, action)  # join url and action
    try:
        # Make the POST Request.
        response = requests.post(main_url,
                                 headers=headers,
                                 data=data,
                                 timeout=TIMEOUT_VALUE)
        if DISPLAY_HEADERS:
            pheaders(response.headers)
        return response  # read data content
    except requests.exceptions.HTTPError as e:  # if error
        verbout(R, "HTTP Error : " + main_url)
        ErrorLogger(main_url, e.__str__())
        return None
    except requests.exceptions.ConnectionError as e:
        verbout(R, 'Connection Aborted : ' + main_url)
        ErrorLogger(main_url, e.__str__())
        return None
    except requests.exceptions.ReadTimeout as e:
        verbout(R, 'Exception at: ' + color.GREY + url)
        verbout(
            R,
            'Error: Read Timeout. Consider increasing the timeout value via --timeout.'
        )
        ErrorLogger(url, e.__str__())
        return None
    except ValueError as e:  # again if valuerror
        verbout(R, "Value Error : " + main_url)
        ErrorLogger(main_url, e.__str__())
        return None
    except Exception as e:
        verbout(R, "Exception Caught: " + e.__str__())
        ErrorLogger(main_url, e.__str__())
        return None  # if at all nothing happens :(
Example #14
0
def Persistence(url, postq):
    '''
    The main idea behind this is to check for Cookie
                    Persistence.
    '''
    verbout(color.RED, '\n +-----------------------------------+')
    verbout(color.RED, ' |   Cookie Persistence Validation   |')
    verbout(color.RED, ' +-----------------------------------+\n')
    # Checking if user has supplied a value.
    verbout(
        GR, 'Proceeding to test for ' + color.GREY + 'Cookie Persistence' +
        color.END + '...')
    time.sleep(0.7)
    found = 0x00
    # Now let the real test begin...
    #
    # [Step 1]: Lets examine now whether cookies set by server are persistent or not.
    # For this we'll have to parse the cookies set by the server and check for the
    # time when the cookie expires. Lets do it!
    #
    # First its time for GET type requests. Lets prepare our request.
    cookies = []
    verbout(
        C, 'Proceeding to test cookie persistence via ' + color.CYAN +
        'Prepared GET Requests' + color.END + '...')
    gen_headers = HEADER_VALUES
    gen_headers[
        'User-Agent'] = 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36'
    if COOKIE_VALUE:
        for cookie in COOKIE_VALUE:
            gen_headers['Cookie'] = cookie
    verbout(GR, 'Making the request...')
    req = Get(url, headers=gen_headers)
    if req.cookies:
        for cook in req.cookies:
            if cook.expires:
                print(color.GREEN +
                      ' [+] Persistent Cookies found in Response Headers!')
                print(color.GREY + ' [+] Cookie: ' + color.CYAN +
                      cook.__str__())
                # cookie.expires returns a timestamp value. I didn't know it. :( Spent over 2+ hours scratching my head
                # over this, until I stumbled upon a stackoverflow answer comment. So to decode this, we'd need to
                # convert it a human readable format.
                print(color.GREEN + ' [+] Cookie Expiry Period: ' +
                      color.ORANGE +
                      datetime.fromtimestamp(cook.expires).__str__())
                found = 0x01
                VulnLogger(url, 'Persistent Session Cookies Found.',
                           '[i] Cookie: ' + req.headers.get('Set-Cookie'))
            else:
                NovulLogger(url, 'No Persistent Session Cookies.')
    if found == 0x00:
        verbout(
            R,
            'No persistent session cookies identified on GET Type Requests!')
    verbout(
        C, 'Proceeding to test cookie persistence on ' + color.CYAN +
        'POST Requests' + color.END + '...')
    # Now its time for POST Based requests.
    #
    # NOTE: As a standard method, every web application should supply a cookie upon a POST query.
    # It might or might not be in case of GET requests.
    if postq.cookies:
        for cookie in postq.cookies:
            if cookie.expires:
                print(color.GREEN +
                      ' [+] Persistent Cookies found in Response Headers!')
                print(color.GREY + ' [+] Cookie: ' + color.CYAN +
                      cookie.__str__())
                # So to decode this, we'd need to convert it a human readable format.
                print(color.GREEN + ' [+] Cookie Expiry Period: ' +
                      color.ORANGE +
                      datetime.fromtimestamp(cookie.expires).__str__())
                found = 0x01
                VulnLogger(url, 'Persistent Session Cookies Found.',
                           '[i] Cookie: ' + req.headers.get('Set-Cookie'))
                print(color.ORANGE + ' [!] Probable Insecure Practice: ' +
                      color.BY + ' Persistent Session Cookies ' + color.END)
            else:
                NovulLogger(url, 'No Persistent Cookies.')
    if found == 0x00:
        verbout(
            R, 'No persistent session cookies identified upon POST Requests!')
        print(color.ORANGE + ' [+] Endpoint might be ' + color.BY +
              ' NOT VULNERABLE ' + color.END + color.ORANGE +
              ' to CSRF attacks!')
        print(color.ORANGE + ' [+] Detected : ' + color.BY +
              ' No Persistent Cookies ' + color.END)

    # [Step 2]: The idea here is to try to identify cookie persistence on basis of observing
    # variations in cases of using different user-agents. For this test we have chosen 5 different
    # well used and common user-agents (as below) and then we observe the variation of set-cookie
    # header under different conditions.
    #
    # We'll test this method only when we haven't identified requests based on previous algo.
    if found != 0x01:
        verbout(
            C, 'Proceeding to test cookie persistence via ' + color.CYAN +
            'User-Agent Alteration' + color.END + '...')
        user_agents = {
            'Chrome on Windows 8.1':
            'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
            'Safari on iOS':
            'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4',
            'IE6 on Windows XP':
            'Mozilla/5.0 (Windows; U; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727)',
            'Opera on Windows 10':
            'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991',
            'Chrome on Android':
            'Mozilla/5.0 (Linux; U; Android 2.3.1; en-us; MID Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'
        }
        verbout(GR, 'Setting custom generic headers...')
        gen_headers = HEADER_VALUES
        for name, agent in user_agents.items():
            verbout(C, 'Using User-Agent : ' + color.CYAN + name)
            verbout(GR, 'Value : ' + color.ORANGE + agent)
            gen_headers['User-Agent'] = agent
            if COOKIE_VALUE:
                for cookie in COOKIE_VALUE:
                    gen_headers['Cookie'] = cookie
            req = Get(url, headers=gen_headers)
            # We will append this to stuff only when set-cookie is being supplied.
            if req.headers.get('Set-Cookie'):
                resps.append(req.headers.get('Set-Cookie'))
        if resps:
            if checkDuplicates(resps):
                verbout(
                    G,
                    'Set-Cookie header does not change with varied User-Agents...'
                )
                verbout(color.ORANGE,
                        ' [+] Possible persistent session cookies found...')
                print(color.RED +
                      ' [+] Possible CSRF Vulnerability Detected : ' +
                      color.ORANGE + url + '!')
                print(color.ORANGE + ' [!] Probable Insecure Practice: ' +
                      color.BY + ' Persistent Session Cookies ' + color.END)
                VulnLogger(url, 'Persistent Session Cookies Found.',
                           '[i] Cookie: ' + req.headers.get('Set-Cookie'))
            else:
                verbout(
                    G, 'Set-Cookie header changes with varied User-Agents...')
                verbout(R, 'No possible persistent session cookies found...')
                verbout(
                    color.ORANGE,
                    ' [+] Endpoint ' + color.BY + ' PROBABLY NOT VULNERABLE ' +
                    color.END + color.ORANGE + ' to CSRF attacks!')
                verbout(
                    color.ORANGE,
                    ' [+] Application Practice Method Detected : ' + color.BY +
                    ' No Persistent Cookies ' + color.END)
                NovulLogger(url, 'No Persistent Cookies.')
        else:
            verbout(R, 'No cookies are being set on any requests.')
Example #15
0
    def process(self, root):
        # Our first task is to remove urls that aren't to be scanned and have been
        # passed via the --exclude parameter.
        if EXCLUDE_DIRS:
            for link in EXCLUDE_DIRS:
                self.toVisit.remove(link)
        url = self.currentURI  # Main Url (Current)
        try:
            query = Get(url)  # Open it (to check if it exists)
            if query != None and not str(query.status_code).startswith('40'):  # Avoiding 40x errors
                INTERNAL_URLS.append(url)  # We append it to the list of valid urls
            else:
                if url in self.toVisit:
                    self.toVisit.remove(url)

        except (urllib.error.HTTPError, urllib.error.URLError) as msg:  # Incase there isan exception connecting to Url
            verbout(R, 'HTTP Request Error: '+msg.__str__())
            ErrorLogger(url, msg.__str__())
            if url in self.toVisit:
                self.toVisit.remove(url)  # Remove non-existent / errored urls
            return None

        # Making sure the content type is in HTML format, so that BeautifulSoup
        # can parse it...
        if not query or not re.search('html', query.headers['Content-Type']):
            return None

        # Just in case there is a redirection, we are supposed to follow it :D
        verbout(GR, 'Making request to new location...')
        if hasattr(query.headers, 'Location'):
            url = query.headers['Location']
        verbout(O,'Reading response...')
        response = query.content  # Read the response contents

        try:
            verbout(O, 'Trying to parse response...')
            soup = BeautifulSoup(response)  # Parser init

        except HTMLParser.HTMLParseError:
            verbout(R, 'BeautifulSoup Error: '+url)
            self.visited.append(url)
            if url in self.toVisit:
                self.toVisit.remove(url)
            return None

        for m in soup.findAll('a', href=True):  # find out all href^?://*
            app = ''
            # Making sure that href is not a function or doesn't begin with http://
            if not re.match(r'javascript:', m['href']) or re.match('http://', m['href']):
                app = Parser.buildUrl(url, m['href'])

            # If we get a valid link
            if app!='' and re.search(root, app):
                # Getting rid of Urls starting with '../../../..'
                while re.search(RID_DOUBLE, app):
                    p = re.compile(RID_COMPILE)
                    app = p.sub('/', app)
                # Getting rid of Urls starting with './'
                p = re.compile(RID_SINGLE)
                app = p.sub('', app)

                # Add new link to the queue only if its pattern has not been added yet
                uriPattern=removeIDs(app)  # remove IDs
                if self.notExist(uriPattern) and app != url:
                    verbout(G, 'Added :> ' +color.BLUE+ app)  # display what we have got!
                    self.toVisit.append(app)  # add up urls to visit
                    self.uriPatterns.append(uriPattern)

        self.visited.append(url)  # add urls visited
        return soup  # go back!
Example #16
0
def Referer(url):
    """
    Check if the remote web application verifies the Referer before
                    processing the HTTP request.
    """
    verbout(color.RED, '\n +--------------------------------------+')
    verbout(color.RED, ' |   Referer Based Request Validation   |')
    verbout(color.RED, ' +--------------------------------------+\n')
    # Make the request normally and get content
    verbout(O, 'Making request on normal basis...')
    req0x01 = Get(url)

    # Set normal headers...
    verbout(GR, 'Setting generic headers...')
    gen_headers = HEADER_VALUES

    # Set a fake Referer along with UA (pretending to be a
    # legitimate request from a browser)
    gen_headers['Referer'] = REFERER_URL

    # We put the cookie in request, if cookie supplied :D
    if COOKIE_VALUE:
        gen_headers['Cookie'] = ','.join(cookie for cookie in COOKIE_VALUE)

    # Make the request with different referer header and get the content
    verbout(
        O, 'Making request with ' + color.CYAN + 'Tampered Referer Header' +
        color.END + '...')
    req0x02 = Get(url, headers=gen_headers)
    HEADER_VALUES.pop('Referer', None)

    # Comparing the length of the requests' responses. If both content
    # lengths are same, then the site actually does not validate referer
    # before processing the HTTP request which makes the site more
    # vulnerable to CSRF attacks.
    #
    # IMPORTANT NOTE: I'm aware that checking for the referer header does
    # NOT protect the application against all cases of CSRF, but it's a
    # very good first step. In order to exploit a CSRF in an application
    # that protects using this method an intruder would have to identify
    # other vulnerabilities, such as XSS or open redirects, in the same
    # domain.
    #
    # TODO: This algorithm has lots of room for improvement.
    if len(req0x01.content) != len(req0x02.content):
        print(color.GREEN + ' [+] Endoint ' + color.ORANGE +
              'Referer Validation' + color.GREEN + ' Present!')
        print(color.GREEN + ' [-] Heuristics reveal endpoint might be ' +
              color.BG + ' NOT VULNERABLE ' + color.END + '...')
        print(color.ORANGE + ' [+] Mitigation Method: ' + color.BG +
              ' Referer Based Request Validation ' + color.END)
        NovulLogger(url,
                    'Presence of Referer Header based Request Validation.')
        return True
    else:
        verbout(
            R, 'Endpoint ' + color.RED + 'Referer Validation Not Present' +
            color.END + '!')
        verbout(
            R, 'Heuristics reveal endpoint might be ' + color.BY +
            ' VULNERABLE ' + color.END + ' to Origin Based CSRFs...')
        print(color.CYAN + ' [+] Possible CSRF Vulnerability Detected : ' +
              color.GREY + url + '!')
        print(color.ORANGE + ' [+] Possible Vulnerability Type: ' + color.BY +
              ' No Referer Based Request Validation ' + color.END)
        VulnLogger(url, 'No Referer Header based Request Validation presence.',
                   '[i] Response Headers: ' + str(req0x02.headers))
        return False
Example #17
0
def Token(req, headers):
    '''
    This method checks for whether Anti-CSRF Tokens are
               present in the request.
    '''
    verbout(color.RED, '\n +---------------------------+')
    verbout(color.RED, ' |   Anti-CSRF Token Check   |')
    verbout(color.RED, ' +---------------------------+\n')
    param = ''  # Initializing param
    query = ''
    found = False
    # First lets have a look at config.py and see if its set
    if config.TOKEN_CHECKS:
        verbout(O, 'Parsing request for detecting anti-csrf tokens...')
        try:
            # Lets check for the request values. But before that lets encode and unquote the request :D
            con = unquote(urlencode(req)).split('&')
            for c in con:
                for name in COMMON_CSRF_NAMES:  # Iterate over the list
                    qu = c.split('=')
                    # Search if the token is there in request...
                    if name.lower() in qu[0].lower():
                        verbout(
                            color.GREEN,
                            ' [+] The form was requested with an ' + color.BG +
                            ' Anti-CSRF Token ' + color.END + color.GREEN +
                            '!')
                        verbout(
                            color.GREY, ' [+] Token Parameter: ' + color.CYAN +
                            qu[0] + '=' + color.ORANGE + qu[1])
                        query, param = qu[0], qu[1]
                        # We are appending the token to a variable for further analysis
                        discovered.REQUEST_TOKENS.append(param)
                        found = True
                        break  # Break execution if a Anti-CSRF token is found
            # If we haven't found the Anti-CSRF token in query, we'll search for it in headers :)
            if not found:
                for key, value in headers.items():
                    for name in COMMON_CSRF_HEADERS:  # Iterate over the list
                        # Search if the token is there in request...
                        if name.lower() in key.lower():
                            verbout(
                                color.GREEN,
                                ' [+] The form was requested with an ' +
                                color.BG + ' Anti-CSRF Token Header ' +
                                color.END + color.GREEN + '!')
                            verbout(
                                color.GREY,
                                ' [+] Token Parameter: ' + color.CYAN + qu[0] +
                                '=' + color.ORANGE + qu[1])
                            query, param = key, value
                            # We are appending the token to a variable for further analysis
                            discovered.REQUEST_TOKENS.append(param)
                            break  # Break execution if a Anti-CSRF token is found
        except Exception as e:
            verbout(R, 'Request Parsing Exception!')
            verbout(R, 'Error: ' + e.__str__())
        if param:
            return (query, param)
        verbout(
            color.ORANGE, ' [-] The form was requested ' + color.RED +
            ' Without an Anti-CSRF Token ' + color.END + color.ORANGE + '...')
        print(color.RED + ' [-] Endpoint seems ' + color.BR + ' VULNERABLE ' +
              color.END + color.RED + ' to ' + color.BR +
              ' POST-Based Request Forgery ' + color.END)
        return (None, None)
Example #18
0
def Analysis():
    '''
    The main idea behind this is to observe and analyse
           the patterns in which the CSRF tokens
                  are generated by server.
    '''
    ctr = 0  # Counter variable set to 0
    # Checking if the no of tokens is greater than 1
    if len(REQUEST_TOKENS) > 1:
        verbout(color.RED, '\n +--------------+')
        verbout(color.RED, ' |   Analysis   |')
        verbout(color.RED, ' +--------------+\n')
        print(GR + 'Proceeding for post-scan analysis of tokens gathered...')
        verbout(
            G, 'A total of %s tokens was discovered during the scan' %
            (len(REQUEST_TOKENS)))
        # The idea behind this is to generate all possible combinations (not
        # considering permutations) from the given list of discovered tokens
        # and generate anti-CSRF token generation pattern.
        for tokenx1, tokenx2 in itertools.combinations(REQUEST_TOKENS, 2):
            try:
                verbout(
                    GR,
                    'Analysing 2 Anti-CSRF Tokens from gathered requests...')
                verbout(color.CYAN,
                        ' [+] First Token: ' + color.BLUE + tokenx1)
                verbout(
                    color.ORANGE, ' [+] Shannon Entropy: ' + color.GREEN +
                    '%s' % (calcEntropy(tokenx1)))
                verbout(color.CYAN,
                        ' [+] Second Token: ' + color.BLUE + tokenx2)
                verbout(
                    color.ORANGE, ' [+] Shannon Entropy: ' + color.GREEN +
                    '%s' % (calcEntropy(tokenx2)))
                # Calculating the edit distance via Damerau Levenshtein algorithm
                m = stringdist.rdlevenshtein(tokenx1, tokenx2)
                verbout(
                    color.CYAN, ' [+] Edit Distance Calculated: ' +
                    color.GREY + str(m) + '%')
                # Now its time to detect the alignment ratio
                n = stringdist.rdlevenshtein_norm(tokenx1, tokenx2)
                verbout(
                    color.CYAN,
                    ' [+] Alignment Ratio Calculated: ' + color.GREY + str(n))
                # If both tokens are same, then
                if len(tokenx1) == len(tokenx2):
                    verbout(
                        C, 'Token length calculated is same: ' + color.ORANGE +
                        'Each %s bytes' % len(byteString(tokenx1)))
                else:
                    verbout(
                        C, 'Token length calculated is different: ' +
                        color.ORANGE + 'By %s bytes' %
                        (len(byteString(tokenx1)) - len(byteString(tokenx2))))
                time.sleep(0.5)
                # In my experience with web security assessments, often the Anti-CSRF token
                # is composed of two parts, one of them remains static while the other one dynamic.
                #
                # For example, if the Anti CSRF Tokens “837456mzy29jkd911139” for one request, the
                # other is “837456mzy29jkd337221”, “837456mzy29jkd” part of the token remains same
                # in both requests.
                #
                # The main idea behind this is to detect the static and dynamic part via DL Algorithm
                # as discussed above by calculating edit distance.
                p = sameSequence(tokenx1, tokenx2)
                tokenx01 = tokenx1.replace(p, '')
                tokenx02 = tokenx2.replace(p, '')
                if n == 0.5 or m == len(tokenx1) / 2:
                    verbout(
                        GR,
                        'The tokens are composed of 2 parts (one static and other dynamic)... '
                    )
                    verbout(
                        C, 'Static Part : ' + color.GREY + p + color.END +
                        ' | Length: ' + color.CYAN + str(len(p)))
                    verbout(
                        O, 'Dynamic Part of Token 0x1: ' + color.GREY +
                        tokenx01 + color.END + ' | Length: ' + color.CYAN +
                        str(len(tokenx01)))
                    verbout(
                        O, 'Dynamic Part of Token 0x2: ' + color.GREY +
                        tokenx02 + color.END + ' | Length: ' + color.CYAN +
                        str(len(tokenx02)))
                    if len(len(tokenx1) / 2) <= 6:
                        verbout(
                            color.RED,
                            ' [-] Post-Analysis reveals that token might be ' +
                            color.BR + ' VULNERABLE ' + color.END + '!')
                        print(color.RED +
                              ' [+] Possible CSRF Vulnerability Detected!')
                        print(color.ORANGE + ' [!] Vulnerability Type: ' +
                              color.BR + ' Weak Dynamic Part of Tokens ' +
                              color.END)
                        print(color.GREY + ' [+] Tokens can easily be ' +
                              color.RED + 'Forged by Bruteforcing/Guessing' +
                              color.END + '!\n')
                        VulnLogger(
                            'Analysis',
                            'Tokens can easily be Forged by Bruteforcing/Guessing.',
                            '[i] Token 1: ' + tokenx1 + '\n[i] Token 2: ' +
                            tokenx2)
                elif n < 0.5 or m < len(tokenx1) / 2:
                    verbout(
                        R, 'Token distance calculated is ' + color.RED +
                        'less than 0.5!')
                    verbout(
                        C, 'Static Part : ' + color.GREY + p + color.END +
                        ' | Length: ' + color.CYAN + str(len(p)))
                    verbout(
                        O, 'Dynamic Part of Token 0x1: ' + color.GREY +
                        tokenx01 + color.END + ' | Length: ' + color.CYAN +
                        str(len(tokenx01)))
                    verbout(
                        O, 'Dynamic Part of Token 0x2: ' + color.GREY +
                        tokenx02 + color.END + ' | Length: ' + color.CYAN +
                        str(len(tokenx02)))
                    verbout(
                        color.RED,
                        ' [-] Post-Analysis reveals that token might be ' +
                        color.BR + ' VULNERABLE ' + color.END + '!')
                    print(color.GREEN +
                          ' [+] Possible CSRF Vulnerability Detected!')
                    print(color.ORANGE + ' [!] Vulnerability Type: ' +
                          color.BR + ' Weak Dynamic Part of Tokens ' +
                          color.END)
                    print(color.GREY + ' [+] Tokens can easily be ' +
                          color.RED + 'Forged by Bruteforcing/Guessing' +
                          color.END + '!\n')
                    VulnLogger(
                        'Analysis',
                        'Tokens can easily be Forged by Bruteforcing/Guessing.',
                        '[i] Token 1: ' + tokenx1 + '\n[i] Token 2: ' +
                        tokenx2)
                else:
                    verbout(
                        R, 'Token distance calculated is ' + color.GREEN +
                        'greater than 0.5!')
                    verbout(
                        C, 'Static Part : ' + color.GREY + p + color.END +
                        ' | Length: ' + color.CYAN + str(len(p)))
                    verbout(
                        O, 'Dynamic Part of Token 0x1: ' + color.GREY +
                        tokenx01 + color.END + ' | Length: ' + color.CYAN +
                        str(len(tokenx01)))
                    verbout(
                        O, 'Dynamic Part of Token 0x2: ' + color.GREY +
                        tokenx02 + color.END + ' | Length: ' + color.CYAN +
                        str(len(tokenx02)))
                    verbout(
                        color.GREEN,
                        ' [+] Post-Analysis reveals that tokens are ' +
                        color.BG + ' NOT VULNERABLE ' + color.END + '!')
                    print(color.ORANGE + ' [!] Vulnerability Mitigation: ' +
                          color.BG + ' Strong Dynamic Part of Tokens ' +
                          color.END)
                    print(color.GREY + ' [+] Tokens ' + color.GREEN +
                          'Cannot be Forged by Bruteforcing/Guessing' +
                          color.END + '!\n')
                    NovulLogger(
                        'Analysis',
                        'Tokens cannot be Forged by Bruteforcing/Guessing.')
                time.sleep(1)
            except KeyboardInterrupt:
                ctr += 1
                continue
        print(C + 'Post-Scan Analysis Completed!')
Example #19
0
def Engine():  # lets begin it!

    os.system('clear')  # Clear shit from terminal :p
    banner()  # Print the banner
    banabout()  # The second banner
    web, fld = inputin()  # Take the input
    form1 = testFormx1()  # Get the form 1 ready
    form2 = testFormx2()  # Get the form 2 ready
    # For the cookies that we encounter during requests...
    Cookie0 = http.cookiejar.CookieJar()  # First as User1
    Cookie1 = http.cookiejar.CookieJar()  # Then as User2
    resp1 = build_opener(HTTPCookieProcessor(Cookie0))  # Process cookies
    resp2 = build_opener(HTTPCookieProcessor(Cookie1))  # Process cookies
    actionDone = []  # init to the done stuff
    csrf = ''  # no token initialise / invalid token
    ref_detect = 0x00  # Null Char Flag
    ori_detect = 0x00  # Null Char Flags
    form = Debugger.Form_Debugger()  # init to the form parser+token generator
    bs1 = BeautifulSoup(form1).findAll(
        'form', action=True)[0]  # make sure the stuff works properly
    bs2 = BeautifulSoup(form2).findAll('form', action=True)[0]  # same as above
    init1 = web  # First init
    resp1.open(init1)  # Makes request as User2
    resp2.open(init1)  # Make request as User1

    # Now there are 2 different modes of scanning and crawling here.
    # 1st -> Testing a single endpoint without the --crawl flag.
    # 2nd -> Testing all endpoints with the --crawl flag.
    try:
        # Implementing the first mode. [NO CRAWL]
        if not CRAWL_SITE:
            url = web
            response = Get(url).text
            try:
                verbout(O, 'Trying to parse response...')
                soup = BeautifulSoup(response)  # Parser init
            except HTMLParser.HTMLParseError:
                verbout(R, 'BeautifulSoup Error: ' + url)
            i = 0  # Init user number
            if REFERER_ORIGIN_CHECKS:
                # Referer Based Checks if True...
                verbout(
                    O, 'Checking endpoint request validation via ' +
                    color.GREY + 'Referer' + color.END + ' Checks...')
                if Referer(url):
                    ref_detect = 0x01
                verbout(O, 'Confirming the vulnerability...')
                # We have finished with Referer Based Checks, lets go for Origin Based Ones...
                verbout(
                    O, 'Confirming endpoint request validation via ' +
                    color.GREY + 'Origin' + color.END + ' Checks...')
                if Origin(url):
                    ori_detect = 0x01
            # Now lets get the forms...
            verbout(
                O, 'Retrieving all forms on ' + color.GREY + url + color.END +
                '...')
            for m in Debugger.getAllForms(
                    soup):  # iterating over all forms extracted
                verbout(O, 'Testing form:\n' + color.CYAN)
                formPrettify(m.prettify())
                verbout('', '')
                FORMS_TESTED.append('(i) ' + url + ':\n\n' + m.prettify() +
                                    '\n')
                try:
                    if m['action']:
                        pass
                except KeyError:
                    m['action'] = '/' + url.rsplit('/', 1)[1]
                    ErrorLogger(url, 'No standard form "action".')
                action = Parser.buildAction(
                    url,
                    m['action'])  # get all forms which have 'action' attribute
                if not action in actionDone and action != '':  # if url returned is not a null value nor duplicate...
                    # If form submission is kept to True
                    if FORM_SUBMISSION:
                        try:
                            # NOTE: Slow connections may cause read timeouts which may result in AttributeError
                            # So the idea here is tp make requests pretending to be 3 different users.
                            # Now a series of requests will be targeted against the site with different
                            # identities. Refer to XSRFProbe wiki for more info.
                            #
                            # NOTE: Slow connections may cause read timeouts which may result in AttributeError
                            result, genpoc = form.prepareFormInputs(
                                m)  # prepare inputs as user 1
                            r1 = Post(
                                url, action, result
                            )  # make request with token values generated as user1
                            result, genpoc = form.prepareFormInputs(
                                m)  # prepare inputs as user 2
                            r2 = Post(
                                url, action, result
                            )  # again make request with token values generated as user2
                            # Go for cookie based checks
                            if COOKIE_BASED:
                                Cookie(url, r1)
                            # Go for token based entropy checks...
                            try:
                                if m['name']:
                                    query, token = Entropy(
                                        result, url, r1.headers, m.prettify(),
                                        m['action'], m['name'])
                            except KeyError:
                                query, token = Entropy(result, url, r1.headers,
                                                       m.prettify(),
                                                       m['action'])
                            # Now its time to detect the encoding type (if any) of the Anti-CSRF token.
                            fnd, detct = Encoding(token)
                            if fnd == 0x01 and detct:
                                VulnLogger(
                                    url,
                                    'Token is a string encoded value which can be probably decrypted.',
                                    '[i] Encoding: ' + detct)
                            else:
                                NovulLogger(
                                    url,
                                    'Anti-CSRF token is not a string encoded value.'
                                )
                            # Go for token parameter tamper checks.
                            if (query and token):
                                txor = Tamper(url, action, result, r2.text,
                                              query, token)
                            o2 = Get(url).text  # make request as user2
                            try:
                                form2 = Debugger.getAllForms(BeautifulSoup(
                                    o2))[i]  # user2 gets his form
                            except IndexError:
                                verbout(R, 'Form Index Error')
                                ErrorLogger(url, 'Form Index Error.')
                                continue  # Making sure program won't end here (dirty fix :( )
                            verbout(GR, 'Preparing form inputs...')
                            contents2, genpoc = form.prepareFormInputs(
                                form2)  # prepare for form 3 as user3
                            r3 = Post(
                                url, action, contents2
                            )  # make request as user3 with user3's form
                            if (POST_BASED) and ((not query) or (txor)):
                                try:
                                    if m['name']:
                                        PostBased(url, r1.text, r2.text,
                                                  r3.text,
                                                  m['action'], result, genpoc,
                                                  m.prettify(), m['name'])
                                except KeyError:
                                    PostBased(url, r1.text, r2.text, r3.text,
                                              m['action'], result, genpoc,
                                              m.prettify())
                            else:
                                print(
                                    color.GREEN +
                                    ' [+] The form was requested with a Anti-CSRF token.'
                                )
                                print(color.GREEN + ' [+] Endpoint ' +
                                      color.BG + ' NOT VULNERABLE ' +
                                      color.END + color.GREEN +
                                      ' to POST-Based CSRF Attacks!')
                                NovulLogger(
                                    url,
                                    'Not vulnerable to POST-Based CSRF Attacks.'
                                )
                        except HTTPError as msg:  # if runtime exception...
                            verbout(R, 'Exception : ' +
                                    msg.__str__())  # again exception :(
                            ErrorLogger(url, msg)
                actionDone.append(action)  # add the stuff done
                i += 1  # Increase user iteration
        else:
            # Implementing the 2nd mode [CRAWLING AND SCANNING].
            verbout(GR, "Initializing crawling and scanning...")
            crawler = Crawler.Handler(init1,
                                      resp1)  # Init to the Crawler handler
            while crawler.noinit():  # Until 0 urls left
                url = next(crawler)  # Go for next!
                print(C + 'Testing :> ' + color.CYAN +
                      url)  # Display what url its crawling
                try:
                    soup = crawler.process(fld)  # Start the parser
                    if not soup:
                        continue  # Making sure not to end the program yet...
                    i = 0  # Set count = 0 (user number 0, which will be subsequently incremented)
                    if REFERER_ORIGIN_CHECKS:
                        # Referer Based Checks if True...
                        verbout(
                            O, 'Checking endpoint request validation via ' +
                            color.GREY + 'Referer' + color.END + ' Checks...')
                        if Referer(url):
                            ref_detect = 0x01
                        verbout(O, 'Confirming the vulnerability...')
                        # We have finished with Referer Based Checks, lets go for Origin Based Ones...
                        verbout(
                            O, 'Confirming endpoint request validation via ' +
                            color.GREY + 'Origin' + color.END + ' Checks...')
                        if Origin(url):
                            ori_detect = 0x01
                    # Now lets get the forms...
                    verbout(
                        O, 'Retrieving all forms on ' + color.GREY + url +
                        color.END + '...')
                    for m in Debugger.getAllForms(
                            soup):  # iterating over all forms extracted
                        FORMS_TESTED.append('(i) ' + url + ':\n\n' +
                                            m.prettify() + '\n')
                        try:
                            if m['action']:
                                pass
                        except KeyError:
                            m['action'] = '/' + url.rsplit('/', 1)[1]
                            ErrorLogger(url, 'No standard "action" attribute.')
                        action = Parser.buildAction(
                            url, m['action']
                        )  # get all forms which have 'action' attribute
                        if not action in actionDone and action != '':  # if url returned is not a null value nor duplicate...
                            # If form submission is kept to True
                            if FORM_SUBMISSION:
                                try:
                                    result, genpoc = form.prepareFormInputs(
                                        m)  # prepare inputs as user 1
                                    r1 = Post(
                                        url, action, result
                                    )  # make request with token values generated as user1
                                    result, genpoc = form.prepareFormInputs(
                                        m)  # prepare inputs as user 2
                                    r2 = Post(
                                        url, action, result
                                    )  # again make request with token values generated as user2
                                    if COOKIE_BASED:
                                        Cookie(url, r1)
                                    # Go for token based entropy checks...
                                    try:
                                        if m['name']:
                                            query, token = Entropy(
                                                result, url, r1.headers,
                                                m.prettify(), m['action'],
                                                m['name'])
                                    except KeyError:
                                        query, token = Entropy(
                                            result, url, r1.headers,
                                            m.prettify(), m['action'])
                                        ErrorLogger(
                                            url, 'No standard form "name".')
                                    # Now its time to detect the encoding type (if any) of the Anti-CSRF token.
                                    fnd, detct = Encoding(token)
                                    if fnd == 0x01 and detct:
                                        VulnLogger(
                                            url,
                                            'String encoded token value. Token might be decrypted.',
                                            '[i] Encoding: ' + detct)
                                    else:
                                        NovulLogger(
                                            url,
                                            'Anti-CSRF token is not a string encoded value.'
                                        )
                                    # Go for token parameter tamper checks.
                                    if (query and token):
                                        txor = Tamper(url, action, result,
                                                      r2.text, query, token)
                                    o2 = Get(url).text  # make request as user2
                                    try:
                                        form2 = Debugger.getAllForms(
                                            BeautifulSoup(o2))[
                                                i]  # user2 gets his form
                                    except IndexError:
                                        verbout(R, 'Form Index Error')
                                        ErrorLogger(url, 'Form Index Error.')
                                        continue  # making sure program won't end here (dirty fix :( )
                                    verbout(GR, 'Preparing form inputs...')
                                    contents2, genpoc = form.prepareFormInputs(
                                        form2)  # prepare for form 3 as user3
                                    r3 = Post(
                                        url, action, contents2
                                    )  # make request as user3 with user3's form
                                    if (POST_BASED) and ((query == '') or
                                                         (txor == True)):
                                        try:
                                            if m['name']:
                                                PostBased(
                                                    url, r1.text, r2.text,
                                                    r3.text, m['action'],
                                                    result, genpoc,
                                                    m.prettify(), m['name'])
                                        except KeyError:
                                            PostBased(url, r1.text, r2.text,
                                                      r3.text, m['action'],
                                                      result, genpoc,
                                                      m.prettify())
                                    else:
                                        print(
                                            color.GREEN +
                                            ' [+] The form was requested with a Anti-CSRF token.'
                                        )
                                        print(color.GREEN + ' [+] Endpoint ' +
                                              color.BG + ' NOT VULNERABLE ' +
                                              color.END + color.GREEN +
                                              ' to P0ST-Based CSRF Attacks!')
                                        NovulLogger(
                                            url,
                                            'Not vulnerable to POST-Based CSRF Attacks.'
                                        )
                                except HTTPError as msg:  # if runtime exception...
                                    verbout(
                                        color.RED,
                                        ' [-] Exception : ' + color.END +
                                        msg.__str__())  # again exception :(
                                    ErrorLogger(url, msg)
                        actionDone.append(action)  # add the stuff done
                        i += 1  # Increase user iteration
                except URLError as e:  # if again...
                    verbout(R, 'Exception at : ' + url)  # again exception -_-
                    time.sleep(0.4)
                    verbout(O, 'Moving on...')
                    ErrorLogger(url, e)
                    continue  # make sure it doesn't stop at exceptions
                # This error usually happens when some sites are protected by some load balancer
                # example Cloudflare. These domains return a 403 forbidden response in various
                # contexts. For example when making reverse DNS queries.
                except HTTPError as e:
                    if str(e.code) == '403':
                        verbout(R, 'HTTP Authentication Error!')
                        verbout(R, 'Error Code : ' + O + str(e.code))
                        ErrorLogger(url, e)
                        quit()
        GetLogger(
        )  # The scanning has finished, so now we can log out all the links ;)
        print('\n' + G + "Scan completed!" + '\n')
        Analysis()  # For Post Scan Analysis
    except KeyboardInterrupt as e:  # Incase user wants to exit :') (while crawling)
        verbout(R, 'User Interrupt!')
        time.sleep(1.5)
        Analysis()  # For Post scan Analysis
        print(R + 'Aborted!')  # say goodbye
        ErrorLogger('KeyBoard Interrupt', 'Aborted')
        GetLogger(
        )  # The scanning has interrupted, so now we can log out all the links ;)
        sys.exit(1)
    except Exception as e:
        verbout(R, e.__str__())
        ErrorLogger(url, e)
        GetLogger()
        sys.exit(1)
Example #20
0
def Entropy(req, url, headers, form, m_action, m_name=''):
    """
    This function has the work of comparing and
      calculating Shannon Entropy and related
           POST Based requests' security.
    """
    found = 0x00
    # The minimum length of a csrf token should be 6 bytes.
    min_length = 6

    # I have never seen a CSRF token longer than 256 bytes,
    # so the main concept here is doubling that and checking
    # to make sure we don't check parameters which are
    # files in multipart uploads or stuff like that.
    #
    # Multipart uploads usually have a trailing sequence of
    # characters which could be misunderstood as a CSRF token.
    # This is a very important step with respect to
    # decreasing [[ False Positives ]].
    max_length = 256 * 2

    # Shannon Entropy calculated for a particular CSRF token
    # should be at least 2.4. If the token entropy is less
    # than that, the application request can be easily
    # forged making the application vulnerable even in
    # presence of a CSRF token.
    min_entropy = 3.0

    # Check for common CSRF token names
    _q, para = Token(req, headers)
    if (para and _q) == None:
        VulnLogger(
            url, 'Form Requested Without Anti-CSRF Token.',
            '[i] Form Requested: ' + form + '\n[i] Request Query: ' +
            req.__str__())
        return '', ''
    verbout(color.RED, '\n +------------------------------+')
    verbout(color.RED, ' |   Token Strength Detection   |')
    verbout(color.RED, ' +------------------------------+\n')
    for para in REQUEST_TOKENS:
        # Coverting the token to a raw string, cause some special
        # chars might fu*k with the Shannon Entropy operation.
        value = r'%s' % para
        verbout(
            color.CYAN,
            ' [!] Testing Anti-CSRF Token: ' + color.ORANGE + '%s' % (value))
        # Check length
        if len(value) <= min_length:
            print(color.RED + ' [-] CSRF Token Length less than 5 bytes. ' +
                  color.ORANGE + 'Token value can be guessed/bruteforced...')
            print(color.ORANGE + ' [-] Endpoint likely ' + color.BR +
                  ' VULNERABLE ' + color.END + color.ORANGE +
                  ' to CSRF Attacks...')
            print(color.RED + ' [!] Vulnerability Type: ' + color.BR +
                  ' Very Short/No Anti-CSRF Tokens ' + color.END)
            VulnLogger(url, 'Very Short Anti-CSRF Tokens.', 'Token: ' + value)
        if len(value) >= max_length:
            print(color.ORANGE + ' [+] CSRF Token Length greater than ' +
                  color.CYAN + '256 bytes. ' + color.GREEN +
                  'Token value cannot be guessed/bruteforced...')
            print(color.GREEN + ' [+] Endpoint likely ' + color.BG +
                  ' NOT VULNERABLE ' + color.END + color.GREEN +
                  ' to CSRF Attacks...')
            print(color.GREEN + ' [!] CSRF Mitigation Method: ' + color.BG +
                  ' Long Anti-CSRF Tokens ' + color.END)
            NovulLogger(url, 'Long Anti-CSRF tokens with Good Strength.')
            found = 0x01
        # Checking entropy
        verbout(
            O, 'Proceeding to calculate ' + color.GREY + 'Shannon Entropy' +
            color.END + ' of Token audited...')
        entropy = calcEntropy(value)
        verbout(GR, 'Calculating Entropy...')
        verbout(color.BLUE,
                ' [+] Entropy Calculated: ' + color.CYAN + str(entropy))
        if entropy >= min_entropy:
            verbout(
                color.ORANGE, ' [+] Anti-CSRF Token Entropy Calculated is ' +
                color.BY + ' GREATER than 3.0 ' + color.END + '... ')
            print(color.ORANGE + ' [+] Endpoint ' + color.BY +
                  ' PROBABLY NOT VULNERABLE ' + color.END + color.ORANGE +
                  ' to CSRF Attacks...')
            print(color.ORANGE + ' [!] CSRF Mitigation Method: ' + color.BY +
                  ' High Entropy Anti-CSRF Tokens ' + color.END)
            NovulLogger(url, 'High Entropy Anti-CSRF Tokens.')
            found = 0x01
        else:
            verbout(
                color.RED, ' [-] Anti-CSRF Token Entropy Calculated is ' +
                color.BY + ' LESS than 3.0 ' + color.END + '... ')
            print(color.RED + ' [-] Endpoint likely ' + color.BR +
                  ' VULNERABLE ' + color.END + color.RED +
                  ' to CSRF Attacks inspite of CSRF Tokens...')
            print(color.RED + ' [!] Vulnerability Type: ' + color.BR +
                  ' Low Entropy Anti-CSRF Tokens ' + color.END)
            VulnLogger(url, 'Low Entropy Anti-CSRF Tokens.', 'Token: ' + value)
    if found == 0x00:
        if m_name:
            print(color.RED + '\n +---------+')
            print(color.RED + ' |   PoC   |')
            print(color.RED + ' +---------+\n')
            print(color.BLUE + ' [+] URL : ' + color.CYAN + url)
            print(color.CYAN + ' [+] Name : ' + color.ORANGE + m_name)
            print(color.GREEN + ' [+] Action : ' + color.ORANGE + m_action)
        else:  # if value m_name not there :(
            print(color.RED + '\n +---------+')
            print(color.RED + ' |   PoC   |')
            print(color.RED + ' +---------+\n')
            print(color.BLUE + ' [+] URL : ' + color.CYAN + url)
            print(color.GREEN + ' [+] Action : ' + color.ORANGE + m_action)
        # Print out the params
        print(color.ORANGE + ' [+] Query : ' + color.GREY +
              urllib.parse.urlencode(req))
        print('')
    return (_q, para)  # Return the query paramter and anti-csrf token
Example #21
0
def PostBased(url, r1, r2, r3, m_action, result, genpoc, form, m_name=''):
    '''
    This method is for detecting POST-Based Request Forgeries
        on basis of fuzzy string matching and comparison
            based on Ratcliff-Obershelp Algorithm.
    '''
    verbout(color.RED, '\n +------------------------------+')
    verbout(color.RED, ' |   POST-Based Forgery Check   |')
    verbout(color.RED, ' +------------------------------+\n')
    verbout(O, 'Matching response query differences...')
    checkdiffx1 = difflib.ndiff(r1.splitlines(1), r2.splitlines(1))  # check the diff noted
    checkdiffx2 = difflib.ndiff(r1.splitlines(1), r3.splitlines(1))  # check the diff noted
    result12 = []  # an init
    verbout(O, 'Matching results...')
    for n in checkdiffx1:
        if re.match('\+|-', n):  # get regex matching stuff only +/-
            result12.append(n)  # append to existing list
    result13 = []  # an init
    for n in checkdiffx2:
        if re.match('\+|-', n):  # get regex matching stuff
            result13.append(n)  # append to existing list

    # This logic is based purely on the assumption on the difference of various requests
    # and response body.
    # If the number of differences of result12 are less than the number of differences
    # than result13 then we have the vulnerability. (very basic check)
    #
    # NOTE: The algorithm has lots of scopes of improvement...
    if len(result12) <= len(result13):
        print(color.GREEN+ ' [+] CSRF Vulnerability Detected : '+color.ORANGE+url+'!')
        print(color.ORANGE+' [!] Vulnerability Type: '+color.BR+' POST-Based Request Forgery '+color.END)
        VulnLogger(url, 'POST-Based Request Forgery on Forms.', '[i] Form: '+form.__str__()+'\n[i] POST Query: '+result.__str__()+'\n')
        time.sleep(0.3)
        verbout(O, 'PoC of response and request...')
        if m_name:
            print(color.RED+'\n +-----------------+')
            print(color.RED+' |   Request PoC   |')
            print(color.RED+' +-----------------+\n')
            print(color.BLUE+' [+] URL : ' +color.CYAN+url)  # url part
            print(color.CYAN+' [+] Name : ' +color.ORANGE+m_name)  # name
            if m_action.count('/') > 1:
                print(color.GREEN+' [+] Action : ' +color.END+'/'+m_action.rsplit('/', 1)[1])  # action
            else:
                print(color.GREEN+' [+] Action : ' +color.END+m_action)  # action
        else:  # if value m['name'] not there :(
            print(color.RED+'\n +-----------------+')
            print(color.RED+' |   Request PoC   |')
            print(color.RED+' +-----------------+\n')
            print(color.BLUE+' [+] URL : ' +color.CYAN+url)  # the url
            if m_action.count('/') > 1:
                print(color.GREEN+' [+] Action : ' +color.END+'/'+m_action.rsplit('/', 1)[1])  # action
            else:
                print(color.GREEN+' [+] Action : ' +color.END+m_action)  # action
        print(color.ORANGE+' [+] POST Query : '+color.GREY+ urlencode(result).strip())
        # If option --skip-poc hasn't been supplied...
        if POC_GENERATION:
            # If --malicious has been supplied
            if GEN_MALICIOUS:
                # Generates a malicious CSRF form
                GenMalicious(url, genpoc.__str__())
            else:
                # Generates a normal PoC
                GenNormalPoC(url, genpoc.__str__())
Example #22
0
def Tamper(url, action, req, body, query, para):
    '''
    The main idea behind this is to tamper the Anti-CSRF tokens
          found and check the content length for related
                      vulnerabilities.
    '''
    verbout(color.RED, '\n +---------------------------------------+')
    verbout(color.RED, ' |   Anti-CSRF Token Tamper Validation   |')
    verbout(color.RED, ' +---------------------------------------+\n')
    # Null char flags (hex)
    flagx1, destx1 = 0x00, 0x00
    flagx2, destx2 = 0x00, 0x00
    flagx3, destx3 = 0x00, 0x00
    verbout(GR, 'Proceeding for CSRF attack via Anti-CSRF token tampering...')
    # First of all lets get out token from request
    if para == '':
        return True
    # Coverting the token to a raw string, cause some special
    # chars might fu*k with the operation.
    value = r'%s' % para
    copy = req

    # Alright lets start...
    # [Step 1]: First we take the token and then replace a particular character
    # at a specific position (here at 4th position) and test the response body.
    #
    # Required check for checking if string at that position isn't the
    # same char we are going to replace with.
    verbout(
        GR, 'Tampering Token by ' + color.GREY + 'index replacement' +
        color.END + '...')
    if value[3] != 'a':
        tampvalx1 = replaceStrIndex(value, 3, 'a')
    else:
        tampvalx1 = replaceStrIndex(value, 3, 'x')
    verbout(color.BLUE, ' [+] Original Token: ' + color.CYAN + value)
    verbout(color.BLUE, ' [+] Tampered Token: ' + color.CYAN + tampvalx1)
    # Lets build up the request...
    req[query] = tampvalx1
    resp = Post(url, action, req)

    # If there is a 40x (Not Found) or a 50x (Internal Error) error,
    # we assume that the tamper did not work :( But if there is a 20x
    # (Accepted) or a 30x (Redirection), then we know it worked.
    #
    # Or if the previous request has same content length as this tampered
    # request, then we have the vulnerability.
    #
    # NOTE: This algorithm has lots of room for improvement.
    if str(resp.status_code).startswith('2'): destx1 = 0x01
    if not any(search(s, resp.text, I) for s in TOKEN_ERRORS): destx2 = 0x01
    if len(body) == len(resp.text): destx3 = 0x01
    if ((destx1 == 0x01 and destx2 == 0x01) or (destx3 == 0x01)):
        verbout(
            color.RED, ' [-] Anti-CSRF Token tamper by ' + color.GREY +
            'index replacement' + color.RED + ' returns valid response!')
        flagx1 = 0x01
        VulnLogger(
            url,
            'Anti-CSRF Token tamper by index replacement returns valid response.',
            '[i] POST Query: ' + req.__str__())
    else:
        verbout(
            color.RED,
            ' [+] Token tamper in request does not return valid response!')
        NovulLogger(
            url,
            'Anti-CSRF Token tamper by index replacement does not return valid response.'
        )

    # [Step 2]: Second we take the token and then remove a character
    # at a specific position and test the response body.
    verbout(
        GR, 'Tampering Token by ' + color.GREY + 'index removal' + color.END +
        '...')
    tampvalx2 = replaceStrIndex(value, 3)
    verbout(color.BLUE, ' [+] Original Token: ' + color.CYAN + value)
    verbout(color.BLUE, ' [+] Tampered Token: ' + color.CYAN + tampvalx2)
    # Lets build up the request...
    req[query] = tampvalx2
    resp = Post(url, action, req)

    # If there is a 40x (Not Found) or a 50x (Internal Error) error,
    # we assume that the tamper did not work :( But if there is a 20x
    # (Accepted) or a 30x (Redirection), then we know it worked.
    #
    # NOTE: This algorithm has lots of room for improvement.
    if str(resp.status_code).startswith('2'): destx1 = 0x02
    if not any(search(s, resp.text, I) for s in TOKEN_ERRORS): destx2 = 0x02
    if len(body) == len(resp.text): destx3 = 0x02
    if ((destx1 == 0x02 and destx2 == 0x02) or destx3 == 0x02):
        verbout(
            color.RED, ' [-] Anti-CSRF Token tamper by ' + color.GREY +
            'index removal' + color.RED + ' returns valid response!')
        flagx2 = 0x01
        VulnLogger(
            url,
            'Anti-CSRF Token tamper by index removal returns valid response.',
            '[i] POST Query: ' + req.__str__())
    else:
        verbout(
            color.RED,
            ' [+] Token tamper in request does not return valid response!')
        NovulLogger(
            url,
            'Anti-CSRF Token tamper by index removal does not return valid response.'
        )

    # [Step 3]: Third we take the token and then remove the whole
    # anticsrf token and test the response body.
    verbout(
        GR, 'Tampering Token by ' + color.GREY + 'Token removal' + color.END +
        '...')
    # Removing the anti-csrf token from request
    del req[query]
    verbout(color.GREY, ' [+] Removed token parameter from request!')
    # Lets build up the request...
    resp = Post(url, action, req)

    # If there is a 40x (Not Found) or a 50x (Internal Error) error,
    # we assume that the tamper did not work :(. But if there is a 20x
    # (Accepted) or a 30x (Redirection), then we know it worked.
    #
    # NOTE: This algorithm has lots of room for improvement.
    if str(resp.status_code).startswith('2'): destx1 = 0x03
    if not any(search(s, resp.text, I) for s in TOKEN_ERRORS): destx2 = 0x03
    if len(body) == len(resp.text): destx3 = 0x03
    if ((destx1 == 0x03 and destx2 == 0x03) or destx3 == 0x03):
        verbout(
            color.RED, ' [-] Anti-CSRF' + color.GREY + ' Token removal' +
            color.RED + ' returns valid response!')
        flagx3 = 0x01
        VulnLogger(url, 'Anti-CSRF Token removal returns valid response.',
                   '[i] POST Query: ' + req.__str__())
    else:
        verbout(
            color.RED,
            ' [+] Token tamper in request does not return valid response!')
        NovulLogger(url,
                    'Anti-CSRF Token removal does not return valid response.')

    # If any of the forgeries worked...
    if ((flagx1 == 0x01 and flagx2 == 0x01)
            or (flagx1 == 0x01 and flagx3 == 0x01)
            or (flagx2 == 0x01 and flagx3 == 0x01)):
        verbout(
            color.RED, ' [+] The tampered token value works! Endpoint ' +
            color.BR + ' VULNERABLE to Replay Attacks ' + color.END + '!')
        verbout(
            color.ORANGE,
            ' [-] The Tampered Anti-CSRF Token requested does NOT return a 40x or 50x response! '
        )
        print(color.RED + ' [-] Endpoint ' + color.BR +
              ' CONFIRMED VULNERABLE ' + color.END + color.RED +
              ' to Request Forgery Attacks...')
        print(color.ORANGE + ' [!] Vulnerability Type: ' + color.BR +
              ' Non-Unique Anti-CSRF Tokens in Requests ' + color.END + '\n')
        VulnLogger(url,
                   'Anti-CSRF Tokens are not Unique. Token Reuse detected.',
                   '[i] Request: ' + str(copy))
        return True
    else:
        print(
            color.RED +
            ' [-] The Tampered Anti-CSRF Token requested returns a 40x or 50x response... '
        )
        print(color.GREEN + ' [-] Endpoint ' + color.BG + ' NOT VULNERABLE ' +
              color.END + color.ORANGE + ' to CSRF Attacks...')
        print(color.ORANGE + ' [!] CSRF Mitigation Method: ' + color.BG +
              ' Unique Anti-CSRF Tokens ' + color.END + '\n')
        NovulLogger(url, 'Unique Anti-CSRF Tokens. No token reuse.')
        return False