Esempio n. 1
0
def Origin(url):
    """
    Check if the remote web application verifies the Origin before
                    processing the HTTP request.
    """
    verbout(color.RED, '\n +-------------------------------------+')
    verbout(color.RED, ' |   Origin Based Request Validation   |')
    verbout(color.RED, ' +-------------------------------------+\n')
    # Make the request normally and get content
    verbout(O,'Making request on normal basis...')
    req0x01 = Get(url)
    global HEADER_VALUES
    # Set a fake Origin along with UA (pretending to be a
    # legitimate request from a browser)
    verbout(GR,'Setting generic headers...')
    gen_headers = HEADER_VALUES
    gen_headers['Origin'] = ORIGIN_URL

    # We put the cookie in request, if cookie supplied :D
    if COOKIE_VALUE:
        gen_headers['Cookie'] = ','.join(cookie for cookie in COOKIE_VALUE)

    # Make the request with different Origin header and get the content
    verbout(O,'Making request with '+color.CYAN+'Tampered Origin Header'+color.END+'...')
    req0x02 = Get(url, headers=gen_headers)
    HEADER_VALUES.pop('Origin', None)

    # Comparing the length of the requests' responses. If both content
    # lengths are same, then the site actually does not validate Origin
    # before processing the HTTP request which makes the site more
    # vulnerable to CSRF attacks.
    #
    # IMPORTANT NOTE: I'm aware that checking for the Origin header does
    # NOT protect the application against all cases of CSRF, but it's a
    # very good first step. In order to exploit a CSRF in an application
    # that protects using this method an intruder would have to identify
    # other vulnerabilities, such as XSS or open redirects, in the same
    # domain.
    #
    # TODO: This algorithm has lots of room for improvement
    if len(req0x01.content) != len(req0x02.content):
        verbout(color.GREEN,' [+] Endoint '+color.ORANGE+'Origin Validation'+color.GREEN+' Present!')
        print(color.GREEN+' [-] Heuristics reveal endpoint might be '+color.BG+' NOT VULNERABLE '+color.END+'...')
        print(color.ORANGE+' [+] Mitigation Method: '+color.BG+' Origin Based Request Validation '+color.END+'\n')
        NovulLogger(url, 'Presence of Origin Header based request Validation.')
        return True
    else:
        verbout(R,'Endpoint '+color.RED+'Origin Validation Not Present'+color.END+'!')
        verbout(R,'Heuristics reveal endpoint might be '+color.BY+' VULNERABLE '+color.END+' to Origin Based CSRFs...')
        print(color.CYAN+ ' [+] Possible CSRF Vulnerability Detected : '+color.GREY+url+'!')
        print(color.ORANGE+' [!] Possible Vulnerability Type: '+color.BY+' No Origin Based Request Validation '+color.END+'\n')
        VulnLogger(url, 'No Origin Header based request validation presence.', '[i] Response Headers: '+str(req0x02.headers))
        return False
Esempio n. 2
0
def SameSite(url):
    '''
    This function parses and verifies the cookies with
                    SameSite Flags.
    '''
    verbout(color.RED, '\n +------------------------------------+')
    verbout(color.RED, ' |   Cross Origin Cookie Validation   |')
    verbout(color.RED, ' +------------------------------------+\n')
    # Some Flags we'd need later...
    foundx1 = 0x00
    foundx2 = 0x00
    foundx3 = 0x00
    # Step 1: First we check that if the server returns any
    # SameSite flag on Cookies with the same Referer as the netloc
    verbout(color.GREY,
            ' [+] Lets examine how server reacts to same referer...')
    gen_headers = HEADER_VALUES
    gen_headers['User-Agent'] = USER_AGENT or RandomAgent()
    verbout(GR, 'Setting Referer header same as host...')
    # Setting the netloc as the referer for the first check.
    gen_headers['Referer'] = urlsplit(url).netloc
    if COOKIE_VALUE:
        for cook in COOKIE_VALUE:
            gen_headers['Cookie'] = cook
    getreq = Get(url, headers=gen_headers)  # Making the request
    head = getreq.headers
    for h in head:
        #if search('cookie', h, I) or search('set-cookie', h, I):
        if 'Cookie'.lower() in h.lower():
            verbout(G, 'Found cookie header value...')
            cookieval = head[h]
            verbout(color.ORANGE,
                    ' [+] Cookie Received: ' + color.CYAN + str(cookieval))
            m = cookieval.split(';')
            verbout(GR, 'Examining Cookie...')
            for q in m:
                if search('SameSite', q, I):
                    verbout(
                        G, 'SameSite Flag ' + color.ORANGE +
                        ' detected on cookie!')
                    foundx1 = 0x01
                    q = q.split('=')[1].strip()
                    verbout(C, 'Cookie: ' + color.ORANGE + q)
                    break
        else:
            foundx3 = 0x02
    if foundx1 == 0x01:
        verbout(
            R, ' [+] Endpoint ' + color.ORANGE +
            'SameSite Flag Cookie Validation' + color.END + ' Present!')

    # Step 2: Now we check security mechanisms when the Referer is
    # different, i.e. request originates from a different url other
    # than the host. (This time without the Cookie assigned)
    verbout(
        color.GREY,
        ' [+] Lets examine how server reacts to a fake external referer...')
    gen_headers = HEADER_VALUES
    gen_headers['User-Agent'] = USER_AGENT or RandomAgent(
    )  # Setting user-agents
    # Assigning a fake referer for the second check, but no cookie.
    gen_headers['Referer'] = REFERER_URL
    getreq = Get(url, headers=gen_headers)
    head = getreq.headers  # Getting headers from requests
    for h in head:
        # If search('cookie', h, I) or search('set-cookie', h, I):
        if 'Cookie'.lower() in h.lower():
            verbout(G, 'Found cookie header value...')
            cookieval = head[h]
            verbout(color.ORANGE,
                    ' [+] Cookie Received: ' + color.CYAN + str(cookieval))
            m = cookieval.split(';')
            verbout(GR, 'Examining Cookie...')
            for q in m:
                if search('SameSite', q, I):
                    verbout(
                        G, 'SameSite Flag ' + color.ORANGE +
                        ' detected on cookie!')
                    foundx2 = 0x01
                    q = q.split('=')[1].strip()
                    verbout(C, 'Cookie: ' + color.ORANGE + q)
                    break
        else:
            foundx3 = 0x02

    if foundx1 == 0x01:
        verbout(
            R, ' [+] Endpoint ' + color.ORANGE +
            'SameSite Flag Cookie Validation' + color.END + ' Present!')

    # Step 3: And finally comes the most important step. Lets see how
    # the site reacts to a valid cookie (ofc supplied by the user) coming
    # from a a different site, i.e Referer set to other than host.
    # This is the most crucial part of the detection.
    #
    # TODO: Improve the logic in detection.
    verbout(
        color.GREY,
        ' [+] Lets examine how server reacts to valid cookie from a different referer...'
    )
    gen_headers = HEADER_VALUES
    gen_headers['User-Agent'] = USER_AGENT or RandomAgent()
    # Assigning a fake referer for third request, this time with cookie ;)
    gen_headers['Referer'] = REFERER_URL
    if COOKIE_VALUE:
        for cook in COOKIE_VALUE:
            gen_headers['Cookie'] = cook
    getreq = Get(url, headers=gen_headers)
    head = getreq.headers
    for h in head:
        # if search('cookie', h, I) or search('set-cookie', h, I):
        if 'Cookie'.lower() in h.lower():
            verbout(G, 'Found cookie header value...')
            cookieval = head[h]
            verbout(color.ORANGE,
                    ' [+] Cookie Received: ' + color.CYAN + str(cookieval))
            m = cookieval.split(';')
            verbout(GR, 'Examining Cookie...')
            for q in m:
                if search('samesite', q.lower(), I):
                    verbout(
                        G, 'SameSite Flag ' + color.ORANGE +
                        ' detected on cookie on Cross Origin Request!')
                    foundx3 = 0x01
                    q = q.split('=')[1].strip()
                    verbout(C, 'Cookie: ' + color.ORANGE + q)
                    break
        else:
            foundx3 = 0x02

    if foundx1 == 0x01:
        verbout(
            R, 'Endpoint ' + color.ORANGE + 'SameSite Flag Cookie Validation' +
            color.END + ' is Present!')

    if (foundx1 == 0x01 and foundx3 == 0x00) and (foundx2 == 0x00
                                                  or foundx2 == 0x01):
        print(color.GREEN + ' [+] Endpoint ' + color.BG +
              ' NOT VULNERABLE to ANY type of CSRF attacks! ' + color.END)
        print(color.GREEN + ' [+] Protection Method Detected : ' + color.BG +
              ' SameSite Flag on Cookies ' + color.END)
        NovulLogger(url,
                    'SameSite Flag set on Cookies on Cross-Origin Requests.')
        # If a SameSite flag is set on cookies, then the application is totally fool-proof
        # against CSRF attacks unless there is some XSS stuff on it. So for now the job of
        # this application is done. We need to confirm before we quit.
        oq = input(color.BLUE + ' [+] Continue scanning? (y/N) :> ')
        if oq.lower().startswith('n'):
            sys.exit('\n' + R + 'Shutting down XSRFProbe...\n')
    elif foundx1 == 0x02 and foundx2 == 0x02 and foundx3 == 0x02:
        print(color.GREEN + ' [+] Endpoint ' + color.BG + ' NOT VULNERABLE ' +
              color.END + color.GREEN + ' to CSRF attacks!')
        print(color.GREEN + ' [+] Type: ' + color.BG +
              ' No Cookie Set while Cross Origin Requests ' + color.END)
        NovulLogger(url, 'No cookie set on Cross-Origin Requests.')
    else:
        verbout(
            R, 'Endpoint ' + color.ORANGE + 'Cross Origin Cookie Validation' +
            color.END + ' Not Present!')
        verbout(
            R, 'Heuristic(s) reveal endpoint might be ' + color.BY +
            ' VULNERABLE ' + color.END + ' to CSRFs...')
        print(color.CYAN + ' [+] Possible CSRF Vulnerability Detected : ' +
              color.GREY + url + '!')
        print(color.ORANGE + ' [!] Possible Vulnerability Type: ' + color.BY +
              ' No Cross Origin Cookie Validation Presence ' + color.END)
        VulnLogger(url, 'No Cookie Validation on Cross-Origin Requests.',
                   '[i] Headers: ' + str(head))
Esempio n. 3
0
def Engine():  # lets begin it!

    os.system('clear')  # Clear shit from terminal :p
    banner()  # Print the banner
    banabout()  # The second banner
    web, fld = inputin()  # Take the input
    form1 = testFormx1()  # Get the form 1 ready
    form2 = testFormx2()  # Get the form 2 ready
    # For the cookies that we encounter during requests...
    Cookie0 = http.cookiejar.CookieJar()  # First as User1
    Cookie1 = http.cookiejar.CookieJar()  # Then as User2
    resp1 = build_opener(HTTPCookieProcessor(Cookie0))  # Process cookies
    resp2 = build_opener(HTTPCookieProcessor(Cookie1))  # Process cookies
    actionDone = []  # init to the done stuff
    csrf = ''  # no token initialise / invalid token
    ref_detect = 0x00  # Null Char Flag
    ori_detect = 0x00  # Null Char Flags
    form = Debugger.Form_Debugger()  # init to the form parser+token generator
    bs1 = BeautifulSoup(form1).findAll(
        'form', action=True)[0]  # make sure the stuff works properly
    bs2 = BeautifulSoup(form2).findAll('form', action=True)[0]  # same as above
    init1 = web  # First init
    resp1.open(init1)  # Makes request as User2
    resp2.open(init1)  # Make request as User1

    # Now there are 2 different modes of scanning and crawling here.
    # 1st -> Testing a single endpoint without the --crawl flag.
    # 2nd -> Testing all endpoints with the --crawl flag.
    try:
        # Implementing the first mode. [NO CRAWL]
        if not CRAWL_SITE:
            url = web
            response = Get(url).text
            try:
                verbout(O, 'Trying to parse response...')
                soup = BeautifulSoup(response)  # Parser init
            except HTMLParser.HTMLParseError:
                verbout(R, 'BeautifulSoup Error: ' + url)
            i = 0  # Init user number
            if REFERER_ORIGIN_CHECKS:
                # Referer Based Checks if True...
                verbout(
                    O, 'Checking endpoint request validation via ' +
                    color.GREY + 'Referer' + color.END + ' Checks...')
                if Referer(url):
                    ref_detect = 0x01
                verbout(O, 'Confirming the vulnerability...')
                # We have finished with Referer Based Checks, lets go for Origin Based Ones...
                verbout(
                    O, 'Confirming endpoint request validation via ' +
                    color.GREY + 'Origin' + color.END + ' Checks...')
                if Origin(url):
                    ori_detect = 0x01
            # Now lets get the forms...
            verbout(
                O, 'Retrieving all forms on ' + color.GREY + url + color.END +
                '...')
            for m in Debugger.getAllForms(
                    soup):  # iterating over all forms extracted
                verbout(O, 'Testing form:\n' + color.CYAN)
                formPrettify(m.prettify())
                verbout('', '')
                FORMS_TESTED.append('(i) ' + url + ':\n\n' + m.prettify() +
                                    '\n')
                try:
                    if m['action']:
                        pass
                except KeyError:
                    m['action'] = '/' + url.rsplit('/', 1)[1]
                    ErrorLogger(url, 'No standard form "action".')
                action = Parser.buildAction(
                    url,
                    m['action'])  # get all forms which have 'action' attribute
                if not action in actionDone and action != '':  # if url returned is not a null value nor duplicate...
                    # If form submission is kept to True
                    if FORM_SUBMISSION:
                        try:
                            # NOTE: Slow connections may cause read timeouts which may result in AttributeError
                            # So the idea here is tp make requests pretending to be 3 different users.
                            # Now a series of requests will be targeted against the site with different
                            # identities. Refer to XSRFProbe wiki for more info.
                            #
                            # NOTE: Slow connections may cause read timeouts which may result in AttributeError
                            result, genpoc = form.prepareFormInputs(
                                m)  # prepare inputs as user 1
                            r1 = Post(
                                url, action, result
                            )  # make request with token values generated as user1
                            result, genpoc = form.prepareFormInputs(
                                m)  # prepare inputs as user 2
                            r2 = Post(
                                url, action, result
                            )  # again make request with token values generated as user2
                            # Go for cookie based checks
                            if COOKIE_BASED:
                                Cookie(url, r1)
                            # Go for token based entropy checks...
                            try:
                                if m['name']:
                                    query, token = Entropy(
                                        result, url, r1.headers, m.prettify(),
                                        m['action'], m['name'])
                            except KeyError:
                                query, token = Entropy(result, url, r1.headers,
                                                       m.prettify(),
                                                       m['action'])
                            # Now its time to detect the encoding type (if any) of the Anti-CSRF token.
                            fnd, detct = Encoding(token)
                            if fnd == 0x01 and detct:
                                VulnLogger(
                                    url,
                                    'Token is a string encoded value which can be probably decrypted.',
                                    '[i] Encoding: ' + detct)
                            else:
                                NovulLogger(
                                    url,
                                    'Anti-CSRF token is not a string encoded value.'
                                )
                            # Go for token parameter tamper checks.
                            if (query and token):
                                txor = Tamper(url, action, result, r2.text,
                                              query, token)
                            o2 = Get(url).text  # make request as user2
                            try:
                                form2 = Debugger.getAllForms(BeautifulSoup(
                                    o2))[i]  # user2 gets his form
                            except IndexError:
                                verbout(R, 'Form Index Error')
                                ErrorLogger(url, 'Form Index Error.')
                                continue  # Making sure program won't end here (dirty fix :( )
                            verbout(GR, 'Preparing form inputs...')
                            contents2, genpoc = form.prepareFormInputs(
                                form2)  # prepare for form 3 as user3
                            r3 = Post(
                                url, action, contents2
                            )  # make request as user3 with user3's form
                            if (POST_BASED) and ((not query) or (txor)):
                                try:
                                    if m['name']:
                                        PostBased(url, r1.text, r2.text,
                                                  r3.text,
                                                  m['action'], result, genpoc,
                                                  m.prettify(), m['name'])
                                except KeyError:
                                    PostBased(url, r1.text, r2.text, r3.text,
                                              m['action'], result, genpoc,
                                              m.prettify())
                            else:
                                print(
                                    color.GREEN +
                                    ' [+] The form was requested with a Anti-CSRF token.'
                                )
                                print(color.GREEN + ' [+] Endpoint ' +
                                      color.BG + ' NOT VULNERABLE ' +
                                      color.END + color.GREEN +
                                      ' to POST-Based CSRF Attacks!')
                                NovulLogger(
                                    url,
                                    'Not vulnerable to POST-Based CSRF Attacks.'
                                )
                        except HTTPError as msg:  # if runtime exception...
                            verbout(R, 'Exception : ' +
                                    msg.__str__())  # again exception :(
                            ErrorLogger(url, msg)
                actionDone.append(action)  # add the stuff done
                i += 1  # Increase user iteration
        else:
            # Implementing the 2nd mode [CRAWLING AND SCANNING].
            verbout(GR, "Initializing crawling and scanning...")
            crawler = Crawler.Handler(init1,
                                      resp1)  # Init to the Crawler handler
            while crawler.noinit():  # Until 0 urls left
                url = next(crawler)  # Go for next!
                print(C + 'Testing :> ' + color.CYAN +
                      url)  # Display what url its crawling
                try:
                    soup = crawler.process(fld)  # Start the parser
                    if not soup:
                        continue  # Making sure not to end the program yet...
                    i = 0  # Set count = 0 (user number 0, which will be subsequently incremented)
                    if REFERER_ORIGIN_CHECKS:
                        # Referer Based Checks if True...
                        verbout(
                            O, 'Checking endpoint request validation via ' +
                            color.GREY + 'Referer' + color.END + ' Checks...')
                        if Referer(url):
                            ref_detect = 0x01
                        verbout(O, 'Confirming the vulnerability...')
                        # We have finished with Referer Based Checks, lets go for Origin Based Ones...
                        verbout(
                            O, 'Confirming endpoint request validation via ' +
                            color.GREY + 'Origin' + color.END + ' Checks...')
                        if Origin(url):
                            ori_detect = 0x01
                    # Now lets get the forms...
                    verbout(
                        O, 'Retrieving all forms on ' + color.GREY + url +
                        color.END + '...')
                    for m in Debugger.getAllForms(
                            soup):  # iterating over all forms extracted
                        FORMS_TESTED.append('(i) ' + url + ':\n\n' +
                                            m.prettify() + '\n')
                        try:
                            if m['action']:
                                pass
                        except KeyError:
                            m['action'] = '/' + url.rsplit('/', 1)[1]
                            ErrorLogger(url, 'No standard "action" attribute.')
                        action = Parser.buildAction(
                            url, m['action']
                        )  # get all forms which have 'action' attribute
                        if not action in actionDone and action != '':  # if url returned is not a null value nor duplicate...
                            # If form submission is kept to True
                            if FORM_SUBMISSION:
                                try:
                                    result, genpoc = form.prepareFormInputs(
                                        m)  # prepare inputs as user 1
                                    r1 = Post(
                                        url, action, result
                                    )  # make request with token values generated as user1
                                    result, genpoc = form.prepareFormInputs(
                                        m)  # prepare inputs as user 2
                                    r2 = Post(
                                        url, action, result
                                    )  # again make request with token values generated as user2
                                    if COOKIE_BASED:
                                        Cookie(url, r1)
                                    # Go for token based entropy checks...
                                    try:
                                        if m['name']:
                                            query, token = Entropy(
                                                result, url, r1.headers,
                                                m.prettify(), m['action'],
                                                m['name'])
                                    except KeyError:
                                        query, token = Entropy(
                                            result, url, r1.headers,
                                            m.prettify(), m['action'])
                                        ErrorLogger(
                                            url, 'No standard form "name".')
                                    # Now its time to detect the encoding type (if any) of the Anti-CSRF token.
                                    fnd, detct = Encoding(token)
                                    if fnd == 0x01 and detct:
                                        VulnLogger(
                                            url,
                                            'String encoded token value. Token might be decrypted.',
                                            '[i] Encoding: ' + detct)
                                    else:
                                        NovulLogger(
                                            url,
                                            'Anti-CSRF token is not a string encoded value.'
                                        )
                                    # Go for token parameter tamper checks.
                                    if (query and token):
                                        txor = Tamper(url, action, result,
                                                      r2.text, query, token)
                                    o2 = Get(url).text  # make request as user2
                                    try:
                                        form2 = Debugger.getAllForms(
                                            BeautifulSoup(o2))[
                                                i]  # user2 gets his form
                                    except IndexError:
                                        verbout(R, 'Form Index Error')
                                        ErrorLogger(url, 'Form Index Error.')
                                        continue  # making sure program won't end here (dirty fix :( )
                                    verbout(GR, 'Preparing form inputs...')
                                    contents2, genpoc = form.prepareFormInputs(
                                        form2)  # prepare for form 3 as user3
                                    r3 = Post(
                                        url, action, contents2
                                    )  # make request as user3 with user3's form
                                    if (POST_BASED) and ((query == '') or
                                                         (txor == True)):
                                        try:
                                            if m['name']:
                                                PostBased(
                                                    url, r1.text, r2.text,
                                                    r3.text, m['action'],
                                                    result, genpoc,
                                                    m.prettify(), m['name'])
                                        except KeyError:
                                            PostBased(url, r1.text, r2.text,
                                                      r3.text, m['action'],
                                                      result, genpoc,
                                                      m.prettify())
                                    else:
                                        print(
                                            color.GREEN +
                                            ' [+] The form was requested with a Anti-CSRF token.'
                                        )
                                        print(color.GREEN + ' [+] Endpoint ' +
                                              color.BG + ' NOT VULNERABLE ' +
                                              color.END + color.GREEN +
                                              ' to P0ST-Based CSRF Attacks!')
                                        NovulLogger(
                                            url,
                                            'Not vulnerable to POST-Based CSRF Attacks.'
                                        )
                                except HTTPError as msg:  # if runtime exception...
                                    verbout(
                                        color.RED,
                                        ' [-] Exception : ' + color.END +
                                        msg.__str__())  # again exception :(
                                    ErrorLogger(url, msg)
                        actionDone.append(action)  # add the stuff done
                        i += 1  # Increase user iteration
                except URLError as e:  # if again...
                    verbout(R, 'Exception at : ' + url)  # again exception -_-
                    time.sleep(0.4)
                    verbout(O, 'Moving on...')
                    ErrorLogger(url, e)
                    continue  # make sure it doesn't stop at exceptions
                # This error usually happens when some sites are protected by some load balancer
                # example Cloudflare. These domains return a 403 forbidden response in various
                # contexts. For example when making reverse DNS queries.
                except HTTPError as e:
                    if str(e.code) == '403':
                        verbout(R, 'HTTP Authentication Error!')
                        verbout(R, 'Error Code : ' + O + str(e.code))
                        ErrorLogger(url, e)
                        quit()
        GetLogger(
        )  # The scanning has finished, so now we can log out all the links ;)
        print('\n' + G + "Scan completed!" + '\n')
        Analysis()  # For Post Scan Analysis
    except KeyboardInterrupt as e:  # Incase user wants to exit :') (while crawling)
        verbout(R, 'User Interrupt!')
        time.sleep(1.5)
        Analysis()  # For Post scan Analysis
        print(R + 'Aborted!')  # say goodbye
        ErrorLogger('KeyBoard Interrupt', 'Aborted')
        GetLogger(
        )  # The scanning has interrupted, so now we can log out all the links ;)
        sys.exit(1)
    except Exception as e:
        verbout(R, e.__str__())
        ErrorLogger(url, e)
        GetLogger()
        sys.exit(1)
Esempio n. 4
0
    def process(self, root):
        # Our first task is to remove urls that aren't to be scanned and have been
        # passed via the --exclude parameter.
        if EXCLUDE_DIRS:
            for link in EXCLUDE_DIRS:
                self.toVisit.remove(link)
        url = self.currentURI  # Main Url (Current)
        try:
            query = Get(url)  # Open it (to check if it exists)
            if query != None and not str(query.status_code).startswith('40'):  # Avoiding 40x errors
                INTERNAL_URLS.append(url)  # We append it to the list of valid urls
            else:
                if url in self.toVisit:
                    self.toVisit.remove(url)

        except (urllib.error.HTTPError, urllib.error.URLError) as msg:  # Incase there isan exception connecting to Url
            verbout(R, 'HTTP Request Error: '+msg.__str__())
            ErrorLogger(url, msg.__str__())
            if url in self.toVisit:
                self.toVisit.remove(url)  # Remove non-existent / errored urls
            return None

        # Making sure the content type is in HTML format, so that BeautifulSoup
        # can parse it...
        if not query or not re.search('html', query.headers['Content-Type']):
            return None

        # Just in case there is a redirection, we are supposed to follow it :D
        verbout(GR, 'Making request to new location...')
        if hasattr(query.headers, 'Location'):
            url = query.headers['Location']
        verbout(O,'Reading response...')
        response = query.content  # Read the response contents

        try:
            verbout(O, 'Trying to parse response...')
            soup = BeautifulSoup(response)  # Parser init

        except HTMLParser.HTMLParseError:
            verbout(R, 'BeautifulSoup Error: '+url)
            self.visited.append(url)
            if url in self.toVisit:
                self.toVisit.remove(url)
            return None

        for m in soup.findAll('a', href=True):  # find out all href^?://*
            app = ''
            # Making sure that href is not a function or doesn't begin with http://
            if not re.match(r'javascript:', m['href']) or re.match('http://', m['href']):
                app = Parser.buildUrl(url, m['href'])

            # If we get a valid link
            if app!='' and re.search(root, app):
                # Getting rid of Urls starting with '../../../..'
                while re.search(RID_DOUBLE, app):
                    p = re.compile(RID_COMPILE)
                    app = p.sub('/', app)
                # Getting rid of Urls starting with './'
                p = re.compile(RID_SINGLE)
                app = p.sub('', app)

                # Add new link to the queue only if its pattern has not been added yet
                uriPattern=removeIDs(app)  # remove IDs
                if self.notExist(uriPattern) and app != url:
                    verbout(G, 'Added :> ' +color.BLUE+ app)  # display what we have got!
                    self.toVisit.append(app)  # add up urls to visit
                    self.uriPatterns.append(uriPattern)

        self.visited.append(url)  # add urls visited
        return soup  # go back!
Esempio n. 5
0
def Persistence(url, postq):
    '''
    The main idea behind this is to check for Cookie
                    Persistence.
    '''
    verbout(color.RED, '\n +-----------------------------------+')
    verbout(color.RED, ' |   Cookie Persistence Validation   |')
    verbout(color.RED, ' +-----------------------------------+\n')
    # Checking if user has supplied a value.
    verbout(
        GR, 'Proceeding to test for ' + color.GREY + 'Cookie Persistence' +
        color.END + '...')
    time.sleep(0.7)
    found = 0x00
    # Now let the real test begin...
    #
    # [Step 1]: Lets examine now whether cookies set by server are persistent or not.
    # For this we'll have to parse the cookies set by the server and check for the
    # time when the cookie expires. Lets do it!
    #
    # First its time for GET type requests. Lets prepare our request.
    cookies = []
    verbout(
        C, 'Proceeding to test cookie persistence via ' + color.CYAN +
        'Prepared GET Requests' + color.END + '...')
    gen_headers = HEADER_VALUES
    gen_headers[
        'User-Agent'] = 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36'
    if COOKIE_VALUE:
        for cookie in COOKIE_VALUE:
            gen_headers['Cookie'] = cookie
    verbout(GR, 'Making the request...')
    req = Get(url, headers=gen_headers)
    if req.cookies:
        for cook in req.cookies:
            if cook.expires:
                print(color.GREEN +
                      ' [+] Persistent Cookies found in Response Headers!')
                print(color.GREY + ' [+] Cookie: ' + color.CYAN +
                      cook.__str__())
                # cookie.expires returns a timestamp value. I didn't know it. :( Spent over 2+ hours scratching my head
                # over this, until I stumbled upon a stackoverflow answer comment. So to decode this, we'd need to
                # convert it a human readable format.
                print(color.GREEN + ' [+] Cookie Expiry Period: ' +
                      color.ORANGE +
                      datetime.fromtimestamp(cook.expires).__str__())
                found = 0x01
                VulnLogger(url, 'Persistent Session Cookies Found.',
                           '[i] Cookie: ' + req.headers.get('Set-Cookie'))
            else:
                NovulLogger(url, 'No Persistent Session Cookies.')
    if found == 0x00:
        verbout(
            R,
            'No persistent session cookies identified on GET Type Requests!')
    verbout(
        C, 'Proceeding to test cookie persistence on ' + color.CYAN +
        'POST Requests' + color.END + '...')
    # Now its time for POST Based requests.
    #
    # NOTE: As a standard method, every web application should supply a cookie upon a POST query.
    # It might or might not be in case of GET requests.
    if postq.cookies:
        for cookie in postq.cookies:
            if cookie.expires:
                print(color.GREEN +
                      ' [+] Persistent Cookies found in Response Headers!')
                print(color.GREY + ' [+] Cookie: ' + color.CYAN +
                      cookie.__str__())
                # So to decode this, we'd need to convert it a human readable format.
                print(color.GREEN + ' [+] Cookie Expiry Period: ' +
                      color.ORANGE +
                      datetime.fromtimestamp(cookie.expires).__str__())
                found = 0x01
                VulnLogger(url, 'Persistent Session Cookies Found.',
                           '[i] Cookie: ' + req.headers.get('Set-Cookie'))
                print(color.ORANGE + ' [!] Probable Insecure Practice: ' +
                      color.BY + ' Persistent Session Cookies ' + color.END)
            else:
                NovulLogger(url, 'No Persistent Cookies.')
    if found == 0x00:
        verbout(
            R, 'No persistent session cookies identified upon POST Requests!')
        print(color.ORANGE + ' [+] Endpoint might be ' + color.BY +
              ' NOT VULNERABLE ' + color.END + color.ORANGE +
              ' to CSRF attacks!')
        print(color.ORANGE + ' [+] Detected : ' + color.BY +
              ' No Persistent Cookies ' + color.END)

    # [Step 2]: The idea here is to try to identify cookie persistence on basis of observing
    # variations in cases of using different user-agents. For this test we have chosen 5 different
    # well used and common user-agents (as below) and then we observe the variation of set-cookie
    # header under different conditions.
    #
    # We'll test this method only when we haven't identified requests based on previous algo.
    if found != 0x01:
        verbout(
            C, 'Proceeding to test cookie persistence via ' + color.CYAN +
            'User-Agent Alteration' + color.END + '...')
        user_agents = {
            'Chrome on Windows 8.1':
            'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
            'Safari on iOS':
            'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4',
            'IE6 on Windows XP':
            'Mozilla/5.0 (Windows; U; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727)',
            'Opera on Windows 10':
            'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991',
            'Chrome on Android':
            'Mozilla/5.0 (Linux; U; Android 2.3.1; en-us; MID Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1'
        }
        verbout(GR, 'Setting custom generic headers...')
        gen_headers = HEADER_VALUES
        for name, agent in user_agents.items():
            verbout(C, 'Using User-Agent : ' + color.CYAN + name)
            verbout(GR, 'Value : ' + color.ORANGE + agent)
            gen_headers['User-Agent'] = agent
            if COOKIE_VALUE:
                for cookie in COOKIE_VALUE:
                    gen_headers['Cookie'] = cookie
            req = Get(url, headers=gen_headers)
            # We will append this to stuff only when set-cookie is being supplied.
            if req.headers.get('Set-Cookie'):
                resps.append(req.headers.get('Set-Cookie'))
        if resps:
            if checkDuplicates(resps):
                verbout(
                    G,
                    'Set-Cookie header does not change with varied User-Agents...'
                )
                verbout(color.ORANGE,
                        ' [+] Possible persistent session cookies found...')
                print(color.RED +
                      ' [+] Possible CSRF Vulnerability Detected : ' +
                      color.ORANGE + url + '!')
                print(color.ORANGE + ' [!] Probable Insecure Practice: ' +
                      color.BY + ' Persistent Session Cookies ' + color.END)
                VulnLogger(url, 'Persistent Session Cookies Found.',
                           '[i] Cookie: ' + req.headers.get('Set-Cookie'))
            else:
                verbout(
                    G, 'Set-Cookie header changes with varied User-Agents...')
                verbout(R, 'No possible persistent session cookies found...')
                verbout(
                    color.ORANGE,
                    ' [+] Endpoint ' + color.BY + ' PROBABLY NOT VULNERABLE ' +
                    color.END + color.ORANGE + ' to CSRF attacks!')
                verbout(
                    color.ORANGE,
                    ' [+] Application Practice Method Detected : ' + color.BY +
                    ' No Persistent Cookies ' + color.END)
                NovulLogger(url, 'No Persistent Cookies.')
        else:
            verbout(R, 'No cookies are being set on any requests.')