Ejemplo n.º 1
0
def request(uri, params='', PayloadIndex=0):
    skip = 1
    if args.proxy:
        try:
            page = requester(uri, True, params)
        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            return skip
        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            return skip
    else:
        try:
            page = requester(uri, False, params)
        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            return skip
        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            return skip
        except IndexError:
            PayloadIndex = 0

    function_break = check(page, page.request.url, file[PayloadIndex])
    PayloadIndex += 1
    if function_break:
        return skip
Ejemplo n.º 2
0
def request(Uri,Foxy,Params='',PayloadIndex=0):
    skip = 1
    if Foxy:
        try:
            page = requester(Uri,True,Params)
        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            return skip
        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            return skip
    else:
        try:
            page = requester(Uri,False,Params)
        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            return skip
        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            return skip
        except IndexError:
            PayloadIndex = 0

    function_break = BasicChecks(page,payloads[PayloadIndex],page.request.url)
    PayloadIndex += 1
    if function_break:
        return skip  
Ejemplo n.º 3
0
def MutlipleParameters(ParameterList, uri, PayloadIndex=0):
    print('%s Infusing payloads' % info)
    for params in ParameterList:
        if args.proxy:
            try:
                page = requester(uri, True, params)
                function_break = check(page, unquote(page.request.url),
                                       file[PayloadIndex])
                PayloadIndex += 1
                if function_break:
                    break
            except requests.exceptions.Timeout:
                print("[\033[91mTimeout\033[00m] %s" % url)
                break
            except requests.exceptions.ConnectionError:
                print("%s Connection Error" % bad)
                break
        else:
            try:
                page = requester(uri, False, params)
                function_break = check(page, unquote(page.request.url),
                                       file[PayloadIndex])
                if function_break:
                    break
            except requests.exceptions.Timeout:
                print("[\033[91mTimeout\033[00m] %s" % url)
                break
            except requests.exceptions.ConnectionError:
                print("%s Connection Error" % bad)
                break
            except IndexError:
                PayloadIndex = 0
Ejemplo n.º 4
0
def Empty(URLlist, PayloadIndex=0):
    print('%s Infusing payloads' % info)

    for uri in URLlist:
        if args.proxy:
            try:
                page = requester(uri, True)
            except requests.exceptions.Timeout:
                print("[\033[91mTimeout\033[00m] %s" % url)
                break
            except requests.exceptions.ConnectionError:
                print("%s Connection Error" % bad)
                break
        else:
            try:
                page = requester(uri, False)
            except requests.exceptions.Timeout:
                print("[\033[91mTimeout\033[00m] %s" % url)
                break
            except requests.exceptions.ConnectionError:
                print("%s Connection Error" % bad)
                break
        function_break = check(page, uri, file[PayloadIndex])
        PayloadIndex += 1
        if function_break:
            break
Ejemplo n.º 5
0
def CrlfScan(url, foxy):

    print("%s Checking for CRLF Injection" % info)

    parameter = '=' in url
    if parameter:
        if url.endswith('=') == False:
            print("%s Omit the value of parameter that you wanna fuzz" % info)
            exit()
    elif parameter == False:
        print('%s Appending payloads just after the URL' % info)
        if url.endswith('/') == True:
            pass
        elif url.endswith('/') == False:
            url = url + '/'

    for payload in payloads:
        try:
            page = requester("%s%s" % (url, payload), foxy)

        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            break

        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            break

        except requests.exceptions.InvalidURL:
            print("%s Invalid URL structure" % bad)
            break
#-----------------------------------------------------------------------#
        if page.status_code == 404:
            print("[\033[91m404\033[00m] %s%s" % (url, payload))

        elif page.status_code == 403:
            print("[\033[91m403\033[00m] %s%s" % (url, payload))

        elif page.status_code == 400:
            print("[\033[91m400\033[00m] %s%s" % (url, payload))
#------------------------------------------------------------------------#
        if 'Location' in payload:
            if page.status_code in http_redirect_codes and page.headers[
                    'Location'] == "www.google.com":
                print("%s HTTP Response Splitting found: \033[1m%s\033[00m" %
                      (good, payload))

        elif "Set-Cookie" in payload:
            if page.status_code != 404:
                try:
                    if page.headers['Set-Cookie'] == "name=ch33ms;":
                        print(
                            "%s HTTP Response Splitting found: \033[1m%s\033[00m"
                            % (good, payload))

                except KeyError:
                    break
Ejemplo n.º 6
0
def MultipleParams(ParamList,Uri,Foxy):
    PayloadIndex = 0

    print("%s Checking for CRLF Injection" % info)
    for params in ParamList:
        try: 
            page = requester(Uri,Foxy,params)
            func_break = BasicChecks(page,payloads[PayloadIndex],unquote(page.request.url))
            if func_break:
                break
        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            break
        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            break
        except requests.exceptions.InvalidURL:
            print("%s Invalid URL structure" % bad)
            break
        except KeyError:
            PayloadIndex = 0
Ejemplo n.º 7
0
def NoParams(Uris,Foxy):
    PayloadIndex = 0

    for url in Uris:
        try: 
            page = requester(url,Foxy)
            func_break = BasicChecks(page,payloads[PayloadIndex],url)
            if func_break:
                break
        except requests.exceptions.Timeout:
            print("[\033[91mTimeout\033[00m] %s" % url)
            break

        except requests.exceptions.ConnectionError:
            print("%s Connection Error" % bad)
            break

        except requests.exceptions.InvalidURL:
            print("%s Invalid URL structure" % bad)
            break

        except KeyError:
            PayloadIndex = 0
Ejemplo n.º 8
0
def analyze(url):
    parameter = '=' in url
    if parameter:
        if url.endswith('=') == False:
            print("%s Omit the value of parameter that you wanna fuzz" % info)
            exit()
    elif parameter == False:
        print('%s Appending payloads just after the URL' % info)
        if url.endswith('/') == True:
            pass
        elif url.endswith('/') == False:
            url = url + '/'
    print('%s Infusing payloads' % info)
    if args.payload:
        file = open(args.payload, 'r')
    else:
        try:
            file = open('Oralyzer/payloads.txt', 'r')
        except FileNotFoundError:
            file = open('payloads.txt', 'r')
    urls = []
    redirect_codes = [i for i in range(300, 311, 1)]

    #-----------------------------------------------------------------#

    for payload in file:
        urls.append(url + payload.rstrip('\n'))

    for uri in urls:
        if args.proxy:
            try:
                page = requester(uri, True)
            except requests.exceptions.Timeout:
                print("[\033[91mTimeout\033[00m] %s" % url)
                break
            except requests.exceptions.ConnectionError:
                print("%s Connection Error" % bad)
                break
        else:
            try:
                page = requester(uri, False)
            except requests.exceptions.Timeout:
                print("[\033[91mTimeout\033[00m] %s" % url)
                break
            except requests.exceptions.ConnectionError:
                print("%s Connection Error" % bad)
                break

        soup = BeautifulSoup(page.text, 'html.parser')
        location = 'window.location' in str(soup.find_all('script'))
        href = 'location.href' in str(soup.find_all('script'))
        google = 'http://www.google.com' in str(soup.find_all('script'))
        metas = str(soup.find_all('meta'))
        meta_tag_search = "http://www.google.com" in metas
        #----------------------------------------------------------------------------------------------#
        if page.status_code in redirect_codes:
            if meta_tag_search and "http-equiv=\"refresh\"" in metas:
                print("%s Meta Tag Redirection" % good)
                break
            else:
                print("%s Header Based Redirection : %s ▶ \033[92m%s\033[00m" %
                      (good, uri, page.headers['Location']))

        elif page.status_code == 200:
            if google:
                #---------------------------------------------------------------------------------------------_#
                print("%s Javascript Based Redirection" % good)
                if location and href:
                    print(
                        "%s Vulnerable Source Found: \033[1mwindow.location\033[00m"
                        % good)
                    print(
                        "%s Vulnerable Source Found: \033[1mlocation.href\033[00m"
                        % good)
                elif href:
                    print(
                        "%s Vulnerable Source Found: \033[1mlocation.href\033[00m"
                        % good)
                elif location:
                    print(
                        "%s Vulnerable Source Found: \033[1mwindow.location\033[00m"
                        % good)
                print("%s Try fuzzing the URL for DOM XSS" % info)
                break

            elif location and google == None:
                print(
                    "%s Vulnerable Source Found: \033[1mwindow.location\033[00m"
                    % good)
                print("%s Try fuzzing the URL for DOM XSS" % info)
                break
#------------------------------------------------------------------------------------#
            if meta_tag_search and "http-equiv=\"refresh\"" in str(page.text):
                print("%s Meta Tag Redirection" % good)
                break
            elif "http-equiv=\"refresh\"" in str(
                    page.text) and not meta_tag_search:
                print("%s The page is only getting refreshed" % bad)
                break

        if page.status_code == 404:
            print("[\033[91m404\033[00m] %s" % uri)
        elif page.status_code == 403:
            print("[\033[91m403\033[00m] %s" % uri)
        elif page.status_code == 400:
            print("[\033[91m400\033[00m] %s" % uri)