Exemplo n.º 1
0
def fuzz_discover_helper(url, file):

    current_url = utility.S.get(url).url
    links = utility.get_links_from_html(url)
    params = utility.parse_url(url)
    inputs = utility.get_input_fields(url)
    good_urls = utility.page_guess(file, url)

    print("-------------------------------------------------------------")
    print("CURRENT URL: " + current_url)
    print("-------------------------------------------------------------")
    print("REACHABLE URLS FROM COMMON WORDS LIST: ")
    for good_url in good_urls:
        print(good_url)
    print("-------------------------------------------------------------")
    print("INPUT FIELDS: ")
    print("| NAME OF FIELD | TYPE OF FIELD |")
    for input_field in inputs:
        if input_field.get('name') is not None and input_field.get('type') is not None:
            print("| " + input_field.get('name') + " | " + input_field.get('type') + " |")
        elif input_field.get('type') is not None:
            print("| NO NAME | " + input_field.get('type') + " |")
        else:
            print("| " + input_field.get('name') + " | NO TYPE | ")
    print("-------------------------------------------------------------")
    print("URL PARAMETERS: ")
    print(params)
    print("-------------------------------------------------------------")
    print("")
    print("")

    links = utility.filter_urls_for_offsite(url, links)

    return links
Exemplo n.º 2
0
def fuzz_test_normal(url, common_file, vector_file, sensitive_file, slow):
    url_list = get_urls(url, common_file)

    unsanitized_list = set()
    leaked_data = set()
    start = time.clock()
    isDOS = False
    HTTP_code = "none"
    for curr_url in url_list:  # For every Possible url...
        input_fields = utility.get_input_fields(curr_url)  # Get the urls inputs.
        print("-------------------------------------------------------------")
        print("CURRENT URL: " + curr_url)
        print("-------------------------------------------------------------")
        # Lets get all the printable input.
        for vector in open(vector_file):  # for every bad input in the file
            vector = vector.strip()
            payload = payload_generator(input_fields, vector)
            errmessage = "none"
            try:
                response = utility.S.post(curr_url, params=payload, data=payload, timeout=slow)
                # Still not sure what is being returned....
                unsanitized_list = unsanitized_list | set(checkSanitization(response, vector))
                leaked_data = leaked_data | set(checkDataLeak(response, sensitive_file))
            except requests.exceptions.HTTPError as error:
                errmessage = ""
                errmessage += error.response.statuscode()
                if errmessage == "400":
                    errmessage += ": Error, cannot process request"
                if errmessage == "403":
                    errmessage += ": Error, you do not have permission to enter"
                if errmessage == "500":
                    errmessage += ": Error, server encountered unexpected error"
                HTTP_code = errmessage
                continue
            except requests.exceptions.Timeout:
                isDOS = True
                continue

                # isDOS = checkDOS(response)
                # HTTP_code = checkHTTP(response)

        if len(unsanitized_list) > 0:  # If here is unsanitized input
            print("UNSANITIZED INPUT: ")

            for unsanitized_item in unsanitized_list:
                print(unsanitized_item)  # still not sure how this item will be printed

            print("-------------------------------------------------------------")

        if len(leaked_data) > 0:  # If here is leaked data
            print("LEAKED DATA: ")

            for leaked_item in leaked_data:
                print(leaked_item)  # will print each bit of leaked data.

            print("-------------------------------------------------------------")

        if isDOS:  # If there is a possibility of a DOS we print hat it's possible
            print("DENIAL OF SERVICE POSSIBLE ")
            print("-------------------------------------------------------------")

        if HTTP_code != "none":  # If the HTTP code isn't 'ok'
            print("URL CODE: \n" + HTTP_code)  # Print out the code.
            print("-------------------------------------------------------------")

        print()
        print()

        unsanitized_list = set()
        leaked_data = set()

    print("Time Elapsed:" + str(time.clock() - start))
Exemplo n.º 3
0
def fuzz_test_random(url, common_file, vector_file, sensitive_file, slow):

    url_list = get_urls(url, common_file)

    unsanitized_list = set()
    leaked_data = set()
    random_fields = []
    HTTP_code = "none"
    isDOS = False
    rand_url = random.randint(0, len(url_list) - 1)
    curr_url = url_list[rand_url]  # For a random Possible url...
    input_fields = utility.get_input_fields(curr_url)  # Get the urls inputs.

    if len(input_fields) > 0:
        if input_fields[len(input_fields) - 1].get("type") == "submit":
            random_fields.append(input_fields[len(input_fields) - 1])
            rand_input = random.randint(0, len(input_fields) - 2)
        else:
            rand_input = random.randint(0, len(input_fields) - 1)

        random_fields.append(input_fields[rand_input])

    print("-------------------------------------------------------------")
    print("CURRENT URL: " + curr_url)
    print("-------------------------------------------------------------")
    # Lets get all the printable input.
    for vector in open(vector_file):  # for every bad input in the file
        vector = vector.strip()
        payload = payload_generator(random_fields, vector)
        errmessage = "none"
        try:
            response = utility.S.post(curr_url, params=payload, data=payload, timeout=slow)
            # Still not sure what is being returned....
            unsanitized_list = unsanitized_list | set(checkSanitization(response, vector))
            leaked_data = leaked_data | set(checkDataLeak(response, sensitive_file))
        except requests.exceptions.HTTPError as error:
            errmessage = ""
            errmessage += error.response.statuscode()
            if errmessage == "400":
                errmessage += ": Error, cannot process request"
            if errmessage == "403":
                errmessage += ": Error, you do not have permission to enter"
            if errmessage == "404":
                errmessage += ": Page not found"
            if errmessage == "500":
                errmessage += ": Error, server encountered unexpected error"
            HTTP_code = errmessage
            continue
        except requests.exceptions.Timeout:
            isDOS = True
            continue

    if len(unsanitized_list) > 0:  # If here is unsanitized input
        print("UNSANITIZED INPUT: ")

        for unsanitized_item in unsanitized_list:
            print(unsanitized_item)  # still not sure how this item will be printed

        print("-------------------------------------------------------------")

    if len(leaked_data) > 0:  # If there is leaked data
        print("LEAKED DATA: ")

        for leaked_item in leaked_data:
            print(leaked_item)  # will print each bit of leaked data.

        print("-------------------------------------------------------------")

    if isDOS:  # If there is a possibility of a DOS we print hat it's possible
        print("DENIAL OF SERVICE POSSIBLE ")
        print("-------------------------------------------------------------")

    if HTTP_code != "none":  # If the HTTP code isn't 'ok' (coud make this a range)
        print("URL CODE: " + HTTP_code)  # Print out the code.
        print("-------------------------------------------------------------")