def fuzz_discover_helper(url, file): current_url = utility.S.get(url).url links = utility.get_links_from_html(url) params = utility.parse_url(url) inputs = utility.get_input_fields(url) good_urls = utility.page_guess(file, url) print("-------------------------------------------------------------") print("CURRENT URL: " + current_url) print("-------------------------------------------------------------") print("REACHABLE URLS FROM COMMON WORDS LIST: ") for good_url in good_urls: print(good_url) print("-------------------------------------------------------------") print("INPUT FIELDS: ") print("| NAME OF FIELD | TYPE OF FIELD |") for input_field in inputs: if input_field.get('name') is not None and input_field.get('type') is not None: print("| " + input_field.get('name') + " | " + input_field.get('type') + " |") elif input_field.get('type') is not None: print("| NO NAME | " + input_field.get('type') + " |") else: print("| " + input_field.get('name') + " | NO TYPE | ") print("-------------------------------------------------------------") print("URL PARAMETERS: ") print(params) print("-------------------------------------------------------------") print("") print("") links = utility.filter_urls_for_offsite(url, links) return links
def get_urls(url, common_words): print("GETTING ALL URLS!") unique_urls = [url] possible_urls = [] for init_url in utility.get_links_from_html(url): possible_urls.append(init_url) while len(possible_urls) > 0: if possible_urls[0].lower() not in unique_urls: if "logout" not in possible_urls[0].lower(): guessed_pages = utility.page_guess(common_words, url) direct_urls = utility.get_links_from_html(possible_urls[0]) direct_urls = utility.filter_urls_for_offsite(url, direct_urls) possible_urls = possible_urls + direct_urls possible_urls = possible_urls + guessed_pages unique_urls.append(possible_urls[0].lower()) possible_urls = possible_urls[1:] return unique_urls
def fuzz_discover(url,commonFile): start = time.clock() try: open(commonFile) except FileNotFoundError: print("INVALID FILE: " + commonFile + " was not found. Please put a valid file in the " "same directory as fuzz.py") return url_list = [] url_list.append(url) visited_url_list = [] possible_links = set(utility.get_links_from_html(url)) while(len(url_list) > 0): if(url_list[0] not in visited_url_list): if "logout" not in url_list[0].lower(): new_urls = fuzz_discover_helper(url_list[0], commonFile) url_list = url_list + new_urls new_urls = set(new_urls) possible_links = possible_links | new_urls visited_url_list.append(url_list[0]) print() url_list = url_list[1:] print("-------------------------------------------------------------") print("ALL DIRECT LINKS ON THE SITE") for link in possible_links: print(link) print("-------------------------------------------------------------") print("SESSION COOKIE") print(utility.get_cookie(url)) print("") print("") print("ELAPSED TIME (seconds): " + str(time.clock() - start))