def narrower(oldParamList, url, include, headers, GET, delay, originalResponse, originalCode, reflections, factors, threadCount): newParamList = [] threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(quickBruter, part, originalResponse, originalCode, reflections, factors, include, delay, headers, url, GET) for part in oldParamList) for i, result in enumerate(concurrent.futures.as_completed(futures)): if result.result(): newParamList.extend(slicer(result.result())) log('%s Processing: %i/%-6i' % (info, i + 1, len(oldParamList)), mode='run') return newParamList
def initialize(url, include, headers, GET, delay, paramList, threadCount): url = stabilize(url) if not url: return {} else: firstResponse = requester(url, include, headers, GET, delay) originalFuzz = randomString(6) data = {originalFuzz: originalFuzz[::-1]} data.update(include) response = requester(url, data, headers, GET, delay) reflections = response.text.count(originalFuzz[::-1]) originalResponse = response.text originalCode = response.status_code newLength = len(response.text) plainText = removeTags(originalResponse) plainTextLength = len(plainText) factors = {'sameHTML': False, 'samePlainText': False} if len(firstResponse.text) == len(originalResponse): factors['sameHTML'] = True elif len(removeTags(firstResponse.text)) == len(plainText): factors['samePlainText'] = True heuristic(firstResponse.text, paramList) fuzz = randomString(8) data = {fuzz: fuzz[::-1]} data.update(include) toBeChecked = slicer(paramList, 50) foundParamsTemp = [] while True: toBeChecked = narrower(toBeChecked, url, include, headers, GET, delay, originalResponse, originalCode, reflections, factors, threadCount) toBeChecked = unityExtracter(toBeChecked, foundParamsTemp) if not toBeChecked: break foundParams = [] for param in foundParamsTemp: exists = quickBruter([param], originalResponse, originalCode, reflections, factors, include, delay, headers, url, GET) if exists: foundParams.append(param) for each in foundParams: print('%s?%s' % (url, each)) if not foundParams: pass return foundParams
def narrower(request, factors, param_groups): anamolous_params = [] threadpool = ThreadPoolExecutor(max_workers=mem.var['threads']) futures = (threadpool.submit(bruter, request, factors, params) for params in param_groups) for i, result in enumerate(as_completed(futures)): if result.result(): anamolous_params.extend(slicer(result.result())) if not mem.var['kill']: print('%s Processing chunks: %i/%-6i' % (info, i + 1, len(param_groups)), end='\r') return anamolous_params
def narrower(oldParamList): newParamList = [] potenialParameters = 0 threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(quick_bruter, part, originalResponse, originalCode, factors, include, delay, headers, url, GET) for part in oldParamList) for i, result in enumerate(concurrent.futures.as_completed(futures)): if result.result(): potenialParameters += 1 newParamList.extend(slicer(result.result())) print('%s Processing: %i/%-6i' % (info, i + 1, len(oldParamList)), end='\r') return newParamList
def initialize(request, wordlist): url = request['url'] if not url.startswith('http'): print('%s %s is not a valid URL' % (bad, url)) return 'skipped' print('%s Probing the target for stability' % run) stable = stable_request(url, request['headers']) if not stable: return 'skipped' else: fuzz = randomString(6) response_1 = requester(request, {fuzz : fuzz[::-1]}) print('%s Analysing HTTP response for anamolies' % run) fuzz = randomString(6) response_2 = requester(request, {fuzz : fuzz[::-1]}) if type(response_1) == str or type(response_2) == str: return 'skipped' factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist) print('%s Analysing HTTP response for potential parameter names' % run) found = heuristic(response_1.text, wordlist) if found: num = len(found) s = 's' if num > 1 else '' print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found))) print('%s Logicforcing the URL endpoint' % run) populated = populate(wordlist) param_groups = slicer(populated, int(len(wordlist)/args.chunks)) last_params = [] while True: param_groups = narrower(request, factors, param_groups) if mem.var['kill']: return 'skipped' param_groups = confirm(param_groups, last_params) if not param_groups: break confirmed_params = [] for param in last_params: reason = bruter(request, factors, param, mode='verify') if reason: name = list(param.keys())[0] confirmed_params.append(name) print('%s name: %s, factor: %s' % (res, name, reason)) return confirmed_params
print ('%s Performing heuristic level checks' % run) def narrower(oldParamList): newParamList = [] potenialParameters = 0 threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(quickBruter, part, originalResponse, originalCode, factors, include, delay, headers, url, GET) for part in oldParamList) for i, result in enumerate(concurrent.futures.as_completed(futures)): if result.result(): potenialParameters += 1 newParamList.extend(slicer(result.result())) print('%s Processing: %i/%-6i' % (info, i + 1, len(oldParamList)), end='\r') return newParamList toBeChecked = slicer(paramList, 25) foundParams = [] while True: toBeChecked = narrower(toBeChecked) toBeChecked = unityExtracter(toBeChecked, foundParams) if not toBeChecked: break if foundParams: print ('%s Heuristic found %i potenial parameters.' % (info, len(foundParams))) paramList = foundParams finalResult = [] threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(bruter, param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET) for param in foundParams)
def initialize(url, include, headers, GET, delay, paramList, threadCount): url = stabilize(url) if not url: return {} else: print('%s Analysing the content of the webpage' % run) firstResponse = requester(url, include, headers, GET, delay) print('%s Analysing behaviour for a non-existent parameter' % run) originalFuzz = randomString(6) data = {originalFuzz: originalFuzz[::-1]} data.update(include) response = requester(url, data, headers, GET, delay) reflections = response.text.count(originalFuzz[::-1]) print('%s Reflections: %s%i%s' % (info, green, reflections, end)) originalResponse = response.text originalCode = response.status_code print('%s Response Code: %s%i%s' % (info, green, originalCode, end)) newLength = len(response.text) plainText = removeTags(originalResponse) plainTextLength = len(plainText) print('%s Content Length: %s%i%s' % (info, green, newLength, end)) print('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end)) factors = {'sameHTML': False, 'samePlainText': False} if len(firstResponse.text) == len(originalResponse): factors['sameHTML'] = True elif len(removeTags(firstResponse.text)) == len(plainText): factors['samePlainText'] = True print('%s Parsing webpage for potential parameters' % run) heuristic(firstResponse.text, paramList) fuzz = randomString(8) data = {fuzz: fuzz[::-1]} data.update(include) print('%s Performing heuristic level checks' % run) toBeChecked = slicer(paramList, 50) foundParamsTemp = [] while True: toBeChecked = narrower(toBeChecked, url, include, headers, GET, delay, originalResponse, originalCode, reflections, factors, threadCount) toBeChecked = unityExtracter(toBeChecked, foundParamsTemp) if not toBeChecked: break foundParams = [] for param in foundParamsTemp: exists = quickBruter([param], originalResponse, originalCode, reflections, factors, include, delay, headers, url, GET) if exists: foundParams.append(param) print('%s Scan Completed ' % info) for each in foundParams: print('%s Valid parameter found: %s%s%s' % (good, green, each, end)) if not foundParams: print( '%s Unable to verify existence of parameters detected by heuristic.' % bad) return foundParams
def initialize(url, include, headers, GET, delay, paramList, threadCount): url = stabilize(url) log('%s Analysing the content of the webpage' % run) firstResponse = requester(url, include, headers, GET, delay) log('%s Analysing behaviour for a non-existent parameter' % run) originalFuzz = randomString(6) data = {originalFuzz : originalFuzz[::-1]} data.update(include) response = requester(url, data, headers, GET, delay) reflections = response.text.count(originalFuzz[::-1]) log('%s Reflections: %s%i%s' % (info, green, reflections, end)) originalResponse = response.text originalCode = response.status_code log('%s Response Code: %s%i%s' % (info, green, originalCode, end)) newLength = len(response.text) plainText = removeTags(originalResponse) plainTextLength = len(plainText) log('%s Content Length: %s%i%s' % (info, green, newLength, end)) log('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end)) factors = {'sameHTML': False, 'samePlainText': False} if len(firstResponse.text) == len(originalResponse): factors['sameHTML'] = True elif len(removeTags(firstResponse.text)) == len(plainText): factors['samePlainText'] = True log('%s Parsing webpage for potential parameters' % run) heuristic(firstResponse.text, paramList) fuzz = randomString(8) data = {fuzz : fuzz[::-1]} data.update(include) log('%s Performing heuristic level checks' % run) toBeChecked = slicer(paramList, 50) foundParams = [] while True: toBeChecked = narrower(toBeChecked, url, include, headers, GET, delay, originalResponse, originalCode, reflections, factors, threadCount) toBeChecked = unityExtracter(toBeChecked, foundParams) if not toBeChecked: break if foundParams: log('%s Heuristic found %i potential parameters.' % (info, len(foundParams))) paramList = foundParams finalResult = [] jsonResult = [] threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(bruter, param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET) for param in foundParams) for i, result in enumerate(concurrent.futures.as_completed(futures)): if result.result(): finalResult.append(result.result()) log('%s Progress: %i/%i' % (info, i + 1, len(paramList)), mode='run') log('%s Scan Completed ' % info) for each in finalResult: for param, reason in each.items(): log('%s Valid parameter found: %s%s%s' % (good, green, param, end)) log('%s Reason: %s' % (info, reason)) jsonResult.append({"param": param, "reason": reason}) if not jsonResult: log('%s Unable to verify existence of parameters detected by heuristic' % bad) return jsonResult
def main(): print('''%s _ /_| _ ' ( |/ /(//) %sv1.3%s _/ %s''' % (green, white, green, end)) parser = argparse.ArgumentParser() #defines the parser #Arguments that can be supplied parser.add_argument('-u', help='target url', dest='url', required=True) parser.add_argument('-d', help='request delay', dest='delay', type=int) parser.add_argument('-t', help='number of threads', dest='threads', type=int) parser.add_argument('-f', help='file path', dest='file') parser.add_argument('-o', help='Path for the output file', dest='output_file') parser.add_argument('--get', help='use get method', dest='GET', action='store_true') parser.add_argument('--post', help='use post method', dest='POST', action='store_true') parser.add_argument('--headers', help='http headers prompt', dest='headers', action='store_true') parser.add_argument('--include', help='include this data in every request', dest='include') args = parser.parse_args() #arguments to be parsed url = args.url params_file = args.file or './db/params.txt' headers = args.headers delay = args.delay or 0 include = args.include or {} threadCount = args.threads or 2 if headers: headers = extract_headers(prompt()) else: headers = {} if args.GET: GET = True else: GET = False include = get_params(include) paramList = [] try: with open(params_file, 'r') as params_file: for line in params_file: paramList.append(line.strip('\n')) except FileNotFoundError: print('%s The specified file doesn\'t exist' % bad) quit() url = stabilize(url) print('%s Analysing the content of the webpage' % run) firstResponse = requester(url, include, headers, GET, delay) print('%s Now lets see how target deals with a non-existent parameter' % run) originalFuzz = random_string(6) data = {originalFuzz: originalFuzz[::-1]} data.update(include) response = requester(url, data, headers, GET, delay) reflections = response.text.count(originalFuzz[::-1]) print('%s Reflections: %s%i%s' % (info, green, reflections, end)) originalResponse = response.text originalCode = response.status_code print('%s Response Code: %s%i%s' % (info, green, originalCode, end)) newLength = len(response.text) plainText = remove_tags(originalResponse) plainTextLength = len(plainText) print('%s Content Length: %s%i%s' % (info, green, newLength, end)) print('%s Plain-text Length: %s%i%s' % (info, green, plainTextLength, end)) factors = {'sameHTML': False, 'samePlainText': False} if len(firstResponse.text) == len(originalResponse): factors['sameHTML'] = True elif len(remove_tags(firstResponse.text)) == len(plainText): factors['samePlainText'] = True print('%s Parsing webpage for potential parameters' % run) heuristic(firstResponse.text, paramList) fuzz = random_string(8) data = {fuzz: fuzz[::-1]} data.update(include) print('%s Performing heuristic level checks' % run) toBeChecked = slicer(paramList, 25) foundParams = [] while True: toBeChecked = narrower(toBeChecked) toBeChecked = unity_extracter(toBeChecked, foundParams) if not toBeChecked: break if foundParams: print('%s Heuristic found %i potential parameters.' % (info, len(foundParams))) paramList = foundParams finalResult = [] jsonResult = [] threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(bruter, param, originalResponse, originalCode, factors, include, reflections, delay, headers, url, GET) for param in foundParams) for i, result in enumerate(concurrent.futures.as_completed(futures)): if result.result(): finalResult.append(result.result()) print('%s Progress: %i/%i' % (info, i + 1, len(paramList)), end='\r') print('%s Scan Completed' % info) for each in finalResult: for param, reason in each.items(): print('%s Valid parameter found: %s%s%s' % (good, green, param, end)) print('%s Reason: %s' % (info, reason)) jsonResult.append({"param": param, "reason": reason}) # Finally, export to json if args.output_file and jsonResult: print("Saving output to JSON file in %s" % args.output_file) with open(str(args.output_file), 'w') as json_output: json.dump( {"results": jsonResult}, json_output, sort_keys=True, indent=4, )