def burp_export(result): """ exports results to Burp Suite by sending request to Burp proxy """ proxies = { 'http': 'http://' + mem.var['burp_port'], 'https': 'http://' + mem.var['burp_port'] } for url, data in result.items(): if data['method'] == 'GET': requests.get(url, params=populate(data['params']), headers=data['headers'], proxies=proxies, verify=False) elif data['method'] == 'POST': requests.post(url, data=populate(data['params']), headers=data['headers'], proxies=proxies, verify=False) elif data['method'] == 'JSON': requests.post(url, json=populate(data['params']), headers=data['headers'], proxies=proxies, verify=False)
def clean_export(result): for url, data in result.items(): clean_url = url.lstrip('/') if data['method'] == 'JSON': print(clean_url + '\t' + json.dumps(populate(data['params']))) else: query_string = create_query_string(data['params']) if '?' in clean_url: query_string = query_string.replace('?', '&', 1) if data['method'] == 'GET': print(clean_url + query_string) elif data['method'] == 'POST': print(clean_url + '\t' + query_string)
def initialize(request, wordlist): """ handles parameter finding process for a single request object returns 'skipped' (on error), list on success """ url = request['url'] if not url.startswith('http'): print('%s %s is not a valid URL' % (bad, url)) return 'skipped' print('%s Probing the target for stability' % run) stable = stable_request(url, request['headers']) if not stable: return 'skipped' else: fuzz = random_str(6) response_1 = requester(request, {fuzz: fuzz[::-1]}) print('%s Analysing HTTP response for anamolies' % run) fuzz = random_str(6) response_2 = requester(request, {fuzz: fuzz[::-1]}) if type(response_1) == str or type(response_2) == str: return 'skipped' factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist) print('%s Analysing HTTP response for potential parameter names' % run) found = heuristic(response_1.text, wordlist) if found: num = len(found) s = 's' if num > 1 else '' print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found))) print('%s Logicforcing the URL endpoint' % run) populated = populate(wordlist) param_groups = slicer(populated, int(len(wordlist) / mem.var['chunks'])) last_params = [] while True: param_groups = narrower(request, factors, param_groups) if mem.var['kill']: return 'skipped' param_groups = confirm(param_groups, last_params) if not param_groups: break confirmed_params = [] for param in last_params: reason = bruter(request, factors, param, mode='verify') if reason: name = list(param.keys())[0] confirmed_params.append(name) print('%s name: %s, factor: %s' % (res, name, reason)) return confirmed_params
def text_export(result): """ exports results to a text file, one url per line """ with open(mem.var['text_file'], 'w+', encoding='utf8') as text_file: for url, data in result.items(): clean_url = url.lstrip('/') if data['method'] == 'JSON': text_file.write(clean_url + '\t' + json.dumps(populate(data['params'])) + '\n') else: query_string = create_query_string(data['params']) if '?' in clean_url: query_string = query_string.replace('?', '&', 1) if data['method'] == 'GET': text_file.write(clean_url + query_string + '\n') elif data['method'] == 'POST': text_file.write(clean_url + '\t' + query_string + '\n')