def burp_import(path): requests = {} content = reader(path) matches = re.finditer(burp_regex, content) for match in matches: request = parse_request(match.group(4)) url = match.group(1) requests[url] = request['data'] return requests
def urls_import(path, method, headers, include): requests = [] urls = reader(path, mode='lines') for url in urls: requests.append({ 'url': url, 'method': method, 'headers': headers, 'data': include }) return requests
def load_modules(phase, **kwargs): if var['modules']: for module in var['modules']: content = reader('./modules/' + module + '.py', string=True) phaseMatch = re.search(r'[\'"]phase[\'"] : [\'"]%s[\'"]' % phase, content) if phaseMatch: function_string = 'modules.' + module + '.' + module mod_name, func_name = function_string.rsplit('.', 1) mod = importlib.import_module(mod_name) func = getattr(mod, func_name) func(kwargs)
def burp_import(path): requests = [] content = reader(path) matches = re.finditer(burp_regex, content) for match in matches: request = parse_request(match.group(4)) headers = request['headers'] if match.group(7) in ('HTML', 'JSON'): requests.append({ 'url': match.group(1), 'method': match.group(2), 'extension': match.group(3), 'headers': headers, 'include': request['data'], 'code': match.group(5), 'length': match.group(6), 'mime': match.group(7) }) return requests
from modes.bruteforcer import bruteforcer from modes.crawl import crawl from modes.scan import scan from modes.singleFuzz import singleFuzz if type(args.add_headers) == bool: headers = extractHeaders(prompt()) elif type(args.add_headers) == str: headers = extractHeaders(args.add_headers) else: from core.config import headers core.config.globalVariables['headers'] = headers core.config.globalVariables['checkedScripts'] = set() core.config.globalVariables['checkedForms'] = {} core.config.globalVariables['definitions'] = json.loads('\n'.join(reader(sys.path[0] + '/db/definitions.json'))) if path: paramData = converter(target, target) elif jsonData: headers['Content-type'] = 'application/json' paramData = converter(paramData) if args_file: if args_file == 'default': payloadList = core.config.payloads else: payloadList = list(filter(None, reader(args_file))) seedList = [] if args_seeds:
def request_import(path): parsed = parse_request(reader(path)) return {parsed['url']: [parsed['data']]}
def urls_import(path): return {url: [] for url in reader(path, mode='lines')}
from modes.crawl import crawl from modes.scan import scan from modes.singleFuzz import singleFuzz if type(args.add_headers) == bool: headers = extractHeaders(prompt()) elif type(args.add_headers) == str: headers = extractHeaders(args.add_headers) else: from core.config import headers core.config.globalVariables['headers'] = headers core.config.globalVariables['checkedScripts'] = set() core.config.globalVariables['checkedForms'] = {} core.config.globalVariables['definitions'] = json.loads('\n'.join( reader(sys.path[0] + '/db/definitions.json'))) if path: paramData = converter(target, target) elif jsonData: headers['Content-type'] = 'application/json' paramData = converter(paramData) if args_file: if args_file == 'default': payloadList = core.config.payloads else: payloadList = list(filter(None, reader(args_file))) seedList = [] if args_seeds:
skipDOM = args.skipDOM verbose = args.verbose blindXSS = args.blindXSS core.config.globalVariables = vars(args) if path: paramData = converter(target, target) elif jsonData: paramData = converter(paramData) if args_file: if args_file == 'default': payloadList = core.config.payloads else: payloadList = list(filter(None, reader(args_file))) seedList = [] if args_seeds: seedList = list(filter(None, reader(args_seeds))) encoding = base64 if encode and encode == 'base64' else False if not proxy: core.config.proxies = {} if update: # if the user has supplied --update argument updater() quit() # quitting because files have been changed if not target and not args_seeds: # if the user hasn't supplied a url
import json from core.requester import requester from core.utils import reader, write_json file = './db/vulners_cache.json' database = json.loads(reader(file)) def vulners(software, version, cpe=False): if software and version: pass else: return False cached = query_cache(software, version, cpe) if cached: if cached == 'vulnerable': return True else: return False kind = 'software' if cpe: kind = 'cpe' data = '{"software": "%s", "version": "%s", "type" : "%s", "maxVulnerabilities" : %i}' % ( software, version, kind, 1) response = requester('https://vulners.com/api/v3/burp/software/', get=False, data=data).text cache(software, version, response, cpe) if 'Nothing found for Burpsuite search request' in response: return False
action='store_true') parser.add_argument('--include', help='include this data in every request', dest='include', default={}) args = parser.parse_args() # arguments to be parsed mem.var = vars(args) mem.var['method'] = mem.var['method'].upper() if mem.var['stable'] or mem.var['delay']: mem.var['threads'] = 1 try: wordlist = set(reader(args.wordlist, mode='lines')) if mem.var['passive']: host = mem.var['passive'] if host == '-': host = urlparse(args.url).netloc print( '%s Collecting parameter names from passive sources for %s, it may take a while' % (run, host)) passive_params = fetch_params(host) wordlist.update(passive_params) print('%s Collected %s parameters, added to the wordlist' % (info, len(passive_params))) wordlist = list(wordlist) except FileNotFoundError: exit('%s The specified file for parameters doesn\'t exist' % bad)
def request_import(path): return parse_request(reader(path))