def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding): if form: for each in form.values(): url = each['action'] if url: if url.startswith( main_url ): #startswith用来判断当前字符串是否是以另外一个给定的子字符串“开头”的,根据判断结果返回 true 或 false pass elif url.startswith('//') and url[2:].startswith( host): #[2:]代表url列表中第2+1项到最后一项 url = scheme + '://' + url[2:] #scheme代表默认协议http/https elif url.startswith('/'): url = scheme + '://' + host + url #'\w'匹配字母或数字或下划线或汉字0-9、a-z、A-Z、_(下划线)、汉字和其他国家的语言符号 elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url if url not in core.config.globalVariables['checkedForms']: core.config.globalVariables['checkedForms'][url] = [] method = each['method'] GET = True if method == 'get' else False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] for paramName in paramData.keys(): if paramName not in core.config.globalVariables[ 'checkedForms'][url]: core.config.globalVariables['checkedForms'][ url].append(paramName) paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) occurences = htmlParser(response, encoding) positions = occurences.keys() #验证方式是判断generator函数是否生成了payload efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] logger.vuln( 'Vulnerable webpage: %s%s%s' % (green, url, end)) logger.vuln( 'Vector for %s%s%s: %s' % (green, paramName, end, payload)) break except IndexError: pass if blindXSS and blindPayload: paramsCopy[paramName] = blindPayload requester(url, paramsCopy, headers, GET, delay, timeout)
def crawl(scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding): if domURL and not skipDOM: response = requester(domURL, {}, headers, True, delay, timeout).text highlighted = dom(response) if highlighted: print('%s Potentially vulnerable objects found at %s' % (good, domURL)) print(red + ('-' * 60) + end) for line in highlighted: print(line) print(red + ('-' * 60) + end) if form: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url method = each['method'] GET = True if method == 'get' else False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] for paramName in paramData.keys(): paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response, encoding) occurences = parsedResponse[0] positions = parsedResponse[1] efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] print('%s Vulnerable webpage: %s%s%s' % (good, green, url, end)) print( '%s Vector for %s%s%s: %s' % (good, green, paramName, end, payload)) break except IndexError: pass if blindXSS and blindPayload: paramsCopy[paramName] = blindPayload requester(url, paramsCopy, headers, GET, delay, timeout)
def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding): if form: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url if url not in core.config.globalVariables['checkedForms']: core.config.globalVariables['checkedForms'][url] = [] method = each['method'] GET = True if method == 'get' else False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] for paramName in paramData.keys(): if paramName not in core.config.globalVariables[ 'checkedForms'][url]: core.config.globalVariables['checkedForms'][ url].append(paramName) paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response, encoding) occurences = parsedResponse[0] positions = parsedResponse[1] efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] logger.vuln( 'Vulnerable webpage: %s%s%s' % (green, url, end)) logger.vuln( 'Vector for %s%s%s: %s' % (green, paramName, end, payload)) break except IndexError: pass if blindXSS and blindPayload: paramsCopy[paramName] = blindPayload requester(url, paramsCopy, headers, GET, delay, timeout)
def crawl(scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding): if domURL and not skipDOM: response = requester(domURL, {}, headers, True, delay, timeout).text highlighted = dom(response) if highlighted: print('%s Potentially vulnerable objects found at %s' % (good, domURL)) print(red + ('-' * 60) + end) for line in highlighted: print(line) print(red + ('-' * 60) + end) if form: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url method = each['method'] GET = True if method == 'get' else False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] for paramName in paramData.keys(): paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester( url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response, encoding) occurences = parsedResponse[0] positions = parsedResponse[1] efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] print('%s Vulnerable webpage: %s%s%s' % (good, green, url, end)) print('%s Vector for %s%s%s: %s' % (good, green, paramName, end, payload)) break except IndexError: pass if blindXSS and blindPayload: paramsCopy[paramName] = blindPayload requester(url, paramsCopy, headers, GET, delay, timeout)
def multiTargets(scheme, host, main_url, form): signatures = set() for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url method = each['method'] if method == 'get': GET = True else: GET = False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] if target not in ''.join(signatures) and not skipDOM: response = requests.get(target).text if dom(response, silent=True): print('%s Potentially vulnerable objects found' % good) for paramName in paramData.keys(): signature = url + paramName if signature not in signatures: signatures.add(signature) print('%s Scanning %s%s%s, %s' % (run, green, url, end, paramName)) paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay).text try: occurences = htmlParser(response) efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences) vectors = generator(occurences, response) if vectors: for confidence, vects in vectors.items(): try: print( '%s Vector for %s: %s' % (good, paramName, list(vects)[0])) break except IndexError: pass except Exception as e: print('%s Error: %s' % (bad, e))
def multiTargets(scheme, host, main_url, form, domURL): signatures = set() if domURL and not skipDOM: response = requests.get(domURL).text if dom(response, silent=True): print('%s Potentially vulnerable objects found at %s' % (good, domURL)) if form: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url method = each['method'] if method == 'get': GET = True else: GET = False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] for paramName in paramData.keys(): paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response) occurences = parsedResponse[0] positions = parsedResponse[1] efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] print('%s Vulnerable webpage: %s%s%s' % (good, green, url, end)) print( '%s Vector for %s%s%s: %s' % (good, green, paramName, end, payload)) break except IndexError: pass
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip): GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target logger.debug('Scan target: {}'.format(target)) response = requester(target, {}, headers, GET, delay, timeout).text if not skipDOM: logger.run('Checking for DOM vulnerabilities') highlighted = dom(response) if highlighted: logger.good('Potentially vulnerable objects found') logger.red_line(level='good') for line in highlighted: logger.no_format(line, level='good') logger.red_line(level='good') host = urlparse(target).netloc # Extracts host out of the url logger.debug('Host to scan: {}'.format(host)) url = getUrl(target, GET) logger.debug('Url to scan: {}'.format(url)) params = getParams(target, paramData, GET) logger.debug_json('Scan parameters:', params) if find: params = arjun(url, GET, headers, delay, timeout) if not params: logger.error('No parameters to test.') quit() WAF = wafDetector( url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: logger.error('WAF detected: %s%s%s' % (green, WAF, end)) else: logger.good('WAF Status: %sOffline%s' % (green, end)) for paramName in params.keys(): paramsCopy = copy.deepcopy(params) logger.info('Testing parameter: %s' % paramName) if encoding: paramsCopy[paramName] = encoding(xsschecker) else: paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response, encoding) occurences = parsedResponse[0] logger.debug('Scan occurences: {}'.format(occurences)) positions = parsedResponse[1] logger.debug('Scan positions: {}'.format(positions)) if not occurences: logger.error('No reflection found') continue else: logger.info('Reflections found: %i' % len(occurences)) logger.run('Analysing reflections') efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) logger.debug('Scan efficiencies: {}'.format(efficiencies)) logger.run('Generating payloads') vectors = generator(occurences, response.text) total = 0 for v in vectors.values(): total += len(v) if total == 0: logger.error('No vectors were crafted.') continue logger.info('Payloads generated: %i' % total) progress = 0 for confidence, vects in vectors.items(): for vect in vects: if core.config.globalVariables['path']: vect = vect.replace('/', '%2F') loggerVector = vect progress += 1 logger.run('Progress: %i/%i\r' % (progress, total)) if confidence == 10: if not GET: vect = unquote(vect) efficiencies = checker( url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): logger.red_line() logger.good('Payload: %s' % loggerVector) logger.info('Efficiency: %i' % bestEfficiency) logger.info('Confidence: %i' % confidence) if not skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() elif bestEfficiency > minEfficiency: logger.red_line() logger.good('Payload: %s' % loggerVector) logger.info('Efficiency: %i' % bestEfficiency) logger.info('Confidence: %i' % confidence) else: if re.search(r'<(a|d3|details)|lt;(a|d3|details)', vect.lower()): continue vect = unquote(vect) if encoding: paramsCopy[paramName] = encoding(vect) else: paramsCopy[paramName] = vect response = requester(url, paramsCopy, headers, GET, delay, timeout).text success = browserEngine(response) if success: logger.red_line() logger.good('Payload: %s' % loggerVector) logger.info('Efficiency: %i' % 100) logger.info('Confidence: %i' % 10) if not skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() logger.no_format('')
def singleTarget(target, paramData, verbose, encoding): if paramData: GET, POST = False, True else: GET, POST = True, False # If the user hasn't supplied the root url with http(s), we will handle it if target.startswith('http'): target = target else: try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target response = requester(target, {}, headers, GET, delay, timeout).text if not skipDOM: print('%s Checking for DOM vulnerabilities' % run) highlighted = dom(response) if highlighted: print('%s Potentially vulnerable objects found' % good) print(red + ('-' * 60) + end) for line in highlighted: print(line) print(red + ('-' * 60) + end) host = urlparse(target).netloc # Extracts host out of the url verboseOutput(host, 'host', verbose) url = getUrl(target, GET) verboseOutput(url, 'url', verbose) params = getParams(target, paramData, GET) verboseOutput(params, 'params', verbose) if args.find: params = arjun(url, GET, headers, delay, timeout) if not params: quit() WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: print('%s WAF detected: %s%s%s' % (bad, green, WAF, end)) else: print('%s WAF Status: %sOffline%s' % (good, green, end)) if fuzz: for paramName in params.keys(): print('%s Fuzzing parameter: %s' % (info, paramName)) paramsCopy = copy.deepcopy(params) paramsCopy[paramName] = xsschecker fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF, encoding) quit() for paramName in params.keys(): paramsCopy = copy.deepcopy(params) print('%s Testing parameter: %s' % (info, paramName)) if encoding: paramsCopy[paramName] = encoding(xsschecker) else: paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response, encoding) occurences = parsedResponse[0] verboseOutput(occurences, 'occurences', verbose) positions = parsedResponse[1] verboseOutput(positions, 'positions', verbose) if not occurences: print('%s No reflection found' % bad) continue else: print('%s Reflections found: %s' % (info, len(occurences))) print('%s Analysing reflections' % run) efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) verboseOutput(efficiencies, 'efficiencies', verbose) print('%s Generating payloads' % run) vectors = generator(occurences, response.text) verboseOutput(vectors, 'vectors', verbose) total = 0 for v in vectors.values(): total += len(v) if total == 0: print('%s No vectors were crafted' % bad) continue print('%s Payloads generated: %i' % (info, total)) progress = 0 for confidence, vects in vectors.items(): for vect in vects: progress += 1 print('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r') if not GET: vect = unquote(vect) efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, vect)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Confidence: %i' % (info, confidence)) if not args.skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() elif bestEfficiency > minEfficiency: print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, vect)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Confidence: %i' % (info, confidence))
def singleTarget(target, paramData): if paramData: GET, POST = False, True else: GET, POST = True, False # If the user hasn't supplied the root url with http(s), we will handle it if target.startswith('http'): target = target else: try: response = requests.get('https://' + target) target = 'https://' + target except: target = 'http://' + target try: response = requests.get(target).text if not skipDOM: print('%s Checking for DOM vulnerabilities' % run) if dom(response): print('%s Potentially vulnerable objects found' % good) except Exception as e: print('%s Unable to connect to the target' % bad) print('%s Error: %s' % (bad, e)) quit() host = urlparse(target).netloc # Extracts host out of the url url = getUrl(target, paramData, GET) params = getParams(target, paramData, GET) if args.find: params = arjun(url, GET, headers, delay) if not params: quit() WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET, delay) if WAF: print('%s WAF detected: %s%s%s' % (bad, green, WAF, end)) else: print('%s WAF Status: %sOffline%s' % (good, green, end)) if fuzz: for paramName in params.keys(): print('%s Fuzzing parameter: %s' % (info, paramName)) paramsCopy = copy.deepcopy(params) paramsCopy[paramName] = xsschecker fuzzer(url, paramsCopy, headers, GET, delay, WAF) quit() for paramName in params.keys(): paramsCopy = copy.deepcopy(params) print('%s Testing parameter: %s' % (info, paramName)) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay).text occurences = htmlParser(response) if not occurences: print('%s No reflection found' % bad) continue else: print('%s Reflections found: %s' % (info, len(occurences))) print('%s Analysing reflections' % run) efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences) print('%s Generating payloads' % run) vectors = generator(occurences, response) total = 0 for v in vectors.values(): total += len(v) if total == 0: print('%s No vectors were crafted' % bad) continue print('%s Payloads generated: %i' % (info, total)) progress = 0 for confidence, vects in vectors.items(): for vect in vects: progress += 1 print('%s Payloads tried [%i/%i]' % (run, progress, total), end='\r') if not GET: vect = unquote(vect) efficiencies = checker(url, paramsCopy, headers, GET, delay, vect) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, vect)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Cofidence: %i' % (info, confidence)) if GET: flatParams = flattenParams(paramName, paramsCopy, vect) if '"' not in flatParams and '}' not in flatParams and not skipPOC: webbrowser.open(url + flatParams) choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() elif bestEfficiency > minEfficiency: print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, vect)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Cofidence: %i' % (info, confidence))
response = requests.get(target).text if dom(response, silent=True): print('%s Potentially vulnerable objects found' % good) for paramName in paramData.keys(): signature = url + paramName if signature not in signatures: signatures.add(signature) print('%s Scanning %s%s%s, %s' % (run, green, url, end, paramName)) paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay).text try: occurences = htmlParser(response) efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences) vectors = generator(occurences, response) if vectors: for confidence, vects in vectors.items(): try: print('%s Vector for %s: %s' % (good, paramName, list(vects)[0])) break except IndexError: pass except Exception as e: print('%s Error: %s' % (bad, e))
def scan(target, form, paramData, encoding, headers, delay, timeout, skipDOM, skip): GET, POST = (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target response = requester(target, {}, headers, GET, delay, timeout).text url = target + form + "/" params = {p: "" for p in paramData} vuln_params = {} for paramName in params.keys(): logger.info(f"Target URL: {target+form}") logger.info(f"Target Param: {paramName}") paramsCopy = copy.deepcopy(params) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) logger.debug(f"Response: {response.text}") occurences = htmlParser(response, encoding) positions = occurences.keys() logger.debug('Scan occurences: {}'.format(occurences)) if not occurences: logger.error('No reflection found') continue else: logger.info(' - Reflections found: %i' % len(occurences)) efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) logger.debug('Scan efficiencies: {}'.format(efficiencies)) vectors = generator(occurences, response.text) total = 0 for v in vectors.values(): total += len(v) if total == 0: logger.error('No vectors were crafted.') continue # logger.info('Payloads generated: %i' % total) progress = 0 payloads_used = [] for confidence, vects in vectors.items(): for vect in vects: loggerVector = vect progress += 1 logger.run( f'Analysing Reflections - Progress: {progress}/{total}\r') if not GET: vect = unquote(vect) efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): payloads_used.append(loggerVector) logger.red_line(level='good') vuln_params.update({paramName: payloads_used}) # logger.info(f"{url, vuln_params}") return url, vuln_params
def crawl( scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding, ): if form: for each in form.values(): url = each["action"] if url: if url.startswith(main_url): pass elif url.startswith("//") and url[2:].startswith(host): url = scheme + "://" + url[2:] elif url.startswith("/"): url = scheme + "://" + host + url elif re.match(r"\w", url[0]): url = scheme + "://" + host + "/" + url if url not in core.config.globalVariables["checkedForms"]: core.config.globalVariables["checkedForms"][url] = [] method = each["method"] GET = True if method == "get" else False inputs = each["inputs"] paramData = {} for one in inputs: paramData[one["name"]] = one["value"] for paramName in paramData.keys(): if (paramName not in core.config. globalVariables["checkedForms"][url]): core.config.globalVariables["checkedForms"][ url].append(paramName) paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) occurences = htmlParser(response, encoding) positions = occurences.keys() efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding, ) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] logger.vuln( "Vulnerable webpage: %s%s%s" % (green, url, end)) logger.vuln( "Vector for %s%s%s: %s" % (green, paramName, end, payload)) break except IndexError: pass if blindXSS and blindPayload: paramsCopy[paramName] = blindPayload requester(url, paramsCopy, headers, GET, delay, timeout)
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip): reports = {} GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target logger.debug('Scan target: {}'.format(target)) response = requester(target, {}, headers, GET, delay, timeout).text vulnerable_code = list() if not skipDOM: logger.run('Checking for DOM vulnerabilities') highlighted = dom(response) if highlighted: logger.good('Potentially vulnerable objects found') logger.red_line(level='good') for line in highlighted: vulnerable_code.append(line) logger.no_format(line, level='good') logger.red_line(level='good') potential_vulnerabilities = [{"code": vulnerable_code}] reports["potential_vulnerabilities"] = potential_vulnerabilities host = urlparse(target).netloc # Extracts host out of the url logger.debug('Host to scan: {}'.format(host)) url = getUrl(target, GET) logger.debug('Url to scan: {}'.format(url)) params = getParams(target, paramData, GET) logger.debug_json('Scan parameters:', params) if find: params = arjun(url, GET, headers, delay, timeout) if not params: logger.error('No parameters to test.') reports['parameter_reports'] = "No parameters to test." return reports WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: logger.error('WAF detected: %s%s%s' % (green, WAF, end)) else: logger.good('WAF Status: %sOffline%s' % (green, end)) paramReports = list() for paramName in params.keys(): paramReport = {"parameter": None, "encoding": None, "reflection": None} paramReport['parameter'] = paramName paramsCopy = copy.deepcopy(params) logger.info('Testing parameter: %s' % paramName) if encoding: paramsCopy[paramName] = encoding(xsschecker) paramReport['encoding'] = str(encoding) else: paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) occurences = htmlParser(response, encoding) positions = occurences.keys() logger.debug('Scan occurences: {}'.format(occurences)) if not occurences: logger.error('No reflection found') paramReport['reflection'] = "No reflection found" continue else: logger.info('Reflections found: %i' % len(occurences)) paramReport['reflection'] = len(occurences) logger.run('Analysing reflections') efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) logger.debug('Scan efficiencies: {}'.format(efficiencies)) logger.run('Generating payloads') vectors = generator(occurences, response.text) total = 0 for v in vectors.values(): total += len(v) if total == 0: logger.error('No vectors were crafted.') continue logger.info('Payloads generated: %i' % total) paramReport['payloads_generated'] = total payloadLists = list() progress = 0 for confidence, vects in vectors.items(): for vect in vects: payloaditem = {} if core.config.globalVariables['path']: vect = vect.replace('/', '%2F') loggerVector = vect progress += 1 logger.run('Progress: %i/%i\r' % (progress, total)) if not GET: vect = unquote(vect) try: efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding) except Exception as e: payloaditem['error'] = str(e) print("ERROR") continue if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): logger.red_line() logger.good('Payload: %s' % loggerVector) logger.info('Efficiency: %i' % bestEfficiency) logger.info('Confidence: %i' % confidence) if not skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() elif bestEfficiency > minEfficiency: logger.red_line() logger.good('Payload: %s' % loggerVector) logger.info('Efficiency: %i' % bestEfficiency) logger.info('Confidence: %i' % confidence) payloaditem['payload'] = loggerVector payloaditem['efficiency'] = bestEfficiency payloaditem['confidence'] = confidence payloadLists.append(payloaditem) print(payloaditem) paramReport['payload_reports'] = payloadLists logger.no_format('') reports['parameter_reports'] = paramReport return reports
def scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip): GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith("http"): try: response = requester("https://" + target, {}, headers, GET, delay, timeout) target = "https://" + target except: target = "http://" + target logger.debug("Scan target: {}".format(target)) response = requester(target, {}, headers, GET, delay, timeout).text if not skipDOM: logger.run("Checking for DOM vulnerabilities") highlighted = dom(response) if highlighted: logger.good("Potentially vulnerable objects found") logger.red_line(level="good") for line in highlighted: logger.no_format(line, level="good") logger.red_line(level="good") host = urlparse(target).netloc # Extracts host out of the url logger.debug("Host to scan: {}".format(host)) url = getUrl(target, GET) logger.debug("Url to scan: {}".format(url)) params = getParams(target, paramData, GET) logger.debug_json("Scan parameters:", params) if find: params = arjun(url, GET, headers, delay, timeout) if not params: logger.error("No parameters to test.") quit() WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: logger.error("WAF detected: %s%s%s" % (green, WAF, end)) else: logger.good("WAF Status: %sOffline%s" % (green, end)) for paramName in params.keys(): paramsCopy = copy.deepcopy(params) logger.info("Testing parameter: %s" % paramName) if encoding: paramsCopy[paramName] = encoding(xsschecker) else: paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) occurences = htmlParser(response, encoding) positions = occurences.keys() logger.debug("Scan occurences: {}".format(occurences)) if not occurences: logger.error("No reflection found") continue else: logger.info("Reflections found: %i" % len(occurences)) logger.run("Analysing reflections") efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) logger.debug("Scan efficiencies: {}".format(efficiencies)) logger.run("Generating payloads") vectors = generator(occurences, response.text) total = 0 for v in vectors.values(): total += len(v) if total == 0: logger.error("No vectors were crafted.") continue logger.info("Payloads generated: %i" % total) progress = 0 for confidence, vects in vectors.items(): for vect in vects: if core.config.globalVariables["path"]: vect = vect.replace("/", "%2F") loggerVector = vect progress += 1 logger.run("Progress: %i/%i\r" % (progress, total)) if not GET: vect = unquote(vect) efficiencies = checker( url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding, ) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == "\\" and bestEfficiency >= 95): logger.red_line() logger.good("Payload: %s" % loggerVector) logger.info("Efficiency: %i" % bestEfficiency) logger.info("Confidence: %i" % confidence) if not skip: choice = input( "%s Would you like to continue scanning? [y/N] " % que).lower() if choice != "y": quit() elif bestEfficiency > minEfficiency: logger.red_line() logger.good("Payload: %s" % loggerVector) logger.info("Efficiency: %i" % bestEfficiency) logger.info("Confidence: %i" % confidence) logger.no_format("")
def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip): GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target response = requester(target, {}, headers, GET, delay, timeout).text if not skipDOM: print('%s Checking for DOM vulnerabilities' % run) highlighted = dom(response) if highlighted: print('%s Potentially vulnerable objects found' % good) print(red + ('-' * 60) + end) for line in highlighted: print(line) print(red + ('-' * 60) + end) host = urlparse(target).netloc # Extracts host out of the url verboseOutput(host, 'host', verbose) url = getUrl(target, GET) verboseOutput(url, 'url', verbose) params = getParams(target, paramData, GET) verboseOutput(params, 'params', verbose) if find: params = arjun(url, GET, headers, delay, timeout) if not params: print('%s No parameters to test.' % bad) quit() WAF = wafDetector( url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: print('%s WAF detected: %s%s%s' % (bad, green, WAF, end)) else: print('%s WAF Status: %sOffline%s' % (good, green, end)) for paramName in params.keys(): paramsCopy = copy.deepcopy(params) print('%s Testing parameter: %s' % (info, paramName)) if encoding: paramsCopy[paramName] = encoding(xsschecker) else: paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) parsedResponse = htmlParser(response, encoding) occurences = parsedResponse[0] verboseOutput(occurences, 'occurences', verbose) positions = parsedResponse[1] verboseOutput(positions, 'positions', verbose) if not occurences: print('%s No reflection found' % bad) continue else: print('%s Reflections found: %s' % (info, len(occurences))) print('%s Analysing reflections' % run) efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) verboseOutput(efficiencies, 'efficiencies', verbose) print('%s Generating payloads' % run) vectors = generator(occurences, response.text) verboseOutput(vectors, 'vectors', verbose) total = 0 for v in vectors.values(): total += len(v) if total == 0: print('%s No vectors were crafted' % bad) continue print('%s Payloads generated: %i' % (info, total)) progress = 0 for confidence, vects in vectors.items(): for vect in vects: if core.config.globalVariables['path']: vect = vect.replace('/', '%2F') printVector = vect progress += 1 print ('%s Progress: %i/%i' % (run, progress, total), end='\r') if confidence == 10: if not GET: vect = unquote(vect) efficiencies = checker( url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, printVector)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Confidence: %i' % (info, confidence)) if not skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() elif bestEfficiency > minEfficiency: print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, printVector)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Confidence: %i' % (info, confidence)) else: if re.search(r'<(a|d3|details)|lt;(a|d3|details)', vect.lower()): continue vect = unquote(vect) if encoding: paramsCopy[paramName] = encoding(vect) else: paramsCopy[paramName] = vect response = requester(url, paramsCopy, headers, GET, delay, timeout).text success = browserEngine(response) if success: print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, printVector)) print('%s Efficiency: %i' % (info, 100)) print('%s Confidence: %i' % (info, 10)) if not skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() print ('')