if payload in response: print('%s %s' % (good, payload)) if not args.recursive: if args.file: bruteforcer(target, paramData, payloadList, verbose, encoding) else: singleTarget(target, paramData, verbose, encoding) else: print('%s Crawling the target' % run) scheme = urlparse(target).scheme verboseOutput(scheme, 'scheme', verbose) host = urlparse(target).netloc main_url = scheme + '://' + host crawlingResult = photon(target, headers, level, threadCount, delay, timeout) forms = crawlingResult[0] domURLs = list(crawlingResult[1]) difference = abs(len(domURLs) - len(forms)) if len(domURLs) > len(forms): for i in range(difference): forms.append(0) elif len(forms) > len(domURLs): for i in range(difference): domURLs.append(0) threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(multiTargets, scheme, host, main_url, form, domURL, verbose) for form, domURL in zip(forms, domURLs)) for i, _ in enumerate(concurrent.futures.as_completed(futures)): if i + 1 == len(forms) or (i + 1) % threadCount == 0:
from core.config import headers target = args.target delay = args.delay or 0 level = args.level or 2 timeout = args.timeout or 20 threadCount = args.threads or 2 allTokens = [] weakTokens = [] tokenDatabase = [] insecureForms = [] print(' %s Phase: Crawling %s[%s1/6%s]%s' % (lightning, green, end, green, end)) dataset = photon(target, headers, level, threadCount) allForms = dataset[0] print('\r%s Crawled %i URL(s) and found %i form(s).%-10s' % (info, dataset[1], len(allForms), ' ')) print(' %s Phase: Evaluating %s[%s2/6%s]%s' % (lightning, green, end, green, end)) evaluate(allForms, weakTokens, tokenDatabase, allTokens, insecureForms) if weakTokens: print('%s Weak token(s) found' % good) for weakToken in weakTokens: url = list(weakToken.keys())[0] token = list(weakToken.values())[0] print('%s %s %s' % (info, url, token))
quit() elif bestEfficiency > minEfficiency: print(('%s-%s' % (red, end)) * 60) print('%s Payload: %s' % (good, vect)) print('%s Efficiency: %i' % (info, bestEfficiency)) print('%s Cofidence: %i' % (info, confidence)) if not args.recursive: singleTarget(target, paramData) else: print('%s Crawling the target' % run) scheme = urlparse(target).scheme host = urlparse(target).netloc main_url = scheme + '://' + host forms = photon(main_url, target, headers) signatures = set() for form in forms: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url method = each['method'] if method == 'get':
singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout) elif not recursive and not args_seeds: if args_file: bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout) else: scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip) else: if target: seedList.append(target) for target in seedList: print('%s Crawling the target' % run) scheme = urlparse(target).scheme verboseOutput(scheme, 'scheme', verbose) host = urlparse(target).netloc main_url = scheme + '://' + host crawlingResult = photon(target, headers, level, threadCount, delay, timeout) forms = crawlingResult[0] domURLs = list(crawlingResult[1]) difference = abs(len(domURLs) - len(forms)) if len(domURLs) > len(forms): for i in range(difference): forms.append(0) elif len(forms) > len(domURLs): for i in range(difference): domURLs.append(0) threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose, blindXSS, blindPayload, headers, delay, timeout, skipDOM, encoding) for form, domURL in zip(forms, domURLs)) for i, _ in enumerate(concurrent.futures.as_completed(futures)): if i + 1 == len(forms) or (i + 1) % threadCount == 0: print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r')
payload = list(vects)[0] print ('%s Vulnerable webpage: %s%s%s' % (good, green, url, end)) print ('%s Vector for %s%s%s: %s' % (good, green, paramName, end, payload)) break except IndexError: pass if not args.recursive: singleTarget(target, paramData) else: print ('%s Crawling the target' % run) scheme = urlparse(target).scheme host = urlparse(target).netloc main_url = scheme + '://' + host crawlingResult = photon(target, headers, level, threadCount) forms = crawlingResult[0] domURLs = list(crawlingResult[1]) difference = abs(len(domURLs) - len(forms)) if len(domURLs) > len(forms): for i in range(difference): forms.append(0) elif len(forms) > len(domURLs): for i in range(difference): domURLs.append(0) threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount) futures = (threadpool.submit(multiTargets, scheme, host, main_url, form, domURL) for form, domURL in zip(forms, domURLs)) for i, _ in enumerate(concurrent.futures.as_completed(futures)): if i + 1 == len(forms) or (i + 1) % threadCount == 0: print('%s Progress: %i/%i' % (info, i + 1, len(forms)), end='\r') print ('')
if choice != 'y': quit() elif bestEfficiency > minEfficiency: print (('%s-%s' % (red, end)) * 60) print ('%s Payload: %s' % (good, vect)) print ('%s Efficiency: %i' % (info, bestEfficiency)) print ('%s Cofidence: %i' % (info, confidence)) if not args.recursive: singleTarget(target, paramData) else: print ('%s Crawling the target' % run) scheme = urlparse(target).scheme host = urlparse(target).netloc main_url = scheme + '://' + host forms = photon(target, headers, level) signatures = set() for form in forms: for each in form.values(): url = each['action'] if url: if url.startswith(main_url): pass elif url.startswith('//') and url[2:].startswith(host): url = scheme + '://' + url[2:] elif url.startswith('/'): url = scheme + '://' + host + url elif re.match(r'\w', url[0]): url = scheme + '://' + host + '/' + url method = each['method'] if method == 'get':
print('%s Deploying Photon for component assessment' % run) print('%s Deploying Alpha for software fingerprinting' % run) print('%s Deploying Zetanize for identifying entry points' % run) print('%s ETA: %i seconds' % (info, 10 * 2 * len(dataset))) for subdomain in dataset: url = dataset[subdomain]['schema'] + '://' + subdomain takeover = False for each in var('sub_takeover'): for i in each['cname']: if i in url: try: response = requester(url) for i in each['fingerprint']: if i in response.text: takeover = True break except requests.exceptions.ConnectionError: if each['nxdomain']: takeover = True break break dataset[subdomain]['cms'] = whatcms(subdomain) crawled = photon(url) dataset[subdomain]['forms'] = crawled[0] dataset[subdomain]['all_urls'] = list(crawled[1]) dataset[subdomain]['technologies'] = list(crawled[2]) dataset[subdomain]['outdated_libs'] = crawled[3] print(json.dumps(dataset, indent=4))
else: dataset[raw_subdomain]['schema'] = 'http' print('%s[✈️]%s %s' % (green, end, raw_subdomain)) except (socket.gaierror, UnicodeError): pass # print ('%s Deploying wavelet analyzing module to detect hidden targets.' % run) # print ('Wavelets analyzed [1/1]') print('%s Deploying Photon for component assessment' % run) print('%s Deploying Alpha for software fingerprinting' % run) print('%s Deploying Zetanize for identifying entry points' % run) print('%s ETA: %i seconds' % (info, 10 * 2 * len(dataset))) for subdomain in dataset: dataset[subdomain]['cms'] = whatcms(subdomain) crawled = photon(dataset[subdomain]['schema'] + '://' + subdomain) dataset[subdomain]['forms'] = crawled[0] dataset[subdomain]['all_urls'] = list(crawled[1]) dataset[subdomain]['technologies'] = list(crawled[2]) dataset[subdomain]['outdated_libs'] = crawled[3] print(json.dumps(dataset, indent=4)) # print ('%s Deploying Bolt for CSRF detection' % run) # print ('%s Deploying XSStrike for XSS detection' % run) # print ('%s Deploying Zoom to scan for camouflaged components' % run) # print ('%s Deploying Zeta to find open redirect vulnerabilities' % run) # print ('%s Deploying Hawk to find file inclusion vulnerabilities' % run) # for subdomain in dataset: # print ('%s Attacking [%s]' % (info, subdomain))