if update: # if the user has supplied --update argument updater() quit() # quitting because files have been changed if not target and not args_seeds: # if the user hasn't supplied a url logger.no_format('\n' + parser.format_help().lower()) quit() if fuzz: singleFuzz(target, paramData, encoding, headers, delay, timeout) elif not recursive and not args_seeds: if args_file: bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout) else: scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip) else: if target: seedList.append(target) for target in seedList: logger.run('Crawling the target') scheme = urlparse(target).scheme logger.debug('Target scheme: {}'.format(scheme)) host = urlparse(target).netloc main_url = scheme + '://' + host crawlingResult = photon(target, headers, level, threadCount, delay, timeout, skipDOM) forms = crawlingResult[0] domURLs = list(crawlingResult[1]) difference = abs(len(domURLs) - len(forms)) if len(domURLs) > len(forms):
if update: # if the user has supplied --update argument updater() quit() # quitting because files have been changed if not target and not args_seeds: # if the user hasn't supplied a url print('\n' + parser.format_help().lower()) quit() if fuzz: singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout) elif not recursive and not args_seeds: if args_file: bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout) else: scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip) else: if target: seedList.append(target) for target in seedList: print('%s Crawling the target' % run) scheme = urlparse(target).scheme verboseOutput(scheme, 'scheme', verbose) host = urlparse(target).netloc main_url = scheme + '://' + host crawlingResult = photon(target, headers, level, threadCount, delay, timeout) forms = crawlingResult[0] domURLs = list(crawlingResult[1]) difference = abs(len(domURLs) - len(forms)) if len(domURLs) > len(forms):