Exemple #1
0
encoding = base64 if encode and encode == 'base64' else False

if not proxy:
    core.config.proxies = {}

if update:  # if the user has supplied --update argument
    updater()
    quit()  # quitting because files have been changed

if not target and not args_seeds:  # if the user hasn't supplied a url
    logger.no_format('\n' + parser.format_help().lower())
    quit()

if fuzz:
    singleFuzz(target, paramData, encoding, headers, delay, timeout)
elif not recursive and not args_seeds:
    if args_file:
        bruteforcer(target, paramData, payloadList, encoding, headers, delay, timeout)
    else:
        scan(target, paramData, encoding, headers, delay, timeout, skipDOM, find, skip)
else:
    if target:
        seedList.append(target)
    for target in seedList:
        logger.run('Crawling the target')
        scheme = urlparse(target).scheme
        logger.debug('Target scheme: {}'.format(scheme))
        host = urlparse(target).netloc
        main_url = scheme + '://' + host
        crawlingResult = photon(target, headers, level,
Exemple #2
0
encoding = base64 if encode and encode == 'base64' else False

if not proxy:
    core.config.proxies = {}

if update:  # if the user has supplied --update argument
    updater()
    quit()  # quitting because files have been changed

if not target and not args_seeds:  # if the user hasn't supplied a url
    print('\n' + parser.format_help().lower())
    quit()

if fuzz:
    singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout)
elif not recursive and not args_seeds:
    if args_file:
        bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout)
    else:
        scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip)
else:
    if target:
        seedList.append(target)
    for target in seedList:
        print('%s Crawling the target' % run)
        scheme = urlparse(target).scheme
        verboseOutput(scheme, 'scheme', verbose)
        host = urlparse(target).netloc
        main_url = scheme + '://' + host
        crawlingResult = photon(target, headers, level,