Exemplo n.º 1
0
def singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout):
    GET, POST = (False, True) if paramData else (True, False)
    # If the user hasn't supplied the root url with http(s), we will handle it
    if not target.startswith('http'):
        try:
            response = requester('https://' + target, {}, headers, GET, delay,
                                 timeout)
            target = 'https://' + target
        except:
            target = 'http://' + target
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    verboseOutput(params, 'params', verbose)
    if not params:
        print('%s No parameters to test.' % bad)
        quit()
    WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET,
                      delay, timeout)
    if WAF:
        print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
    else:
        print('%s WAF Status: %sOffline%s' % (good, green, end))

    for paramName in params.keys():
        print('%s Fuzzing parameter: %s' % (info, paramName))
        paramsCopy = copy.deepcopy(params)
        paramsCopy[paramName] = xsschecker
        fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF, encoding)
Exemplo n.º 2
0
def bruteforcer(target, paramData, payloadList, verbose, encoding):
    GET, POST = (False, True) if paramData else (True, False)
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    if not params:
        print('%s No parameters to test.' % bad)
        quit()
    verboseOutput(params, 'params', verbose)
    for paramName in params.keys():
        progress = 1
        paramsCopy = copy.deepcopy(params)
        for payload in payloadList:
            print('%s Progress: %i/%i' % (run, progress, len(payloadList)),
                  end='\r')
            if encoding:
                payload = encoding(unquote(payload))
            paramsCopy[paramName] = payload
            response = requester(url, paramsCopy, headers, GET, delay,
                                 timeout).text
            if encoding:
                payload = encoding(payload)
            if payload in response:
                print('%s %s' % (good, payload))
            progress += 1
    print('')
Exemplo n.º 3
0
def bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout):
    GET, POST = (False, True) if paramData else (True, False)
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    if not params:
        print('%s No parameters to test.' % bad)
        quit()
    verboseOutput(params, 'params', verbose)
    for paramName in params.keys():
        progress = 1
        paramsCopy = copy.deepcopy(params)
        for payload in payloadList:
            print ('%s Bruteforcing %s[%s%s%s]%s: %i/%i' % (run, green, end, paramName, green, end, progress, len(payloadList)), end='\r')
            if encoding:
                payload = encoding(unquote(payload))
            paramsCopy[paramName] = payload
            response = requester(url, paramsCopy, headers,
                                 GET, delay, timeout).text
            if encoding:
                payload = encoding(payload)
            if payload in response:
                print('%s %s' % (good, payload))
            progress += 1
    print ()
Exemplo n.º 4
0
def singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout):
    GET, POST = (False, True) if paramData else (True, False)
    # If the user hasn't supplied the root url with http(s), we will handle it
    if not target.startswith('http'):
        try:
            response = requester('https://' + target, {},
                                 headers, GET, delay, timeout)
            target = 'https://' + target
        except:
            target = 'http://' + target
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    verboseOutput(params, 'params', verbose)
    if not params:
        print('%s No parameters to test.' % bad)
        quit()
    WAF = wafDetector(
        url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
    if WAF:
        print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
    else:
        print('%s WAF Status: %sOffline%s' % (good, green, end))

    for paramName in params.keys():
        print('%s Fuzzing parameter: %s' % (info, paramName))
        paramsCopy = copy.deepcopy(params)
        paramsCopy[paramName] = xsschecker
        fuzzer(url, paramsCopy, headers, GET,
               delay, timeout, WAF, encoding)
Exemplo n.º 5
0
def brute(target, paramData, payloadList, verbose):
    if paramData:
        GET, POST = False, True
    else:
        GET, POST = True, False
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, paramData, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    verboseOutput(params, 'params', verbose)
    for paramName in params.keys():
        paramsCopy = copy.deepcopy(params)
        for payload in payloadList:
            paramsCopy[paramName] = payload
            response = requester(url, paramsCopy, headers, GET, delay,
                                 timeout).text
            if payload in response:
                print('%s %s' % (good, payload))
Exemplo n.º 6
0
def singleTarget(target, paramData, verbose, encoding):
    if paramData:
        GET, POST = False, True
    else:
        GET, POST = True, False
    # If the user hasn't supplied the root url with http(s), we will handle it
    if target.startswith('http'):
        target = target
    else:
        try:
            response = requester('https://' + target, {}, headers, GET, delay,
                                 timeout)
            target = 'https://' + target
        except:
            target = 'http://' + target
    response = requester(target, {}, headers, GET, delay, timeout).text
    if not skipDOM:
        print('%s Checking for DOM vulnerabilities' % run)
        highlighted = dom(response)
        if highlighted:
            print('%s Potentially vulnerable objects found' % good)
            print(red + ('-' * 60) + end)
            for line in highlighted:
                print(line)
            print(red + ('-' * 60) + end)
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    verboseOutput(params, 'params', verbose)
    if args.find:
        params = arjun(url, GET, headers, delay, timeout)
    if not params:
        quit()
    WAF = wafDetector(url, {list(params.keys())[0]: xsschecker}, headers, GET,
                      delay, timeout)
    if WAF:
        print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
    else:
        print('%s WAF Status: %sOffline%s' % (good, green, end))

    if fuzz:
        for paramName in params.keys():
            print('%s Fuzzing parameter: %s' % (info, paramName))
            paramsCopy = copy.deepcopy(params)
            paramsCopy[paramName] = xsschecker
            fuzzer(url, paramsCopy, headers, GET, delay, timeout, WAF,
                   encoding)
        quit()

    for paramName in params.keys():
        paramsCopy = copy.deepcopy(params)
        print('%s Testing parameter: %s' % (info, paramName))
        if encoding:
            paramsCopy[paramName] = encoding(xsschecker)
        else:
            paramsCopy[paramName] = xsschecker
        response = requester(url, paramsCopy, headers, GET, delay, timeout)
        parsedResponse = htmlParser(response, encoding)
        occurences = parsedResponse[0]
        verboseOutput(occurences, 'occurences', verbose)
        positions = parsedResponse[1]
        verboseOutput(positions, 'positions', verbose)
        if not occurences:
            print('%s No reflection found' % bad)
            continue
        else:
            print('%s Reflections found: %s' % (info, len(occurences)))
        print('%s Analysing reflections' % run)
        efficiencies = filterChecker(url, paramsCopy, headers, GET, delay,
                                     occurences, timeout, encoding)
        verboseOutput(efficiencies, 'efficiencies', verbose)
        print('%s Generating payloads' % run)
        vectors = generator(occurences, response.text)
        verboseOutput(vectors, 'vectors', verbose)
        total = 0
        for v in vectors.values():
            total += len(v)
        if total == 0:
            print('%s No vectors were crafted' % bad)
            continue
        print('%s Payloads generated: %i' % (info, total))
        progress = 0
        for confidence, vects in vectors.items():
            for vect in vects:
                progress += 1
                print('%s Payloads tried [%i/%i]' % (run, progress, total),
                      end='\r')
                if not GET:
                    vect = unquote(vect)
                efficiencies = checker(url, paramsCopy, headers, GET, delay,
                                       vect, positions, timeout, encoding)
                if not efficiencies:
                    for i in range(len(occurences)):
                        efficiencies.append(0)
                bestEfficiency = max(efficiencies)
                if bestEfficiency == 100 or (vect[0] == '\\'
                                             and bestEfficiency >= 95):
                    print(('%s-%s' % (red, end)) * 60)
                    print('%s Payload: %s' % (good, vect))
                    print('%s Efficiency: %i' % (info, bestEfficiency))
                    print('%s Confidence: %i' % (info, confidence))
                    if not args.skip:
                        choice = input(
                            '%s Would you like to continue scanning? [y/N] ' %
                            que).lower()
                        if choice != 'y':
                            quit()
                elif bestEfficiency > minEfficiency:
                    print(('%s-%s' % (red, end)) * 60)
                    print('%s Payload: %s' % (good, vect))
                    print('%s Efficiency: %i' % (info, bestEfficiency))
                    print('%s Confidence: %i' % (info, confidence))
Exemplo n.º 7
0
                                 timeout).text
            if encoding:
                payload = encoding(payload)
            if payload in response:
                print('%s %s' % (good, payload))


if not args.recursive:
    if args.file:
        bruteforcer(target, paramData, payloadList, verbose, encoding)
    else:
        singleTarget(target, paramData, verbose, encoding)
else:
    print('%s Crawling the target' % run)
    scheme = urlparse(target).scheme
    verboseOutput(scheme, 'scheme', verbose)
    host = urlparse(target).netloc
    main_url = scheme + '://' + host
    crawlingResult = photon(target, headers, level, threadCount, delay,
                            timeout)
    forms = crawlingResult[0]
    domURLs = list(crawlingResult[1])
    difference = abs(len(domURLs) - len(forms))
    if len(domURLs) > len(forms):
        for i in range(difference):
            forms.append(0)
    elif len(forms) > len(domURLs):
        for i in range(difference):
            domURLs.append(0)
    threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
    futures = (threadpool.submit(multiTargets, scheme, host, main_url, form,
Exemplo n.º 8
0
    quit()

if fuzz:
    singleFuzz(target, paramData, verbose, encoding, headers, delay, timeout)
elif not recursive and not args_seeds:
    if args_file:
        bruteforcer(target, paramData, payloadList, verbose, encoding, headers, delay, timeout)
    else:
        scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip)
else:
    if target:
        seedList.append(target)
    for target in seedList:
        print('%s Crawling the target' % run)
        scheme = urlparse(target).scheme
        verboseOutput(scheme, 'scheme', verbose)
        host = urlparse(target).netloc
        main_url = scheme + '://' + host
        crawlingResult = photon(target, headers, level,
                                threadCount, delay, timeout)
        forms = crawlingResult[0]
        domURLs = list(crawlingResult[1])
        difference = abs(len(domURLs) - len(forms))
        if len(domURLs) > len(forms):
            for i in range(difference):
                forms.append(0)
        elif len(forms) > len(domURLs):
            for i in range(difference):
                domURLs.append(0)
        threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=threadCount)
        futures = (threadpool.submit(crawl, scheme, host, main_url, form, domURL, verbose,
Exemplo n.º 9
0
def scan(target, paramData, verbose, encoding, headers, delay, timeout, skipDOM, find, skip):
    GET, POST = (False, True) if paramData else (True, False)
    # If the user hasn't supplied the root url with http(s), we will handle it
    if not target.startswith('http'):
        try:
            response = requester('https://' + target, {},
                                 headers, GET, delay, timeout)
            target = 'https://' + target
        except:
            target = 'http://' + target
    response = requester(target, {}, headers, GET, delay, timeout).text
    if not skipDOM:
        print('%s Checking for DOM vulnerabilities' % run)
        highlighted = dom(response)
        if highlighted:
            print('%s Potentially vulnerable objects found' % good)
            print(red + ('-' * 60) + end)
            for line in highlighted:
                print(line)
            print(red + ('-' * 60) + end)
    host = urlparse(target).netloc  # Extracts host out of the url
    verboseOutput(host, 'host', verbose)
    url = getUrl(target, GET)
    verboseOutput(url, 'url', verbose)
    params = getParams(target, paramData, GET)
    verboseOutput(params, 'params', verbose)
    if find:
        params = arjun(url, GET, headers, delay, timeout)
    if not params:
        print('%s No parameters to test.' % bad)
        quit()
    WAF = wafDetector(
        url, {list(params.keys())[0]: xsschecker}, headers, GET, delay, timeout)
    if WAF:
        print('%s WAF detected: %s%s%s' % (bad, green, WAF, end))
    else:
        print('%s WAF Status: %sOffline%s' % (good, green, end))

    for paramName in params.keys():
        paramsCopy = copy.deepcopy(params)
        print('%s Testing parameter: %s' % (info, paramName))
        if encoding:
            paramsCopy[paramName] = encoding(xsschecker)
        else:
            paramsCopy[paramName] = xsschecker
        response = requester(url, paramsCopy, headers, GET, delay, timeout)
        parsedResponse = htmlParser(response, encoding)
        occurences = parsedResponse[0]
        verboseOutput(occurences, 'occurences', verbose)
        positions = parsedResponse[1]
        verboseOutput(positions, 'positions', verbose)
        if not occurences:
            print('%s No reflection found' % bad)
            continue
        else:
            print('%s Reflections found: %s' % (info, len(occurences)))
        print('%s Analysing reflections' % run)
        efficiencies = filterChecker(
            url, paramsCopy, headers, GET, delay, occurences, timeout, encoding)
        verboseOutput(efficiencies, 'efficiencies', verbose)
        print('%s Generating payloads' % run)
        vectors = generator(occurences, response.text)
        verboseOutput(vectors, 'vectors', verbose)
        total = 0
        for v in vectors.values():
            total += len(v)
        if total == 0:
            print('%s No vectors were crafted' % bad)
            continue
        print('%s Payloads generated: %i' % (info, total))
        progress = 0
        for confidence, vects in vectors.items():
            for vect in vects:
                if core.config.globalVariables['path']:
                    vect = vect.replace('/', '%2F')
                printVector = vect
                progress += 1
                print ('%s Progress: %i/%i' % (run, progress, total), end='\r')
                if confidence == 10:
                    if not GET:
                        vect = unquote(vect)
                    efficiencies = checker(
                        url, paramsCopy, headers, GET, delay, vect, positions, timeout, encoding)
                    if not efficiencies:
                        for i in range(len(occurences)):
                            efficiencies.append(0)
                    bestEfficiency = max(efficiencies)
                    if bestEfficiency == 100 or (vect[0] == '\\' and bestEfficiency >= 95):
                        print(('%s-%s' % (red, end)) * 60)
                        print('%s Payload: %s' % (good, printVector))
                        print('%s Efficiency: %i' % (info, bestEfficiency))
                        print('%s Confidence: %i' % (info, confidence))
                        if not skip:
                            choice = input(
                                '%s Would you like to continue scanning? [y/N] ' % que).lower()
                            if choice != 'y':
                                quit()
                    elif bestEfficiency > minEfficiency:
                        print(('%s-%s' % (red, end)) * 60)
                        print('%s Payload: %s' % (good, printVector))
                        print('%s Efficiency: %i' % (info, bestEfficiency))
                        print('%s Confidence: %i' % (info, confidence))
                else:
                    if re.search(r'<(a|d3|details)|lt;(a|d3|details)', vect.lower()):
                        continue
                    vect = unquote(vect)
                    if encoding:
                        paramsCopy[paramName] = encoding(vect)
                    else:
                        paramsCopy[paramName] = vect
                    response = requester(url, paramsCopy, headers, GET, delay, timeout).text
                    success = browserEngine(response)
                    if success:
                        print(('%s-%s' % (red, end)) * 60)
                        print('%s Payload: %s' % (good, printVector))
                        print('%s Efficiency: %i' % (info, 100))
                        print('%s Confidence: %i' % (info, 10))
                        if not skip:
                            choice = input(
                                '%s Would you like to continue scanning? [y/N] ' % que).lower()
                            if choice != 'y':
                                quit()
        print ('')