Ejemplo n.º 1
0
def cmd_test(args, auth, cookies):
    if args.commonwords is not None:
        words = list()
        for word in args.commonwords:
            word = word.rstrip("\n")
            if word:
                words.append(word)
        args.commonwords.close()
    else:
        words = []

    sensitive_data, vectors = [], []
    for sensitive in args.sensitive:
        sensitive = sensitive.rstrip("\n")
        if sensitive:
            sensitive_data.append(sensitive)
    args.sensitive.close()

    for v in args.vectors:
        v = v.rstrip("\n")
        if v:
            vectors.append(v)
    args.vectors.close()

    crawler = Site(words, args.blacklist)
    crawler.crawl(args.url, auth, cookies)

    fuzzer = Tester(crawler, sensitive_data, vectors, args.slow, args.random)
    if args.random:
        fuzzer.run_random()
    else:
        fuzzer.run()
Ejemplo n.º 2
0
def cmd_test(args, auth, cookies):
	if args.commonwords is not None:
		words = list()
		for word in args.commonwords:
			word = word.rstrip("\n")
			if word:
				words.append(word)
		args.commonwords.close()
	else:
		words = []

	sensitive_data, vectors = [], []
	for sensitive in args.sensitive:
		sensitive = sensitive.rstrip("\n")
		if sensitive:
			sensitive_data.append(sensitive)
	args.sensitive.close()

	for v in args.vectors:
		v = v.rstrip("\n")
		if v:
			vectors.append(v)
	args.vectors.close()

	crawler = Site(words, args.blacklist)
	crawler.crawl(args.url, auth, cookies)

	fuzzer = Tester(crawler, sensitive_data, vectors, args.slow, args.random)
	if args.random:
		fuzzer.run_random()
	else:
		fuzzer.run()
Ejemplo n.º 3
0
def cmd_discover(args, auth, cookies):
    words = list()
    for word in args.commonwords:
        word = word.rstrip("\n")
        if word:
            words.append(word)
    args.commonwords.close()

    crawler = Site(words, args.blacklist)
    crawler.crawl(args.url, auth, cookies)

    for page in crawler.pages.values():
        # Warn if page wasn't retreived successfully, but only if it wasn't guessed.
        if not page.valid:
            if not page.guessed:
                print("Couldn't fetch", page.url, "(status code:",
                      page.response_code, ")")
            continue

        print(page.url)

        if page.guessed:
            print("\t Page URL was guessed.")

        print("\t", len(page.forms), "form(s)")
        for form in page.forms:
            print("\t\t", form.action, form.method)

            for field in form.fields:
                print("\t\t\t", field.name, " : ", str(field))

        print("\t", len(page.get_parameters), "GET parameter(s)")
        for param in page.get_parameters:
            print("\t\t", param)
Ejemplo n.º 4
0
def cmd_discover(args, auth, cookies):
	words = list()
	for word in args.commonwords:
		word = word.rstrip("\n")
		if word:
			words.append(word)
	args.commonwords.close()

	crawler = Site(words, args.blacklist)
	crawler.crawl(args.url, auth, cookies)

	for page in crawler.pages.values():
		# Warn if page wasn't retreived successfully, but only if it wasn't guessed.
		if not page.valid:
			if not page.guessed:
				print("Couldn't fetch", page.url, "(status code:", page.response_code, ")")
			continue

		print(page.url)

		if page.guessed:
			print("\t Page URL was guessed.")

		print("\t", len(page.forms), "form(s)")
		for form in page.forms:
			print("\t\t", form.action, form.method)

			for field in form.fields:
				print("\t\t\t", field.name, " : ", str(field))

		print("\t", len(page.get_parameters), "GET parameter(s)")
		for param in page.get_parameters:
			print("\t\t", param)